Skip to content

Commit

Permalink
fix(propagation/doc): Don't fail in doc propagation if origin is miss…
Browse files Browse the repository at this point in the history
…ing (#161)
  • Loading branch information
treff7es authored Jan 31, 2025
1 parent 649f1d4 commit 2f78ab1
Show file tree
Hide file tree
Showing 4 changed files with 16 additions and 40 deletions.
4 changes: 2 additions & 2 deletions .github/workflows/build-and-test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ jobs:
- name: Gradle build (and test)
run: |
./gradlew build
- uses: actions/upload-artifact@v3
- uses: actions/upload-artifact@v4
if: always()
with:
name: Test Results (build)
Expand All @@ -44,7 +44,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Upload
uses: actions/upload-artifact@v3
uses: actions/upload-artifact@v4
with:
name: Event File
path: ${{ github.event_path }}
7 changes: 4 additions & 3 deletions .github/workflows/datahub-actions-docker.yml
Original file line number Diff line number Diff line change
Expand Up @@ -152,7 +152,7 @@ jobs:
run: docker image save -o image.tar ${{ steps.docker_meta_slim.outputs.tags }}
- name: Upload artifact
if: needs.setup.outputs.publish != 'true'
uses: actions/upload-artifact@v3
uses: actions/upload-artifact@v4
with:
name: docker-image
path: image.tar
Expand Down Expand Up @@ -201,7 +201,7 @@ jobs:
uses: actions/checkout@v4
- name: Download artifact (if not publishing)
if: needs.setup.outputs.publish != 'true'
uses: actions/download-artifact@v3
uses: actions/download-artifact@v4
with:
name: docker-image
- name: Load Docker image (if not publishing)
Expand All @@ -214,6 +214,7 @@ jobs:
uses: aquasecurity/trivy-action@master
env:
TRIVY_OFFLINE_SCAN: true
TRIVY_DB_REPOSITORY: public.ecr.aws/aquasecurity/trivy-db:2,ghcr.io/aquasecurity/trivy-db:2
with:
image-ref: acryldata/datahub-actions-slim:${{ needs.setup.outputs.unique_tag }}
format: "template"
Expand Down Expand Up @@ -252,7 +253,7 @@ jobs:
cache: "pip"
- name: Download artifact (if not publishing)
if: needs.setup.outputs.publish != 'true'
uses: actions/download-artifact@v3
uses: actions/download-artifact@v4
with:
name: docker-image
- name: Load Docker image (if not publishing)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -305,7 +305,11 @@ def process_schema_field_documentation(
propagation_relationships = self.get_propagation_relationships(
entity_type="schemaField", source_details=source_details_parsed
)
origin_entity = source_details_parsed.origin
origin_entity = (
source_details_parsed.origin
if source_details_parsed.origin
else entity_urn
)
if old_docs is None or not old_docs.documentations:
return DocPropagationDirective(
propagate=True,
Expand Down
39 changes: 5 additions & 34 deletions smoke-test/tests/actions/doc_propagation/test_propagation.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,11 +5,11 @@
import uuid
from contextlib import contextmanager
from pathlib import Path
from typing import Any, Dict, Iterable, List, Tuple
from typing import Any, Dict, Iterator, List, Tuple

import datahub.metadata.schema_classes as models
from pydantic import BaseModel
import pytest
import tenacity
from datahub.api.entities.dataset.dataset import Dataset
from datahub.emitter.mce_builder import make_schema_field_urn
from datahub.emitter.mcp import MetadataChangeProposalWrapper
Expand All @@ -19,11 +19,7 @@
from datahub.ingestion.sink.file import FileSink, FileSinkConfig
from datahub.utilities.urns.urn import Urn
from jinja2 import Template
import tenacity
from datahub_actions.plugin.action.propagation.docs.propagation_action import (
DocPropagationConfig,
)

from pydantic import BaseModel

from tests.utils import (
delete_urns_from_file,
Expand Down Expand Up @@ -111,7 +107,7 @@ def action_env_vars(pytestconfig) -> ActionTestEnv:
key, value = line.split("=", 1)
env_vars[key] = value

return ActionTestEnv(**env_vars)
return ActionTestEnv.parse_obj(env_vars)


@pytest.fixture(scope="function")
Expand Down Expand Up @@ -164,31 +160,6 @@ def test_resources_dir(root_dir):
return Path(root_dir) / "tests" / "actions" / "doc_propagation" / "resources"


@pytest.fixture(scope="function")
def ingest_cleanup_data_function(request, test_resources_dir, graph, test_id):
@contextmanager
def _ingest_cleanup_data(template_file="datasets_template.yaml"):
new_file, filename = tempfile.mkstemp(suffix=f"_{test_id}.json")
try:
template_path = Path(test_resources_dir) / template_file
all_urns = create_test_data(filename, template_path, test_id)
print(
f"Ingesting datasets test data for test_id: {test_id} using template: {template_file}"
)
ingest_file_via_rest(filename)
yield all_urns
finally:
if DELETE_AFTER_TEST:
print(f"Removing test data for test_id: {test_id}")
delete_urns_from_file(filename)
for urn in all_urns:
graph.delete_entity(urn, hard=True)
wait_for_writes_to_sync()
os.remove(filename)

return _ingest_cleanup_data


@pytest.fixture(scope="function")
def ingest_cleanup_data(ingest_cleanup_data_function):
"""
Expand Down Expand Up @@ -261,7 +232,7 @@ def large_fanout_graph_function(graph: DataHubGraph):
@contextmanager
def _large_fanout_graph(
test_id: str, max_fanout: int
) -> Iterable[Tuple[str, List[str]]]:
) -> Iterator[Tuple[str, List[str]]]:
max_index = max_fanout + 1
all_urns = []
dataset_base_name = f"large_fanout_dataset_{test_id}"
Expand Down

0 comments on commit 2f78ab1

Please sign in to comment.