|
20 | 20 | from typing import TYPE_CHECKING
|
21 | 21 |
|
22 | 22 | from connexion import NoContent
|
| 23 | +from marshmallow import ValidationError |
23 | 24 | from sqlalchemy import delete, func, select
|
24 | 25 | from sqlalchemy.orm import joinedload, subqueryload
|
25 | 26 |
|
26 | 27 | from airflow.api_connexion import security
|
27 |
| -from airflow.api_connexion.exceptions import NotFound |
| 28 | +from airflow.api_connexion.endpoints.request_dict import get_json_request_dict |
| 29 | +from airflow.api_connexion.exceptions import BadRequest, NotFound |
28 | 30 | from airflow.api_connexion.parameters import apply_sorting, check_limit, format_datetime, format_parameters
|
29 | 31 | from airflow.api_connexion.schemas.dataset_schema import (
|
30 | 32 | DagScheduleDatasetReference,
|
|
33 | 35 | QueuedEvent,
|
34 | 36 | QueuedEventCollection,
|
35 | 37 | TaskOutletDatasetReference,
|
| 38 | + create_dataset_event_schema, |
36 | 39 | dataset_collection_schema,
|
37 | 40 | dataset_event_collection_schema,
|
| 41 | + dataset_event_schema, |
38 | 42 | dataset_schema,
|
39 | 43 | queued_event_collection_schema,
|
40 | 44 | queued_event_schema,
|
41 | 45 | )
|
| 46 | +from airflow.datasets import Dataset |
| 47 | +from airflow.datasets.manager import dataset_manager |
42 | 48 | from airflow.models.dataset import DatasetDagRunQueue, DatasetEvent, DatasetModel
|
| 49 | +from airflow.security import permissions |
| 50 | +from airflow.utils import timezone |
43 | 51 | from airflow.utils.db import get_query_count
|
| 52 | +from airflow.utils.log.action_logger import action_event_from_permission |
44 | 53 | from airflow.utils.session import NEW_SESSION, provide_session
|
| 54 | +from airflow.www.decorators import action_logging |
45 | 55 | from airflow.www.extensions.init_auth_manager import get_auth_manager
|
46 | 56 |
|
47 | 57 | if TYPE_CHECKING:
|
48 | 58 | from sqlalchemy.orm import Session
|
49 | 59 |
|
50 | 60 | from airflow.api_connexion.types import APIResponse
|
51 | 61 |
|
| 62 | +RESOURCE_EVENT_PREFIX = "dataset" |
| 63 | + |
52 | 64 |
|
53 | 65 | @security.requires_access_dataset("GET")
|
54 | 66 | @provide_session
|
@@ -311,3 +323,38 @@ def delete_dataset_queued_events(
|
311 | 323 | "Queue event not found",
|
312 | 324 | detail=f"Queue event with dataset uri: `{uri}` was not found",
|
313 | 325 | )
|
| 326 | + |
| 327 | + |
| 328 | +@security.requires_access_dataset("POST") |
| 329 | +@provide_session |
| 330 | +@action_logging( |
| 331 | + event=action_event_from_permission( |
| 332 | + prefix=RESOURCE_EVENT_PREFIX, |
| 333 | + permission=permissions.ACTION_CAN_CREATE, |
| 334 | + ), |
| 335 | +) |
| 336 | +def create_dataset_event(session: Session = NEW_SESSION) -> APIResponse: |
| 337 | + """Create dataset event.""" |
| 338 | + body = get_json_request_dict() |
| 339 | + try: |
| 340 | + json_body = create_dataset_event_schema.load(body) |
| 341 | + except ValidationError as err: |
| 342 | + raise BadRequest(detail=str(err)) |
| 343 | + |
| 344 | + uri = json_body["dataset_uri"] |
| 345 | + dataset = session.scalar(select(DatasetModel).where(DatasetModel.uri == uri).limit(1)) |
| 346 | + if not dataset: |
| 347 | + raise NotFound(title="Dataset not found", detail=f"Dataset with uri: '{uri}' not found") |
| 348 | + timestamp = timezone.utcnow() |
| 349 | + extra = json_body.get("extra", {}) |
| 350 | + extra["from_rest_api"] = True |
| 351 | + dataset_event = dataset_manager.register_dataset_change( |
| 352 | + dataset=Dataset(uri), |
| 353 | + timestamp=timestamp, |
| 354 | + extra=extra, |
| 355 | + session=session, |
| 356 | + ) |
| 357 | + if not dataset_event: |
| 358 | + raise NotFound(title="Dataset not found", detail=f"Dataset with uri: '{uri}' not found") |
| 359 | + event = dataset_event_schema.dump(dataset_event) |
| 360 | + return event |
0 commit comments