Skip to content

Commit e2134cb

Browse files
committed
removed ids moved commented out some tests and updated dockerfile to 7200 hours
1 parent 32d19af commit e2134cb

File tree

3 files changed

+2
-49
lines changed

3 files changed

+2
-49
lines changed

.github/workflows/test_and_build_cache.yml

+1
Original file line numberDiff line numberDiff line change
@@ -3,6 +3,7 @@ name: cache dependencies, test, and build
33
on:
44
push:
55
branches:
6+
- main
67
tags:
78
- v*
89
pull_request:

Dockerfile

+1-5
Original file line numberDiff line numberDiff line change
@@ -25,12 +25,8 @@ ENV FROMREPO "tests_docker/temp_from"
2525
ENV TOREPO "tests_docker/temp_to"
2626
ENV INSTRUMENT "LATISS"
2727
ENV NOW "2020-03-01 23:59:59.999999"
28-
ENV EMBARGO_HRS "1063.08018813861"
28+
ENV EMBARGO_HRS "7200"
2929
ENV DATAQUERIES '{ "datasettype": "raw", "collections": "LATISS/raw/all"}'
30-
# ENV DATASETTYPE "raw"
31-
#'["raw", "calexp"]'
32-
# ENV COLLECTIONS "LATISS/raw/all"
33-
#'["LATISS/raw/all", "LATISS/runs/AUXTEL_DRP_IMAGING_2022-11A/w_2022_46/PREOPS-1616"]'
3430
ENV LOG "True"
3531
ENV PASTEMBARGO "1.0"
3632

src/move_embargo_args.py

-44
Original file line numberDiff line numberDiff line change
@@ -467,23 +467,6 @@ def parse_args():
467467
register_dataset_types=True,
468468
transfer_dimensions=True,
469469
)
470-
ids_moved = []
471-
for dt in dest_registry.queryDatasets(
472-
datasetType=datalist_exposure, collections=collections_exposure
473-
):
474-
try:
475-
ids_moved.append(dt.dataId.mapping["exposure"])
476-
except KeyError:
477-
continue
478-
"""
479-
ids_moved = [
480-
dt.dataId.mapping["exposure"]
481-
for dt in dest_registry.queryDatasets(
482-
datasetType=datalist_exposure, collections=collections_exposure
483-
)
484-
]
485-
"""
486-
logger.info("exposure ids moved: %s", ids_moved)
487470
if datalist_visit: # if there is anything in the list
488471
# first, run all of the exposure types through
489472
logger.info("datalist_visit exists: %s", datalist_visit)
@@ -537,24 +520,6 @@ def parse_args():
537520
register_dataset_types=True,
538521
transfer_dimensions=True,
539522
)
540-
# its breaking here because not everything is a visit in the registry
541-
ids_moved = []
542-
for dt in dest_registry.queryDatasets(datasetType=..., collections=...):
543-
try:
544-
ids_moved.append(dt.dataId.mapping["visit"])
545-
except KeyError:
546-
continue
547-
"""
548-
ids_moved = [
549-
dt.dataId.mapping["visit"]
550-
for dt in dest_registry.queryDatasets(
551-
datasetType=..., collections=...)
552-
]
553-
"""
554-
logger.info("datalist_visit: %s", datalist_visit)
555-
logger.info("collections_visit: %s", collections_visit)
556-
logger.info("visit ids moved: %s", ids_moved)
557-
558523
if datalist_no_exposure:
559524
# this is for datatypes that don't have an exposure
560525
# or visit dimension
@@ -576,15 +541,6 @@ def parse_args():
576541
register_dataset_types=True,
577542
transfer_dimensions=True,
578543
)
579-
ids_moved = [
580-
dt.id
581-
for dt in dest_registry.queryDatasets(
582-
datasetType=datalist_no_exposure,
583-
collections=collections_no_exposure,
584-
)
585-
]
586-
logger.info("ids in to butler: %s", ids_moved)
587-
588544
if move == "True":
589545
# concatenate both dataset types
590546
combined_datalist = datalist_exposure + datalist_visit + datalist_no_exposure

0 commit comments

Comments
 (0)