Skip to content

Commit 67dd718

Browse files
move history workflow extraction from API controller to service layer
1 parent aa9ad87 commit 67dd718

2 files changed

Lines changed: 105 additions & 96 deletions

File tree

lib/galaxy/webapps/galaxy/api/histories.py

Lines changed: 2 additions & 96 deletions
Original file line numberDiff line numberDiff line change
@@ -69,8 +69,6 @@
6969
)
7070
from galaxy.schema.types import LatestLiteral
7171
from galaxy.schema.workflows import (
72-
WorkflowExtractionJob,
73-
WorkflowExtractionOutput,
7472
WorkflowExtractionPayload,
7573
WorkflowExtractionResult,
7674
WorkflowExtractionSummary,
@@ -92,10 +90,7 @@
9290
query_serialization_params,
9391
)
9492
from galaxy.webapps.galaxy.services.histories import HistoriesService
95-
from galaxy.workflow.extract import (
96-
extract_workflow,
97-
summarize,
98-
)
93+
from galaxy.workflow.extract import extract_workflow
9994
from .common import HistoryIDPathParam
10095

10196
log = logging.getLogger(__name__)
@@ -801,96 +796,7 @@ def extraction_summary(
801796
history_id: HistoryIDPathParam,
802797
trans: ProvidesHistoryContext = DependsOnTrans,
803798
) -> WorkflowExtractionSummary:
804-
history = self.service.manager.get_accessible(history_id, trans.user, current_history=trans.history)
805-
jobs, warnings = summarize(trans, history)
806-
807-
def serialize_output(content) -> WorkflowExtractionOutput:
808-
return WorkflowExtractionOutput.model_validate(
809-
{
810-
"id": content.id,
811-
"hid": content.hid,
812-
"name": content.name,
813-
"state": content.state,
814-
"deleted": content.deleted,
815-
"history_content_type": content.history_content_type,
816-
}
817-
)
818-
819-
def input_step_type(outputs: list[WorkflowExtractionOutput]) -> Literal["input_dataset", "input_collection"]:
820-
if outputs and outputs[0].history_content_type == "dataset_collection":
821-
return "input_collection"
822-
return "input_dataset"
823-
824-
jobs_list = []
825-
for job, datasets in jobs.items():
826-
is_fake = getattr(job, "is_fake", False)
827-
outputs = [serialize_output(data) for _, data in datasets]
828-
checked = any(not data.deleted for _, data in datasets)
829-
830-
if is_fake:
831-
# FakeJob / DatasetCollectionCreationJob: input with no creating tool.
832-
jobs_list.append(
833-
WorkflowExtractionJob(
834-
id=None,
835-
step_type=input_step_type(outputs),
836-
tool_name=getattr(job, "name", None),
837-
tool_id=None,
838-
tool_version=None,
839-
checked=checked,
840-
tool_version_warning=None,
841-
outputs=outputs,
842-
)
843-
)
844-
else:
845-
tool = trans.app.toolbox.get_tool(job.tool_id, tool_version=job.tool_version)
846-
if tool is None:
847-
# Tool missing
848-
continue
849-
if not tool.is_workflow_compatible:
850-
# Not a workflow step (e.g. upload, data fetch) — treat as input.
851-
jobs_list.append(
852-
WorkflowExtractionJob(
853-
id=None,
854-
step_type=input_step_type(outputs),
855-
tool_name=tool.name,
856-
tool_id=None,
857-
tool_version=None,
858-
checked=checked,
859-
tool_version_warning=None,
860-
outputs=outputs,
861-
)
862-
)
863-
else:
864-
tool_version_warning = (
865-
(
866-
f'Dataset was created with tool version "{job.tool_version}", '
867-
f'but workflow extraction will use version "{tool.version}".'
868-
)
869-
if tool.version != job.tool_version
870-
else None
871-
)
872-
jobs_list.append(
873-
WorkflowExtractionJob.model_validate(
874-
{
875-
"id": job.id,
876-
"step_type": "tool",
877-
"tool_name": tool.name,
878-
"tool_id": job.tool_id,
879-
"tool_version": job.tool_version,
880-
"checked": checked,
881-
"tool_version_warning": tool_version_warning,
882-
"outputs": outputs,
883-
}
884-
)
885-
)
886-
887-
return WorkflowExtractionSummary.model_validate(
888-
{
889-
"history_id": history.id,
890-
"warnings": list(warnings),
891-
"jobs": jobs_list,
892-
}
893-
)
799+
return self.service.create_workflow_extraction_summary(history_id, trans)
894800

895801
@router.post(
896802
"/api/histories/{history_id}/extract_workflow",

lib/galaxy/webapps/galaxy/services/histories.py

Lines changed: 103 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -9,6 +9,7 @@
99
)
1010
from typing import (
1111
cast,
12+
Literal,
1213
Optional,
1314
Union,
1415
)
@@ -77,6 +78,11 @@
7778
WriteHistoryTo,
7879
)
7980
from galaxy.schema.types import LatestLiteral
81+
from galaxy.schema.workflows import (
82+
WorkflowExtractionJob,
83+
WorkflowExtractionOutput,
84+
WorkflowExtractionSummary,
85+
)
8086
from galaxy.security.idencoding import IdEncodingHelper
8187
from galaxy.short_term_storage import ShortTermStorageAllocator
8288
from galaxy.util import restore_text
@@ -90,6 +96,7 @@
9096
)
9197
from galaxy.webapps.galaxy.services.notifications import NotificationService
9298
from galaxy.webapps.galaxy.services.sharable import ShareableService
99+
from galaxy.workflow.extract import summarize
93100

94101
log = logging.getLogger(__name__)
95102

@@ -751,6 +758,102 @@ def archive_history(
751758
history = self.manager.archive_history(history, archive_export_id=archive_export_id)
752759
return self._serialize_archived_history(trans, history)
753760

761+
def create_workflow_extraction_summary(
762+
self,
763+
history_id: DecodedDatabaseIdField,
764+
trans: ProvidesHistoryContext,
765+
) -> WorkflowExtractionSummary:
766+
history = self.manager.get_accessible(history_id, trans.user, current_history=trans.history)
767+
jobs, warnings = summarize(trans, history)
768+
769+
def serialize_output(content) -> WorkflowExtractionOutput:
770+
return WorkflowExtractionOutput.model_validate(
771+
{
772+
"id": content.id,
773+
"hid": content.hid,
774+
"name": content.name,
775+
"state": content.state,
776+
"deleted": content.deleted,
777+
"history_content_type": content.history_content_type,
778+
}
779+
)
780+
781+
def input_step_type(outputs: list[WorkflowExtractionOutput]) -> Literal["input_dataset", "input_collection"]:
782+
if outputs and outputs[0].history_content_type == "dataset_collection":
783+
return "input_collection"
784+
return "input_dataset"
785+
786+
jobs_list = []
787+
for job, datasets in jobs.items():
788+
is_fake = getattr(job, "is_fake", False)
789+
outputs = [serialize_output(data) for _, data in datasets]
790+
checked = any(not data.deleted for _, data in datasets)
791+
792+
if is_fake:
793+
# FakeJob / DatasetCollectionCreationJob: input with no creating tool.
794+
jobs_list.append(
795+
WorkflowExtractionJob(
796+
id=None,
797+
step_type=input_step_type(outputs),
798+
tool_name=getattr(job, "name", None),
799+
tool_id=None,
800+
tool_version=None,
801+
checked=checked,
802+
tool_version_warning=None,
803+
outputs=outputs,
804+
)
805+
)
806+
else:
807+
tool = trans.app.toolbox.get_tool(job.tool_id, tool_version=job.tool_version)
808+
if tool is None:
809+
# Tool missing
810+
continue
811+
if not tool.is_workflow_compatible:
812+
# Not a workflow step (e.g. upload, data fetch) — treat as input.
813+
jobs_list.append(
814+
WorkflowExtractionJob(
815+
id=None,
816+
step_type=input_step_type(outputs),
817+
tool_name=tool.name,
818+
tool_id=None,
819+
tool_version=None,
820+
checked=checked,
821+
tool_version_warning=None,
822+
outputs=outputs,
823+
)
824+
)
825+
else:
826+
tool_version_warning = (
827+
(
828+
f'Dataset was created with tool version "{job.tool_version}", '
829+
f'but workflow extraction will use version "{tool.version}".'
830+
)
831+
if tool.version != job.tool_version
832+
else None
833+
)
834+
jobs_list.append(
835+
WorkflowExtractionJob.model_validate(
836+
{
837+
"id": job.id,
838+
"step_type": "tool",
839+
"tool_name": tool.name,
840+
"tool_id": job.tool_id,
841+
"tool_version": job.tool_version,
842+
"checked": checked,
843+
"tool_version_warning": tool_version_warning,
844+
"outputs": outputs,
845+
}
846+
)
847+
)
848+
849+
return WorkflowExtractionSummary.model_validate(
850+
{
851+
"history_id": history.id,
852+
"warnings": list(warnings),
853+
"jobs": jobs_list,
854+
}
855+
)
856+
754857
def _ensure_export_record_can_be_associated_with_history_archival(
755858
self, history_id: int, export_record: model.StoreExportAssociation
756859
):

0 commit comments

Comments
 (0)