@@ -916,8 +916,8 @@ class User(Base, Dictifiable, RepresentById):
916916 data_manager_histories: Mapped[list["DataManagerHistoryAssociation"]] = relationship(back_populates="user")
917917 roles: Mapped[list["UserRoleAssociation"]] = relationship(back_populates="user")
918918 stored_workflows: Mapped[list["StoredWorkflow"]] = relationship(
919- back_populates="user",
920919 primaryjoin=(lambda: User.id == StoredWorkflow.user_id),
920+ viewonly=True,
921921 )
922922 all_notifications: Mapped[list["UserNotificationAssociation"]] = relationship(back_populates="user")
923923
@@ -3522,7 +3522,9 @@ class History(Base, HasTags, Dictifiable, UsesAnnotations, HasName, Serializable
35223522 archive_export_id: Mapped[Optional[int]] = mapped_column(ForeignKey("store_export_association.id"), default=None)
35233523
35243524 datasets: Mapped[list["HistoryDatasetAssociation"]] = relationship(
3525- back_populates="history", order_by=lambda: asc(HistoryDatasetAssociation.hid)
3525+ primaryjoin=(lambda: HistoryDatasetAssociation.history_id == History.id),
3526+ order_by=lambda: asc(HistoryDatasetAssociation.hid),
3527+ viewonly=True,
35263528 )
35273529 exports: Mapped[list["JobExportHistoryArchive"]] = relationship(
35283530 back_populates="history",
@@ -3635,7 +3637,7 @@ def __init__(self, id=None, name=None, user=None):
36353637 self.user = user
36363638 # Objects to eventually add to history
36373639 self._pending_additions = []
3638- self._item_by_hid_cache = None
3640+ self._copied_from_object_id_cache = None
36393641
36403642 @reconstructor
36413643 def init_on_load(self):
@@ -3828,15 +3830,21 @@ def copy(self, name=None, target_user=None, activatable=False, all_datasets=Fals
38283830 hdas = self.datasets
38293831 else:
38303832 hdas = self.active_datasets
3833+ copied_from_object_id_map = {}
38313834 for hda in hdas:
38323835 # Copy HDA.
38333836 new_hda = hda.copy(flush=False)
38343837 new_history.add_dataset(new_hda, set_hid=False, quota=applies_to_quota)
3838+ copied_from_object_id_map[hda.id] = new_hda
38353839
38363840 if target_user:
38373841 new_hda.copy_item_annotation(db_session, self.user, hda, target_user, new_hda)
38383842 new_hda.copy_tags_from(target_user, hda)
38393843
3844+ # Pre-populate cache so HDCA copy's minimize_copies can find
3845+ # the just-created HDAs (viewonly self.datasets won't see unflushed rows).
3846+ new_history._copied_from_object_id_cache = copied_from_object_id_map
3847+
38403848 # Copy history dataset collections
38413849 if all_datasets:
38423850 hdcas = self.dataset_collections
@@ -3861,10 +3869,10 @@ def copy(self, name=None, target_user=None, activatable=False, all_datasets=Fals
38613869
38623870 return new_history
38633871
3864- def get_dataset_by_hid (self, hid ):
3865- if self._item_by_hid_cache is None:
3866- self._item_by_hid_cache = {dataset.hid: dataset for dataset in self.datasets}
3867- return self._item_by_hid_cache .get(hid )
3872+ def get_copied_dataset (self, id ):
3873+ if self._copied_from_object_id_cache is None:
3874+ return None
3875+ return self._copied_from_object_id_cache .get(id )
38683876
38693877 @property
38703878 def has_possible_members(self):
@@ -7955,7 +7963,7 @@ def build_statement():
79557963 subq = subq1.union(subq2)
79567964
79577965 # Build and return final query
7958- stm = select().select_from(subq)
7966+ stm = select().select_from(subq.subquery() )
79597967 # Add aggregate columns for each job state
79607968 for state in enum_values(Job.states):
79617969 col = func.sum(case((column(state_label) == state, 1), else_=0)).label(state)
@@ -8434,12 +8442,8 @@ def copy_to_collection(
84348442 elif isinstance(element_object, HistoryDatasetAssociation):
84358443 new_element_object = None
84368444 if minimize_copies:
8437- new_element_object = element_destination.get_dataset_by_hid(element_object.hid)
8438- if (
8439- new_element_object
8440- and new_element_object.dataset
8441- and new_element_object.dataset.id == element_object.dataset_id
8442- ):
8445+ new_element_object = element_destination.get_copied_dataset(element_object.id)
8446+ if new_element_object:
84438447 element_object = new_element_object
84448448 else:
84458449 new_element_object = element_object.copy(
@@ -8593,7 +8597,7 @@ class StoredWorkflow(Base, HasTags, Dictifiable, RepresentById, UsesCreateAndUpd
85938597 published: Mapped[Optional[bool]] = mapped_column(index=True, default=False)
85948598
85958599 user: Mapped["User"] = relationship(
8596- primaryjoin=(lambda: User.id == StoredWorkflow.user_id), back_populates="stored_workflows"
8600+ primaryjoin=(lambda: User.id == StoredWorkflow.user_id),
85978601 )
85988602 workflows: Mapped[list["Workflow"]] = relationship(
85998603 back_populates="stored_workflow",
@@ -8794,7 +8798,7 @@ class Workflow(Base, Dictifiable, RepresentById):
87948798 parent_workflow_steps = relationship(
87958799 "WorkflowStep",
87968800 primaryjoin=(lambda: Workflow.id == WorkflowStep.subworkflow_id),
8797- back_populates="subworkflow" ,
8801+ viewonly=True ,
87988802 )
87998803 stored_workflow = relationship(
88008804 "StoredWorkflow",
@@ -9007,7 +9011,6 @@ class WorkflowStep(Base, RepresentById, UsesCreateAndUpdateTime):
90079011
90089012 subworkflow: Mapped[Optional["Workflow"]] = relationship(
90099013 primaryjoin=(lambda: Workflow.id == WorkflowStep.subworkflow_id),
9010- back_populates="parent_workflow_steps",
90119014 )
90129015 dynamic_tool: Mapped[Optional["DynamicTool"]] = relationship(
90139016 primaryjoin=(lambda: DynamicTool.id == WorkflowStep.dynamic_tool_id)
@@ -11432,7 +11435,7 @@ def user_exists(cls, *args, **kwargs):
1143211435 (Required by social_core.storage.UserMixin interface)
1143311436 """
1143411437 stmt_user = select(User).filter_by(*args, **kwargs)
11435- stmt_count = select(func.count()).select_from(stmt_user)
11438+ stmt_count = select(func.count()).select_from(stmt_user.subquery() )
1143611439 return cls.sa_session.scalar(stmt_count) > 0
1143711440
1143811441 @classmethod
@@ -12922,7 +12925,7 @@ class Credential(Base):
1292212925 _metadata=deferred(HistoryDatasetAssociation.table.c._metadata),
1292312926 dependent_jobs=relationship(JobToInputDatasetAssociation, back_populates="dataset"),
1292412927 creating_job_associations=relationship(JobToOutputDatasetAssociation, back_populates="dataset"),
12925- history=relationship(History, back_populates="datasets" ),
12928+ history=relationship(History),
1292612929 implicitly_converted_datasets=relationship(
1292712930 ImplicitlyConvertedDatasetAssociation,
1292812931 primaryjoin=(lambda: ImplicitlyConvertedDatasetAssociation.hda_parent_id == HistoryDatasetAssociation.id),
0 commit comments