Skip to content

Commit

Permalink
pass modules in a few more places where possible
Browse files Browse the repository at this point in the history
I think that is everywhere in the frontend.

Backend is a bit odd as we are passing dataset.modules when it is None and thus creating children that would require individual inits of ModuleCollector. Could be more to look at there.
  • Loading branch information
dale-wahl committed Nov 8, 2023
1 parent 74ea8fd commit ea9de6b
Show file tree
Hide file tree
Showing 3 changed files with 5 additions and 5 deletions.
6 changes: 3 additions & 3 deletions common/lib/dataset.py
Original file line number Diff line number Diff line change
Expand Up @@ -161,7 +161,7 @@ def __init__(self, parameters=None, key=None, job=None, data=None, db=None, pare
if extension is None:
own_processor = self.get_own_processor()
if own_processor:
extension = own_processor.get_extension(parent_dataset=DataSet(key=parent, db=db) if parent else None)
extension = own_processor.get_extension(parent_dataset=DataSet(key=parent, db=db, modules=self.modules) if parent else None)
# Still no extension, default to 'csv'
if not extension:
extension = "csv"
Expand All @@ -171,7 +171,7 @@ def __init__(self, parameters=None, key=None, job=None, data=None, db=None, pare

# retrieve analyses and processors that may be run for this dataset
analyses = self.db.fetchall("SELECT * FROM datasets WHERE key_parent = %s ORDER BY timestamp ASC", (self.key,))
self.children = sorted([DataSet(data=analysis, db=self.db) for analysis in analyses],
self.children = sorted([DataSet(data=analysis, db=self.db, modules=self.modules) for analysis in analyses],
key=lambda dataset: dataset.is_finished(), reverse=True)

self.refresh_owners()
Expand Down Expand Up @@ -1274,7 +1274,7 @@ def get_all_children(self, recursive=True):
:return list: List of DataSets
"""
children = [DataSet(data=record, db=self.db) for record in self.db.fetchall("SELECT * FROM datasets WHERE key_parent = %s", (self.key,))]
children = [DataSet(data=record, db=self.db, modules=self.modules) for record in self.db.fetchall("SELECT * FROM datasets WHERE key_parent = %s", (self.key,))]
results = children.copy()
if recursive:
for child in children:
Expand Down
2 changes: 1 addition & 1 deletion webtool/views/views_admin.py
Original file line number Diff line number Diff line change
Expand Up @@ -900,7 +900,7 @@ def dataset_bulk():
flash(f"{len(bulk_owner):,} new owner(s) were added to the datasets.")

if not incomplete:
datasets = [DataSet(data=dataset, db=db) for dataset in datasets_meta]
datasets = [DataSet(data=dataset, db=db, modules=fourcat_modules) for dataset in datasets_meta]
flash(f"{len(datasets):,} dataset(s) updated.")

if action == "export":
Expand Down
2 changes: 1 addition & 1 deletion webtool/views/views_dataset.py
Original file line number Diff line number Diff line change
Expand Up @@ -140,7 +140,7 @@ def show_results(page):

# some housekeeping to prepare data for the template
pagination = Pagination(page, page_size, num_datasets)
filtered = [DataSet(key=dataset["key"], db=db) for dataset in datasets]
filtered = [DataSet(key=dataset["key"], db=db, modules=fourcat_modules) for dataset in datasets]

favourites = [row["key"] for row in
db.fetchall("SELECT key FROM users_favourites WHERE name = %s", (current_user.get_id(),))]
Expand Down

0 comments on commit ea9de6b

Please sign in to comment.