Skip to content

Commit

Permalink
Fix logging in worker processes
Browse files Browse the repository at this point in the history
  • Loading branch information
mobiusklein committed May 21, 2024
1 parent ddd5d58 commit 691994d
Show file tree
Hide file tree
Showing 2 changed files with 8 additions and 11 deletions.
17 changes: 7 additions & 10 deletions src/ms_deisotope/tools/deisotoper/process.py
Original file line number Diff line number Diff line change
Expand Up @@ -182,7 +182,7 @@ def _make_scan_batch(self) -> Tuple[
except StopIteration:
break
except Exception as e:
self.log_handler("An error occurred in _make_scan_batch: %r" % e)
self.log_handler(f"An error occurred in _make_scan_batch: {e}")
break
if not self.ignore_tandem_scans:
batch.append((scan_id, product_scan_ids, True))
Expand All @@ -198,12 +198,12 @@ def _initialize_iterator(self):
self.start_scan, require_ms1=self.loader.has_ms1_scans(), grouped=True)
except IndexError as e:
self.log_handler(
"An error occurred while locating start scan", e)
f"An error occurred while locating start scan {e}", e)
self.loader.reset()
self.loader.make_iterator(grouped=True)
except AttributeError as e:
self.log_handler(
"The reader does not support random access, start time will be ignored", e)
f"The reader does not support random access, start time will be ignored {e}")
self.loader.reset()
self.loader.make_iterator(grouped=True)
else:
Expand Down Expand Up @@ -254,12 +254,12 @@ def run(self):
except StopIteration:
break
except Exception as e:
self.log_handler("An error occurred while fetching scans: %r" % e)
self.log_handler(f"An error occurred while fetching scans: {e}")
break

if self.no_more_event is not None:
self.no_more_event.set()
self.log_handler("All Scan IDs have been dealt. %d scan bunches." % (count,))
self.log_handler(f"All Scan IDs have been dealt. {count} scan bunches.")
else:
self.scan_id_queue.put(DONE)

Expand All @@ -272,9 +272,7 @@ class _ScanTransformMixin(object):
def log_error(self, error: Exception, scan_id: str, scan: Scan, product_scan_ids: List[str]):
tb = traceback.format_exc()
self.log_handler(
"An %r occurred for %s (index %r) in Process %r\n%s" % (
error, scan_id, scan.index, multiprocessing.current_process(),
tb))
f"An {error} occurred for {scan_id} (index {scan.index}) in Process {multiprocessing.current_process()}\n{tb}")

def _init_batch_store(self):
self._batch_store = deque()
Expand All @@ -290,8 +288,7 @@ def get_work(self, block: bool=True, timeout: float=30) -> Tuple[str, List[str],
return result

def log_message(self, message):
self.log_handler(message + ", %r" %
(multiprocessing.current_process().name))
self.log_handler(message + f", {multiprocessing.current_process().name}")

def all_work_done(self) -> bool:
return self._work_complete.is_set()
Expand Down
2 changes: 1 addition & 1 deletion src/ms_deisotope/tools/deisotoper/scan_generator.py
Original file line number Diff line number Diff line change
Expand Up @@ -299,7 +299,7 @@ def _make_scan_id_yielder(self, start_scan: str, end_scan: str, max_scans: int)

def _make_scan_packer(self):
"""
This method initializes a helper object that will be shared with workersto pre-serialize
Initialize a helper object that will be shared with workersto pre-serialize
scans before they are sent over IPC to save space, and avoid needing to reconstitute whole
objects on the other side. If the serializing type changes, then the scan packer may also
need to change.
Expand Down

0 comments on commit 691994d

Please sign in to comment.