Skip to content

Commit

Permalink
Merge branch 'edge' into pd_thermocycler-form-pt1
Browse files Browse the repository at this point in the history
  • Loading branch information
ncdiehl11 committed Oct 2, 2024
2 parents ca91df0 + d0561ee commit 8f0c373
Show file tree
Hide file tree
Showing 161 changed files with 5,115 additions and 811 deletions.
2 changes: 1 addition & 1 deletion .eslintignore
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
**/venv/**
.opentrons_config
**/tsconfig*.json
**/vite.config.ts
**/vite.config.mts
# prettier
**/package.json
**/CHANGELOG.md
Expand Down
2 changes: 1 addition & 1 deletion abr-testing/Pipfile
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ slackclient = "*"
slack-sdk = "*"
pandas = "*"
pandas-stubs = "*"

numpy = "==1.8.3"

[dev-packages]
atomicwrites = "==1.4.1"
Expand Down
115 changes: 60 additions & 55 deletions abr-testing/Pipfile.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

4 changes: 2 additions & 2 deletions abr-testing/abr_testing/automation/google_sheets_tool.py
Original file line number Diff line number Diff line change
Expand Up @@ -225,8 +225,8 @@ def get_sheet_by_name(self, title: str) -> None:

def token_check(self) -> None:
"""Check if credentials are still valid and refresh if expired."""
if self.credentials.expired:
self.credentials.refresh() # Refresh the credentials
if self.credentials.access_token_expired:
self.gc.login() # Refresh the credentials

def get_row_index_with_value(self, some_string: str, col_num: int) -> Any:
"""Find row index of string by looking in specific column."""
Expand Down
20 changes: 18 additions & 2 deletions abr-testing/abr_testing/data_collection/abr_google_drive.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@
from abr_testing.data_collection import read_robot_logs
from typing import Set, Dict, Any, Tuple, List, Union
from abr_testing.automation import google_drive_tool, google_sheets_tool
from abr_testing.tools import sync_abr_sheet
from abr_testing.tools import sync_abr_sheet, plate_reader


def get_modules(file_results: Dict[str, str]) -> Dict[str, Any]:
Expand All @@ -17,6 +17,7 @@ def get_modules(file_results: Dict[str, str]) -> Dict[str, Any]:
"temperatureModuleV2",
"magneticBlockV1",
"thermocyclerModuleV2",
"absorbanceReaderV1",
)
all_modules = {key: "" for key in modList}
for module in file_results.get("modules", []):
Expand All @@ -35,6 +36,7 @@ def create_data_dictionary(
issue_url: str,
plate: str,
accuracy: Any,
hellma_plate_standards: List[Dict[str, Any]],
) -> Tuple[List[List[Any]], List[str], List[List[Any]], List[str]]:
"""Pull data from run files and format into a dictionary."""
runs_and_robots: List[Any] = []
Expand Down Expand Up @@ -113,6 +115,9 @@ def create_data_dictionary(
hs_dict = read_robot_logs.hs_commands(file_results)
tm_dict = read_robot_logs.temperature_module_commands(file_results)
pipette_dict = read_robot_logs.instrument_commands(file_results)
plate_reader_dict = read_robot_logs.plate_reader_commands(
file_results, hellma_plate_standards
)
notes = {"Note1": "", "Jira Link": issue_url}
plate_measure = {
"Plate Measured": plate,
Expand All @@ -132,6 +137,7 @@ def create_data_dictionary(
**hs_dict,
**tm_dict,
**tc_dict,
**plate_reader_dict,
**pipette_dict,
**plate_measure,
}
Expand Down Expand Up @@ -181,6 +187,7 @@ def create_data_dictionary(
storage_directory = args.storage_directory[0]
google_sheet_name = args.google_sheet_name[0]
email = args.email[0]

try:
credentials_path = os.path.join(storage_directory, "credentials.json")
except FileNotFoundError:
Expand All @@ -203,13 +210,22 @@ def create_data_dictionary(
missing_runs_from_gs = read_robot_logs.get_unseen_run_ids(
run_ids_on_gd, run_ids_on_gs
)
# Read Hellma Files
file_values = plate_reader.read_hellma_plate_files(storage_directory, 101934)
# Add missing runs to google sheet
(
transposed_runs_and_robots,
headers,
transposed_runs_and_lpc,
headers_lpc,
) = create_data_dictionary(missing_runs_from_gs, storage_directory, "", "", "")
) = create_data_dictionary(
missing_runs_from_gs,
storage_directory,
"",
"",
"",
hellma_plate_standards=file_values,
)
start_row = google_sheet.get_index_row() + 1
print(start_row)
google_sheet.batch_update_cells(transposed_runs_and_robots, "A", start_row, "0")
Expand Down
11 changes: 10 additions & 1 deletion abr-testing/abr_testing/data_collection/abr_robot_error.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@
import re
import pandas as pd
from statistics import mean, StatisticsError
from abr_testing.tools import plate_reader


def compare_current_trh_to_average(
Expand Down Expand Up @@ -590,13 +591,21 @@ def get_run_error_info_from_robot(
except FileNotFoundError:
print("Run file not uploaded.")
run_id = os.path.basename(error_run_log).split("_")[1].split(".")[0]
# Get hellma readings
file_values = plate_reader.read_hellma_plate_files(storage_directory, 101934)

(
runs_and_robots,
headers,
runs_and_lpc,
headers_lpc,
) = abr_google_drive.create_data_dictionary(
run_id, error_folder_path, issue_url, "", ""
run_id,
error_folder_path,
issue_url,
"",
"",
hellma_plate_standards=file_values,
)

start_row = google_sheet.get_index_row() + 1
Expand Down
Loading

0 comments on commit 8f0c373

Please sign in to comment.