Skip to content

Commit

Permalink
Resolve merge conflicts
Browse files Browse the repository at this point in the history
  • Loading branch information
sadielbartholomew committed Jul 2, 2018
1 parent 54f36ab commit 9ef7258
Show file tree
Hide file tree
Showing 4 changed files with 75 additions and 33 deletions.
71 changes: 70 additions & 1 deletion lib/cylc/rundb.py
Original file line number Diff line number Diff line change
Expand Up @@ -866,7 +866,7 @@ def _upgrade_with_state_file_states(self, state_file_path):
# run mode, time stamp, initial cycle, final cycle
location = self._upgrade_with_state_file_header(line)
elif location == "broadcast":
# Ignore broadcast pickle in state file.
# Ignore broadcast json in state file.
# The "broadcast_states" table should already be populated.
if line == "Begin task states":
location = "task states"
Expand Down Expand Up @@ -957,6 +957,75 @@ def _upgrade_with_state_file_extras(self):
conn.execute(r"DROP TABLE " + t_name + "_old")
conn.commit()

def upgrade_pickle_to_json(self):
"""Upgrade the database tables if containing pickled objects.
Back compat for <=7.6.X.
"""
conn = self.connect()
t_name = self.TABLE_TASK_ACTION_TIMERS
if "_pickle" not in self.select_table_schema("table", t_name):
return

# Rename old tables
conn.execute(r"ALTER TABLE %(table)s RENAME TO %(table)s_old" % {
"table": t_name})
conn.commit()

# Create tables with new columns
self.create_tables()

# Populate new tables using old column data
# Codacy: Pickle library appears to be in use, possible security issue.
# Use of "pickle" module is for loading data written by <=7.6.X of Cylc
# in users' own spaces.
import pickle
sys.stdout.write(r"Upgrading %s table " % (t_name))
cols = []
for col in self.tables[t_name].columns:
if col.name in ['ctx_key', 'ctx', 'delays']:
cols.append(col.name + '_pickle')
else:
cols.append(col.name)
n_skips = 0
# Codacy: Possible SQL injection vector through string-based query
# construction.
# This is highly unlikely - all strings in the constuct are from
# constants in this module.
for i, row in enumerate(conn.execute(
r"SELECT " + ",".join(cols) + " FROM " + t_name + "_old")):
args = []
try:
for col, cell in zip(cols, row):
if col == "ctx_pickle":
# Upgrade pickled namedtuple objects
orig = pickle.loads(str(cell))
if orig is not None:
args.append(json.dumps(
[type(orig).__name__, orig.__getnewargs__()]))
else:
args.append(json.dumps(orig))
elif col.endswith("_pickle"):
# Upgrade pickled lists
args.append(json.dumps(pickle.loads(str(cell))))
else:
args.append(cell)
except (EOFError, TypeError, LookupError, ValueError):
n_skips += 1 # skip bad rows
else:
# These tables can be big, so we don't want to queue the items
# in memory.
conn.execute(self.tables[t_name].get_insert_stmt(), args)
if i:
sys.stdout.write("\b" * len("%d rows" % (i)))
sys.stdout.write("%d rows" % (i + 1))
sys.stdout.write(" done, %d skipped\n" % n_skips)
conn.commit()

# Drop old tables
conn.execute(r"DROP TABLE %(table)s_old" % {"table": t_name})
conn.commit()

def vacuum(self):
"""Vacuum to the database."""
return self.connect().execute("VACUUM")
Expand Down
1 change: 0 additions & 1 deletion tests/cli/test_header

This file was deleted.

31 changes: 0 additions & 31 deletions tests/cyclers/49-365_calendar.t

This file was deleted.

5 changes: 5 additions & 0 deletions tests/lib/bash/test_header
Original file line number Diff line number Diff line change
Expand Up @@ -116,6 +116,11 @@
# array index. It can also be a dict {attr_key: attr_value, ...}. In
# which case, the expected data item is under a list of dicts, where a
# unique dict in the list contains all elements attr_key: attr_value.
# set_test_remote_host
# set CYLC_TEST_HOST from global config, for remote job tests.
# (Remote job tests should really use set_test_remote, below, however).
# set_test_remote
# set CYLC_TEST_HOST and CYLC_TEST_OWNER for remote job tests.
#-------------------------------------------------------------------------------
set -eu

Expand Down

0 comments on commit 9ef7258

Please sign in to comment.