Skip to content

Commit

Permalink
feat(set_all_data_external) : new check_data option (#966)
Browse files Browse the repository at this point in the history
* feat(set_all_data_external) : new check_data option gives user option to turn off data checking for improved performance
  • Loading branch information
spaulins-usgs authored Aug 17, 2020
1 parent 992d788 commit 46e041b
Show file tree
Hide file tree
Showing 7 changed files with 102 additions and 59 deletions.
34 changes: 21 additions & 13 deletions flopy/mf6/coordinates/modeldimensions.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@

from .simulationtime import SimulationTime
from .modelgrid import UnstructuredModelGrid, ModelGrid
from ..mfbase import StructException, FlopyException
from ..mfbase import StructException, FlopyException, VerbosityLevel
from ..data.mfstructure import DatumType
from ..utils.mfenums import DiscretizationType
from ...utils.datautil import DatumUtil, NameIter
Expand Down Expand Up @@ -591,13 +591,17 @@ def _resolve_data_item_shape(
if result[0] is not None:
data = result[0].get_data()
if data is None:
print(
"WARNING: Unable to resolve dimension of "
"{} based on shape "
'"{}".'.format(
data_item_struct.path, item[0]
if (
self.simulation_data.verbosity_level.value
>= VerbosityLevel.normal.value
):
print(
"WARNING: Unable to resolve "
"dimension of {} based on shape "
'"{}".'.format(
data_item_struct.path, item[0]
)
)
)
shape_dimensions.append(-9999)
consistent_shape = False
elif result[1] is not None:
Expand All @@ -623,13 +627,17 @@ def _resolve_data_item_shape(
self.resolve_exp(item, len(data))
)
else:
print(
"WARNING: Unable to resolve dimension of {} "
"based on shape "
'"{}".'.format(
data_item_struct.path, item[0]
if (
self.simulation_data.verbosity_level.value
>= VerbosityLevel.normal.value
):
print(
"WARNING: Unable to resolve "
"dimension of {} based on shape "
'"{}".'.format(
data_item_struct.path, item[0]
)
)
)
shape_dimensions.append(-9999)
consistent_shape = False
else:
Expand Down
18 changes: 14 additions & 4 deletions flopy/mf6/data/mfdataarray.py
Original file line number Diff line number Diff line change
Expand Up @@ -496,6 +496,7 @@ def store_as_external_file(
layer=None,
binary=False,
replace_existing_external=True,
check_data=True,
):
storage = self._get_storage_obj()
if storage is None:
Expand Down Expand Up @@ -542,6 +543,7 @@ def store_as_external_file(
current_layer = (current_layer,)
# get the layer's data
data = self._get_data(current_layer, True)

if data is None:
# do not write empty data to an external file
continue
Expand Down Expand Up @@ -572,7 +574,10 @@ def store_as_external_file(
"factor": factor,
"binary": binary,
}
self._set_data(external_data, layer=current_layer)
self._set_data(
external_data, layer=current_layer, check_data=False
)

except Exception as ex:
type_, value_, traceback_ = sys.exc_info()
raise MFDataException(
Expand Down Expand Up @@ -660,7 +665,7 @@ def _get_data(self, layer=None, apply_mult=False, **kwargs):
def set_data(self, data, multiplier=None, layer=None):
self._set_data(data, multiplier, layer)

def _set_data(self, data, multiplier=None, layer=None):
def _set_data(self, data, multiplier=None, layer=None, check_data=True):
self._resync()
if self._get_storage_obj() is None:
self._data_storage = self._new_storage(False)
Expand All @@ -674,7 +679,7 @@ def _set_data(self, data, multiplier=None, layer=None):
if tas_name is not None:
# verify and save as time series array
self._get_storage_obj().set_tas(
tas_name, tas_label, self._current_key
tas_name, tas_label, self._current_key, check_data
)
return

Expand Down Expand Up @@ -1378,6 +1383,7 @@ def store_as_external_file(
layer=None,
binary=False,
replace_existing_external=True,
check_data=True,
):
sim_time = self._data_dimensions.package_dim.model_dim[
0
Expand All @@ -1398,7 +1404,11 @@ def store_as_external_file(
fname, ext = os.path.splitext(external_file_path)
full_name = "{}_{}{}".format(fname, sp + 1, ext)
super(MFTransientArray, self).store_as_external_file(
full_name, layer, binary, replace_existing_external
full_name,
layer,
binary,
replace_existing_external,
check_data,
)

def get_data(self, layer=None, apply_mult=True, **kwargs):
Expand Down
28 changes: 20 additions & 8 deletions flopy/mf6/data/mfdatalist.py
Original file line number Diff line number Diff line change
Expand Up @@ -270,7 +270,11 @@ def new_simulation(self, sim_data):
self._data_line = None

def store_as_external_file(
self, external_file_path, binary=False, replace_existing_external=True
self,
external_file_path,
binary=False,
replace_existing_external=True,
check_data=True,
):
# only store data externally (do not subpackage info)
if self.structure.construct_package is None:
Expand Down Expand Up @@ -300,7 +304,7 @@ def store_as_external_file(
"data": data,
"binary": binary,
}
self._set_data(external_data)
self._set_data(external_data, check_data=check_data)

def has_data(self):
try:
Expand Down Expand Up @@ -349,15 +353,15 @@ def _get_data(self, apply_mult=False, **kwargs):
def get_data(self, apply_mult=False, **kwargs):
return self._get_data(apply_mult, **kwargs)

def _set_data(self, data, autofill=False):
def _set_data(self, data, autofill=False, check_data=True):
if isinstance(data, dict):
if "data" in data:
data_check = data["data"]
else:
data_check = None
else:
data_check = data
if iterable(data_check):
if iterable(data_check) and check_data:
# verify data length
min_line_size = self.structure.get_min_record_entries()
if isinstance(data_check[0], np.record) or (
Expand Down Expand Up @@ -392,8 +396,9 @@ def _set_data(self, data, autofill=False):
self._simulation_data.debug,
ex,
)
# verify cellids
self._check_valid_cellids()
if check_data:
# verify cellids
self._check_valid_cellids()

def _check_valid_cellids(self):
# only check packages that are a part of a model
Expand Down Expand Up @@ -1413,7 +1418,11 @@ def data(self):
return self.get_data()

def store_as_external_file(
self, external_file_path, binary=False, replace_existing_external=True
self,
external_file_path,
binary=False,
replace_existing_external=True,
check_data=True,
):
sim_time = self._data_dimensions.package_dim.model_dim[
0
Expand All @@ -1433,7 +1442,10 @@ def store_as_external_file(
fname, ext = os.path.splitext(external_file_path)
full_name = "{}_{}{}".format(fname, sp + 1, ext)
super(MFTransientList, self).store_as_external_file(
full_name, binary, replace_existing_external
full_name,
binary,
replace_existing_external,
check_data,
)

def get_data(self, key=None, apply_mult=False, **kwargs):
Expand Down
28 changes: 15 additions & 13 deletions flopy/mf6/data/mfdatastorage.py
Original file line number Diff line number Diff line change
Expand Up @@ -1329,9 +1329,11 @@ def store_external(
False,
print_format,
)

ext_file_entry = self._get_file_entry()
fd.write(ext_file_entry)
fd.close()

# set as external data
self.layer_storage.first_item().internal_data = None
else:
Expand Down Expand Up @@ -2227,19 +2229,19 @@ def _fill_dimensions(self, data_iter, dimensions):
data_array[index] = data_iter.next()
return data_array

def set_tas(self, tas_name, tas_label, current_key):
# move to storage
package_dim = self.data_dimensions.package_dim
tas_names = package_dim.get_tasnames()
if (
tas_name.lower() not in tas_names
and self._simulation_data.verbosity_level.value
>= VerbosityLevel.normal.value
):
print(
"WARNING: Time array series name {} not found in any "
"time series file".format(tas_name)
)
def set_tas(self, tas_name, tas_label, current_key, check_name=True):
if check_name:
package_dim = self.data_dimensions.package_dim
tas_names = package_dim.get_tasnames()
if (
tas_name.lower() not in tas_names
and self._simulation_data.verbosity_level.value
>= VerbosityLevel.normal.value
):
print(
"WARNING: Time array series name {} not found in any "
"time series file".format(tas_name)
)
# this is a time series array with a valid tas variable
self.data_structure_type = DataStructureType.scalar
try:
Expand Down
13 changes: 8 additions & 5 deletions flopy/mf6/mfmodel.py
Original file line number Diff line number Diff line change
Expand Up @@ -78,13 +78,16 @@ class MFModel(PackageContainer, ModelInterface):
package's name, type, or package object to be removed from
the model
set_model_relative_path : (path : string)
sets the file path to the model folder and updates all model file paths
sets the file path to the model folder and updates all model file
paths
is_valid : () : boolean
checks the validity of the model and all of its packages
rename_all_packages : (name : string)
renames all packages in the model
set_all_data_external
sets the model's list and array data to be stored externally
set_all_data_external : (check_data : boolean)
sets the model's list and array data to be stored externally,
check_data determines if data error checking is enabled during this
process
See Also
--------
Expand Down Expand Up @@ -1106,9 +1109,9 @@ def rename_all_packages(self, name):
package.package_type,
)

def set_all_data_external(self):
def set_all_data_external(self, check_data=True):
for package in self.packagelist:
package.set_all_data_external()
package.set_all_data_external(check_data)

def register_package(
self,
Expand Down
22 changes: 14 additions & 8 deletions flopy/mf6/mfpackage.py
Original file line number Diff line number Diff line change
Expand Up @@ -56,8 +56,6 @@ class MFBlockHeader(object):
writes block header to file object 'fd'
write_footer : (fd : file object)
writes block footer to file object 'fd'
set_all_data_external
sets the block's list and array data to be stored externally
"""

Expand Down Expand Up @@ -278,6 +276,10 @@ class MFBlock(object):
writes block to a file object
is_valid : ()
returns true of the block is valid
set_all_data_external : (base_name : string, check_data : boolean)
sets the block's list and array data to be stored externally,
base_name is external file name's prefix, check_data determines
if data error checking is enabled during this process
See Also
--------
Expand Down Expand Up @@ -1137,7 +1139,7 @@ def _header_exists(self, key):
return True
return False

def set_all_data_external(self, base_name):
def set_all_data_external(self, base_name, check_data=True):
for key, dataset in self.datasets.items():
if (
isinstance(dataset, mfdataarray.MFArray)
Expand All @@ -1150,6 +1152,7 @@ def set_all_data_external(self, base_name):
dataset.store_as_external_file(
"{}_{}.txt".format(base_name, dataset.structure.name),
replace_existing_external=False,
check_data=check_data,
)

def _find_repeating_datasets(self):
Expand Down Expand Up @@ -1364,8 +1367,6 @@ class MFPackage(PackageContainer, PackageInterface):
describes the blocks and data contain in this package
dimensions : PackageDimension
resolves data dimensions for data within this package
set_all_data_external
sets the package's list and array data to be stored externally
Methods
-------
Expand All @@ -1384,6 +1385,11 @@ class MFPackage(PackageContainer, PackageInterface):
Returns the package file's path
remove
Removes package from the simulation/model it is currently a part of
set_all_data_external : (check_data : boolean)
sets the package's list and array data to be stored externally,
check_data determines if data error checking is enabled during this
process
See Also
--------
Expand Down Expand Up @@ -1885,14 +1891,14 @@ def set_model_relative_path(self, model_ws):
for package in self._packagelist:
package.set_model_relative_path(model_ws)

def set_all_data_external(self):
def set_all_data_external(self, check_data=True):
# set blocks
for key, block in self.blocks.items():
file_name = os.path.split(self.filename)[1]
block.set_all_data_external(file_name)
block.set_all_data_external(file_name, check_data=check_data)
# set sub-packages
for package in self._packagelist:
package.set_all_data_external()
package.set_all_data_external(check_data)

def load(self, strict=True):
# open file
Expand Down
18 changes: 10 additions & 8 deletions flopy/mf6/modflow/mfsimulation.py
Original file line number Diff line number Diff line change
Expand Up @@ -400,8 +400,10 @@ class MFSimulation(PackageContainer):
the model
is_valid : () : boolean
checks the validity of the solution and all of its models and packages
set_all_data_external
sets the simulation's list and array data to be stored externally
set_all_data_external : (check_data : boolean)
sets the simulation's list and array data to be stored externally,
check_data determines if data error checking is enabled during this
process
Examples
--------
Expand Down Expand Up @@ -1206,23 +1208,23 @@ def rename_all_packages(self, name):
for model in self._models.values():
model.rename_all_packages(name)

def set_all_data_external(self):
def set_all_data_external(self, check_data=True):
# copy any files whose paths have changed
self.simulation_data.mfpath.copy_files()
# set data external for all packages in all models
for model in self._models.values():
model.set_all_data_external()
model.set_all_data_external(check_data)
# set data external for ims packages
for package in self._ims_files.values():
package.set_all_data_external()
package.set_all_data_external(check_data)
# set data external for ghost node packages
for package in self._ghost_node_files.values():
package.set_all_data_external()
package.set_all_data_external(check_data)
# set data external for mover packages
for package in self._mover_files.values():
package.set_all_data_external()
package.set_all_data_external(check_data)
for package in self._exchange_files.values():
package.set_all_data_external()
package.set_all_data_external(check_data)

def write_simulation(
self, ext_file_action=ExtFileAction.copy_relative_paths, silent=False
Expand Down

0 comments on commit 46e041b

Please sign in to comment.