-
Notifications
You must be signed in to change notification settings - Fork 54
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Machine Learning & Artificial Intelligence Plugin #995
Changes from 1 commit
1ee728c
6ba10cf
5f4044c
5c98bbc
a0a55eb
937d02d
5f75eff
760dced
43f2c03
22f695c
987de2a
913775b
a993b53
d426155
a7140be
064fd28
696ec0c
a24f624
b8614da
3dd03c8
182a526
2ddd67f
06fafd4
3be6605
eaf6b0b
f2e4099
0942f95
bc2877f
8a21625
4f62e2f
90e7370
ae5f75a
3cac6c4
0aa924d
4b0fec3
0f184b3
867ce73
141995c
db01acc
f96d09b
a46fa00
dd9beb8
b9a8d39
359f1d9
98af321
45cd78d
bc32a0c
a4799af
285df23
25c6226
71e9586
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -45,9 +45,12 @@ | |
import tensorflow as tf | ||
|
||
load = tf.keras.models.load_model | ||
except: | ||
pass # errors will be thrown if tensorflow is called but not installed, | ||
# otherwise no error should be thrown so passing is fine | ||
except ImportError: | ||
# if TensorFlow is not available, create a proxy function that will raise | ||
# an exception whenever code tries to use `load()` at runtime | ||
def load(*args, **kwargs): | ||
raise RuntimeError(f"`load()` was called with args={args}," | ||
"kwargs={kwargs} but `tensorflow` is not available") | ||
# pylint: enable=import-error | ||
|
||
|
||
|
@@ -106,15 +109,33 @@ def __init__(self, model): | |
for i in range(np.shape(self.model.inputs[0])[1]): | ||
try: | ||
input_label = self.model.layers[1].input_labels[i] | ||
except: | ||
except AttributeError: | ||
logging.getLogger("foqus." + __name__).info( | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. I think these extended messages are great, they add lots of useful context both for users and other developers. One minor suggestion: instead of repeating the |
||
"Model has no attribute input_label, using default x" | ||
+ str(i + 1) + ". If attribute should exist, check that " | ||
+ "Tensorflow Keras model was correctly saved with " | ||
+ "CustomLayer. Otherwise, this is not an error and model " | ||
+ "will load " + "as expected using default attributes.") | ||
input_label = "x" + str(i + 1) | ||
try: | ||
input_min = self.model.layers[1].input_bounds[input_label][0] | ||
except: | ||
except AttributeError: | ||
logging.getLogger("foqus." + __name__).info( | ||
"Model has no attribute input_min, using default 0" | ||
+ ". If attribute should exist, check that " | ||
+ "Tensorflow Keras model was correctly saved with " | ||
+ "CustomLayer. Otherwise, this is not an error and model " | ||
+ "will load as expected using default attributes.") | ||
input_min = 0 # not necessarily a good default | ||
try: | ||
input_max = self.model.layers[1].input_bounds[input_label][1] | ||
except: | ||
except AttributeError: | ||
logging.getLogger("foqus." + __name__).info( | ||
"Model has no attribute input_max, using default 1E5" | ||
+ ". If attribute should exist, check that " | ||
+ "Tensorflow Keras model was correctly saved with " | ||
+ "CustomLayer. Otherwise, this is not an error and model " | ||
+ "will load as expected using default attributes.") | ||
input_max = 1e5 # not necessarily a good default | ||
|
||
self.inputs[input_label] = NodeVars( | ||
|
@@ -132,15 +153,33 @@ def __init__(self, model): | |
for j in range(np.shape(self.model.outputs[0])[1]): | ||
try: | ||
output_label = self.model.layers[1].output_labels[j] | ||
except: | ||
except AttributeError: | ||
logging.getLogger("foqus." + __name__).info( | ||
"Model has no attribute output_label, using default z" | ||
+ str(j + 1) + ". If attribute should exist, check that " | ||
+ "Tensorflow Keras model was correctly saved with " | ||
+ "CustomLayer. Otherwise, this is not an error and model " | ||
+ "will load as expected using default attributes.") | ||
output_label = "z" + str(j + 1) | ||
try: | ||
output_min = self.model.layers[1].output_bounds[output_label][0] | ||
except: | ||
except AttributeError: | ||
logging.getLogger("foqus." + __name__).info( | ||
"Model has no attribute output_min, using default 0" | ||
+ ". If attribute should exist, check that " | ||
+ "Tensorflow Keras model was correctly saved with " | ||
+ "CustomLayer. Otherwise, this is not an error and model " | ||
+ "will load as expected using default attributes.") | ||
output_min = 0 # not necessarily a good default | ||
try: | ||
output_max = self.model.layers[1].output_bounds[output_label][1] | ||
except: | ||
except AttributeError: | ||
logging.getLogger("foqus." + __name__).info( | ||
"Model has no attribute output_max, using default 1E5" | ||
+ ". If attribute should exist, check that " | ||
+ "Tensorflow Keras model was correctly saved with " | ||
+ "CustomLayer. Otherwise, this is not an error and model " | ||
+ "will load as expected using default attributes.") | ||
output_max = 1e5 # not necessarily a good default | ||
|
||
self.outputs[output_label] = NodeVars( | ||
|
@@ -158,7 +197,13 @@ def __init__(self, model): | |
# check if user passed a model for normalized data - FOQUS will automatically scale/un-scale | ||
try: # if attribute exists, user has specified a model form | ||
self.normalized = self.model.layers[1].normalized | ||
except: # otherwise user did not pass a normalized model | ||
except AttributeError: # otherwise user did not pass a normalized model | ||
logging.getLogger("foqus." + __name__).info( | ||
"Model has no attribute normalized, using default False" | ||
+ ". If attribute should exist, check that " | ||
+ "Tensorflow Keras model was correctly saved with " | ||
+ "CustomLayer. Otherwise, this is not an error and model " | ||
+ "will load as expected using default attributes.") | ||
self.normalized = False | ||
|
||
def run(self): | ||
|
@@ -604,9 +649,13 @@ def setSim(self, newType=None, newModel=None, force=False, ids=None): | |
str(self.modelName): getattr(module, str(self.modelName)) | ||
}, | ||
) | ||
except: # try to load model without custom layer | ||
except ImportError: # try to load model without custom layer | ||
logging.getLogger("foqus." + __name__).info( | ||
"Cannot detect CustomLayer object to import, FOQUS " | ||
+ "will import model without custom attributes.") | ||
self.model = load(str(self.modelName) + ".h5") | ||
os.chdir(cwd) # reset to original working directory | ||
finally: | ||
os.chdir(cwd) # reset to original working directory | ||
inst = pymodel_ml_ai(self.model) | ||
for vkey, v in inst.inputs.items(): | ||
self.gr.input[self.name][vkey] = v | ||
|
@@ -1106,9 +1155,13 @@ def runPymodelMLAI(self): | |
str(self.modelName): getattr(module, str(self.modelName)) | ||
}, | ||
) | ||
except: # try to load model without custom layer | ||
except ImportError: # try to load model without custom layer | ||
logging.getLogger("foqus." + __name__).info( | ||
"Cannot detect CustomLayer object to import, FOQUS " | ||
+ "will import model without custom attributes.") | ||
self.model = load(str(self.modelName) + ".h5") | ||
os.chdir(cwd) # reset to original working directory | ||
finally: | ||
os.chdir(cwd) # reset to original working directory | ||
self.pyModel = pymodel_ml_ai(self.model) | ||
# set the instance inputs | ||
for vkey, v in self.gr.input[self.name].items(): | ||
|
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Minor suggestion, but in retrospective
ModuleNotFoundError
would be a better fit thanRuntimeError
here.