Skip to content

Commit

Permalink
Fix some bugs of PySpark integration test & Disable Spark UI (ucbrise…
Browse files Browse the repository at this point in the history
…#665)

* Fix some bugs of PySpark integration test

* Remove useless variables

* Disable Spark UI
  • Loading branch information
Sungjun.Kim authored and rkooo567 committed Apr 27, 2019
1 parent 944b8f2 commit beda867
Show file tree
Hide file tree
Showing 5 changed files with 26 additions and 15 deletions.
1 change: 1 addition & 0 deletions containers/python/pyspark_container.py
Original file line number Diff line number Diff line change
Expand Up @@ -58,6 +58,7 @@ def __init__(self, path, input_type):
self.spark = SparkSession\
.builder\
.appName("clipper-pyspark")\
.config("spark.ui.enabled", "false")\
.getOrCreate()
metadata_path = os.path.join(path, "metadata.json")
spark_model_path = os.path.join(path, "pyspark_model_data")
Expand Down
12 changes: 7 additions & 5 deletions integration-tests/deploy_pyspark_models.py
Original file line number Diff line number Diff line change
Expand Up @@ -121,11 +121,13 @@ def get_test_point():

import random
cluster_name = "spark-{}".format(random.randint(0, 5000))
clipper_conn = None

try:
spark = SparkSession\
.builder\
.appName("clipper-pyspark")\
.config("spark.ui.enabled", "false")\
.getOrCreate()
sc = spark.sparkContext
clipper_conn = create_docker_connection(
Expand Down Expand Up @@ -182,16 +184,16 @@ def get_test_point():
log_docker(clipper_conn)
log_clipper_state(clipper_conn)
logger.exception("BenchmarkException")
clipper_conn = create_docker_connection(
create_docker_connection(
cleanup=True, start_clipper=False, cleanup_name=cluster_name)
sys.exit(1)
else:
spark.stop()
clipper_conn = create_docker_connection(
create_docker_connection(
cleanup=True, start_clipper=False, cleanup_name=cluster_name)
except Exception:
logger.exception("Exception")
except Exception as e:
log_docker(clipper_conn)
clipper_conn = create_docker_connection(
logger.exception("Exception: {}".format(e))
create_docker_connection(
cleanup=True, start_clipper=False, cleanup_name=cluster_name)
sys.exit(1)
12 changes: 7 additions & 5 deletions integration-tests/deploy_pyspark_pipeline_models.py
Original file line number Diff line number Diff line change
Expand Up @@ -62,6 +62,7 @@ def run_test():
spark = SparkSession\
.builder\
.appName("clipper-pyspark")\
.config("spark.ui.enabled", "false")\
.getOrCreate()

training = spark.createDataFrame(
Expand Down Expand Up @@ -94,6 +95,7 @@ def run_test():
# test predict function
print(predict(spark, model,
[json.dumps((np.random.randint(1000), "spark abcd"))]))
clipper_conn = None

try:
clipper_conn = create_docker_connection(
Expand Down Expand Up @@ -167,22 +169,22 @@ def run_test():
if num_defaults > num_preds / 2:
raise BenchmarkException("Error querying APP %s, MODEL %s:%d" %
(app_name, model_name, version))
except BenchmarkException as e:
except BenchmarkException:
log_docker(clipper_conn)
log_clipper_state(clipper_conn)
logger.exception("BenchmarkException")
clipper_conn = create_docker_connection(
create_docker_connection(
cleanup=True, start_clipper=False, cleanup_name=cluster_name)
sys.exit(1)
else:
spark.stop()
clipper_conn = create_docker_connection(
create_docker_connection(
cleanup=True, start_clipper=False, cleanup_name=cluster_name)
logger.info("ALL TESTS PASSED")
except Exception as e:
log_docker(clipper_conn)
logger.exception("Exception")
clipper_conn = create_docker_connection(
logger.exception("Exception: {}".format(e))
create_docker_connection(
cleanup=True, start_clipper=False, cleanup_name=cluster_name)
sys.exit(1)

Expand Down
13 changes: 8 additions & 5 deletions integration-tests/deploy_pyspark_sparkml_models.py
Original file line number Diff line number Diff line change
Expand Up @@ -116,10 +116,13 @@ def get_test_point():

import random
cluster_name = "sparkml-{}".format(random.randint(0, 5000))
clipper_conn = None

try:
spark = SparkSession\
.builder\
.appName("clipper-pyspark-ml")\
.config("spark.ui.enabled", "false")\
.getOrCreate()
sc = spark.sparkContext
clipper_conn = create_docker_connection(
Expand Down Expand Up @@ -155,16 +158,16 @@ def get_test_point():
log_docker(clipper_conn)
log_clipper_state(clipper_conn)
logger.exception("BenchmarkException")
clipper_conn = create_docker_connection(
create_docker_connection(
cleanup=True, start_clipper=False, cleanup_name=cluster_name)
sys.exit(1)
else:
spark.stop()
clipper_conn = create_docker_connection(
create_docker_connection(
cleanup=True, start_clipper=False, cleanup_name=cluster_name)
except Exception:
except Exception as e:
log_docker(clipper_conn)
logger.exception("Exception")
clipper_conn = create_docker_connection(
logger.exception("Exception: {}".format(e))
create_docker_connection(
cleanup=True, start_clipper=False, cleanup_name=cluster_name)
sys.exit(1)
3 changes: 3 additions & 0 deletions integration-tests/test_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -213,6 +213,9 @@ def log_clipper_state(cl):


def log_docker(clipper_conn):
if clipper_conn is None:
return

"""Retrieve status and log for last ten containers"""
container_runing = clipper_conn.cm.docker_client.containers.list(limit=10)
logger.info('----------------------')
Expand Down

0 comments on commit beda867

Please sign in to comment.