Skip to content

Commit

Permalink
run_test.py: Implement --no-test-index needed for deployment
Browse files Browse the repository at this point in the history
Needed for: elastic/elasticsearch#30112 (comment)

> I already have a workaround in place for this which consists of automatically deploying as many watches as I need to send different mails. Those watches are derived from my watch definition. For this, I extended [run_test.py](https://github.com/elastic/examples/blob/master/Alerting/Sample%20Watches/run_test.py) to inject Python code after the watch definition is read. Not ideal, but it is maintainable.
  • Loading branch information
ypid-geberit committed Oct 12, 2018
1 parent dd26f1e commit 18e4377
Showing 1 changed file with 31 additions and 25 deletions.
56 changes: 31 additions & 25 deletions Alerting/Sample Watches/run_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,11 @@ def load_file(serialized_file):
parser.add_argument('--test_file', help='test file')
parser.add_argument('--keep-index', help='Keep the index where test documents have been loaded to after the test', action='store_true')
parser.add_argument('--modify-watch-by-eval', help='Python code to modify the watch before loading it into Elastic')
parser.add_argument(
'--no-test-index',
help='Don’t put the test data into an index.',
action='store_false',
dest='test_index')
parser.add_argument(
'--no-execute-watch',
help='Do not force watch execution. This can be useful when you use this script to deploy the watch.',
Expand All @@ -46,31 +51,32 @@ def load_file(serialized_file):

test = load_file(args.test_file)

# Load Mapping
try:
es.indices.delete(test['index'])
except Exception as err:
print("Unable to delete current dataset")
pass
es.indices.create(index=test["index"], body=load_file(test['mapping_file']))

# Load pipeline if its declared
params = {}
if "ingest_pipeline_file" in test:
es.index(index="_ingest", doc_type="pipeline", id=test["watch_name"], body=load_file(test['ingest_pipeline_file']))
params["pipeline"] = test["watch_name"]

# Index data
current_data = last_time = datetime.datetime.utcnow()
i = 0
time_field = test["time_field"] if "time_field" in test else "@timestamp"
for event in test['events']:
# All offsets are in seconds.
event_time = current_data+datetime.timedelta(seconds=int(event['offset'] if 'offset' in event else 0))
event[time_field] = event_time.strftime('%Y-%m-%dT%H:%M:%S.%fZ') if time_field not in event else event[time_field]
es.index(index=test['index'], doc_type=test['type'], body=event, id=event['id'] if "id" in event else i, params=params)
i += 1
es.indices.refresh(index=test["index"])
if args.test_index:
# Load Mapping
try:
es.indices.delete(test['index'])
except Exception as err:
print("Unable to delete current dataset")
pass
es.indices.create(index=test["index"], body=load_file(test['mapping_file']))

# Load pipeline if its declared
params = {}
if "ingest_pipeline_file" in test:
es.index(index="_ingest", doc_type="pipeline", id=test["watch_name"], body=load_file(test['ingest_pipeline_file']))
params["pipeline"] = test["watch_name"]

# Index data
current_data = last_time = datetime.datetime.utcnow()
i = 0
time_field = test["time_field"] if "time_field" in test else "@timestamp"
for event in test['events']:
# All offsets are in seconds.
event_time = current_data+datetime.timedelta(seconds=int(event['offset'] if 'offset' in event else 0))
event[time_field] = event_time.strftime('%Y-%m-%dT%H:%M:%S.%fZ') if time_field not in event else event[time_field]
es.index(index=test['index'], doc_type=test['type'], body=event, id=event['id'] if "id" in event else i, params=params)
i += 1
es.indices.refresh(index=test["index"])

# Load Scripts
if 'scripts' in test:
Expand Down

0 comments on commit 18e4377

Please sign in to comment.