forked from cisagov/Malcolm
-
Notifications
You must be signed in to change notification settings - Fork 61
/
create-arkime-sessions-index.sh
executable file
·340 lines (280 loc) · 19.5 KB
/
create-arkime-sessions-index.sh
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
#!/bin/bash
# Copyright (c) 2024 Battelle Energy Alliance, LLC. All rights reserved.
set -euo pipefail
shopt -s nocasematch
DASHB_URL=${DASHBOARDS_URL:-"http://dashboards:5601/dashboards"}
INDEX_PATTERN=${MALCOLM_NETWORK_INDEX_PATTERN:-"arkime_sessions3-*"}
INDEX_TIME_FIELD=${MALCOLM_NETWORK_INDEX_TIME_FIELD:-"firstPacket"}
OTHER_INDEX_PATTERN=${MALCOLM_OTHER_INDEX_PATTERN:-"malcolm_beats_*"}
OTHER_INDEX_TIME_FIELD=${MALCOLM_OTHER_INDEX_TIME_FIELD:-"@timestamp"}
DUMMY_DETECTOR_NAME=${DUMMY_DETECTOR_NAME:-"malcolm_init_dummy"}
DARK_MODE=${DASHBOARDS_DARKMODE:-"true"}
MALCOLM_TEMPLATES_DIR="/opt/templates"
MALCOLM_TEMPLATE_FILE_ORIG="$MALCOLM_TEMPLATES_DIR/malcolm_template.json"
MALCOLM_TEMPLATE_FILE="/data/init/malcolm_template.json"
DEFAULT_DASHBOARD=${OPENSEARCH_DEFAULT_DASHBOARD:-"0ad3d7c2-3441-485e-9dfe-dbb22e84e576"}
ISM_SNAPSHOT_REPO=${ISM_SNAPSHOT_REPO:-"logs"}
ISM_SNAPSHOT_COMPRESSED=${ISM_SNAPSHOT_COMPRESSED:-"false"}
OPENSEARCH_PRIMARY=${OPENSEARCH_PRIMARY:-"opensearch-local"}
OPENSEARCH_SECONDARY=${OPENSEARCH_SECONDARY:-""}
function DoReplacersInFile() {
# Index pattern and time field name may be specified via environment variable, but need
# to be reflected in dashboards, templates, anomaly detectors, etc.
# This function takes a file and performs that replacement.
REPLFILE="$1"
if [[ -n "$REPLFILE" ]] && [[ -f "$REPLFILE" ]]; then
sed -i "s/MALCOLM_NETWORK_INDEX_PATTERN_REPLACER/${INDEX_PATTERN}/g" "${REPLFILE}" || true
sed -i "s/MALCOLM_NETWORK_INDEX_TIME_FIELD_REPLACER/${INDEX_TIME_FIELD}/g" "${REPLFILE}" || true
sed -i "s/MALCOLM_OTHER_INDEX_PATTERN_REPLACER/${OTHER_INDEX_PATTERN}/g" "${REPLFILE}" || true
sed -i "s/MALCOLM_OTHER_INDEX_TIME_FIELD_REPLACER/${OTHER_INDEX_TIME_FIELD}/g" "${REPLFILE}" || true
fi
}
function DoReplacersForDir() {
REPLDIR="$1"
if [[ -n "$REPLDIR" ]] && [[ -d "$REPLDIR" ]]; then
while IFS= read -r fname; do
DoReplacersInFile "$fname"
done < <( find "$REPLDIR"/ -type f 2>/dev/null )
fi
}
# is the argument to automatically create this index enabled?
if [[ "$CREATE_OS_ARKIME_SESSION_INDEX" = "true" ]] ; then
# give OpenSearch time to start and Arkime to get its own template created before configuring dashboards
/data/opensearch_status.sh -l arkime_sessions3_template >/dev/null 2>&1
for LOOP in primary secondary; do
if [[ "$LOOP" == "primary" ]]; then
OPENSEARCH_URL_TO_USE=${OPENSEARCH_URL:-"http://opensearch:9200"}
OPENSEARCH_CREDS_CONFIG_FILE_TO_USE=${OPENSEARCH_CREDS_CONFIG_FILE:-"/var/local/curlrc/.opensearch.primary.curlrc"}
if ( [[ "$OPENSEARCH_PRIMARY" == "opensearch-remote" ]] || [[ "$OPENSEARCH_PRIMARY" == "elasticsearch-remote" ]] ) && [[ -r "$OPENSEARCH_CREDS_CONFIG_FILE_TO_USE" ]]; then
OPENSEARCH_LOCAL=false
CURL_CONFIG_PARAMS=(
--config
"$OPENSEARCH_CREDS_CONFIG_FILE_TO_USE"
)
else
OPENSEARCH_LOCAL=true
CURL_CONFIG_PARAMS=()
fi
DATASTORE_TYPE="$(echo "$OPENSEARCH_PRIMARY" | cut -d- -f1)"
elif [[ "$LOOP" == "secondary" ]] && ( [[ "$OPENSEARCH_SECONDARY" == "opensearch-remote" ]] || [[ "$OPENSEARCH_SECONDARY" == "elasticsearch-remote" ]] ) && [[ -n "${OPENSEARCH_SECONDARY_URL:-""}" ]]; then
OPENSEARCH_URL_TO_USE=$OPENSEARCH_SECONDARY_URL
OPENSEARCH_LOCAL=false
OPENSEARCH_CREDS_CONFIG_FILE_TO_USE=${OPENSEARCH_SECONDARY_CREDS_CONFIG_FILE:-"/var/local/curlrc/.opensearch.secondary.curlrc"}
if [[ -r "$OPENSEARCH_CREDS_CONFIG_FILE_TO_USE" ]]; then
CURL_CONFIG_PARAMS=(
--config
"$OPENSEARCH_CREDS_CONFIG_FILE_TO_USE"
)
else
CURL_CONFIG_PARAMS=()
fi
DATASTORE_TYPE="$(echo "$OPENSEARCH_SECONDARY" | cut -d- -f1)"
else
continue
fi
[[ -z "$DATASTORE_TYPE" ]] && DATASTORE_TYPE="opensearch"
if [[ "$DATASTORE_TYPE" == "elasticsearch" ]]; then
DASHBOARDS_URI_PATH="kibana"
XSRF_HEADER="kbn-xsrf"
ECS_TEMPLATES_DIR=/opt/ecs-templates
else
DASHBOARDS_URI_PATH="opensearch-dashboards"
XSRF_HEADER="osd-xsrf"
ECS_TEMPLATES_DIR=/opt/ecs-templates-os
fi
# is the Dashboards process server up and responding to requests?
if [[ "$LOOP" != "primary" ]] || curl "${CURL_CONFIG_PARAMS[@]}" -L --silent --output /dev/null --fail -XGET "$DASHB_URL/api/status" ; then
# have we not not already created the index pattern?
if [[ "$LOOP" != "primary" ]] || ! curl "${CURL_CONFIG_PARAMS[@]}" -L --silent --output /dev/null --fail -XGET "$DASHB_URL/api/saved_objects/index-pattern/$INDEX_PATTERN" ; then
echo "$DATASTORE_TYPE ($LOOP) is running at \"${OPENSEARCH_URL_TO_USE}\"!"
# register the repo name/path for opensearch snapshots (but don't count this an unrecoverable failure)
if [[ "$LOOP" == "primary" ]] && [[ "$OPENSEARCH_LOCAL" == "true" ]]; then
echo "Registering index snapshot repository..."
curl "${CURL_CONFIG_PARAMS[@]}" -w "\n" -H "Accept: application/json" \
-H "Content-type: application/json" \
-XPUT -fsSL "$OPENSEARCH_URL_TO_USE/_snapshot/$ISM_SNAPSHOT_REPO" \
-d "{ \"type\": \"fs\", \"settings\": { \"location\": \"$ISM_SNAPSHOT_REPO\", \"compress\": $ISM_SNAPSHOT_COMPRESSED } }" \
|| true
fi
TEMPLATES_IMPORT_DIR="$(mktemp -d -t templates-XXXXXX)"
rsync -a "$MALCOLM_TEMPLATES_DIR"/ "$TEMPLATES_IMPORT_DIR"/
DoReplacersForDir "$TEMPLATES_IMPORT_DIR"
MALCOLM_TEMPLATE_FILE_ORIG_TMP="$(echo "$MALCOLM_TEMPLATE_FILE_ORIG" | sed "s@$MALCOLM_TEMPLATES_DIR@$TEMPLATES_IMPORT_DIR@")"
# calculate combined SHA sum of all templates to save as _meta.hash to determine if
# we need to do this import (mostly useful for the secondary loop)
TEMPLATE_HASH="$(find "$ECS_TEMPLATES_DIR"/composable "$TEMPLATES_IMPORT_DIR" -type f -name "*.json" -size +2c 2>/dev/null | sort | xargs -r cat | sha256sum | awk '{print $1}')"
# get the previous stored template hash (if any) to avoid importing if it's already been imported
set +e
TEMPLATE_HASH_OLD="$(curl "${CURL_CONFIG_PARAMS[@]}" -sSL --fail -XGET -H "Content-Type: application/json" "$OPENSEARCH_URL_TO_USE/_index_template/malcolm_template" 2>/dev/null | jq --raw-output '.index_templates[]|select(.name=="malcolm_template")|.index_template._meta.hash' 2>/dev/null)"
set -e
# information about other index patterns will be obtained during template import
OTHER_INDEX_PATTERNS=()
# proceed only if the current template HASH doesn't match the previously imported one, or if there
# was an error calculating or storing either
if [[ "$TEMPLATE_HASH" != "$TEMPLATE_HASH_OLD" ]] || [[ -z "$TEMPLATE_HASH_OLD" ]] || [[ -z "$TEMPLATE_HASH" ]]; then
if [[ -d "$ECS_TEMPLATES_DIR"/composable/component ]]; then
echo "Importing ECS composable templates..."
for i in "$ECS_TEMPLATES_DIR"/composable/component/*.json; do
TEMP_BASENAME="$(basename "$i")"
TEMP_FILENAME="${TEMP_BASENAME%.*}"
echo "Importing ECS composable template $TEMP_FILENAME ..."
curl "${CURL_CONFIG_PARAMS[@]}" -w "\n" -sSL --fail -XPOST -H "Content-Type: application/json" "$OPENSEARCH_URL_TO_USE/_component_template/ecs_$TEMP_FILENAME" -d "@$i" 2>&1 || true
done
fi
if [[ -d "$TEMPLATES_IMPORT_DIR"/composable/component ]]; then
echo "Importing custom ECS composable templates..."
for i in "$TEMPLATES_IMPORT_DIR"/composable/component/*.json; do
TEMP_BASENAME="$(basename "$i")"
TEMP_FILENAME="${TEMP_BASENAME%.*}"
echo "Importing custom ECS composable template $TEMP_FILENAME ..."
curl "${CURL_CONFIG_PARAMS[@]}" -w "\n" -sSL --fail -XPOST -H "Content-Type: application/json" "$OPENSEARCH_URL_TO_USE/_component_template/custom_$TEMP_FILENAME" -d "@$i" 2>&1 || true
done
fi
echo "Importing malcolm_template ($TEMPLATE_HASH)..."
if [[ -f "$MALCOLM_TEMPLATE_FILE_ORIG_TMP" ]] && [[ ! -f "$MALCOLM_TEMPLATE_FILE" ]]; then
cp "$MALCOLM_TEMPLATE_FILE_ORIG_TMP" "$MALCOLM_TEMPLATE_FILE"
fi
# store the TEMPLATE_HASH we calculated earlier as the _meta.hash for the malcolm template
MALCOLM_TEMPLATE_FILE_TEMP="$(mktemp)"
( jq "._meta.hash=\"$TEMPLATE_HASH\"" "$MALCOLM_TEMPLATE_FILE" >"$MALCOLM_TEMPLATE_FILE_TEMP" 2>/dev/null ) && \
[[ -s "$MALCOLM_TEMPLATE_FILE_TEMP" ]] && \
cp -f "$MALCOLM_TEMPLATE_FILE_TEMP" "$MALCOLM_TEMPLATE_FILE" && \
rm -f "$MALCOLM_TEMPLATE_FILE_TEMP"
# load malcolm_template containing malcolm data source field type mappings (merged from /opt/templates/malcolm_template.json to /data/init/malcolm_template.json in dashboard-helpers on startup)
curl "${CURL_CONFIG_PARAMS[@]}" -w "\n" -sSL --fail -XPOST -H "Content-Type: application/json" \
"$OPENSEARCH_URL_TO_USE/_index_template/malcolm_template" -d "@$MALCOLM_TEMPLATE_FILE" 2>&1
# import other templates as well (and get info for creating their index patterns)
for i in "$TEMPLATES_IMPORT_DIR"/*.json; do
TEMP_BASENAME="$(basename "$i")"
TEMP_FILENAME="${TEMP_BASENAME%.*}"
if [[ "$TEMP_FILENAME" != "malcolm_template" ]]; then
echo "Importing template \"$TEMP_FILENAME\"..."
if curl "${CURL_CONFIG_PARAMS[@]}" -w "\n" -sSL --fail -XPOST -H "Content-Type: application/json" "$OPENSEARCH_URL_TO_USE/_index_template/$TEMP_FILENAME" -d "@$i" 2>&1; then
for TEMPLATE_INDEX_PATTERN in $(jq '.index_patterns[]' "$i" | tr -d '"'); do
OTHER_INDEX_PATTERNS+=("$TEMPLATE_INDEX_PATTERN;$TEMPLATE_INDEX_PATTERN;@timestamp")
done
fi
fi
done
else
echo "malcolm_template ($TEMPLATE_HASH) already exists ($LOOP) at \"${OPENSEARCH_URL_TO_USE}\""
fi # TEMPLATE_HASH check
rm -rf "${TEMPLATES_IMPORT_DIR}"
if [[ "$LOOP" == "primary" ]]; then
echo "Importing index pattern..."
# From https://github.com/elastic/kibana/issues/3709
# Create index pattern
curl "${CURL_CONFIG_PARAMS[@]}" -w "\n" -sSL --fail -XPOST -H "Content-Type: application/json" -H "$XSRF_HEADER: anything" \
"$DASHB_URL/api/saved_objects/index-pattern/$INDEX_PATTERN" \
-d"{\"attributes\":{\"title\":\"$INDEX_PATTERN\",\"timeFieldName\":\"$INDEX_TIME_FIELD\"}}" 2>&1 || true
echo "Setting default index pattern..."
# Make it the default index
curl "${CURL_CONFIG_PARAMS[@]}" -w "\n" -sSL -XPOST -H "Content-Type: application/json" -H "$XSRF_HEADER: anything" \
"$DASHB_URL/api/$DASHBOARDS_URI_PATH/settings/defaultIndex" \
-d"{\"value\":\"$INDEX_PATTERN\"}" || true
for i in ${OTHER_INDEX_PATTERNS[@]}; do
IDX_ID="$(echo "$i" | cut -d';' -f1)"
IDX_NAME="$(echo "$i" | cut -d';' -f2)"
IDX_TIME_FIELD="$(echo "$i" | cut -d';' -f3)"
echo "Creating index pattern \"$IDX_NAME\"..."
curl "${CURL_CONFIG_PARAMS[@]}" -w "\n" -sSL --fail -XPOST -H "Content-Type: application/json" -H "$XSRF_HEADER: anything" \
"$DASHB_URL/api/saved_objects/index-pattern/$IDX_ID" \
-d"{\"attributes\":{\"title\":\"$IDX_NAME\",\"timeFieldName\":\"$IDX_TIME_FIELD\"}}" 2>&1 || true
done
echo "Importing $DATASTORE_TYPE Dashboards saved objects..."
# install default dashboards
DASHBOARDS_IMPORT_DIR="$(mktemp -d -t dashboards-XXXXXX)"
rsync -a /opt/dashboards/ "$DASHBOARDS_IMPORT_DIR"/
DoReplacersForDir "$DASHBOARDS_IMPORT_DIR"/
for i in "${DASHBOARDS_IMPORT_DIR}"/*.json; do
if [[ "$DATASTORE_TYPE" == "elasticsearch" ]]; then
# strip out Arkime and NetBox links from dashboards' navigation pane when doing Kibana import (idaholab/Malcolm#286)
sed -i 's/ \\\\n\[↪ NetBox\](\/netbox\/) \\\\n\[↪ Arkime\](\/arkime)//' "$i"
# take care of a few other substitutions
sed -i 's/opensearchDashboardsAddFilter/kibanaAddFilter/g' "$i"
fi
curl "${CURL_CONFIG_PARAMS[@]}" -L --silent --output /dev/null --show-error -XPOST "$DASHB_URL/api/$DASHBOARDS_URI_PATH/dashboards/import?force=true" -H "$XSRF_HEADER:true" -H 'Content-type:application/json' -d "@$i"
done
rm -rf "${DASHBOARDS_IMPORT_DIR}"
# beats will no longer import its dashboards into OpenSearch
# (see opensearch-project/OpenSearch-Dashboards#656 and
# opensearch-project/OpenSearch-Dashboards#831). As such, we're going to
# manually add load our dashboards in /opt/dashboards/beats as well.
BEATS_DASHBOARDS_IMPORT_DIR="$(mktemp -d -t beats-XXXXXX)"
rsync -a /opt/dashboards/beats/ "$BEATS_DASHBOARDS_IMPORT_DIR"/
DoReplacersForDir "$BEATS_DASHBOARDS_IMPORT_DIR"
for i in "${BEATS_DASHBOARDS_IMPORT_DIR}"/*.json; do
curl "${CURL_CONFIG_PARAMS[@]}" -L --silent --output /dev/null --show-error -XPOST "$DASHB_URL/api/$DASHBOARDS_URI_PATH/dashboards/import?force=true" -H "$XSRF_HEADER:true" -H 'Content-type:application/json' -d "@$i"
done
rm -rf "${BEATS_DASHBOARDS_IMPORT_DIR}"
echo "$DATASTORE_TYPE Dashboards saved objects import complete!"
if [[ "$DATASTORE_TYPE" == "opensearch" ]]; then
# some features and tweaks like anomaly detection, alerting, etc. only exist in opensearch
# set dark theme (or not)
[[ "$DARK_MODE" == "true" ]] && DARK_MODE_ARG='{"value":true}' || DARK_MODE_ARG='{"value":false}'
curl "${CURL_CONFIG_PARAMS[@]}" -L --silent --output /dev/null --show-error -XPOST "$DASHB_URL/api/$DASHBOARDS_URI_PATH/settings/theme:darkMode" -H "$XSRF_HEADER:true" -H 'Content-type:application/json' -d "$DARK_MODE_ARG"
# set default dashboard
curl "${CURL_CONFIG_PARAMS[@]}" -L --silent --output /dev/null --show-error -XPOST "$DASHB_URL/api/$DASHBOARDS_URI_PATH/settings/defaultRoute" -H "$XSRF_HEADER:true" -H 'Content-type:application/json' -d "{\"value\":\"/app/dashboards#/view/${DEFAULT_DASHBOARD}\"}"
# set default query time range
curl "${CURL_CONFIG_PARAMS[@]}" -L --silent --output /dev/null --show-error -XPOST "$DASHB_URL/api/$DASHBOARDS_URI_PATH/settings" -H "$XSRF_HEADER:true" -H 'Content-type:application/json' -d \
'{"changes":{"timepicker:timeDefaults":"{\n \"from\": \"now-24h\",\n \"to\": \"now\",\n \"mode\": \"quick\"}"}}'
# turn off telemetry
curl "${CURL_CONFIG_PARAMS[@]}" -L --silent --output /dev/null --show-error -XPOST "$DASHB_URL/api/telemetry/v2/optIn" -H "$XSRF_HEADER:true" -H 'Content-type:application/json' -d '{"enabled":false}'
# pin filters by default
curl "${CURL_CONFIG_PARAMS[@]}" -L --silent --output /dev/null --show-error -XPOST "$DASHB_URL/api/$DASHBOARDS_URI_PATH/settings/filters:pinnedByDefault" -H "$XSRF_HEADER:true" -H 'Content-type:application/json' -d '{"value":true}'
# enable in-session storage
curl "${CURL_CONFIG_PARAMS[@]}" -L --silent --output /dev/null --show-error -XPOST "$DASHB_URL/api/$DASHBOARDS_URI_PATH/settings/state:storeInSessionStorage" -H "$XSRF_HEADER:true" -H 'Content-type:application/json' -d '{"value":true}'
# before we go on to create the anomaly detectors, we need to wait for actual network log documents
/data/opensearch_status.sh -w >/dev/null 2>&1
sleep 60
echo "Creating $DATASTORE_TYPE anomaly detectors..."
# Create anomaly detectors here
ANOMALY_IMPORT_DIR="$(mktemp -d -t anomaly-XXXXXX)"
rsync -a /opt/anomaly_detectors/ "$ANOMALY_IMPORT_DIR"/
DoReplacersForDir "$ANOMALY_IMPORT_DIR"
for i in "${ANOMALY_IMPORT_DIR}"/*.json; do
curl "${CURL_CONFIG_PARAMS[@]}" -L --silent --output /dev/null --show-error -XPOST "$OPENSEARCH_URL_TO_USE/_plugins/_anomaly_detection/detectors" -H "$XSRF_HEADER:true" -H 'Content-type:application/json' -d "@$i"
done
rm -rf "${ANOMALY_IMPORT_DIR}"
# trigger a start/stop for the dummy detector to make sure the .opendistro-anomaly-detection-state index gets created
# see:
# - https://github.com/opensearch-project/anomaly-detection-dashboards-plugin/issues/109
# - https://github.com/opensearch-project/anomaly-detection-dashboards-plugin/issues/155
# - https://github.com/opensearch-project/anomaly-detection-dashboards-plugin/issues/156
# - https://discuss.opendistrocommunity.dev/t/errors-opening-anomaly-detection-plugin-for-dashboards-after-creation-via-api/7711
set +e
DUMMY_DETECTOR_ID=""
until [[ -n "$DUMMY_DETECTOR_ID" ]]; do
sleep 5
DUMMY_DETECTOR_ID="$(curl "${CURL_CONFIG_PARAMS[@]}" -L --fail --silent --show-error -XPOST "$OPENSEARCH_URL_TO_USE/_plugins/_anomaly_detection/detectors/_search" -H "$XSRF_HEADER:true" -H 'Content-type:application/json' -d "{ \"query\": { \"match\": { \"name\": \"$DUMMY_DETECTOR_NAME\" } } }" | jq '.. | ._id? // empty' 2>/dev/null | head -n 1 | tr -d '"')"
done
set -e
if [[ -n "$DUMMY_DETECTOR_ID" ]]; then
curl "${CURL_CONFIG_PARAMS[@]}" -L --silent --output /dev/null --show-error -XPOST "$OPENSEARCH_URL_TO_USE/_plugins/_anomaly_detection/detectors/$DUMMY_DETECTOR_ID/_start" -H "$XSRF_HEADER:true" -H 'Content-type:application/json'
sleep 10
curl "${CURL_CONFIG_PARAMS[@]}" -L --silent --output /dev/null --show-error -XPOST "$OPENSEARCH_URL_TO_USE/_plugins/_anomaly_detection/detectors/$DUMMY_DETECTOR_ID/_stop" -H "$XSRF_HEADER:true" -H 'Content-type:application/json'
sleep 10
curl "${CURL_CONFIG_PARAMS[@]}" -L --silent --output /dev/null --show-error -XDELETE "$OPENSEARCH_URL_TO_USE/_plugins/_anomaly_detection/detectors/$DUMMY_DETECTOR_ID" -H "$XSRF_HEADER:true" -H 'Content-type:application/json'
fi
echo "$DATASTORE_TYPE anomaly detectors creation complete!"
echo "Creating $DATASTORE_TYPE alerting objects..."
# Create notification/alerting objects here
# notification channels
for i in /opt/notifications/channels/*.json; do
curl "${CURL_CONFIG_PARAMS[@]}" -L --silent --output /dev/null --show-error -XPOST "$OPENSEARCH_URL_TO_USE/_plugins/_notifications/configs" -H "$XSRF_HEADER:true" -H 'Content-type:application/json' -d "@$i"
done
# monitors
ALERTING_IMPORT_DIR="$(mktemp -d -t alerting-XXXXXX)"
rsync -a /opt/alerting/monitors/ "$ALERTING_IMPORT_DIR"/
DoReplacersForDir "$ALERTING_IMPORT_DIR"
for i in "${ALERTING_IMPORT_DIR}"/*.json; do
curl "${CURL_CONFIG_PARAMS[@]}" -L --silent --output /dev/null --show-error -XPOST "$OPENSEARCH_URL_TO_USE/_plugins/_alerting/monitors" -H "$XSRF_HEADER:true" -H 'Content-type:application/json' -d "@$i"
done
rm -rf "${ALERTING_IMPORT_DIR}"
echo "$DATASTORE_TYPE alerting objects creation complete!"
fi # DATASTORE_TYPE == opensearch
fi # stuff to only do for primary
fi # index pattern not already created check
fi # dashboards is running
done # primary vs. secondary
fi # CREATE_OS_ARKIME_SESSION_INDEX is true