Skip to content
This repository has been archived by the owner on Dec 17, 2021. It is now read-only.

Commit

Permalink
fix: mongo real time data check (#107)
Browse files Browse the repository at this point in the history
* fix: fix host _id creation for a database check

* fix: add upsert to STATIC data

* fix: add db cleanup after inventory or config change

* test: add unit tests
  • Loading branch information
omrozowicz-splunk authored Aug 25, 2021
1 parent c01fb4d commit 4526b0c
Show file tree
Hide file tree
Showing 4 changed files with 69 additions and 9 deletions.
4 changes: 4 additions & 0 deletions splunk_connect_for_snmp_poller/manager/poller.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@
automatic_realtime_task,
create_poller_scheduler_entry_key,
parse_inventory_file,
return_database_id,
)
from splunk_connect_for_snmp_poller.manager.tasks import snmp_polling
from splunk_connect_for_snmp_poller.mongo import WalkedHostsRepository
Expand Down Expand Up @@ -139,6 +140,9 @@ def __check_inventory(self):
if entry_key not in inventory_hosts:
logger.debug(f"Removing job for {entry_key}")
schedule.cancel_job(self._jobs_map.get(entry_key))
db_host_id = return_database_id(entry_key)
logger.debug(f"Removing _id {db_host_id} from mongo database")
self._mongo_walked_hosts_coll.delete_host(db_host_id)
del self._jobs_map[entry_key]

def __update_schedule(
Expand Down
20 changes: 13 additions & 7 deletions splunk_connect_for_snmp_poller/manager/poller_utilities.py
Original file line number Diff line number Diff line change
Expand Up @@ -133,10 +133,8 @@ def _update_mongo(
all_walked_hosts_collection, host, host_already_walked, current_sys_up_time
):
if not host_already_walked:
_host, _port = parse_port(host)
host_to_add = f"{_host}:{_port}"
logger.info(f"Adding host: {host_to_add} into Mongo database")
all_walked_hosts_collection.add_host(host_to_add)
logger.info(f"Adding host: {host} into Mongo database")
all_walked_hosts_collection.add_host(host)
all_walked_hosts_collection.update_real_time_data_for(host, current_sys_up_time)


Expand All @@ -155,15 +153,16 @@ def automatic_realtime_task(
local_snmp_engine,
):
for inventory_record in parse_inventory_file(inventory_file_path):
db_host_id = return_database_id(inventory_record.host)
sys_up_time = _extract_sys_uptime_instance(
local_snmp_engine,
inventory_record.host,
db_host_id,
inventory_record.version,
inventory_record.community,
server_config,
)
host_already_walked, should_do_walk = _walk_info(
all_walked_hosts_collection, inventory_record.host, sys_up_time
all_walked_hosts_collection, db_host_id, sys_up_time
)
if should_do_walk:
schedule.every().second.do(
Expand All @@ -177,11 +176,18 @@ def automatic_realtime_task(
)
_update_mongo(
all_walked_hosts_collection,
inventory_record.host,
db_host_id,
host_already_walked,
sys_up_time,
)


def create_poller_scheduler_entry_key(host, profile):
return host + "#" + profile


def return_database_id(host):
if "#" in host:
host = host.split("#")[0]
_host, _port = parse_port(host)
return f"{_host}:{_port}"
6 changes: 4 additions & 2 deletions splunk_connect_for_snmp_poller/mongo.py
Original file line number Diff line number Diff line change
Expand Up @@ -68,7 +68,7 @@
IF-MIB::ifAdminStatus.1 = INTEGER: up(1)
IF-MIB::ifAdminStatus.2 = INTEGER: up(1)
* MIB_STATIC_DATA: a dictionary that contains some MIB real-time data that needs to be collected constantly.
* MIB_REAL_TIME_DATA: a dictionary that contains some MIB real-time data that needs to be collected constantly.
At the moment, we only need to collect sysUpTimeInstance data in order to decide when we need to re-walk
a given host.
"""
Expand Down Expand Up @@ -106,7 +106,8 @@ def add_host(self, host):
self._walked_hosts.insert_one({"_id": host})

def delete_host(self, host):
self._walked_hosts.delete_many({"_id": host})
logger.info(f"Delete host {host} from walked_host collection")
self._walked_hosts.delete_one({"_id": host})

def clear(self):
self._walked_hosts.remove()
Expand Down Expand Up @@ -153,5 +154,6 @@ def update_mib_static_data_for(self, host, if_mib_data):
self._walked_hosts.find_one_and_update(
{"_id": host},
{"$set": real_time_data_dictionary},
upsert=True,
return_document=ReturnDocument.AFTER,
)
48 changes: 48 additions & 0 deletions tests/test_poller_utilities.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,48 @@
#
# Copyright 2021 Splunk Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import sys
from unittest import TestCase
from unittest.mock import Mock

sys.modules["splunk_connect_for_snmp_poller.manager.celery_client"] = Mock()
from splunk_connect_for_snmp_poller.manager.poller_utilities import ( # noqa: E402
create_poller_scheduler_entry_key,
return_database_id,
)


class TestPollerUtilities(TestCase):
def test_return_database_id_bare_ip(self):
host = "127.0.0.1"
self.assertEqual(return_database_id(host), "127.0.0.1:161")

def test_return_database_id_ip_with_port(self):
host = "127.0.0.1:29"
self.assertEqual(return_database_id(host), "127.0.0.1:29")

def test_return_database_id_entry(self):
host = "127.0.0.1#1.3.6.1.2.1.2.*"
self.assertEqual(return_database_id(host), "127.0.0.1:161")

def test_return_database_id_entry_with_port(self):
host = "127.0.0.1:162#1.3.6.1.2.1.2.*"
self.assertEqual(return_database_id(host), "127.0.0.1:162")

def test_create_poller_scheduler_entry_key(self):
self.assertEqual(
create_poller_scheduler_entry_key("127.0.0.1", "1.3.6.1.2.1.2"),
"127.0.0.1#1.3.6.1.2.1.2",
)

0 comments on commit 4526b0c

Please sign in to comment.