Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Experimental changes to allow relational-data-sink implementation to work with different database backends #608

Open
wants to merge 2 commits into
base: relational-data-sink
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
31 changes: 30 additions & 1 deletion stix2/datastore/relational_db/relational_db.py
Original file line number Diff line number Diff line change
Expand Up @@ -80,7 +80,36 @@ def __init__(
auto-detect all classes and create table schemas for all of
them.
"""

# The typical usage of SQLAlchemy's create_engine() function is once per
# particular database connection url and held globally for the lifetime of
# a single application process. Among the various databases/dialects supported
# by SQLAlchemy, this function call appears to work well for the following
# three variants using a single url argument as below:
#
# PostgreSQL:
# url = f"postgresql://{os.getenv('POSTGRES_USER')}:{os.getenv('POSTGRES_PASSWORD')}@127.0.0.1:5432/rdb"
# SQLite:
# url = f"sqlite:///sqlite_rdb.db"
# MariaDB:
# url = f"mariadb+pymysql://{os.getenv('MARIADB_USER')}:{os.getenv('MARIADB_PASSWORD')}@127.0.0.1:3306/rdb"
# MS-SQL:
# url = f"mssql+pymssql://{os.getenv('MSSQL_USERNAME'):{os.getenv('MSSQL_PASSWORD')}@127.0.0.1:1433/tempdb")
database_connection = create_engine(database_connection_url)

# For MySQL, which happens to use the same default port 3306 as for MariaDB,
# we can use a different port so multiple SQL service instances can be running
# on the same machine. Below is a workaround for connecting to the MySQL server
# at port 3307 using the connect_args dictionary parameter:
#
# MySQL:
# url = f"mysql+pymysql://os.getenv('MYSQL_USER'):os.getenv('MYSQL_PASSWORD')@127.0.0.1/rdb"
# connect_args = dict(unix_socket="/var/mysql/mysql.sock", port=3307)
#
# database_connection = create_engine(database_connection_url,
# connect_args=dict(unix_socket="/var/mysql/mysql.sock", port=3307))


self.metadata = MetaData()
create_table_objects(
self.metadata, stix_object_classes,
Expand Down Expand Up @@ -261,7 +290,7 @@ def get(self, stix_id, version=None, _composite_filters=None):
stix_obj = read_object(
stix_id,
self.metadata,
conn
conn,
)

return stix_obj
Expand Down
88 changes: 51 additions & 37 deletions stix2/test/v21/test_datastore_relational_db.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,19 +6,32 @@
import pytest

import stix2
from stix2.datastore.relational_db.relational_db import RelationalDBStore
from stix2.datastore import DataSourceError
from stix2.datastore.relational_db.relational_db import RelationalDBStore
import stix2.properties
import stix2.registry
import stix2.v21

_DB_CONNECT_URL = f"postgresql://{os.getenv('POSTGRES_USER', 'postgres')}:{os.getenv('POSTGRES_PASSWORD', 'postgres')}@0.0.0.0:5432/postgres"
# PostgreSQL
#_DB_CONNECT_URL = f"postgresql://{os.getenv('POSTGRES_USER', 'postgres')}:{os.getenv('POSTGRES_PASSWORD', 'postgres')}@127.0.0.1:5432/postgres"

# SQLite
_DB_CONNECT_URL = f"sqlite:///sqlite_rdb.db"

# MariaDB
#_DB_CONNECT_URL = f"mariadb+pymysql://{os.getenv('MARIADB_USER')}:{os.getenv('MARIADB_PASSWORD')}@127.0.0.1:3306/rdb"

# MySQL
#_DB_CONNECT_URL = f"mysql+pymysql://os.getenv('MYSQL_USER'):os.getenv('MYSQL_PASSWORD')@127.0.0.1/rdb"

# MS-SQL
#_DB_CONNECT_URL = f"mssql+pymssql://{os.getenv('MSSQL_USERNAME')}:{os.getenv('MSSQL_PASSWORD')}@127.0.0.1:1433/tempdb"

store = RelationalDBStore(
_DB_CONNECT_URL,
True,
None,
False
True,
)

# Artifacts
Expand Down Expand Up @@ -705,16 +718,16 @@ class TestClass:
"string": (stix2.properties.StringProperty(), "test"),
"timestamp": (
stix2.properties.TimestampProperty(),
datetime.datetime.now(tz=datetime.timezone.utc)
datetime.datetime.now(tz=datetime.timezone.utc),
),
"ref": (
stix2.properties.ReferenceProperty("SDO"),
"identity--ec83b570-0743-4179-a5e3-66fd2fae4711"
"identity--ec83b570-0743-4179-a5e3-66fd2fae4711",
),
"enum": (
stix2.properties.EnumProperty(["value1", "value2"]),
"value1"
)
"value1",
),
}


Expand All @@ -739,8 +752,8 @@ def base_property_value(request):
"list-dict-of",
"subobject",
"list-of-subobject-prop",
"list-of-subobject-class"
]
"list-of-subobject-class",
],
)
def property_variation_value(request, base_property_value):
"""
Expand All @@ -755,7 +768,7 @@ class Embedded(stix2.v21._STIXBase21):
sub-object.
"""
_properties = {
"embedded": base_property
"embedded": base_property,
}

if request.param == "base":
Expand All @@ -768,14 +781,14 @@ class Embedded(stix2.v21._STIXBase21):

elif request.param == "dict-of":
prop_variation = stix2.properties.DictionaryProperty(
valid_types=base_property
valid_types=base_property,
)
# key name doesn't matter here
prop_variation_value = {"key": prop_value}

elif request.param == "dict-list-of":
prop_variation = stix2.properties.DictionaryProperty(
valid_types=stix2.properties.ListProperty(base_property)
valid_types=stix2.properties.ListProperty(base_property),
)
# key name doesn't matter here
prop_variation_value = {"key": [prop_value]}
Expand All @@ -798,7 +811,7 @@ class Embedded(stix2.v21._STIXBase21):
elif request.param == "list-of-subobject-prop":
# list-of-embedded values via EmbeddedObjectProperty
prop_variation = stix2.properties.ListProperty(
stix2.properties.EmbeddedObjectProperty(Embedded)
stix2.properties.EmbeddedObjectProperty(Embedded),
)
prop_variation_value = [{"embedded": prop_value}]

Expand Down Expand Up @@ -831,32 +844,32 @@ def object_variation(request, property_variation_value):
if request.param == "sdo":
@stix2.CustomObject(
"test-object", [
("prop_name", property_instance)
("prop_name", property_instance),
],
ext_id,
is_sdo=True
is_sdo=True,
)
class TestClass:
pass

elif request.param == "sro":
@stix2.CustomObject(
"test-object", [
("prop_name", property_instance)
("prop_name", property_instance),
],
ext_id,
is_sdo=False
is_sdo=False,
)
class TestClass:
pass

elif request.param == "sco":
@stix2.CustomObservable(
"test-object", [
("prop_name", property_instance)
("prop_name", property_instance),
],
["prop_name"],
ext_id
ext_id,
)
class TestClass:
pass
Expand All @@ -883,7 +896,7 @@ def test_property(object_variation):
None,
True,
True,
type(object_variation)
type(object_variation),
)

rdb_store.add(object_variation)
Expand All @@ -898,22 +911,23 @@ def test_dictionary_property_complex():
"""
with _register_object(
"test-object", [
("prop_name",
stix2.properties.DictionaryProperty(
valid_types=[
stix2.properties.IntegerProperty,
stix2.properties.FloatProperty,
stix2.properties.StringProperty
]
)
)
(
"prop_name",
stix2.properties.DictionaryProperty(
valid_types=[
stix2.properties.IntegerProperty,
stix2.properties.FloatProperty,
stix2.properties.StringProperty,
],
),
),
],
"extension-definition--15de9cdb-3515-4271-8479-8141154c5647",
is_sdo=True
is_sdo=True,
) as cls:

obj = cls(
prop_name={"a": 1, "b": 2.3, "c": "foo"}
prop_name={"a": 1, "b": 2.3, "c": "foo"},
)

rdb_store = RelationalDBStore(
Expand All @@ -922,7 +936,7 @@ def test_dictionary_property_complex():
None,
True,
True,
cls
cls,
)

rdb_store.add(obj)
Expand All @@ -939,18 +953,18 @@ def test_extension_definition():
extension_types=["property-extension", "new-sdo", "new-sro"],
object_marking_refs=[
"marking-definition--caa0d913-5db8-4424-aae0-43e770287d30",
"marking-definition--122a27a0-b96f-46bc-8fcd-f7a159757e77"
"marking-definition--122a27a0-b96f-46bc-8fcd-f7a159757e77",
],
granular_markings=[
{
"lang": "en_US",
"selectors": ["name", "schema"]
"selectors": ["name", "schema"],
},
{
"marking_ref": "marking-definition--50902d70-37ae-4f85-af68-3f4095493b42",
"selectors": ["name", "schema"]
}
]
"selectors": ["name", "schema"],
},
],
)

store.add(obj)
Expand Down
Loading