Skip to content

Commit

Permalink
Merge pull request #10 from helxplatform/redis-schema-fix
Browse files Browse the repository at this point in the history
Redis schema fix
  • Loading branch information
YaphetKG authored Nov 30, 2023
2 parents 2f3653b + 877a73f commit 8284359
Show file tree
Hide file tree
Showing 4 changed files with 17 additions and 12 deletions.
2 changes: 1 addition & 1 deletion PLATER/requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,6 @@ uvicorn
httpx
redis==4.1.2
reasoner-transpiler==1.7.1
bmt==0.8.2
bmt==1.1.3
git+https://github.com/TranslatorSRI/reasoner-pydantic@v1.1.2.1#egg=reasoner-pydantic
git+https://github.com/patrickkwang/fastapi#egg=fastapi
11 changes: 7 additions & 4 deletions PLATER/services/util/drivers/redis_driver.py
Original file line number Diff line number Diff line change
Expand Up @@ -51,8 +51,8 @@ def decode_if_byte(value):
return value.decode('utf-8')
except:
return value

async def run(self, query, **kwargs):
def run_graph_query(self, query, **kwargs):
query_timeout = kwargs.get('query_timeout', None)
results = self.redis_graph.query(query, read_only=True, timeout= query_timeout)
headers = list(map(lambda x: RedisDriver.decode_if_byte(x[1]), results.header))
Expand All @@ -75,9 +75,12 @@ async def run(self, query, **kwargs):
response.append(new_row)
return self.format_cypher_result((headers, response))


async def run(self, query, **kwargs):
return self.run_graph_query(query, **kwargs)

def run_sync(self, cypher_query):
results = self.sync_redis_client.execute_command('GRAPH.RO_QUERY', self.graph_name, cypher_query)
return RedisDriver.format_cypher_result(results)
return self.run_graph_query(cypher_query)

@staticmethod
def convert_to_dict(response: dict) -> list:
Expand Down
14 changes: 8 additions & 6 deletions PLATER/services/util/graph_adapter.py
Original file line number Diff line number Diff line change
Expand Up @@ -129,8 +129,8 @@ def search(self, query, indexes, fields=None, options={
result = self.driver.run_sync(query)
hits = self.convert_to_dict(result)
for hit in hits:
hit["labels"] = dict(hit["node"])["labels"]
hit["node"] = dict(dict(hit["node"])["properties"])
hit["labels"] = dict(hit["node"])["category"]
hit["node"] = dict(dict(hit["node"]))
hit["score"] = float(hit["score"])
hits.sort(key=lambda hit: hit["score"], reverse=True)
return {
Expand Down Expand Up @@ -164,13 +164,15 @@ def get_schema(self, force_update=False):
# Since there are some nodes in data currently just one label ['biolink:NamedThing']
# This filter is to avoid that scenario.
# @TODO need to remove this filter when data build avoids adding nodes with single ['biolink:NamedThing'] labels.
filter_named_thing = lambda x: filter(lambda y: y != 'biolink:NamedThing', x)
filter_named_thing = lambda x: filter(lambda y: y != 'biolink.NamedThing', x)
format_to_old_type = lambda y: [x.replace('biolink.', 'biolink:') for x in y]
# For redis convert these to arrays
source_labels = [triplet['source_labels']] if isinstance(triplet['source_labels'], str) else triplet['source_labels']
target_labels = [triplet['target_labels']] if isinstance(triplet['target_labels'], str) else triplet['target_labels']
source_labels, predicate, target_labels = self.find_biolink_leaves(filter_named_thing(source_labels)), \
triplet['predicate'], \
self.find_biolink_leaves(filter_named_thing(target_labels))
source_labels, predicate, target_labels = self.find_biolink_leaves(format_to_old_type(filter_named_thing(source_labels))), \
triplet['predicate'].replace('biolink.', 'biolink:'), \
self.find_biolink_leaves(format_to_old_type(filter_named_thing(target_labels)))


for source_label in source_labels:
for target_label in target_labels:
Expand Down
2 changes: 1 addition & 1 deletion setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@
with open("README.md", "r", encoding="utf-8") as fh:
long_description = fh.read()

version = "2.0.2"
version = "3.0.2"

class PublishClass(Command):
description = "Publish the package"
Expand Down

0 comments on commit 8284359

Please sign in to comment.