Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat: Add hbase online store support in feast #2590

Merged
merged 9 commits into from
Apr 25, 2022
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions CONTRIBUTING.md
Original file line number Diff line number Diff line change
Expand Up @@ -177,6 +177,7 @@ The services with containerized replacements currently implemented are:
- DynamoDB
- Redis
- Trino
- HBase

You can run `make test-python-integration-container` to run tests against the containerized versions of dependencies.

Expand Down
3 changes: 2 additions & 1 deletion sdk/python/feast/cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -539,7 +539,8 @@ def materialize_incremental_command(ctx: click.Context, end_ts: str, views: List
"--template",
"-t",
type=click.Choice(
["local", "gcp", "aws", "snowflake", "spark", "postgres"], case_sensitive=False
["local", "gcp", "aws", "snowflake", "spark", "postgres", "hbase"],
case_sensitive=False,
),
help="Specify a template for the created project",
default="local",
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,78 @@
# HBase Online Store
HBase is not included in current [Feast](https://github.com/feast-dev/feast) roadmap, this project intends to add HBase support for Online Store.
We create a table <project_name>_<feature_view_name> which gets updated with data on every materialize call


#### Create a feature repository

```shell
feast init feature_repo
cd feature_repo
```

#### Edit `feature_store.yaml`

set `online_store` type to be `hbase`

```yaml
project: feature_repo
registry: data/registry.db
provider: local
online_store:
type: hbase
host: 127.0.0.1 # hbase thrift endpoint
port: 9090 # hbase thrift api port
```

#### Apply the feature definitions in `example.py`

```shell
feast -c feature_repo apply
```
##### Output
```
Registered entity driver_id
Registered feature view driver_hourly_stats_view
Deploying infrastructure for driver_hourly_stats_view
```

### Materialize Latest Data to Online Feature Store (HBase)
```
$ CURRENT_TIME=$(date -u +"%Y-%m-%dT%H:%M:%S")
$ feast -c feature_repo materialize-incremental $CURRENT_TIME
```
#### Output
```
Materializing 1 feature views from 2022-04-16 15:30:39+05:30 to 2022-04-19 15:31:04+05:30 into the hbase online store.

driver_hourly_stats_view from 2022-04-16 15:30:39+05:30 to 2022-04-19 15:31:04+05:30:
100%|████████████████████████████████████████████████████████████████| 5/5 [00:00<00:00, 120.59it/s]
```

### Fetch the latest features for some entity id
```python
from pprint import pprint
from feast import FeatureStore

store = FeatureStore(repo_path=".")
feature_vector = store.get_online_features(
features=[
"driver_hourly_stats:conv_rate",
"driver_hourly_stats:acc_rate",
"driver_hourly_stats:avg_daily_trips",
],
entity_rows=[
{"driver_id": 1004},
{"driver_id": 1005},
],
).to_dict()
pprint(feature_vector)

```
#### Output
```
{'acc_rate': [0.01390857808291912, 0.4063614010810852],
'avg_daily_trips': [69, 706],
'conv_rate': [0.6624961495399475, 0.7595928311347961],
'driver_id': [1004, 1005]}
```
Original file line number Diff line number Diff line change
@@ -0,0 +1,234 @@
import calendar
import struct
from datetime import datetime
from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple

from happybase import Connection
from pydantic.typing import Literal

from feast import Entity
from feast.feature_view import FeatureView
from feast.infra.key_encoding_utils import serialize_entity_key
from feast.infra.online_stores.online_store import OnlineStore
from feast.infra.utils.hbase_utils import HbaseConstants, HbaseUtils
from feast.protos.feast.types.EntityKey_pb2 import EntityKey as EntityKeyProto
from feast.protos.feast.types.Value_pb2 import Value as ValueProto
from feast.repo_config import FeastConfigBaseModel, RepoConfig


class HbaseOnlineStoreConfig(FeastConfigBaseModel):
"""Online store config for Hbase store"""

type: Literal["hbase"] = "hbase"
"""Online store type selector"""

host: str
"""Hostname of Hbase Thrift server"""

port: str
"""Port in which Hbase Thrift server is running"""


class HbaseConnection:
"""
Hbase connecttion to connect to hbase.

Attributes:
store_config: Online store config for Hbase store.
"""

def __init__(self, store_config: HbaseOnlineStoreConfig):
self._store_config = store_config
self._real_conn = Connection(
host=store_config.host, port=int(store_config.port)
)

@property
def real_conn(self) -> Connection:
"""Stores the real happybase Connection to connect to hbase."""
return self._real_conn

def close(self) -> None:
"""Close the happybase connection."""
self.real_conn.close()


class HbaseOnlineStore(OnlineStore):
"""
Online feature store for Hbase.

Attributes:
_conn: Happybase Connection to connect to hbase thrift server.
"""

_conn: Connection = None

def _get_conn(self, config: RepoConfig):
"""
Get or Create Hbase Connection from Repoconfig.

Args:
config: The RepoConfig for the current FeatureStore.
"""

store_config = config.online_store
assert isinstance(store_config, HbaseOnlineStoreConfig)

if not self._conn:
self._conn = Connection(host=store_config.host, port=int(store_config.port))
return self._conn

def online_write_batch(
self,
config: RepoConfig,
table: FeatureView,
data: List[
Tuple[EntityKeyProto, Dict[str, ValueProto], datetime, Optional[datetime]]
],
progress: Optional[Callable[[int], Any]],
) -> None:
"""
Write a batch of feature rows to Hbase online store.

Args:
config: The RepoConfig for the current FeatureStore.
table: Feast FeatureView.
data: a list of quadruplets containing Feature data. Each quadruplet contains an Entity Key,
a dict containing feature values, an event timestamp for the row, and
the created timestamp for the row if it exists.
progress: Optional function to be called once every mini-batch of rows is written to
the online store. Can be used to display progress.
"""

hbase = HbaseUtils(self._get_conn(config))
project = config.project
table_name = _table_id(project, table)

b = hbase.batch(table_name)
for entity_key, values, timestamp, created_ts in data:
row_key = serialize_entity_key(entity_key).hex()
values_dict = {}
for feature_name, val in values.items():
values_dict[
HbaseConstants.get_col_from_feature(feature_name)
] = val.SerializeToString()
if isinstance(timestamp, datetime):
values_dict[HbaseConstants.DEFAULT_EVENT_TS] = struct.pack(
">L", int(calendar.timegm(timestamp.timetuple()))
)
else:
values_dict[HbaseConstants.DEFAULT_EVENT_TS] = timestamp
if created_ts is not None:
if isinstance(created_ts, datetime):
values_dict[HbaseConstants.DEFAULT_CREATED_TS] = struct.pack(
">L", int(calendar.timegm(created_ts.timetuple()))
)
Comment on lines +115 to +125
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Does this field have some specific significance/format for HBase? And calendar is the canonical way to generate this value?

Copy link
Contributor Author

@aurobindoc aurobindoc Apr 24, 2022

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Does this field have some specific significance/format for HBase?

In Hbase, every data is stored as byte array. To store datetime columns in HBase, I choose to convert the timestamp object to epoch integer and then convert the integer to byte array.

calendar is the canonical way to generate this value?

There are a couple of ways to convert timestamp to epoch.

  • calendar.timegm()
  • time.mktime()

time.mktime() assumes that the passed tuple is in local time, calendar.timegm() assumes it's in GMT/UTC. Depending on the interpretation the tuple represents a different time, so the functions return different values (seconds since the epoch are UTC based)

else:
values_dict[HbaseConstants.DEFAULT_CREATED_TS] = created_ts
b.put(row_key, values_dict)
b.send()

def online_read(
self,
config: RepoConfig,
table: FeatureView,
entity_keys: List[EntityKeyProto],
requested_features: Optional[List[str]] = None,
) -> List[Tuple[Optional[datetime], Optional[Dict[str, ValueProto]]]]:
"""
Retrieve feature values from the Hbase online store.

Args:
config: The RepoConfig for the current FeatureStore.
table: Feast FeatureView.
entity_keys: a list of entity keys that should be read from the FeatureStore.
"""
hbase = HbaseUtils(self._get_conn(config))
project = config.project
table_name = _table_id(project, table)

result: List[Tuple[Optional[datetime], Optional[Dict[str, ValueProto]]]] = []

row_keys = [
serialize_entity_key(entity_key).hex() for entity_key in entity_keys
]
rows = hbase.rows(table_name, row_keys=row_keys)

for _, row in rows:
res = {}
res_ts = None
for feature_name, feature_value in row.items():
f_name = HbaseConstants.get_feature_from_col(feature_name)
if requested_features is not None and f_name in requested_features:
v = ValueProto()
v.ParseFromString(feature_value)
res[f_name] = v
if f_name is HbaseConstants.EVENT_TS:
ts = struct.unpack(">L", feature_value)[0]
res_ts = datetime.fromtimestamp(ts)
if not res:
result.append((None, None))
else:
result.append((res_ts, res))
return result

def update(
self,
config: RepoConfig,
tables_to_delete: Sequence[FeatureView],
tables_to_keep: Sequence[FeatureView],
entities_to_delete: Sequence[Entity],
entities_to_keep: Sequence[Entity],
partial: bool,
):
"""
Update tables from the Hbase Online Store.

Args:
config: The RepoConfig for the current FeatureStore.
tables_to_delete: Tables to delete from the Hbase Online Store.
tables_to_keep: Tables to keep in the Hbase Online Store.
"""
hbase = HbaseUtils(self._get_conn(config))
project = config.project

# We don't create any special state for the entites in this implementation.
for table in tables_to_keep:
table_name = _table_id(project, table)
if not hbase.check_if_table_exist(table_name):
hbase.create_table_with_default_cf(table_name)

for table in tables_to_delete:
table_name = _table_id(project, table)
hbase.delete_table(table_name)

def teardown(
self,
config: RepoConfig,
tables: Sequence[FeatureView],
entities: Sequence[Entity],
):
"""
Delete tables from the Hbase Online Store.

Args:
config: The RepoConfig for the current FeatureStore.
tables: Tables to delete from the feature repo.
"""
hbase = HbaseUtils(self._get_conn(config))
project = config.project

for table in tables:
table_name = _table_id(project, table)
hbase.delete_table(table_name)


def _table_id(project: str, table: FeatureView) -> str:
"""
Returns table name given the project_name and the feature_view.

Args:
project: Name of the feast project.
table: Feast FeatureView.
"""
return f"{project}_{table.name}"
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
from tests.integration.feature_repos.integration_test_repo_config import (
IntegrationTestRepoConfig,
)
from tests.integration.feature_repos.universal.online_store.hbase import (
HbaseOnlineStoreCreator,
)

FULL_REPO_CONFIGS = [
IntegrationTestRepoConfig(online_store_creator=HbaseOnlineStoreCreator),
]
Loading