Skip to content

Commit

Permalink
Merge pull request #1621 from mabel-dev/#1620
Browse files Browse the repository at this point in the history
  • Loading branch information
joocer authored Apr 29, 2024
2 parents 43c223f + 8969101 commit 487349a
Show file tree
Hide file tree
Showing 4 changed files with 14 additions and 5 deletions.
4 changes: 3 additions & 1 deletion .github/workflows/regression_suite.yaml
Original file line number Diff line number Diff line change
@@ -1,7 +1,9 @@
name: Regression Suite

on:
push
push:
schedule:
- cron: "0 4 * * *"

jobs:
regression_matrix:
Expand Down
2 changes: 1 addition & 1 deletion opteryx/__version__.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
__build__ = 459
__build__ = 461

# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
Expand Down
9 changes: 7 additions & 2 deletions opteryx/managers/cache/memcached.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,8 @@
"""
This implements an interface to Memcached
If we have 10 failures in a row, stop trying to use the cache.
If we have 10 failures in a row, stop trying to use the cache. We have some
scenarios where we assume the remote server is down and stop immediately.
"""

import os
Expand Down Expand Up @@ -116,4 +117,8 @@ def get(self, key: bytes) -> Union[bytes, None]:

def set(self, key: bytes, value: bytes) -> None:
if self._consecutive_failures < MAXIMUM_CONSECUTIVE_FAILURES:
self._server.set(key, value)
try:
self._server.set(key, value)
except:
# if we fail to set, stop trying
self._consecutive_failures = MAXIMUM_CONSECUTIVE_FAILURES
4 changes: 3 additions & 1 deletion opteryx/utils/file_decoders.py
Original file line number Diff line number Diff line change
Expand Up @@ -160,7 +160,9 @@ def parquet_decoder(buffer, projection: List = None, selection=None, just_schema
selected_columns = []

# Read the parquet table with the optimized column list and selection filters
table = parquet.read_table(stream, columns=selected_columns, pre_buffer=False, filters=_select)
table = parquet.read_table(
stream, columns=selected_columns, pre_buffer=False, filters=_select, use_threads=False
)
if selection:
table = filter_records(selection, table)
if projection == []:
Expand Down

0 comments on commit 487349a

Please sign in to comment.