Skip to content
This repository has been archived by the owner on Mar 24, 2021. It is now read-only.

Commit

Permalink
upgrade pytest dependency
Browse files Browse the repository at this point in the history
remove hacky test-skipping workaround now that pytest-dev/pytest#568 has been fixed
  • Loading branch information
emmettbutler committed Jul 19, 2018
1 parent 0f9a0f8 commit 088bfa8
Show file tree
Hide file tree
Showing 6 changed files with 22 additions and 79 deletions.
2 changes: 1 addition & 1 deletion test-requirements.txt
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
lz4==2.0.2
lz4tools==1.3.1.2
pytest
pytest==3.6.3
pytest-cov
python-snappy
mock
Expand Down
54 changes: 0 additions & 54 deletions tests/pykafka/__init__.py
Original file line number Diff line number Diff line change
@@ -1,54 +0,0 @@
import pytest


def patch_subclass(parent, skip_condition):
"""Work around a pytest.mark.skipif bug
https://github.com/pytest-dev/pytest/issues/568
The issue causes all subclasses of a TestCase subclass to be skipped if any one
of them is skipped.
This fix circumvents the issue by overriding Python's existing subclassing mechanism.
Instead of having `cls` be a subclass of `parent`, this decorator adds each attribute
of `parent` to `cls` without using Python inheritance. When appropriate, it also adds
a boolean condition under which to skip tests for the decorated class.
:param parent: The "superclass" from which the decorated class should inherit
its non-overridden attributes
:type parent: unittest2.TestCase
:param skip_condition: A boolean condition that, when True, will cause all tests in
the decorated class to be skipped
:type skip_condition: bool
"""
def patcher(cls):
def build_skipped_method(method, cls, cond=None):
if cond is None:
cond = False
if hasattr(method, "skip_condition"):
cond = cond or method.skip_condition(cls)

@pytest.mark.skipif(cond, reason="")
def _wrapper(self):
return method(self)
return _wrapper

# two passes over parent required so that skips have access to all class
# attributes
for attr in parent.__dict__:
if attr in cls.__dict__:
continue
if not attr.startswith("test_"):
setattr(cls, attr, parent.__dict__[attr])

for attr in cls.__dict__:
if attr.startswith("test_"):
setattr(cls, attr, build_skipped_method(cls.__dict__[attr],
cls, skip_condition))

for attr in parent.__dict__:
if attr.startswith("test_"):
setattr(cls, attr, build_skipped_method(parent.__dict__[attr],
cls, skip_condition))
return cls
return patcher
11 changes: 5 additions & 6 deletions tests/pykafka/rdkafka/test_simple_consumer.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
import pytest
import unittest2

from tests.pykafka import test_simpleconsumer, test_balancedconsumer, patch_subclass
from tests.pykafka import test_simpleconsumer, test_balancedconsumer
from pykafka.utils.compat import range
try:
from pykafka.rdkafka import _rd_kafka # noqa
Expand All @@ -10,8 +9,8 @@
RDKAFKA = False # C extension not built


@patch_subclass(test_simpleconsumer.TestSimpleConsumer, not RDKAFKA)
class TestRdKafkaSimpleConsumer(unittest2.TestCase):
@pytest.mark.skipif(not RDKAFKA)
class TestRdKafkaSimpleConsumer(test_simpleconsumer.TestSimpleConsumer):
USE_RDKAFKA = True

def test_update_cluster(self):
Expand Down Expand Up @@ -69,6 +68,6 @@ def _latest_partition_offsets_by_reading(consumer, n_reads):
return latest_offs


@patch_subclass(test_balancedconsumer.BalancedConsumerIntegrationTests, not RDKAFKA)
class RdkBalancedConsumerIntegrationTests(unittest2.TestCase):
@pytest.mark.skipif(not RDKAFKA)
class RdkBalancedConsumerIntegrationTests(test_balancedconsumer.BalancedConsumerIntegrationTests):
USE_RDKAFKA = True
25 changes: 12 additions & 13 deletions tests/pykafka/test_balancedconsumer.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,6 @@
RangeProtocol)
from pykafka.test.utils import get_cluster, stop_cluster
from pykafka.utils.compat import range, iterkeys, iteritems
from tests.pykafka import patch_subclass


kafka_version_string = os.environ.get('KAFKA_VERSION', '0.8')
Expand Down Expand Up @@ -234,6 +233,7 @@ def verify_extras(consumers, extras_count):
except:
pass

@pytest.mark.skipif(USE_GEVENT)
# weird name to ensure test execution order, because there is an unintended
# interdependency between test_consume_latest and other tests
def test_a_rebalance_unblock_event(self):
Expand Down Expand Up @@ -264,7 +264,6 @@ def test_a_rebalance_unblock_event(self):

# consumer thread would die in case of any rebalancing errors
self.assertTrue(consumer_a_thread.is_alive() and consumer_b_thread.is_alive())
test_a_rebalance_unblock_event.skip_condition = lambda cls: cls.USE_GEVENT

def test_rebalance_callbacks(self):
def on_rebalance(cns, old_partition_offsets, new_partition_offsets):
Expand Down Expand Up @@ -413,6 +412,7 @@ def test_consume_latest(self):
except:
pass

@pytest.mark.skipif(MANAGED_CONSUMER)
def test_external_kazoo_client(self):
"""Run with pre-existing KazooClient instance
Expand All @@ -429,7 +429,6 @@ def test_external_kazoo_client(self):
use_rdkafka=self.USE_RDKAFKA)
[msg for msg in consumer]
consumer.stop()
test_external_kazoo_client.skip_condition = lambda cls: cls.MANAGED_CONSUMER

def test_no_partitions(self):
"""Ensure a consumer assigned no partitions doesn't fail"""
Expand All @@ -456,6 +455,7 @@ def _decide_dummy(participants, partitions, consumer_id):
# check that stop() succeeds (cf #313 and #392)
consumer.stop()

@pytest.mark.skipif(MANAGED_CONSUMER)
def test_zk_conn_lost(self):
"""Check we restore zookeeper nodes correctly after connection loss
Expand Down Expand Up @@ -498,7 +498,6 @@ def test_zk_conn_lost(self):
zk.stop()
except:
pass
test_zk_conn_lost.skip_condition = lambda cls: cls.MANAGED_CONSUMER

def wait_for_rebalancing(self, *balanced_consumers):
"""Test helper that loops while rebalancing is ongoing
Expand All @@ -520,21 +519,21 @@ def wait_for_rebalancing(self, *balanced_consumers):
raise AssertionError("Rebalancing failed")


@patch_subclass(BalancedConsumerIntegrationTests,
platform.python_implementation() == "PyPy" or gevent is None)
class BalancedConsumerGEventIntegrationTests(unittest2.TestCase):
@pytest.mark.skipif(platform.python_implementation() == "PyPy" or gevent is None,
reason="Unresolved crashes")
class BalancedConsumerGEventIntegrationTests(BalancedConsumerIntegrationTests):
USE_GEVENT = True


@patch_subclass(BalancedConsumerIntegrationTests, kafka_version < version_09)
class ManagedBalancedConsumerIntegrationTests(unittest2.TestCase):
@pytest.mark.skipif(kafka_version < version_09,
reason="Managed consumer unsupported until 0.9")
class ManagedBalancedConsumerIntegrationTests(BalancedConsumerIntegrationTests):
MANAGED_CONSUMER = True


@patch_subclass(
BalancedConsumerIntegrationTests,
platform.python_implementation() == "PyPy" or kafka_version < version_09 or gevent is None)
class ManagedBalancedConsumerGEventIntegrationTests(unittest2.TestCase):
@pytest.mark.skipif(platform.python_implementation() == "PyPy" or
kafka_version < version_09 or gevent is None)
class ManagedBalancedConsumerGEventIntegrationTests(BalancedConsumerIntegrationTests):
MANAGED_CONSUMER = True
USE_GEVENT = True

Expand Down
7 changes: 3 additions & 4 deletions tests/pykafka/test_producer.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,6 @@
from pykafka.common import CompressionType
from pykafka.producer import OwnedBroker
from pykafka.utils import serialize_utf8, deserialize_utf8
from tests.pykafka import patch_subclass

kafka_version = os.environ.get('KAFKA_VERSION', '0.8.0')

Expand Down Expand Up @@ -183,6 +182,7 @@ def test_async_produce_queue_full(self):
while consumer.consume() is not None:
time.sleep(.05)

@pytest.mark.skipif(RDKAFKA)
def test_async_produce_lingers(self):
"""Ensure that the context manager waits for linger_ms milliseconds"""
linger = 3
Expand All @@ -194,7 +194,6 @@ def test_async_produce_lingers(self):
self.assertTrue(int(time.time() - start) >= int(linger))
consumer.consume()
consumer.consume()
test_async_produce_lingers.skip_condition = lambda cls: RDKAFKA

def test_async_produce_thread_exception(self):
"""Ensure that an exception on a worker thread is raised to the main thread"""
Expand Down Expand Up @@ -380,8 +379,8 @@ def ensure_all_messages_consumed():
retry(ensure_all_messages_consumed, retry_time=15)


@patch_subclass(ProducerIntegrationTests, not RDKAFKA)
class TestRdKafkaProducer(unittest2.TestCase):
@pytest.mark.skipif(not RDKAFKA)
class TestRdKafkaProducer(ProducerIntegrationTests):
USE_RDKAFKA = True


Expand Down
2 changes: 1 addition & 1 deletion tests/pykafka/test_simpleconsumer.py
Original file line number Diff line number Diff line change
Expand Up @@ -239,6 +239,7 @@ def test_reset_offsets(self):
self.assertEqual(msg.offset, expected_offset)
self.assertEqual(consumer.held_offsets[part_id], expected_offset)

@pytest.mark.skipif(RDKAFKA)
def test_update_cluster(self):
"""Check that the consumer can initiate cluster updates"""
with self._get_simple_consumer() as consumer:
Expand All @@ -262,7 +263,6 @@ def test_update_cluster(self):
# If the fetcher thread fell over during the cluster update
# process, we'd get an exception here:
self.assertIsNotNone(consumer.consume())
test_update_cluster.skip_condition = lambda cls: RDKAFKA

def test_consumer_lag(self):
"""Ensure that after consuming the entire topic, lag is 0"""
Expand Down

0 comments on commit 088bfa8

Please sign in to comment.