Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Refactor Producer #15

Closed
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
683 changes: 254 additions & 429 deletions aiokafka/client.py

Large diffs are not rendered by default.

114 changes: 89 additions & 25 deletions aiokafka/conn.py
Original file line number Diff line number Diff line change
@@ -1,37 +1,58 @@
import asyncio
import struct
import logging
import copy
from kafka.common import ConnectionError
import kafka.common as Errors
from kafka.protocol.api import RequestHeader
from kafka.protocol.commit import GroupCoordinatorResponse
from aiokafka import __version__

__all__ = ['AIOKafkaConnection', 'create_conn']


@asyncio.coroutine
def create_conn(host, port, *, loop=None):
def create_conn(host, port, *, loop=None, **config):
if loop is None:
loop = asyncio.get_event_loop()
conn = AIOKafkaConnection(host, port, loop=loop)
conn = AIOKafkaConnection(host, port, loop=loop, **config)
yield from conn._connect()
return conn


class AIOKafkaConnection:
HEADER = struct.Struct('>i')

def __init__(self, host, port, *, loop):
DEFAULT_CONFIG = {
'client_id': 'kafka-python-' + __version__,
'request_timeout_ms': 40000,
'api_version': (0, 8, 2), # default to most restrictive
}

log = logging.getLogger(__name__)

def __init__(self, host, port, *, loop, **config):
self._host = host
self._port = port
self._reader = self._writer = None
self._loop = loop
self._requests = []
self._read_task = None
self._correlation_id = 0
self._config = copy.copy(self.DEFAULT_CONFIG)
for key in self._config:
if key in config:
self._config[key] = config[key]

@asyncio.coroutine
def _connect(self):
self._reader, self._writer = yield from asyncio.open_connection(
self.host, self.port, loop=self._loop)
self._read_task = asyncio.async(self._read(), loop=self._loop)
future = asyncio.open_connection(self.host, self.port, loop=self._loop)
self._reader, self._writer = yield from asyncio.wait_for(
future, self._config['request_timeout_ms']/1000)
self._read_task = asyncio.ensure_future(self._read(), loop=self._loop)
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Not backward compatible, not all version of python 3.4.x support ensure_future

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.


def __repr__(self):
return "<KafkaConnection host={0.host} port={0.port}>".format(self)
return "<AIOKafkaConnection host={0.host} port={0.port}>".format(self)

@property
def host(self):
Expand All @@ -41,14 +62,40 @@ def host(self):
def port(self):
return self._port

def send(self, payload, no_ack=False):
self._writer.write(payload)
def send(self, request, expect_response=True):
correlation_id = self._next_correlation_id()
header = RequestHeader(request,
correlation_id=correlation_id,
client_id=self._config['client_id'])
message = b''.join([header.encode(), request.encode()])
size = self.HEADER.pack(len(message))
try:
self._writer.write(size)
self._writer.write(message)
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

any reason to make two writes instead of one? we can save one system calls here

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Good point! In aiohttp I've joined calls like this into one already.

except OSError as err:
self.close()
raise ConnectionError(
"Connection at {0}:{1} broken: {2}".format(
self._host, self._port, err))

fut = asyncio.Future(loop=self._loop)
if no_ack:
if not expect_response:
fut.set_result(None)
return fut
self._requests.append(fut)
return fut
self._requests.append((correlation_id, request.RESPONSE_TYPE, fut))
return asyncio.wait_for(fut, self._config['request_timeout_ms']/1000)
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

you should pass loop explicilty

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

+1


def connected(self):
return self._reader and not self._reader.at_eof()

def close(self):
if self._reader:
self._writer.close()
self._reader = self._writer = None
self._read_task.cancel()
for _, _, fut in self._requests:
fut.cancel()
self._requests = []

@asyncio.coroutine
def _read(self):
Expand All @@ -59,23 +106,40 @@ def _read(self):

resp = yield from self._reader.readexactly(size)

fut = self._requests.pop(0)
if not fut.cancelled():
fut.set_result(resp)
recv_correlation_id, = self.HEADER.unpack(resp[:4])

correlation_id, resp_type, fut = self._requests.pop(0)
if (self._config['api_version'] == (0, 8, 2)
and resp_type is GroupCoordinatorResponse
and correlation_id != 0 and recv_correlation_id == 0):
self.log.warning(
'Kafka 0.8.2 quirk -- GroupCoordinatorResponse'
' coorelation id does not match request. This'
' should go away once at least one topic has been'
' initialized on the broker')

if correlation_id != recv_correlation_id:
error = Errors.CorrelationIdError(
'Correlation ids do not match: sent {}, recv {}'
.format(correlation_id, recv_correlation_id))
fut.set_exception(error)
self.close()
break

if not fut.done():
response = resp_type.decode(resp[4:])
self.log.debug('%s Response %d: %s',
self, correlation_id, response)
fut.set_result(response)
except OSError as exc:
conn_exc = ConnectionError("Kafka at {0}:{1} went away".format(
conn_exc = ConnectionError("Connection at {0}:{1} broken".format(
self._host, self._port))
conn_exc.__cause__ = exc
conn_exc.__context__ = exc
fut = self._requests.pop(0)
_, _, fut = self._requests.pop(0)
fut.set_exception(conn_exc)
self.close()

def close(self):
if self._reader:
self._writer.close()
self._reader = self._writer = None
self._read_task.cancel()
for fut in self._requests:
fut.cancel()
self._requests = []
def _next_correlation_id(self):
self._correlation_id = (self._correlation_id + 1) % 2**31
return self._correlation_id
Empty file.
Loading