diff --git a/aiohttp/client.py b/aiohttp/client.py index d95cce18e2d..691ed33e7a7 100644 --- a/aiohttp/client.py +++ b/aiohttp/client.py @@ -14,10 +14,10 @@ from . import hdrs, helpers, streams from .log import client_logger from .streams import EOF_MARKER, FlowControlStreamReader -from .multidict import CIMultiDictProxy, MultiDictProxy, MultiDict +from .multidict import CIMultiDictProxy, MultiDictProxy, MultiDict, CIMultiDict from .multipart import MultipartWriter -__all__ = ('request',) +__all__ = ('request', 'ClientSession') HTTP_PORT = 80 HTTPS_PORT = 443 @@ -66,7 +66,7 @@ def request(method, url, *, :type chunked: bool or int :param bool expect100: Expect 100-continue response from server. :param connector: BaseConnector sub-class instance to support - connection pooling and session cookies. + connection pooling. :type connector: aiohttp.connector.BaseConnector :param bool read_until_eof: Read response until eof if response does not have Content-Length header. @@ -83,69 +83,228 @@ def request(method, url, *, >>> data = yield from resp.read() """ - redirects = 0 - method = method.upper() - if loop is None: - loop = asyncio.get_event_loop() - if request_class is None: - request_class = ClientRequest - if connector is None: - connector = aiohttp.TCPConnector(force_close=True, loop=loop) - - while True: - req = request_class( - method, url, params=params, headers=headers, data=data, - cookies=cookies, files=files, encoding=encoding, - auth=auth, version=version, compress=compress, chunked=chunked, - loop=loop, expect100=expect100, response_class=response_class) - - conn = yield from connector.connect(req) - try: - resp = req.send(conn.writer, conn.reader) + session = ClientSession(connector=connector, loop=loop, + request_class=request_class, + response_class=response_class, + cookies=cookies) + resp = yield from session.request(method, url, + params=params, + data=data, + headers=headers, + files=files, + auth=auth, + allow_redirects=allow_redirects, + max_redirects=max_redirects, + encoding=encoding, + version=version, + compress=compress, + chunked=chunked, + expect100=expect100, + read_until_eof=read_until_eof) + return resp + + +class ClientSession: + + def __init__(self, *, connector=None, loop=None, request_class=None, + response_class=None, cookies=None, headers=None, auth=None): + if loop is None: + loop = asyncio.get_event_loop() + self._loop = loop + self.cookies = http.cookies.SimpleCookie() + if connector is None: + connector = aiohttp.TCPConnector(force_close=True, loop=loop) + # For Backward compatability with `share_cookie` connectors + elif connector._share_cookies: + self._update_cookies(connector.cookies) + if cookies is not None: + self._update_cookies(cookies) + self._connector = connector + self._default_auth = auth + + # Convert to list of tuples + if headers: + if isinstance(headers, dict): + headers = list(headers.items()) + elif isinstance(headers, (MultiDictProxy, MultiDict)): + headers = list(headers.items()) + self._default_headers = headers + + if request_class is None: + request_class = ClientRequest + self._request_class = request_class + self._response_class = response_class + + @asyncio.coroutine + def request(self, method, url, *, + params=None, + data=None, + headers=None, + files=None, + auth=None, + allow_redirects=True, + max_redirects=10, + encoding='utf-8', + version=aiohttp.HttpVersion11, + compress=None, + chunked=None, + expect100=False, + read_until_eof=True): + + redirects = 0 + method = method.upper() + + # Merge with default headers and transform to CIMultiDict + headers = self._prepare_headers(headers) + if auth is None: + auth = self._default_auth + # It would be confusing if we support explicit Authorization header + # with `auth` argument + if (headers is not None and + auth is not None and + hdrs.AUTHORIZATION in headers): + raise ValueError("Can't combine `Authorization` header with " + "`auth` argument") + + while True: + req = self._request_class( + method, url, params=params, headers=headers, data=data, + cookies=self.cookies, files=files, encoding=encoding, + auth=auth, version=version, compress=compress, chunked=chunked, + expect100=expect100, + loop=self._loop, response_class=self._response_class) + + conn = yield from self._connector.connect(req) try: - yield from resp.start(conn, read_until_eof) - except: - resp.close() - conn.close() - raise - except (aiohttp.HttpProcessingError, - aiohttp.ServerDisconnectedError) as exc: - raise aiohttp.ClientResponseError() from exc - except OSError as exc: - raise aiohttp.ClientOSError() from exc - - # redirects - if resp.status in (301, 302, 303, 307) and allow_redirects: - redirects += 1 - if max_redirects and redirects >= max_redirects: - resp.close(force=True) - break + resp = req.send(conn.writer, conn.reader) + try: + yield from resp.start(conn, read_until_eof) + except: + resp.close() + conn.close() + raise + except (aiohttp.HttpProcessingError, + aiohttp.ServerDisconnectedError) as exc: + raise aiohttp.ClientResponseError() from exc + except OSError as exc: + raise aiohttp.ClientOSError() from exc + + self._update_cookies(resp.cookies) + # For Backward compatability with `share_cookie` connectors + if self._connector._share_cookies: + self._connector.update_cookies(resp.cookies) + + # redirects + if resp.status in (301, 302, 303, 307) and allow_redirects: + redirects += 1 + if max_redirects and redirects >= max_redirects: + resp.close(force=True) + break + + # For 301 and 302, mimic IE behaviour, now changed in RFC. + # Details: https://github.com/kennethreitz/requests/pull/269 + if resp.status != 307: + method = hdrs.METH_GET + data = None + + r_url = (resp.headers.get(hdrs.LOCATION) or + resp.headers.get(hdrs.URI)) + + scheme = urllib.parse.urlsplit(r_url)[0] + if scheme not in ('http', 'https', ''): + resp.close(force=True) + raise ValueError('Can redirect only to http or https') + elif not scheme: + r_url = urllib.parse.urljoin(url, r_url) + + url = urllib.parse.urldefrag(r_url)[0] + if url: + yield from asyncio.async(resp.release(), loop=self._loop) + continue + + break + + return resp + + def _update_cookies(self, cookies): + """Update shared cookies.""" + if isinstance(cookies, dict): + cookies = cookies.items() - # For 301 and 302, mimic IE behaviour, now changed in RFC. - # Details: https://github.com/kennethreitz/requests/pull/269 - if resp.status != 307: - method = hdrs.METH_GET - data = None - cookies = resp.cookies + for name, value in cookies: + if isinstance(value, http.cookies.Morsel): + # use dict method because SimpleCookie class modifies value + # before Python3.4 + dict.__setitem__(self.cookies, name, value) + else: + self.cookies[name] = value - r_url = (resp.headers.get(hdrs.LOCATION) or - resp.headers.get(hdrs.URI)) + def _prepare_headers(self, headers): + """ Add default headers and transform it to CIMultiDict + """ + # Convert headers to MultiDict + result = CIMultiDict() + if headers: + if isinstance(headers, dict): + headers = headers.items() + elif isinstance(headers, (MultiDictProxy, MultiDict)): + headers = headers.items() + for key, value in headers: + result.add(key, value) + # Add defaults only if those are not overridden + if self._default_headers: + for key, value in self._default_headers: + if key not in result: + result.add(key, value) + return result - scheme = urllib.parse.urlsplit(r_url)[0] - if scheme not in ('http', 'https', ''): - resp.close(force=True) - raise ValueError('Can redirect only to http or https') - elif not scheme: - r_url = urllib.parse.urljoin(url, r_url) + @asyncio.coroutine + def get(self, url, *, allow_redirects=True, **kwargs): + resp = yield from self.request(hdrs.METH_GET, url, + allow_redirects=allow_redirects, + **kwargs) + return resp - url = urllib.parse.urldefrag(r_url)[0] - if url: - yield from asyncio.async(resp.release(), loop=loop) - continue + @asyncio.coroutine + def options(self, url, *, allow_redirects=True, **kwargs): + resp = yield from self.request(hdrs.METH_OPTIONS, url, + allow_redirects=allow_redirects, + **kwargs) + return resp - break + @asyncio.coroutine + def head(self, url, *, allow_redirects=False, **kwargs): + resp = yield from self.request(hdrs.METH_HEAD, url, + allow_redirects=allow_redirects, + **kwargs) + return resp - return resp + @asyncio.coroutine + def post(self, url, *, data=None, **kwargs): + resp = yield from self.request(hdrs.METH_POST, url, + data=data, + **kwargs) + return resp + + @asyncio.coroutine + def put(self, url, *, data=None, **kwargs): + resp = yield from self.request(hdrs.METH_PUT, url, + data=data, + **kwargs) + return resp + + @asyncio.coroutine + def patch(self, url, *, data=None, **kwargs): + resp = yield from self.request(hdrs.METH_PATCH, url, + data=data, + **kwargs) + return resp + + @asyncio.coroutine + def delete(self, url, **kwargs): + resp = yield from self.request(hdrs.METH_DELETE, url, + **kwargs) + return resp class ClientRequest: @@ -290,7 +449,7 @@ def update_path(self, params): def update_headers(self, headers): """Update request headers.""" - self.headers = MultiDict() + self.headers = CIMultiDict() if headers: if isinstance(headers, dict): headers = headers.items() @@ -298,7 +457,7 @@ def update_headers(self, headers): headers = headers.items() for key, value in headers: - self.headers.add(key.upper(), value) + self.headers.add(key, value) for hdr, val in self.DEFAULT_HEADERS.items(): if hdr not in self.headers: @@ -675,7 +834,6 @@ def start(self, connection, read_until_eof=False): except http.cookies.CookieError as exc: client_logger.warning( 'Can not load response cookies: %s', exc) - connection.share_cookies(self.cookies) return self def close(self, force=False): diff --git a/aiohttp/connector.py b/aiohttp/connector.py index 6ecc343d184..2b64afdf9a4 100644 --- a/aiohttp/connector.py +++ b/aiohttp/connector.py @@ -6,6 +6,7 @@ import ssl import socket import weakref +import warnings from . import hdrs from .client import ClientRequest @@ -50,17 +51,12 @@ def release(self): self._transport = None self._wr = None - def share_cookies(self, cookies): - if self._connector._share_cookies: # XXX - self._connector.update_cookies(cookies) - class BaseConnector(object): """Base connector class. :param conn_timeout: (optional) Connect timeout. :param keepalive_timeout: (optional) Keep-alive timeout. - :param bool share_cookies: Set to True to keep cookies between requests. :param bool force_close: Set to True to force close and do reconnect after each request (and between redirects). :param loop: Optional event loop. @@ -71,6 +67,10 @@ def __init__(self, *, conn_timeout=None, keepalive_timeout=30, self._conns = {} self._conn_timeout = conn_timeout self._keepalive_timeout = keepalive_timeout + if share_cookies: + warnings.warn( + 'Using `share_cookies` is deprecated. ' + 'Use Session object instead', DeprecationWarning) self._share_cookies = share_cookies self._cleanup_handle = None self._force_close = force_close @@ -152,9 +152,6 @@ def connect(self, req): """Get from pool or create new connection.""" key = (req.host, req.port, req.ssl) - if self._share_cookies: - req.update_cookies(self.cookies.items()) - transport, proto = self._get(key) if transport is None: try: diff --git a/docs/client.rst b/docs/client.rst index f5f5f6fd135..bc1d61d4268 100644 --- a/docs/client.rst +++ b/docs/client.rst @@ -195,6 +195,20 @@ For example, if you want to specify the content-type for the previous example:: ... headers=headers) +Custom Cookies +-------------- + +To send your own cookies to the server, you can use the ``cookies`` +parameter:: + + >>> url = 'http://httpbin.org/cookies' + >>> cookies = dict(cookies_are='working') + + >>> r = yield from aiohttp.request('get', url, cookies=cookies) + >>> yield from r.text() + '{"cookies": {"cookies_are": "working"}}' + + More complicated POST requests ------------------------------ @@ -256,6 +270,7 @@ information. .. seealso:: :ref:`aiohttp-multipart` + Streaming uploads ----------------- @@ -301,7 +316,7 @@ a file from another request and calculate the file sha1 hash:: >>> resp = aiohttp.request('get', 'http://httpbin.org/post') >>> stream = StreamReader() >>> asyncio.async(aiohttp.request( - ... 'post', 'http://httpbin.org/post', data=stream) + ... 'post', 'http://httpbin.org/post', data=stream)) >>> file_hash = yield from feed_stream(resp, stream) @@ -315,18 +330,45 @@ post requests together:: ... data=r.content) -.. _aiohttp-client-keep-alive: +.. _aiohttp-client-session: -Keep-Alive and connection pooling ---------------------------------- +Keep-Alive, connection pooling and cookie sharing +------------------------------------------------- -By default aiohttp does not use connection pooling. To enable connection pooling -you should use one of the ``connector`` objects. There are several of them. -The most widely used is :class:`aiohttp.connector.TCPConnector`:: +To share cookies between multiple requests you can create an +``aiohttp.ClientSession`` object: - >>> conn = aiohttp.TCPConnector() - >>> r = yield from aiohttp.request( - ... 'get', 'http://python.org', connector=conn) + >>> session = aiohttp.ClientSession() + >>> yield from session.get( + ... 'http://httpbin.org/cookies/set/my_cookie/my_value') + >>> r = yield from session.get('http://httpbin.org/cookies') + >>> json = yield from r.json() + >>> json['cookies']['my_cookie'] + 'my_value' + +You also can set default headers for all session requests: + + >>> session = aiohttp.ClientSession( + ... headers={"Authorization": "Basic bG9naW46cGFzcw=="}) + >>> r = yield from s.get("http://httpbin.org/headers") + >>> json = yield from r.json() + >>> json['headers']['Authorization'] + 'Basic bG9naW46cGFzcw==' + +By default aiohttp does not use connection pooling. In other words multiple +calls to ``aiohttp.request`` will start a new connection to host each. +``aiohttp.ClientSession`` object will do connection pooling for you. + + +Connectors +---------- + +To tweek or change *transport* layer of requests you can pass a custom +**Connector** to ``aiohttp.request``. For example: + + >>> conn = aiohttp.TCPConnector() + >>> r = yield from aiohttp.request( + ... 'get', 'http://python.org', connector=conn) SSL control for tcp sockets @@ -431,8 +473,8 @@ So, we can access the headers using any capitalization we want:: 'application/json' -Cookies -------- +Response Cookies +---------------- If a response contains some Cookies, you can quickly access them:: @@ -442,37 +484,11 @@ If a response contains some Cookies, you can quickly access them:: >>> r.cookies['example_cookie_name'] 'example_cookie_value' -To send your own cookies to the server, you can use the ``cookies`` -parameter:: - - >>> url = 'http://httpbin.org/cookies' - >>> cookies = dict(cookies_are='working') - - >>> r = yield from aiohttp.request('get', url, cookies=cookies) - >>> yield from r.text() - '{"cookies": {"cookies_are": "working"}}' - -With :ref:`connection pooling` you can -share cookies between requests: - -.. code-block:: python - :emphasize-lines: 1 - - >>> conn = aiohttp.connector.TCPConnector(share_cookies=True) - >>> r = yield from aiohttp.request( - ... 'get', - ... 'http://httpbin.org/cookies/set?k1=v1', - ... connector=conn) - >>> yield from r.text() - '{"cookies": {"k1": "v1"}}' - >>> r = yield from aiohttp.request('get', - ... 'http://httpbin.org/cookies', - ... connection=conn) - >>> yield from r.text() - '{"cookies": {"k1": "v1"}}' - .. note:: - By default ``share_cookies`` is set to ``False``. + Response cookies contain only values, that were in ``Set-Cookie`` headers + of the **last** request in redirection chain. To gather cookies between all + redirection requests you can use :ref:`aiohttp.ClientSession + ` object. Timeouts diff --git a/requirements-dev.txt b/requirements-dev.txt index cc3402b6e48..ea6329c6dac 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -4,4 +4,4 @@ coverage sphinx alabaster>=0.6.2 cython -chardet \ No newline at end of file +chardet diff --git a/tests/test_client_functional.py b/tests/test_client_functional.py index 769418c9f44..416d99e649d 100644 --- a/tests/test_client_functional.py +++ b/tests/test_client_functional.py @@ -11,7 +11,7 @@ from unittest import mock import aiohttp -from aiohttp import client +from aiohttp import client, helpers from aiohttp import test_utils from aiohttp.multidict import MultiDict from aiohttp.multipart import MultipartWriter @@ -925,7 +925,7 @@ def test_session_close(self): conn.close() @mock.patch('aiohttp.client.client_logger') - def test_session_cookies(self, m_log): + def test_connector_cookies(self, m_log): from aiohttp import connector conn = connector.TCPConnector(share_cookies=True, loop=self.loop) @@ -1094,6 +1094,145 @@ def go(): self.loop.run_until_complete(go()) + def test_share_cookie_partial_update(self): + with test_utils.run_server(self.loop, router=Functional) as httpd: + conn = aiohttp.TCPConnector(share_cookies=True, loop=self.loop) + # Set c1 and c2 cookie + resp = self.loop.run_until_complete( + client.request('get', httpd.url('cookies'), + connector=conn, loop=self.loop)) + self.assertEqual(resp.cookies['c1'].value, 'cookie1') + self.assertEqual(resp.cookies['c2'].value, 'cookie2') + self.assertEqual(conn.cookies, resp.cookies) + # Update c1 at server side + resp = self.loop.run_until_complete( + client.request('get', httpd.url('cookies_partial'), + connector=conn, loop=self.loop)) + self.assertEqual(resp.cookies['c1'].value, 'other_cookie1') + # Assert, that we send updated cookies in next requests + r = self.loop.run_until_complete( + client.request('get', httpd.url('method', 'get'), + connector=conn, loop=self.loop)) + self.assertEqual(r.status, 200) + content = self.loop.run_until_complete(r.json()) + self.assertEqual( + content['headers']['Cookie'], + 'c1=other_cookie1; c2=cookie2') + + def test_connector_cookie_merge(self): + with test_utils.run_server(self.loop, router=Functional) as httpd: + conn = aiohttp.TCPConnector(share_cookies=True, loop=self.loop) + conn.update_cookies({ + "c1": "connector_cookie1", + "c2": "connector_cookie2", + }) + # Update c1 using direct cookies attribute of request + r = self.loop.run_until_complete( + client.request('get', httpd.url('method', 'get'), + cookies={"c1": "direct_cookie1"}, + connector=conn, loop=self.loop)) + self.assertEqual(r.status, 200) + content = self.loop.run_until_complete(r.json()) + self.assertEqual( + content['headers']['Cookie'], + 'c1=direct_cookie1; c2=connector_cookie2') + + def test_session_cookies(self): + with test_utils.run_server(self.loop, router=Functional) as httpd: + session = client.ClientSession(loop=self.loop) + + resp = self.loop.run_until_complete( + session.request('get', httpd.url('cookies'))) + self.assertEqual(resp.cookies['c1'].value, 'cookie1') + self.assertEqual(resp.cookies['c2'].value, 'cookie2') + self.assertEqual(session.cookies, resp.cookies) + + # Assert, that we send those cookies in next requests + r = self.loop.run_until_complete( + session.request('get', httpd.url('method', 'get'))) + self.assertEqual(r.status, 200) + content = self.loop.run_until_complete(r.json()) + self.assertEqual( + content['headers']['Cookie'], 'c1=cookie1; c2=cookie2') + + def test_session_headers(self): + with test_utils.run_server(self.loop, router=Functional) as httpd: + session = client.ClientSession( + loop=self.loop, headers={ + "X-Real-IP": "192.168.0.1" + }) + + r = self.loop.run_until_complete( + session.request('get', httpd.url('method', 'get'))) + self.assertEqual(r.status, 200) + content = self.loop.run_until_complete(r.json()) + self.assertIn( + "X-Real-Ip", content['headers']) + self.assertEqual( + content['headers']["X-Real-Ip"], "192.168.0.1") + + def test_session_headers_merge(self): + with test_utils.run_server(self.loop, router=Functional) as httpd: + session = client.ClientSession( + loop=self.loop, headers=[ + ("X-Real-IP", "192.168.0.1"), + ("X-Sent-By", "requests")]) + + r = self.loop.run_until_complete( + session.request('get', httpd.url('method', 'get'), + headers={"X-Sent-By": "aiohttp"})) + self.assertEqual(r.status, 200) + content = self.loop.run_until_complete(r.json()) + self.assertIn( + "X-Real-Ip", content['headers']) + self.assertIn( + "X-Sent-By", content['headers']) + self.assertEqual( + content['headers']["X-Real-Ip"], "192.168.0.1") + self.assertEqual( + content['headers']["X-Sent-By"], "aiohttp") + + def test_session_auth(self): + with test_utils.run_server(self.loop, router=Functional) as httpd: + session = client.ClientSession( + loop=self.loop, auth=helpers.BasicAuth("login", "pass")) + + r = self.loop.run_until_complete( + session.request('get', httpd.url('method', 'get'))) + self.assertEqual(r.status, 200) + content = self.loop.run_until_complete(r.json()) + self.assertIn( + "Authorization", content['headers']) + self.assertEqual( + content['headers']["Authorization"], "Basic bG9naW46cGFzcw==") + + def test_session_auth_override(self): + with test_utils.run_server(self.loop, router=Functional) as httpd: + session = client.ClientSession( + loop=self.loop, auth=helpers.BasicAuth("login", "pass")) + + r = self.loop.run_until_complete( + session.request('get', httpd.url('method', 'get'), + auth=helpers.BasicAuth("other_login", "pass"))) + self.assertEqual(r.status, 200) + content = self.loop.run_until_complete(r.json()) + self.assertIn( + "Authorization", content['headers']) + self.assertEqual( + content['headers']["Authorization"], + "Basic b3RoZXJfbG9naW46cGFzcw==") + + def test_session_auth_header_conflict(self): + with test_utils.run_server(self.loop, router=Functional) as httpd: + session = client.ClientSession( + loop=self.loop, auth=helpers.BasicAuth("login", "pass")) + + headers = {'Authorization': "Basic b3RoZXJfbG9naW46cGFzcw=="} + with self.assertRaises(ValueError): + self.loop.run_until_complete( + session.request('get', httpd.url('method', 'get'), + headers=headers)) + class Functional(test_utils.Router): @@ -1179,6 +1318,17 @@ def cookies(self, match): '{925EC0B8-CB17-4BEB-8A35-1033813B0523}; HttpOnly; Path=/') self._response(resp) + @test_utils.Router.define('/cookies_partial$') + def cookies_partial(self, match): + cookies = http.cookies.SimpleCookie() + cookies['c1'] = 'other_cookie1' + + resp = self._start_response(200) + for cookie in cookies.output(header='').split('\n'): + resp.add_header('Set-Cookie', cookie.strip()) + + self._response(resp) + @test_utils.Router.define('/broken$') def broken(self, match): resp = self._start_response(200) diff --git a/tests/test_connector.py b/tests/test_connector.py index f9e7f9ebb34..d7231fa41f3 100644 --- a/tests/test_connector.py +++ b/tests/test_connector.py @@ -66,27 +66,6 @@ def test_release_released(self): self.assertIsNone(conn._transport) self.assertFalse(self.connector._release.called) - def test_no_share_cookies(self): - connector = aiohttp.BaseConnector(share_cookies=False, loop=self.loop) - - conn = Connection( - connector, self.key, self.request, - self.transport, self.protocol, self.loop) - self.assertEqual(connector.cookies, {}) - conn.share_cookies({'c1': 'cookie1'}) - self.assertEqual(connector.cookies, {}) - - def test_share_cookies(self): - connector = aiohttp.BaseConnector(share_cookies=True, loop=self.loop) - - conn = Connection( - connector, self.key, self.request, - self.transport, self.protocol, self.loop) - self.assertEqual(connector.cookies, {}) - conn.share_cookies({'c1': 'cookie1'}) - self.assertEqual(connector.cookies, - http.cookies.SimpleCookie({'c1': 'cookie1'})) - class BaseConnectorTests(unittest.TestCase): @@ -407,6 +386,11 @@ def test_unix_connector(self): self.assertEqual(r.status, 200) r.close() + def test_connector_cookie_deprecation(self): + with self.assertWarnsRegex(DeprecationWarning, + "^Using `share_cookies` is deprecated"): + aiohttp.TCPConnector(share_cookies=True, loop=self.loop) + class ProxyConnectorTests(unittest.TestCase):