X-Git-Url: https://jfr.im/git/z_archive/twitter.git/blobdiff_plain/26938000070f6302c44e99785bc27eef13114c69..b29e07db2e9d55e3ed78b80613b868b40d99efd7:/twitter/stream.py diff --git a/twitter/stream.py b/twitter/stream.py index eb07742..506b873 100644 --- a/twitter/stream.py +++ b/twitter/stream.py @@ -12,85 +12,125 @@ import sys, select, time from .api import TwitterCall, wrap_response, TwitterHTTPError -def recv_chunk(sock): - buf = sock.recv(10) # Scan for an up to a 4GiB chunk size (0xffffffff). - if buf: - crlf = buf.find(b'\r\n') # Find the HTTP chunk size. - if crlf > 0: - remaining = int(buf[:crlf], 16) # Decode the chunk size. - chunk = bytearray(remaining) # Create the chunk buffer. +def recv_chunk_old(sock): # -> bytearray: + """ + Compatible with Python 2.6, but less efficient. + """ + buf = sock.recv(8) # Scan for an up to 16MiB chunk size (0xffffff). + crlf = buf.find(b'\r\n') # Find the HTTP chunk size. + + if crlf > 0: # If there is a length, then process it - start = crlf + 2 # Add in the length of the header's CRLF pair. - end = len(buf) - start + remaining = int(buf[:crlf], 16) # Decode the chunk size. + start = crlf + 2 # Add in the length of the header's CRLF pair. + end = len(buf) - start + + chunk = bytearray(remaining) + + if remaining <= 2: # E.g. an HTTP chunk with just a keep-alive delimiter or end of stream (0). + chunk[:remaining] = buf[start:start + remaining] + # There are several edge cases (remaining == [3-6]) as the chunk size exceeds the length + # of the initial read of 8 bytes. With Twitter, these do not, in practice, occur. The + # shortest JSON message starts with '{"limit":{'. Hence, it exceeds in size the edge cases + # and eliminates the need to address them. + else: # There is more to read in the chunk. chunk[:end] = buf[start:] - chunk[end:] = sock.recv(remaining - end) + chunk[end:] = sock.recv(max(0, remaining - end)) + sock.recv(2) # Read the trailing CRLF pair. Throw it away. + + return chunk + return bytearray() + +## recv_chunk_old() + +def recv_chunk_new(sock): # -> bytearray: + """ + Compatible with Python 2.7+. + """ + header = sock.recv(8) # Scan for an up to 16MiB chunk size (0xffffff). + crlf = header.find(b'\r\n') # Find the HTTP chunk size. + + if crlf > 0: # If there is a length, then process it + + size = int(header[:crlf], 16) # Decode the chunk size. Rarely exceeds 8KiB. + chunk = bytearray(size) + start = crlf + 2 # Add in the length of the header's CRLF pair. + + if size <= 3: # E.g. an HTTP chunk with just a keep-alive delimiter or end of stream (0). + chunk[:size] = header[start:start + size] + # There are several edge cases (size == [4-6]) as the chunk size exceeds the length + # of the initial read of 8 bytes. With Twitter, these do not, in practice, occur. The + # shortest JSON message starts with '{"limit":{'. Hence, it exceeds in size the edge cases + # and eliminates the need to address them. + else: # There is more to read in the chunk. + end = len(header) - start + chunk[:end] = header[start:] + buffer = memoryview(chunk)[end:] # Create a view into the bytearray to hold the rest of the chunk. + sock.recv_into(buffer) sock.recv(2) # Read the trailing CRLF pair. Throw it away. - return chunk - return b'' + return chunk + + return bytearray() -## recv_chunk() +## recv_chunk_new() +if (sys.version_info.major, sys.version_info.minor) >= (2, 7): + recv_chunk = recv_chunk_new +else: + recv_chunk = recv_chunk_old class TwitterJSONIter(object): def __init__(self, handle, uri, arg_data, block=True, timeout=None): - self.decoder = json.JSONDecoder() self.handle = handle self.uri = uri self.arg_data = arg_data - self.buf = b"" self.block = block self.timeout = timeout - self.timer = time.time() def __iter__(self): - if sys.version_info >= (3, 0): - sock = self.handle.fp.raw._sock - else: - sock = self.handle.fp._sock.fp._sock + sock = self.handle.fp.raw._sock if sys.version_info >= (3, 0) else self.handle.fp._sock.fp._sock sock.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1) - if not self.block or self.timeout: - sock.setblocking(False) + sock.setblocking(self.block and not self.timeout) + buf = '' + json_decoder = json.JSONDecoder() + timer = time.time() while True: try: - utf8_buf = self.buf.decode('utf8').lstrip() - res, ptr = self.decoder.raw_decode(utf8_buf) - self.buf = utf8_buf[ptr:].encode('utf8') + buf = buf.lstrip() + res, ptr = json_decoder.raw_decode(buf) + buf = buf[ptr:] yield wrap_response(res, self.handle.headers) - self.timer = time.time() continue except ValueError as e: - if self.block: - pass - else: - yield None - # this is a non-blocking read (ie, it will return if any data is available) + if self.block and not self.timeout: pass + else: yield None try: - if self.timeout: + buf = buf.lstrip() # Remove any keep-alive delimiters to detect hangups. + if self.timeout and not buf: # This is a non-blocking read. ready_to_read = select.select([sock], [], [], self.timeout) - if ready_to_read[0]: - self.buf += recv_chunk(sock) - if time.time() - self.timer > self.timeout: - yield {"timeout":True} - else: - yield {"timeout":True} - else: - self.buf += recv_chunk(sock) + if not ready_to_read[0] and time.time() - timer > self.timeout: + yield {'timeout': True} + continue + timer = time.time() + buf += recv_chunk(sock).decode('utf-8') + if not buf: + yield {'hangup': True} + break except SSLError as e: - if (not self.block or self.timeout) and (e.errno == 2): - # Apparently this means there was nothing in the socket buf - pass - else: - raise - except urllib_error.HTTPError as e: - raise TwitterHTTPError(e, self.uri, 'json', self.arg_data) + # Error from a non-blocking read of an empty buffer. + if (not self.block or self.timeout) and (e.errno == 2): pass + else: raise def handle_stream_response(req, uri, arg_data, block, timeout=None): - handle = urllib_request.urlopen(req,) + try: + handle = urllib_request.urlopen(req,) + except urllib_error.HTTPError as e: + raise TwitterHTTPError(e, uri, 'json', arg_data) return iter(TwitterJSONIter(handle, uri, arg_data, block, timeout=timeout)) class TwitterStreamCallWithTimeout(TwitterCall):