X-Git-Url: https://jfr.im/git/z_archive/twitter.git/blobdiff_plain/42b9cdeeee949f219a76d5cbbade53d4c1a8260c..16e807982055799c0aa6b0fdea1362316c220783:/twitter/stream.py diff --git a/twitter/stream.py b/twitter/stream.py index bf116d5..bc18f8e 100644 --- a/twitter/stream.py +++ b/twitter/stream.py @@ -1,59 +1,108 @@ -try: +import sys +PY_3_OR_HIGHER = sys.version_info >= (3, 0) + +if PY_3_OR_HIGHER: import urllib.request as urllib_request import urllib.error as urllib_error - import io -except ImportError: +else: import urllib2 as urllib_request import urllib2 as urllib_error import json from ssl import SSLError import socket +import codecs import sys, select, time from .api import TwitterCall, wrap_response, TwitterHTTPError -PY_27_OR_HIGHER = sys.version_info >= (2, 7) -PY_3_OR_HIGHER = sys.version_info >= (3, 0) +CRLF = b'\r\n' +MIN_SOCK_TIMEOUT = 0.0 # Apparenty select with zero wait is okay! +MAX_SOCK_TIMEOUT = 10.0 +HEARTBEAT_TIMEOUT = 90.0 Timeout = {'timeout': True} Hangup = {'hangup': True} +DecodeError = {'hangup': True, 'decode_error': True} +HeartbeatTimeout = {'hangup': True, 'heartbeat_timeout': True} + + +class HttpChunkDecoder(object): + def __init__(self): + self.buf = bytearray() + self.munch_crlf = False -def recv_chunk(sock): # -> bytearray: + def decode(self, data): # -> (bytearray, end_of_stream, decode_error) + chunks = [] + buf = self.buf + munch_crlf = self.munch_crlf + end_of_stream = False + decode_error = False + buf.extend(data) + while True: + if munch_crlf: + # Dang, Twitter, you crazy. Twitter only sends a terminating + # CRLF at the beginning of the *next* message. + if len(buf) >= 2: + buf = buf[2:] + munch_crlf = False + else: + break - header = sock.recv(8) # Scan for an up to 16MiB chunk size (0xffffff). - crlf = header.find(b'\r\n') # Find the HTTP chunk size. + header_end_pos = buf.find(CRLF) + if header_end_pos == -1: + break - if crlf > 0: # If there is a length, then process it + header = buf[:header_end_pos] + data_start_pos = header_end_pos + 2 + try: + chunk_len = int(header.decode('ascii'), 16) + except ValueError: + decode_error = True + break - size = int(header[:crlf], 16) # Decode the chunk size. Rarely exceeds 8KiB. - chunk = bytearray(size) - start = crlf + 2 # Add in the length of the header's CRLF pair. + if chunk_len == 0: + end_of_stream = True + break - if size <= 3: # E.g. an HTTP chunk with just a keep-alive delimiter or end of stream (0). - chunk[:size] = header[start:start + size] - # There are several edge cases (size == [4-6]) as the chunk size exceeds the length - # of the initial read of 8 bytes. With Twitter, these do not, in practice, occur. The - # shortest JSON message starts with '{"limit":{'. Hence, it exceeds in size the edge cases - # and eliminates the need to address them. - else: # There is more to read in the chunk. - end = len(header) - start - chunk[:end] = header[start:] - if PY_27_OR_HIGHER: # When possible, use less memory by reading directly into the buffer. - buffer = memoryview(chunk)[end:] # Create a view into the bytearray to hold the rest of the chunk. - sock.recv_into(buffer) - else: # less efficient for python2.6 compatibility - chunk[end:] = sock.recv(max(0, size - end)) - sock.recv(2) # Read the trailing CRLF pair. Throw it away. + data_end_pos = data_start_pos + chunk_len - return chunk + if len(buf) >= data_end_pos: + chunks.append(buf[data_start_pos:data_end_pos]) + buf = buf[data_end_pos:] + munch_crlf = True + else: + break + self.buf = buf + self.munch_crlf = munch_crlf + return bytearray().join(chunks), end_of_stream, decode_error - return bytearray() + +class JsonDecoder(object): + + def __init__(self): + self.buf = u"" + self.raw_decode = json.JSONDecoder().raw_decode + + def decode(self, data): + chunks = [] + buf = self.buf + data + while True: + try: + buf = buf.lstrip() + res, ptr = self.raw_decode(buf) + buf = buf[ptr:] + chunks.append(res) + except ValueError: + break + self.buf = buf + return chunks class Timer(object): + def __init__(self, timeout): - # If timeout is None, we always expire. + # If timeout is None, we never expire. self.timeout = timeout self.reset() @@ -65,66 +114,98 @@ class Timer(object): If expired, reset the timer and return True. """ if self.timeout is None: - return True + return False elif time.time() - self.time > self.timeout: self.reset() return True return False +class SockReader(object): + def __init__(self, sock, sock_timeout): + self.sock = sock + self.sock_timeout = sock_timeout + + def read(self): + try: + ready_to_read = select.select([self.sock], [], [], self.sock_timeout)[0] + if ready_to_read: + return self.sock.read() + except SSLError as e: + # Code 2 is error from a non-blocking read of an empty buffer. + if e.errno != 2: + raise + return bytearray() + + class TwitterJSONIter(object): - def __init__(self, handle, uri, arg_data, block=True, timeout=None): + def __init__(self, handle, uri, arg_data, block, timeout, heartbeat_timeout): self.handle = handle self.uri = uri self.arg_data = arg_data - self.block = block - self.timeout = timeout - + self.timeout_token = Timeout + self.timeout = None + self.heartbeat_timeout = HEARTBEAT_TIMEOUT + if timeout and timeout > 0: + self.timeout = float(timeout) + elif not (block or timeout): + self.timeout_token = None + self.timeout = MIN_SOCK_TIMEOUT + if heartbeat_timeout and heartbeat_timeout > 0: + self.heartbeat_timeout = float(heartbeat_timeout) def __iter__(self): - actually_blocking = self.block and not self.timeout + timeouts = [t for t in (self.timeout, self.heartbeat_timeout, MAX_SOCK_TIMEOUT) + if t is not None] + sock_timeout = min(*timeouts) sock = self.handle.fp.raw._sock if PY_3_OR_HIGHER else self.handle.fp._sock.fp._sock sock.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1) - sock.setblocking(actually_blocking) - buf = '' - json_decoder = json.JSONDecoder() + headers = self.handle.headers + sock_reader = SockReader(sock, sock_timeout) + chunk_decoder = HttpChunkDecoder() + utf8_decoder = codecs.getincrementaldecoder("utf-8")() + json_decoder = JsonDecoder() timer = Timer(self.timeout) - timeout_token = Timeout if self.timeout else None + heartbeat_timer = Timer(self.heartbeat_timeout) + while True: - buf = buf.lstrip() # Remove any keep-alive delimiters to detect hangups. - try: - res, ptr = json_decoder.raw_decode(buf) - buf = buf[ptr:] - except ValueError: - pass - else: - yield wrap_response(res, self.handle.headers) + # Decode all the things: + data = sock_reader.read() + dechunked_data, end_of_stream, decode_error = chunk_decoder.decode(data) + unicode_data = utf8_decoder.decode(dechunked_data) + json_data = json_decoder.decode(unicode_data) + + # Yield data-like things: + for json_obj in json_data: + yield wrap_response(json_obj, headers) + + # Reset timers: + if dechunked_data: + heartbeat_timer.reset() + if json_data: timer.reset() - continue - try: - if self.timeout and not buf: # This is a non-blocking read. - ready_to_read = select.select([sock], [], [], self.timeout)[0] - if not ready_to_read and timer.expired(): - yield timeout_token - continue - buf += recv_chunk(sock).decode('utf-8') - if not buf: - yield Hangup - break - except SSLError as e: - # Error from a non-blocking read of an empty buffer. - if not actually_blocking and (e.errno == 2): - if timer.expired(): - yield timeout_token - else: raise - -def handle_stream_response(req, uri, arg_data, block, timeout=None): + + # Yield timeouts and special things: + if end_of_stream: + yield Hangup + break + if decode_error: + yield DecodeError + break + if heartbeat_timer.expired(): + yield HeartbeatTimeout + break + if timer.expired(): + yield self.timeout_token + + +def handle_stream_response(req, uri, arg_data, block, timeout, heartbeat_timeout): try: handle = urllib_request.urlopen(req,) except urllib_error.HTTPError as e: raise TwitterHTTPError(e, uri, 'json', arg_data) - return iter(TwitterJSONIter(handle, uri, arg_data, block, timeout=timeout)) + return iter(TwitterJSONIter(handle, uri, arg_data, block, timeout, heartbeat_timeout)) class TwitterStream(TwitterCall): """ @@ -143,26 +224,32 @@ class TwitterStream(TwitterCall): connection breaks, the iterator yields `{'hangup': True}`, and raises `StopIteration` if iterated again. + Similarly, if the stream does not produce heartbeats for more than + 90 seconds, the iterator yields `{'hangup': True, + 'heartbeat_timeout': True}`, and raises `StopIteration` if + iterated again. + The `timeout` parameter controls the maximum time between yields. If it is nonzero, then the iterator will yield either - stream data or `{'timeout': True}`. This is useful if you want - your program to do other stuff in between waiting for tweets. + stream data or `{'timeout': True}` within the timeout period. This + is useful if you want your program to do other stuff in between + waiting for tweets. - The `block` parameter sets the stream to be non-blocking. In this - mode, the iterator always yields immediately. It returns stream - data, or `None`. Note that `timeout` supercedes this argument, so - it should also be set `None` to use this mode. + The `block` parameter sets the stream to be fully non-blocking. In + this mode, the iterator always yields immediately. It returns + stream data, or `None`. Note that `timeout` supercedes this + argument, so it should also be set `None` to use this mode. """ - def __init__( - self, domain="stream.twitter.com", secure=True, auth=None, - api_version='1.1', block=True, timeout=None): + def __init__(self, domain="stream.twitter.com", secure=True, auth=None, + api_version='1.1', block=True, timeout=None, + heartbeat_timeout=90.0): uriparts = (str(api_version),) - timeout = float(timeout) if timeout else None class TwitterStreamCall(TwitterCall): def _handle_response(self, req, uri, arg_data, _timeout=None): return handle_stream_response( - req, uri, arg_data, block=block, timeout=_timeout or timeout) + req, uri, arg_data, block, + _timeout or timeout, heartbeat_timeout) TwitterCall.__init__( self, auth=auth, format="json", domain=domain,