+# encoding: utf-8
+from __future__ import unicode_literals
+
import sys
PY_3_OR_HIGHER = sys.version_info >= (3, 0)
import json
from ssl import SSLError
import socket
-import io
import codecs
import sys, select, time
from .api import TwitterCall, wrap_response, TwitterHTTPError
CRLF = b'\r\n'
+MIN_SOCK_TIMEOUT = 0.0 # Apparenty select with zero wait is okay!
+MAX_SOCK_TIMEOUT = 10.0
+HEARTBEAT_TIMEOUT = 90.0
Timeout = {'timeout': True}
Hangup = {'hangup': True}
-HeartbeatTimeout = {'heartbeat_timeout': True, 'hangup': True}
-
-class ChunkDecodeError(Exception):
- pass
-
-class EndOfStream(Exception):
- pass
-
-range = range if PY_3_OR_HIGHER else xrange
+DecodeError = {'hangup': True, 'decode_error': True}
+HeartbeatTimeout = {'hangup': True, 'heartbeat_timeout': True}
-class HttpDeChunker(object):
+class HttpChunkDecoder(object):
def __init__(self):
self.buf = bytearray()
+ self.munch_crlf = False
- def extend(self, data):
- self.buf.extend(data)
-
- def read_chunks(self): # -> [bytearray]
+ def decode(self, data): # -> (bytearray, end_of_stream, decode_error)
chunks = []
buf = self.buf
+ munch_crlf = self.munch_crlf
+ end_of_stream = False
+ decode_error = False
+ buf.extend(data)
while True:
+ if munch_crlf:
+ # Dang, Twitter, you crazy. Twitter only sends a terminating
+ # CRLF at the beginning of the *next* message.
+ if len(buf) >= 2:
+ buf = buf[2:]
+ munch_crlf = False
+ else:
+ break
+
header_end_pos = buf.find(CRLF)
if header_end_pos == -1:
break
try:
chunk_len = int(header.decode('ascii'), 16)
except ValueError:
- raise ChunkDecodeError()
+ decode_error = True
+ break
if chunk_len == 0:
- raise EndOfStream()
+ end_of_stream = True
+ break
data_end_pos = data_start_pos + chunk_len
- if len(buf) > data_end_pos + 2:
+ if len(buf) >= data_end_pos:
chunks.append(buf[data_start_pos:data_end_pos])
- buf = buf[data_end_pos + 2:]
+ buf = buf[data_end_pos:]
+ munch_crlf = True
else:
break
self.buf = buf
- return chunks
+ self.munch_crlf = munch_crlf
+ return bytearray().join(chunks), end_of_stream, decode_error
-class JsonDeChunker(object):
+class JsonDecoder(object):
def __init__(self):
- self.buf = u""
+ self.buf = ""
self.raw_decode = json.JSONDecoder().raw_decode
- def extend(self, data):
- self.buf += data
-
- def read_json_chunks(self):
+ def decode(self, data):
chunks = []
- buf = self.buf
+ buf = self.buf + data
while True:
try:
buf = buf.lstrip()
class Timer(object):
+
def __init__(self, timeout):
# If timeout is None, we never expire.
self.timeout = timeout
return False
+class SockReader(object):
+ def __init__(self, sock, sock_timeout):
+ self.sock = sock
+ self.sock_timeout = sock_timeout
+
+ def read(self):
+ try:
+ ready_to_read = select.select([self.sock], [], [], self.sock_timeout)[0]
+ if ready_to_read:
+ return self.sock.read()
+ except SSLError as e:
+ # Code 2 is error from a non-blocking read of an empty buffer.
+ if e.errno != 2:
+ raise
+ return bytearray()
+
+
class TwitterJSONIter(object):
def __init__(self, handle, uri, arg_data, block, timeout, heartbeat_timeout):
self.handle = handle
self.uri = uri
self.arg_data = arg_data
- self.block = block
- self.timeout = float(timeout) if timeout else None
- self.heartbeat_timeout = float(heartbeat_timeout) if heartbeat_timeout else None
-
+ self.timeout_token = Timeout
+ self.timeout = None
+ self.heartbeat_timeout = HEARTBEAT_TIMEOUT
+ if timeout and timeout > 0:
+ self.timeout = float(timeout)
+ elif not (block or timeout):
+ self.timeout_token = None
+ self.timeout = MIN_SOCK_TIMEOUT
+ if heartbeat_timeout and heartbeat_timeout > 0:
+ self.heartbeat_timeout = float(heartbeat_timeout)
def __iter__(self):
- actually_block = self.block and not self.timeout
- sock_timeout = min(self.timeout or 1000000, self.heartbeat_timeout)
+ timeouts = [t for t in (self.timeout, self.heartbeat_timeout, MAX_SOCK_TIMEOUT)
+ if t is not None]
+ sock_timeout = min(*timeouts)
sock = self.handle.fp.raw._sock if PY_3_OR_HIGHER else self.handle.fp._sock.fp._sock
sock.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1)
- sock.setblocking(actually_block)
headers = self.handle.headers
- dechunker = HttpDeChunker()
- utf8decoder = codecs.getincrementaldecoder("utf-8")()
- json_dechunker = JsonDeChunker()
+ sock_reader = SockReader(sock, sock_timeout)
+ chunk_decoder = HttpChunkDecoder()
+ utf8_decoder = codecs.getincrementaldecoder("utf-8")()
+ json_decoder = JsonDecoder()
timer = Timer(self.timeout)
heartbeat_timer = Timer(self.heartbeat_timeout)
+
while True:
- json_chunks = json_dechunker.read_json_chunks()
- for json in json_chunks:
- yield wrap_response(json, headers)
- if json_chunks:
- timer.reset()
+ # Decode all the things:
+ data = sock_reader.read()
+ dechunked_data, end_of_stream, decode_error = chunk_decoder.decode(data)
+ unicode_data = utf8_decoder.decode(dechunked_data)
+ json_data = json_decoder.decode(unicode_data)
+
+ # Yield data-like things:
+ for json_obj in json_data:
+ yield wrap_response(json_obj, headers)
+
+ # Reset timers:
+ if dechunked_data:
heartbeat_timer.reset()
+ if json_data:
+ timer.reset()
- if not self.block and not self.timeout:
- yield None
+ # Yield timeouts and special things:
+ if end_of_stream:
+ yield Hangup
+ break
+ if decode_error:
+ yield DecodeError
+ break
if heartbeat_timer.expired():
yield HeartbeatTimeout
break
if timer.expired():
- yield Timeout
+ yield self.timeout_token
- try:
- ready_to_read = select.select([sock], [], [], sock_timeout)[0]
- if not ready_to_read:
- continue
- data = sock.read()
- except SSLError as e:
- # Code 2 is error from a non-blocking read of an empty buffer.
- if e.errno != 2:
- raise
- continue
-
- dechunker.extend(data)
-
- try:
- chunks = dechunker.read_chunks()
- except (ChunkDecodeError, EndOfStream):
- yield Hangup
- break
-
- for chunk in chunks:
- if chunk:
- json_dechunker.extend(utf8decoder.decode(chunk))
- if chunks:
- heartbeat_timer.reset()
def handle_stream_response(req, uri, arg_data, block, timeout, heartbeat_timeout):
try:
iterator = twitter_stream.statuses.sample()
for tweet in iterator:
- ...do something with this tweet...
+ # ...do something with this tweet...
+
+ Per default the ``TwitterStream`` object uses
+ [public streams](https://dev.twitter.com/docs/streaming-apis/streams/public).
+ If you want to use one of the other
+ [streaming APIs](https://dev.twitter.com/docs/streaming-apis), specify the URL
+ manually:
+
+ - [Public streams](https://dev.twitter.com/docs/streaming-apis/streams/public): stream.twitter.com
+ - [User streams](https://dev.twitter.com/docs/streaming-apis/streams/user): userstream.twitter.com
+ - [Site streams](https://dev.twitter.com/docs/streaming-apis/streams/site): sitestream.twitter.com
+
+ Note that you require the proper
+ [permissions](https://dev.twitter.com/docs/application-permission-model) to
+ access these streams. E.g. for direct messages your
+ [application](https://dev.twitter.com/apps) needs the "Read, Write & Direct
+ Messages" permission.
+
+ The following example demonstrates how to retrieve all new direct messages
+ from the user stream::
+
+ auth = OAuth(
+ consumer_key='[your consumer key]',
+ consumer_secret='[your consumer secret]',
+ token='[your token]',
+ token_secret='[your token secret]'
+ )
+ twitter_userstream = TwitterStream(auth=auth, domain='userstream.twitter.com')
+ for msg in twitter_userstream.user():
+ if 'direct_message' in msg:
+ print msg['direct_message']['text']
The iterator will yield until the TCP connection breaks. When the
connection breaks, the iterator yields `{'hangup': True}`, and