self.timer = time.time()
+ def recv_chunk(self, sock):
+ buf = sock.recv(32)
+ if buf:
+ crlf = buf.find(b'\r\n') # Find the HTTP chunk size.
+ if crlf > 0:
+ remaining = int(buf[:crlf].decode(), 16) # Decode the chunk size.
+ chunk = bytearray(buf[crlf + 2:]) # Create the chunk buffer.
+ remaining -= len(chunk)
+
+ while remaining > 0:
+ balance = sock.recv(remaining + 2) # Add the length of the chunk's CRLF pair.
+ if balance:
+ chunk.extend(balance)
+ remaining -= len(balance)
+ # If possible, remove the trailing CRLF pair. (This precludes an extra trip through the JSON parser.)
+ if remaining == -2 and chunk[-2] == 0x0d and chunk[-1] == 0x0a:
+ del chunk[-2:]
+ return chunk
+ return b''
+
+
def __iter__(self):
if sys.version_info >= (3, 0):
sock = self.handle.fp.raw._sock
while True:
try:
utf8_buf = self.buf.decode('utf8').lstrip()
- if utf8_buf and utf8_buf[0] != '{': # Remove the hex delimiter length and extra whitespace.
- utf8_buf = utf8_buf.lstrip('0123456789abcdefABCDEF')
- utf8_buf = utf8_buf.lstrip()
res, ptr = self.decoder.raw_decode(utf8_buf)
self.buf = utf8_buf[ptr:].encode('utf8')
yield wrap_response(res, self.handle.headers)
pass
else:
yield None
- except urllib_error.HTTPError as e: # Probably unnecessary, no dynamic url calls in the try block.
- raise TwitterHTTPError(e, self.uri, 'json', self.arg_data)
# this is a non-blocking read (ie, it will return if any data is available)
try:
if self.timeout:
ready_to_read = select.select([sock], [], [], self.timeout)
if ready_to_read[0]:
- self.buf += sock.recv(1024)
+ self.buf += self.recv_chunk(sock)
if time.time() - self.timer > self.timeout:
yield {"timeout":True}
else:
yield {"timeout":True}
else:
- self.buf += sock.recv(1024) # As tweets are typically longer than 1KB, consider increasing this size.
+ self.buf += self.recv_chunk(sock)
except SSLError as e:
if (not self.block or self.timeout) and (e.errno == 2):
# Apparently this means there was nothing in the socket buf