+ chunks.append(chunk)
+ json = "".join(chunks)
+
+ if "Content-Length" in connection.info():
+ assert len(json) == int(connection.info()["Content-Length"]), "The packet header promised %s of data but only was able to read %s of data" % (
+ connection.info()["Content-Length"],
+ len(json),
+ )
+
+ return json
+
+ @staticmethod
+ def read_by_guess(connection, timeout):
+ # It appears that urllib uses the non-blocking variant of file objects
+ # which means reads might not always be complete, so grabbing as much
+ # of the data as possible with a sleep in between to give it more time
+ # to grab data.
+
+ chunks = []
+ chunk = connection.read()
+ while chunk and 0 < timeout:
+ chunks.append(chunk)
+ time.sleep(1)
+ timeout -= 1
+ chunk = connection.read()
+ chunks.append(chunk)