Merge pull request #2400 from dnephin/fix_stream_with_empty_object

Fix a bug in progress_stream when an `ADD <url>` was used
This commit is contained in:
Daniel Nephin 2015-11-20 11:33:45 -05:00
commit 46446111e5
2 changed files with 26 additions and 14 deletions

View File

@ -102,7 +102,7 @@ def stream_as_text(stream):
def line_splitter(buffer, separator=u'\n'): def line_splitter(buffer, separator=u'\n'):
index = buffer.find(six.text_type(separator)) index = buffer.find(six.text_type(separator))
if index == -1: if index == -1:
return None, None return None
return buffer[:index + 1], buffer[index + 1:] return buffer[:index + 1], buffer[index + 1:]
@ -120,11 +120,11 @@ def split_buffer(stream, splitter=None, decoder=lambda a: a):
for data in stream_as_text(stream): for data in stream_as_text(stream):
buffered += data buffered += data
while True: while True:
item, rest = splitter(buffered) buffer_split = splitter(buffered)
if not item: if buffer_split is None:
break break
buffered = rest item, buffered = buffer_split
yield item yield item
if buffered: if buffered:
@ -140,7 +140,7 @@ def json_splitter(buffer):
rest = buffer[json.decoder.WHITESPACE.match(buffer, index).end():] rest = buffer[json.decoder.WHITESPACE.match(buffer, index).end():]
return obj, rest return obj, rest
except ValueError: except ValueError:
return None, None return None
def json_stream(stream): def json_stream(stream):
@ -148,7 +148,7 @@ def json_stream(stream):
This handles streams which are inconsistently buffered (some entries may This handles streams which are inconsistently buffered (some entries may
be newline delimited, and others are not). be newline delimited, and others are not).
""" """
return split_buffer(stream_as_text(stream), json_splitter, json_decoder.decode) return split_buffer(stream, json_splitter, json_decoder.decode)
def write_out_msg(stream, lines, msg_index, msg, status="done"): def write_out_msg(stream, lines, msg_index, msg, status="done"):

View File

@ -1,25 +1,21 @@
# encoding: utf-8 # encoding: utf-8
from __future__ import unicode_literals from __future__ import unicode_literals
from .. import unittest
from compose import utils from compose import utils
class JsonSplitterTestCase(unittest.TestCase): class TestJsonSplitter(object):
def test_json_splitter_no_object(self): def test_json_splitter_no_object(self):
data = '{"foo": "bar' data = '{"foo": "bar'
self.assertEqual(utils.json_splitter(data), (None, None)) assert utils.json_splitter(data) is None
def test_json_splitter_with_object(self): def test_json_splitter_with_object(self):
data = '{"foo": "bar"}\n \n{"next": "obj"}' data = '{"foo": "bar"}\n \n{"next": "obj"}'
self.assertEqual( assert utils.json_splitter(data) == ({'foo': 'bar'}, '{"next": "obj"}')
utils.json_splitter(data),
({'foo': 'bar'}, '{"next": "obj"}')
)
class StreamAsTextTestCase(unittest.TestCase): class TestStreamAsText(object):
def test_stream_with_non_utf_unicode_character(self): def test_stream_with_non_utf_unicode_character(self):
stream = [b'\xed\xf3\xf3'] stream = [b'\xed\xf3\xf3']
@ -30,3 +26,19 @@ class StreamAsTextTestCase(unittest.TestCase):
stream = ['ěĝ'.encode('utf-8')] stream = ['ěĝ'.encode('utf-8')]
output, = utils.stream_as_text(stream) output, = utils.stream_as_text(stream)
assert output == 'ěĝ' assert output == 'ěĝ'
class TestJsonStream(object):
def test_with_falsy_entries(self):
stream = [
'{"one": "two"}\n{}\n',
"[1, 2, 3]\n[]\n",
]
output = list(utils.json_stream(stream))
assert output == [
{'one': 'two'},
{},
[1, 2, 3],
[],
]