2015-11-04 21:54:59 +01:00
|
|
|
|
# encoding: utf-8
|
2015-10-30 21:22:51 +01:00
|
|
|
|
from __future__ import absolute_import
|
2015-11-04 21:54:59 +01:00
|
|
|
|
from __future__ import unicode_literals
|
|
|
|
|
|
2015-10-05 18:56:10 +02:00
|
|
|
|
from compose import utils
|
|
|
|
|
|
|
|
|
|
|
2015-11-16 18:35:26 +01:00
|
|
|
|
class TestJsonSplitter(object):
|
2015-10-05 18:56:10 +02:00
|
|
|
|
|
|
|
|
|
def test_json_splitter_no_object(self):
|
|
|
|
|
data = '{"foo": "bar'
|
2015-11-16 18:35:26 +01:00
|
|
|
|
assert utils.json_splitter(data) is None
|
2015-10-05 18:56:10 +02:00
|
|
|
|
|
|
|
|
|
def test_json_splitter_with_object(self):
|
|
|
|
|
data = '{"foo": "bar"}\n \n{"next": "obj"}'
|
2015-11-16 18:35:26 +01:00
|
|
|
|
assert utils.json_splitter(data) == ({'foo': 'bar'}, '{"next": "obj"}')
|
2015-11-04 21:54:59 +01:00
|
|
|
|
|
2016-07-29 23:05:59 +02:00
|
|
|
|
def test_json_splitter_leading_whitespace(self):
|
|
|
|
|
data = '\n \r{"foo": "bar"}\n\n {"next": "obj"}'
|
|
|
|
|
assert utils.json_splitter(data) == ({'foo': 'bar'}, '{"next": "obj"}')
|
|
|
|
|
|
2015-11-04 21:54:59 +01:00
|
|
|
|
|
2015-11-16 18:35:26 +01:00
|
|
|
|
class TestStreamAsText(object):
|
2015-11-04 21:54:59 +01:00
|
|
|
|
|
|
|
|
|
def test_stream_with_non_utf_unicode_character(self):
|
|
|
|
|
stream = [b'\xed\xf3\xf3']
|
|
|
|
|
output, = utils.stream_as_text(stream)
|
|
|
|
|
assert output == '<EFBFBD><EFBFBD><EFBFBD>'
|
|
|
|
|
|
|
|
|
|
def test_stream_with_utf_character(self):
|
|
|
|
|
stream = ['ěĝ'.encode('utf-8')]
|
|
|
|
|
output, = utils.stream_as_text(stream)
|
|
|
|
|
assert output == 'ěĝ'
|
2015-11-16 18:35:26 +01:00
|
|
|
|
|
|
|
|
|
|
|
|
|
|
class TestJsonStream(object):
|
|
|
|
|
|
|
|
|
|
def test_with_falsy_entries(self):
|
|
|
|
|
stream = [
|
|
|
|
|
'{"one": "two"}\n{}\n',
|
|
|
|
|
"[1, 2, 3]\n[]\n",
|
|
|
|
|
]
|
|
|
|
|
output = list(utils.json_stream(stream))
|
|
|
|
|
assert output == [
|
|
|
|
|
{'one': 'two'},
|
|
|
|
|
{},
|
|
|
|
|
[1, 2, 3],
|
|
|
|
|
[],
|
|
|
|
|
]
|
2016-07-29 23:05:59 +02:00
|
|
|
|
|
|
|
|
|
def test_with_leading_whitespace(self):
|
|
|
|
|
stream = [
|
|
|
|
|
'\n \r\n {"one": "two"}{"x": 1}',
|
|
|
|
|
' {"three": "four"}\t\t{"x": 2}'
|
|
|
|
|
]
|
|
|
|
|
output = list(utils.json_stream(stream))
|
|
|
|
|
assert output == [
|
|
|
|
|
{'one': 'two'},
|
|
|
|
|
{'x': 1},
|
|
|
|
|
{'three': 'four'},
|
|
|
|
|
{'x': 2}
|
|
|
|
|
]
|
2017-09-28 03:24:46 +02:00
|
|
|
|
|
|
|
|
|
|
|
|
|
|
class TestParseBytes(object):
|
|
|
|
|
def test_parse_bytes(self):
|
|
|
|
|
assert utils.parse_bytes('123kb') == 123 * 1024
|
|
|
|
|
assert utils.parse_bytes(123) == 123
|
|
|
|
|
assert utils.parse_bytes('foobar') is None
|
|
|
|
|
assert utils.parse_bytes('123') == 123
|
2018-09-24 18:08:17 +02:00
|
|
|
|
|
|
|
|
|
|
|
|
|
|
class TestMoreItertools(object):
|
|
|
|
|
def test_unique_everseen(self):
|
2018-09-27 13:58:38 +02:00
|
|
|
|
unique = utils.unique_everseen
|
|
|
|
|
assert list(unique([2, 1, 2, 1])) == [2, 1]
|
|
|
|
|
assert list(unique([2, 1, 2, 1], hash)) == [2, 1]
|
|
|
|
|
assert list(unique([2, 1, 2, 1], lambda x: 'key_%s' % x)) == [2, 1]
|