Make parser less restrictive and able to report problems
This commit is contained in:
parent
d27f7a0411
commit
f75bb9e65b
|
@ -560,8 +560,8 @@ def check_full_segment_data(segment, data, context, echoerr):
|
|||
|
||||
ext = data['ext']
|
||||
theme_segment_data = context[0][1].get('segment_data', {})
|
||||
top_theme_name = data['main_config'].get('ext', {}).get(ext, {}).get('theme', {})
|
||||
if data['theme'] == top_theme_name:
|
||||
top_theme_name = data['main_config'].get('ext', {}).get(ext, {}).get('theme', None)
|
||||
if not top_theme_name or data['theme'] == top_theme_name:
|
||||
top_segment_data = {}
|
||||
else:
|
||||
top_segment_data = data['ext_theme_configs'].get(top_theme_name, {}).get('segment_data', {})
|
||||
|
@ -860,21 +860,33 @@ def check(path=None):
|
|||
'themes' if ext in configs['themes'] else 'colorschemes',
|
||||
))
|
||||
|
||||
main_config = load_json_config(search_paths, 'config', load=load, open=open_file)
|
||||
lhadproblem = [False]
|
||||
def load_config(stream):
|
||||
r, hadproblem = load(stream)
|
||||
if hadproblem:
|
||||
lhadproblem[0] = True
|
||||
return r
|
||||
|
||||
main_config = load_json_config(search_paths, 'config', load=load_config, open=open_file)
|
||||
hadproblem = main_spec.match(main_config, data={'configs': configs}, context=(('', main_config),))[1]
|
||||
|
||||
import_paths = [os.path.expanduser(path) for path in main_config.get('common', {}).get('paths', [])]
|
||||
|
||||
colors_config = load_json_config(search_paths, 'colors', load=load, open=open_file)
|
||||
colors_config = load_json_config(search_paths, 'colors', load=load_config, open=open_file)
|
||||
if colors_spec.match(colors_config, context=(('', colors_config),))[1]:
|
||||
hadproblem = True
|
||||
|
||||
if lhadproblem[0]:
|
||||
hadproblem = True
|
||||
|
||||
colorscheme_configs = defaultdict(lambda: {})
|
||||
for ext in configs['colorschemes']:
|
||||
data = {'ext': ext, 'colors_config': colors_config}
|
||||
for colorscheme, cfile in configs['colorschemes'][ext].items():
|
||||
with open_file(cfile) as config_file_fp:
|
||||
config = load(config_file_fp)
|
||||
config, lhadproblem = load(config_file_fp)
|
||||
if lhadproblem:
|
||||
hadproblem = True
|
||||
colorscheme_configs[ext][colorscheme] = config
|
||||
if ext == 'vim':
|
||||
spec = vim_colorscheme_spec
|
||||
|
@ -887,7 +899,9 @@ def check(path=None):
|
|||
for ext in configs['themes']:
|
||||
for theme, sfile in configs['themes'][ext].items():
|
||||
with open_file(sfile) as config_file_fp:
|
||||
config = load(config_file_fp)
|
||||
config, lhadproblem = load(config_file_fp)
|
||||
if lhadproblem:
|
||||
hadproblem = True
|
||||
theme_configs[ext][theme] = config
|
||||
for ext, configs in theme_configs.items():
|
||||
data = {'ext': ext, 'colorscheme_configs': colorscheme_configs, 'import_paths': import_paths,
|
||||
|
|
|
@ -9,51 +9,6 @@ from .loader import *
|
|||
|
||||
__version__ = '3.10'
|
||||
|
||||
def scan(stream, Loader=Loader):
|
||||
"""
|
||||
Scan a YAML stream and produce scanning tokens.
|
||||
"""
|
||||
loader = Loader(stream)
|
||||
try:
|
||||
while loader.check_token():
|
||||
yield loader.get_token()
|
||||
finally:
|
||||
loader.dispose()
|
||||
|
||||
def parse(stream, Loader=Loader):
|
||||
"""
|
||||
Parse a YAML stream and produce parsing events.
|
||||
"""
|
||||
loader = Loader(stream)
|
||||
try:
|
||||
while loader.check_event():
|
||||
yield loader.get_event()
|
||||
finally:
|
||||
loader.dispose()
|
||||
|
||||
def compose(stream, Loader=Loader):
|
||||
"""
|
||||
Parse the first YAML document in a stream
|
||||
and produce the corresponding representation tree.
|
||||
"""
|
||||
loader = Loader(stream)
|
||||
try:
|
||||
return loader.get_single_node()
|
||||
finally:
|
||||
loader.dispose()
|
||||
|
||||
def compose_all(stream, Loader=Loader):
|
||||
"""
|
||||
Parse all YAML documents in a stream
|
||||
and produce corresponding representation trees.
|
||||
"""
|
||||
loader = Loader(stream)
|
||||
try:
|
||||
while loader.check_node():
|
||||
yield loader.get_node()
|
||||
finally:
|
||||
loader.dispose()
|
||||
|
||||
def load(stream, Loader=Loader):
|
||||
"""
|
||||
Parse the first YAML document in a stream
|
||||
|
@ -61,7 +16,8 @@ def load(stream, Loader=Loader):
|
|||
"""
|
||||
loader = Loader(stream)
|
||||
try:
|
||||
return loader.get_single_data()
|
||||
r = loader.get_single_data()
|
||||
return r, loader.haserrors
|
||||
finally:
|
||||
loader.dispose()
|
||||
|
||||
|
|
|
@ -9,6 +9,12 @@ import collections, datetime, base64, binascii, re, sys, types
|
|||
|
||||
from functools import wraps
|
||||
|
||||
|
||||
try:
|
||||
from __builtin__ import unicode
|
||||
except ImportError:
|
||||
unicode = str
|
||||
|
||||
def marked(func):
|
||||
@wraps(func)
|
||||
def f(self, node, *args, **kwargs):
|
||||
|
@ -117,6 +123,7 @@ class BaseConstructor:
|
|||
return [self.construct_object(child, deep=deep)
|
||||
for child in node.value]
|
||||
|
||||
@marked
|
||||
def construct_mapping(self, node, deep=False):
|
||||
if not isinstance(node, MappingNode):
|
||||
raise ConstructorError(None, None,
|
||||
|
@ -126,8 +133,17 @@ class BaseConstructor:
|
|||
for key_node, value_node in node.value:
|
||||
key = self.construct_object(key_node, deep=deep)
|
||||
if not isinstance(key, collections.Hashable):
|
||||
raise ConstructorError("while constructing a mapping", node.start_mark,
|
||||
"found unhashable key", key_node.start_mark)
|
||||
self.echoerr('While constructing a mapping', node.start_mark,
|
||||
'found unhashable key', key_node.start_mark)
|
||||
continue
|
||||
elif type(key.value) != unicode:
|
||||
self.echoerr('Error while constructing a mapping', node.start_mark,
|
||||
'found key that is not a string', key_node.start_mark)
|
||||
continue
|
||||
elif key in mapping:
|
||||
self.echoerr('Error while constructing a mapping', node.start_mark,
|
||||
'found duplicate key', key_node.start_mark)
|
||||
continue
|
||||
value = self.construct_object(value_node, deep=deep)
|
||||
mapping[key] = value
|
||||
return mapping
|
||||
|
|
|
@ -83,5 +83,5 @@ class MarkedYAMLError(YAMLError):
|
|||
|
||||
def __init__(self, context=None, context_mark=None,
|
||||
problem=None, problem_mark=None, note=None):
|
||||
YAMLError.__init__(format_error(context, context_mark, problem,
|
||||
YAMLError.__init__(self, format_error(context, context_mark, problem,
|
||||
problem_mark, note))
|
||||
|
|
|
@ -7,6 +7,7 @@ from .parser import *
|
|||
from .composer import *
|
||||
from .constructor import *
|
||||
from .resolver import *
|
||||
from .error import echoerr
|
||||
|
||||
class Loader(Reader, Scanner, Parser, Composer, Constructor, Resolver):
|
||||
|
||||
|
@ -17,4 +18,9 @@ class Loader(Reader, Scanner, Parser, Composer, Constructor, Resolver):
|
|||
Composer.__init__(self)
|
||||
Constructor.__init__(self)
|
||||
Resolver.__init__(self)
|
||||
self.haserrors = False
|
||||
|
||||
def echoerr(self, *args, **kwargs):
|
||||
echoerr(*args, **kwargs)
|
||||
self.haserrors = True
|
||||
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
__all__ = ['Parser', 'ParserError']
|
||||
|
||||
from .error import MarkedYAMLError, echoerr
|
||||
from .error import MarkedYAMLError
|
||||
from .tokens import *
|
||||
from .events import *
|
||||
from .scanner import *
|
||||
|
@ -99,10 +99,11 @@ class Parser:
|
|||
if not self.check_token(StreamEndToken):
|
||||
token = self.peek_token()
|
||||
start_mark = token.start_mark
|
||||
raise ParserError(None, None,
|
||||
self.echoerr(None, None,
|
||||
"expected '<stream end>', but found %r"
|
||||
% self.peek_token().id,
|
||||
self.peek_token().start_mark)
|
||||
return StreamEndEvent(token.start_mark, token.end_mark)
|
||||
else:
|
||||
# Parse the end of the stream.
|
||||
token = self.get_token()
|
||||
|
@ -173,7 +174,7 @@ class Parser:
|
|||
self.get_token()
|
||||
if self.check_token(FlowSequenceEndToken):
|
||||
token = self.peek_token()
|
||||
echoerr("While parsing a flow sequence", self.marks[-1],
|
||||
self.echoerr("While parsing a flow sequence", self.marks[-1],
|
||||
"expected sequence value, but got %r" % token.id, token.start_mark)
|
||||
else:
|
||||
token = self.peek_token()
|
||||
|
@ -206,7 +207,7 @@ class Parser:
|
|||
self.get_token()
|
||||
if self.check_token(FlowMappingEndToken):
|
||||
token = self.peek_token()
|
||||
echoerr("While parsing a flow mapping", self.marks[-1],
|
||||
self.echoerr("While parsing a flow mapping", self.marks[-1],
|
||||
"expected mapping key, but got %r" % token.id, token.start_mark)
|
||||
else:
|
||||
token = self.peek_token()
|
||||
|
|
|
@ -351,9 +351,9 @@ class Scanner:
|
|||
|
||||
def fetch_plain(self):
|
||||
|
||||
# No simple keys after plain scalars. But note that `scan_plain` will
|
||||
# change this flag if the scan is finished at the beginning of the
|
||||
# line.
|
||||
self.save_possible_simple_key()
|
||||
|
||||
# No simple keys after plain scalars.
|
||||
self.allow_simple_key = False
|
||||
|
||||
# Scan and add SCALAR. May change `allow_simple_key`.
|
||||
|
|
Loading…
Reference in New Issue