Fix powerline style

Specifically I searched for all lines that are more then one tab off compared to 
the previous line with

    BufGrep /\(^\t\+\)\S.*\n\1\t\t\+/

and replaced them with something more appropriate. Most of time this resulted in 
a few more newlines, but there are cases when I used mixed tabs/spaces 
indentation+alignment.
This commit is contained in:
ZyX 2014-08-15 20:58:19 +04:00
parent cdd36aa778
commit ae92d83eae
31 changed files with 516 additions and 383 deletions

View File

@ -6,3 +6,6 @@
" [2]: https://github.com/thinca/vim-localrc " [2]: https://github.com/thinca/vim-localrc
let g:syntastic_python_flake8_args = '--ignore=W191,E501,E128,W291,E126,E101' let g:syntastic_python_flake8_args = '--ignore=W191,E501,E128,W291,E126,E101'
let b:syntastic_checkers = ['flake8'] let b:syntastic_checkers = ['flake8']
unlet! g:python_space_error_highlight
let g:pymode_syntax_indent_errors = 0
let g:pymode_syntax_space_errors = 0

View File

@ -255,11 +255,13 @@ def finish_common_config(common_config):
if sys.version_info < (3,): if sys.version_info < (3,):
# `raise exception[0], None, exception[1]` is a SyntaxError in python-3* # `raise exception[0], None, exception[1]` is a SyntaxError in python-3*
# Not using ('''…''') because this syntax does not work in python-2.6 # Not using ('''…''') because this syntax does not work in python-2.6
exec(('def reraise(exception):\n' exec((
' if type(exception) is tuple:\n' 'def reraise(exception):\n'
' raise exception[0], None, exception[1]\n' ' if type(exception) is tuple:\n'
' else:\n' ' raise exception[0], None, exception[1]\n'
' raise exception\n')) ' else:\n'
' raise exception\n'
))
else: else:
def reraise(exception): def reraise(exception):
if type(exception) is tuple: if type(exception) is tuple:
@ -302,13 +304,13 @@ class Powerline(object):
''' '''
def __init__(self, def __init__(self,
ext, ext,
renderer_module=None, renderer_module=None,
run_once=False, run_once=False,
logger=None, logger=None,
use_daemon_threads=True, use_daemon_threads=True,
shutdown_event=None, shutdown_event=None,
config_loader=None): config_loader=None):
self.ext = ext self.ext = ext
self.run_once = run_once self.run_once = run_once
self.logger = logger self.logger = logger
@ -437,12 +439,16 @@ class Powerline(object):
or self.ext_config.get('local_themes') != self.prev_ext_config.get('local_themes') or self.ext_config.get('local_themes') != self.prev_ext_config.get('local_themes')
): ):
self.renderer_options['local_themes'] = self.get_local_themes(self.ext_config.get('local_themes')) self.renderer_options['local_themes'] = self.get_local_themes(self.ext_config.get('local_themes'))
load_colorscheme = (load_colorscheme load_colorscheme = (
or not self.prev_ext_config load_colorscheme
or self.prev_ext_config['colorscheme'] != self.ext_config['colorscheme']) or not self.prev_ext_config
load_theme = (load_theme or self.prev_ext_config['colorscheme'] != self.ext_config['colorscheme']
or not self.prev_ext_config )
or self.prev_ext_config['theme'] != self.ext_config['theme']) load_theme = (
load_theme
or not self.prev_ext_config
or self.prev_ext_config['theme'] != self.ext_config['theme']
)
self.prev_ext_config = self.ext_config self.prev_ext_config = self.ext_config
create_renderer = load_colors or load_colorscheme or load_theme or common_config_differs or ext_config_differs create_renderer = load_colors or load_colorscheme or load_theme or common_config_differs or ext_config_differs

View File

@ -86,8 +86,9 @@ else:
if not buffer or buffer.number == vim.current.buffer.number: if not buffer or buffer.number == vim.current.buffer.number:
return int(vim.eval('exists("b:{0}")'.format(varname))) return int(vim.eval('exists("b:{0}")'.format(varname)))
else: else:
return int(vim.eval('has_key(getbufvar({0}, ""), {1})' return int(vim.eval(
.format(buffer.number, varname))) 'has_key(getbufvar({0}, ""), {1})'.format(buffer.number, varname)
))
def vim_getwinvar(segment_info, varname): # NOQA def vim_getwinvar(segment_info, varname): # NOQA
result = vim.eval('getwinvar({0}, "{1}")'.format(segment_info['winnr'], varname)) result = vim.eval('getwinvar({0}, "{1}")'.format(segment_info['winnr'], varname))
@ -210,9 +211,15 @@ class VimEnviron(object):
@staticmethod @staticmethod
def __setitem__(key, value): def __setitem__(key, value):
return vim.command('let $' + key + '="' return vim.command(
+ value.replace('"', '\\"').replace('\\', '\\\\').replace('\n', '\\n').replace('\0', '') 'let ${0}="{1}"'.format(
+ '"') key,
value.replace('"', '\\"')
.replace('\\', '\\\\')
.replace('\n', '\\n')
.replace('\0', '')
)
)
if sys.version_info < (3,): if sys.version_info < (3,):

View File

@ -97,9 +97,10 @@ class INotify(object):
ONESHOT = 0x80000000 # Only send event once. ONESHOT = 0x80000000 # Only send event once.
# All events which a program can wait on. # All events which a program can wait on.
ALL_EVENTS = (ACCESS | MODIFY | ATTRIB | CLOSE_WRITE | CLOSE_NOWRITE | ALL_EVENTS = (
OPEN | MOVED_FROM | MOVED_TO | CREATE | DELETE | ACCESS | MODIFY | ATTRIB | CLOSE_WRITE | CLOSE_NOWRITE | OPEN |
DELETE_SELF | MOVE_SELF) MOVED_FROM | MOVED_TO | CREATE | DELETE | DELETE_SELF | MOVE_SELF
)
# See <bits/inotify.h> # See <bits/inotify.h>
CLOEXEC = 0x80000 CLOEXEC = 0x80000

View File

@ -82,8 +82,11 @@ except ImportError:
# Additionally check that `file` is not a directory, as on Windows # Additionally check that `file` is not a directory, as on Windows
# directories pass the os.access check. # directories pass the os.access check.
def _access_check(fn, mode): def _access_check(fn, mode):
return (os.path.exists(fn) and os.access(fn, mode) return (
and not os.path.isdir(fn)) os.path.exists(fn)
and os.access(fn, mode)
and not os.path.isdir(fn)
)
# If we're given a path with a directory part, look it up directly rather # If we're given a path with a directory part, look it up directly rather
# than referring to PATH directories. This includes checking relative to the # than referring to PATH directories. This includes checking relative to the
@ -101,7 +104,7 @@ except ImportError:
if sys.platform == "win32": if sys.platform == "win32":
# The current directory takes precedence on Windows. # The current directory takes precedence on Windows.
if not os.curdir in path: if os.curdir not in path:
path.insert(0, os.curdir) path.insert(0, os.curdir)
# PATHEXT is necessary to check on Windows. # PATHEXT is necessary to check on Windows.
@ -122,7 +125,7 @@ except ImportError:
seen = set() seen = set()
for dir in path: for dir in path:
normdir = os.path.normcase(dir) normdir = os.path.normcase(dir)
if not normdir in seen: if normdir not in seen:
seen.add(normdir) seen.add(normdir)
for thefile in files: for thefile in files:
name = os.path.join(dir, thefile) name = os.path.join(dir, thefile)

View File

@ -94,13 +94,15 @@ class INotifyTreeWatcher(INotify):
def add_watch(self, path): def add_watch(self, path):
import ctypes import ctypes
bpath = path if isinstance(path, bytes) else path.encode(self.fenc) bpath = path if isinstance(path, bytes) else path.encode(self.fenc)
wd = self._add_watch(self._inotify_fd, ctypes.c_char_p(bpath), wd = self._add_watch(
# Ignore symlinks and watch only directories self._inotify_fd, ctypes.c_char_p(bpath),
self.DONT_FOLLOW | self.ONLYDIR | # Ignore symlinks and watch only directories
self.DONT_FOLLOW | self.ONLYDIR |
self.MODIFY | self.CREATE | self.DELETE | self.MODIFY | self.CREATE | self.DELETE |
self.MOVE_SELF | self.MOVED_FROM | self.MOVED_TO | self.MOVE_SELF | self.MOVED_FROM | self.MOVED_TO |
self.ATTRIB | self.DELETE_SELF) self.ATTRIB | self.DELETE_SELF
)
if wd == -1: if wd == -1:
eno = ctypes.get_errno() eno = ctypes.get_errno()
if eno == errno.ENOTDIR: if eno == errno.ENOTDIR:

View File

@ -139,13 +139,14 @@ try:
untracked_column = 'U' untracked_column = 'U'
continue continue
if status & (git.GIT_STATUS_WT_DELETED if status & (git.GIT_STATUS_WT_DELETED | git.GIT_STATUS_WT_MODIFIED):
| git.GIT_STATUS_WT_MODIFIED):
wt_column = 'D' wt_column = 'D'
if status & (git.GIT_STATUS_INDEX_NEW if status & (
| git.GIT_STATUS_INDEX_MODIFIED git.GIT_STATUS_INDEX_NEW
| git.GIT_STATUS_INDEX_DELETED): | git.GIT_STATUS_INDEX_MODIFIED
| git.GIT_STATUS_INDEX_DELETED
):
index_column = 'I' index_column = 'I'
r = wt_column + index_column + untracked_column r = wt_column + index_column + untracked_column
return r if r != ' ' else None return r if r != ' ' else None

View File

@ -128,14 +128,16 @@ class Spec(object):
def check_type(self, value, context_mark, data, context, echoerr, types): def check_type(self, value, context_mark, data, context, echoerr, types):
if type(value.value) not in types: if type(value.value) not in types:
echoerr(context=self.cmsg.format(key=context_key(context)), echoerr(
context_mark=context_mark, context=self.cmsg.format(key=context_key(context)),
problem='{0!r} must be a {1} instance, not {2}'.format( context_mark=context_mark,
value, problem='{0!r} must be a {1} instance, not {2}'.format(
list_sep.join((t.__name__ for t in types)), value,
type(value.value).__name__ list_sep.join((t.__name__ for t in types)),
), type(value.value).__name__
problem_mark=value.mark) ),
problem_mark=value.mark
)
return False, True return False, True
return True, False return True, False
@ -143,9 +145,9 @@ class Spec(object):
proceed, echo, hadproblem = func(value, data, context, echoerr) proceed, echo, hadproblem = func(value, data, context, echoerr)
if echo and hadproblem: if echo and hadproblem:
echoerr(context=self.cmsg.format(key=context_key(context)), echoerr(context=self.cmsg.format(key=context_key(context)),
context_mark=context_mark, context_mark=context_mark,
problem=msg_func(value), problem=msg_func(value),
problem_mark=value.mark) problem_mark=value.mark)
return proceed, hadproblem return proceed, hadproblem
def check_list(self, value, context_mark, data, context, echoerr, item_func, msg_func): def check_list(self, value, context_mark, data, context, echoerr, item_func, msg_func):
@ -165,9 +167,9 @@ class Spec(object):
proceed, echo, fhadproblem = item_func(item, data, context, echoerr) proceed, echo, fhadproblem = item_func(item, data, context, echoerr)
if echo and fhadproblem: if echo and fhadproblem:
echoerr(context=self.cmsg.format(key=context_key(context) + '/list item ' + unicode(i)), echoerr(context=self.cmsg.format(key=context_key(context) + '/list item ' + unicode(i)),
context_mark=value.mark, context_mark=value.mark,
problem=msg_func(item), problem=msg_func(item),
problem_mark=item.mark) problem_mark=item.mark)
if fhadproblem: if fhadproblem:
hadproblem = True hadproblem = True
if not proceed: if not proceed:
@ -376,9 +378,9 @@ class Spec(object):
if not valspec.isoptional: if not valspec.isoptional:
hadproblem = True hadproblem = True
echoerr(context=self.cmsg.format(key=context_key(context)), echoerr(context=self.cmsg.format(key=context_key(context)),
context_mark=None, context_mark=None,
problem='required key is missing: {0}'.format(key), problem='required key is missing: {0}'.format(key),
problem_mark=value.mark) problem_mark=value.mark)
for key in value.keys(): for key in value.keys():
if key not in self.keys: if key not in self.keys:
for keyfunc, vali in self.uspecs: for keyfunc, vali in self.uspecs:
@ -405,9 +407,9 @@ class Spec(object):
hadproblem = True hadproblem = True
if self.ufailmsg: if self.ufailmsg:
echoerr(context=self.cmsg.format(key=context_key(context)), echoerr(context=self.cmsg.format(key=context_key(context)),
context_mark=None, context_mark=None,
problem=self.ufailmsg(key), problem=self.ufailmsg(key),
problem_mark=key.mark) problem_mark=key.mark)
return True, hadproblem return True, hadproblem
@ -435,19 +437,19 @@ def check_matcher_func(ext, match_name, data, context, echoerr):
func = getattr(__import__(str(match_module), fromlist=[str(match_function)]), str(match_function)) func = getattr(__import__(str(match_module), fromlist=[str(match_function)]), str(match_function))
except ImportError: except ImportError:
echoerr(context='Error while loading matcher functions', echoerr(context='Error while loading matcher functions',
problem='failed to load module {0}'.format(match_module), problem='failed to load module {0}'.format(match_module),
problem_mark=match_name.mark) problem_mark=match_name.mark)
return True, True return True, True
except AttributeError: except AttributeError:
echoerr(context='Error while loading matcher functions', echoerr(context='Error while loading matcher functions',
problem='failed to load matcher function {0}'.format(match_function), problem='failed to load matcher function {0}'.format(match_function),
problem_mark=match_name.mark) problem_mark=match_name.mark)
return True, True return True, True
if not callable(func): if not callable(func):
echoerr(context='Error while loading matcher functions', echoerr(context='Error while loading matcher functions',
problem='loaded "function" {0} is not callable'.format(match_function), problem='loaded "function" {0} is not callable'.format(match_function),
problem_mark=match_name.mark) problem_mark=match_name.mark)
return True, True return True, True
if hasattr(func, 'func_code') and hasattr(func.func_code, 'co_argcount'): if hasattr(func, 'func_code') and hasattr(func.func_code, 'co_argcount'):
@ -470,15 +472,15 @@ def check_ext(ext, data, context, echoerr):
if ext not in data['lists']['exts']: if ext not in data['lists']['exts']:
hadproblem = True hadproblem = True
echoerr(context='Error while loading {0} extension configuration'.format(ext), echoerr(context='Error while loading {0} extension configuration'.format(ext),
context_mark=ext.mark, context_mark=ext.mark,
problem='extension configuration does not exist') problem='extension configuration does not exist')
else: else:
for typ in ('themes', 'colorschemes'): for typ in ('themes', 'colorschemes'):
if ext not in data['configs'][typ] and not data['configs']['top_' + typ]: if ext not in data['configs'][typ] and not data['configs']['top_' + typ]:
hadproblem = True hadproblem = True
echoerr(context='Error while loading {0} extension configuration'.format(ext), echoerr(context='Error while loading {0} extension configuration'.format(ext),
context_mark=ext.mark, context_mark=ext.mark,
problem='{0} configuration does not exist'.format(typ)) problem='{0} configuration does not exist'.format(typ))
else: else:
hadsomedirs = True hadsomedirs = True
return hadsomedirs, hadproblem return hadsomedirs, hadproblem
@ -492,14 +494,16 @@ def check_config(d, theme, data, context, echoerr):
ext = context[-3][0] ext = context[-3][0]
if ext not in data['lists']['exts']: if ext not in data['lists']['exts']:
echoerr(context='Error while loading {0} extension configuration'.format(ext), echoerr(context='Error while loading {0} extension configuration'.format(ext),
context_mark=ext.mark, context_mark=ext.mark,
problem='extension configuration does not exist') problem='extension configuration does not exist')
return True, False, True return True, False, True
if ((ext not in data['configs'][d] or theme not in data['configs'][d][ext]) if (
and theme not in data['configs']['top_' + d]): (ext not in data['configs'][d] or theme not in data['configs'][d][ext])
and theme not in data['configs']['top_' + d]
):
echoerr(context='Error while loading {0} from {1} extension configuration'.format(d[:-1], ext), echoerr(context='Error while loading {0} from {1} extension configuration'.format(d[:-1], ext),
problem='failed to find configuration file {0}/{1}/{2}.json'.format(d, ext, theme), problem='failed to find configuration file {0}/{1}/{2}.json'.format(d, ext, theme),
problem_mark=theme.mark) problem_mark=theme.mark)
return True, False, True return True, False, True
return True, False, False return True, False, False
@ -507,9 +511,9 @@ def check_config(d, theme, data, context, echoerr):
def check_top_theme(theme, data, context, echoerr): def check_top_theme(theme, data, context, echoerr):
if theme not in data['configs']['top_themes']: if theme not in data['configs']['top_themes']:
echoerr(context='Error while checking extension configuration (key {key})'.format(key=context_key(context)), echoerr(context='Error while checking extension configuration (key {key})'.format(key=context_key(context)),
context_mark=context[-2][0].mark, context_mark=context[-2][0].mark,
problem='failed to find top theme {0}'.format(theme), problem='failed to find top theme {0}'.format(theme),
problem_mark=theme.mark) problem_mark=theme.mark)
return True, False, True return True, False, True
return True, False, False return True, False, False
@ -778,8 +782,8 @@ def check_key_compatibility(segment, data, context, echoerr):
if segment_type not in type_keys: if segment_type not in type_keys:
echoerr(context='Error while checking segments (key {key})'.format(key=context_key(context)), echoerr(context='Error while checking segments (key {key})'.format(key=context_key(context)),
problem='found segment with unknown type {0}'.format(segment_type), problem='found segment with unknown type {0}'.format(segment_type),
problem_mark=segment_type.mark) problem_mark=segment_type.mark)
return False, False, True return False, False, True
hadproblem = False hadproblem = False
@ -828,8 +832,8 @@ def check_segment_module(module, data, context, echoerr):
if echoerr.logger.level >= logging.DEBUG: if echoerr.logger.level >= logging.DEBUG:
echoerr.logger.exception(e) echoerr.logger.exception(e)
echoerr(context='Error while checking segments (key {key})'.format(key=context_key(context)), echoerr(context='Error while checking segments (key {key})'.format(key=context_key(context)),
problem='failed to import module {0}'.format(module), problem='failed to import module {0}'.format(module),
problem_mark=module.mark) problem_mark=module.mark)
return True, False, True return True, False, True
return True, False, False return True, False, False
@ -878,19 +882,19 @@ def import_segment(name, data, context, echoerr, module=None):
func = getattr(__import__(str(module), fromlist=[str(name)]), str(name)) func = getattr(__import__(str(module), fromlist=[str(name)]), str(name))
except ImportError: except ImportError:
echoerr(context='Error while checking segments (key {key})'.format(key=context_key(context)), echoerr(context='Error while checking segments (key {key})'.format(key=context_key(context)),
problem='failed to import module {0}'.format(module), problem='failed to import module {0}'.format(module),
problem_mark=module.mark) problem_mark=module.mark)
return None return None
except AttributeError: except AttributeError:
echoerr(context='Error while loading segment function (key {key})'.format(key=context_key(context)), echoerr(context='Error while loading segment function (key {key})'.format(key=context_key(context)),
problem='failed to load function {0} from module {1}'.format(name, module), problem='failed to load function {0} from module {1}'.format(name, module),
problem_mark=name.mark) problem_mark=name.mark)
return None return None
if not callable(func): if not callable(func):
echoerr(context='Error while checking segments (key {key})'.format(key=context_key(context)), echoerr(context='Error while checking segments (key {key})'.format(key=context_key(context)),
problem='imported "function" {0} from module {1} is not callable'.format(name, module), problem='imported "function" {0} from module {1} is not callable'.format(name, module),
problem_mark=module.mark) problem_mark=module.mark)
return None return None
return func return func
@ -933,11 +937,15 @@ def check_segment_name(name, data, context, echoerr):
if hl_groups: if hl_groups:
greg = re.compile(r'``([^`]+)``( \(gradient\))?') greg = re.compile(r'``([^`]+)``( \(gradient\))?')
hl_groups = [[greg.match(subs).groups() for subs in s.split(' or ')] hl_groups = [
for s in (list_sep.join(hl_groups)).split(', ')] [greg.match(subs).groups() for subs in s.split(' or ')]
for s in (list_sep.join(hl_groups)).split(', ')
]
for required_pack in hl_groups: for required_pack in hl_groups:
rs = [hl_exists(hl_group, data, context, echoerr, allow_gradients=('force' if gradient else False)) rs = [
for hl_group, gradient in required_pack] hl_exists(hl_group, data, context, echoerr, allow_gradients=('force' if gradient else False))
for hl_group, gradient in required_pack
]
if all(rs): if all(rs):
echoerr( echoerr(
context='Error while checking theme (key {key})'.format(key=context_key(context)), context='Error while checking theme (key {key})'.format(key=context_key(context)),
@ -983,8 +991,8 @@ def check_segment_name(name, data, context, echoerr):
and not any(((name in theme.get('segment_data', {})) for theme in data['top_themes'].values())) and not any(((name in theme.get('segment_data', {})) for theme in data['top_themes'].values()))
): ):
echoerr(context='Error while checking segments (key {key})'.format(key=context_key(context)), echoerr(context='Error while checking segments (key {key})'.format(key=context_key(context)),
problem='found useless use of name key (such name is not present in theme/segment_data)', problem='found useless use of name key (such name is not present in theme/segment_data)',
problem_mark=name.mark) problem_mark=name.mark)
return True, False, False return True, False, False
@ -1025,14 +1033,14 @@ def hl_exists(hl_group, data, context, echoerr, allow_gradients=False):
r.append(colorscheme) r.append(colorscheme)
continue continue
if allow_gradients == 'force' and not hadgradient: if allow_gradients == 'force' and not hadgradient:
echoerr( echoerr(
context='Error while checking highlight group in theme (key {key})'.format( context='Error while checking highlight group in theme (key {key})'.format(
key=context_key(context)), key=context_key(context)),
context_mark=getattr(hl_group, 'mark', None), context_mark=getattr(hl_group, 'mark', None),
problem='group {0} should have at least one gradient color, but it has no'.format(hl_group), problem='group {0} should have at least one gradient color, but it has no'.format(hl_group),
problem_mark=group_config.mark problem_mark=group_config.mark
) )
r.append(colorscheme) r.append(colorscheme)
return r return r
@ -1109,8 +1117,8 @@ def check_segment_data_key(key, data, context, echoerr):
else: else:
if data['theme_type'] != 'top': if data['theme_type'] != 'top':
echoerr(context='Error while checking segment data', echoerr(context='Error while checking segment data',
problem='found key {0} that cannot be associated with any segment'.format(key), problem='found key {0} that cannot be associated with any segment'.format(key),
problem_mark=key.mark) problem_mark=key.mark)
return True, False, True return True, False, True
return True, False, False return True, False, False
@ -1141,9 +1149,9 @@ def check_args_variant(func, args, data, context, echoerr):
if not all_args >= present_args: if not all_args >= present_args:
echoerr(context='Error while checking segment arguments (key {key})'.format(key=context_key(context)), echoerr(context='Error while checking segment arguments (key {key})'.format(key=context_key(context)),
context_mark=args.mark, context_mark=args.mark,
problem='found unknown keys: {0}'.format(list_sep.join(present_args - all_args)), problem='found unknown keys: {0}'.format(list_sep.join(present_args - all_args)),
problem_mark=next(iter(present_args - all_args)).mark) problem_mark=next(iter(present_args - all_args)).mark)
hadproblem = True hadproblem = True
if isinstance(func, ThreadedSegment): if isinstance(func, ThreadedSegment):
@ -1179,8 +1187,8 @@ def check_args(get_functions, args, data, context, echoerr):
new_echoerr.echo_all() new_echoerr.echo_all()
else: else:
echoerr(context='Error while checking segment arguments (key {key})'.format(key=context_key(context)), echoerr(context='Error while checking segment arguments (key {key})'.format(key=context_key(context)),
context_mark=context[-2][1].mark, context_mark=context[-2][1].mark,
problem='no suitable segments found') problem='no suitable segments found')
return True, False, hadproblem return True, False, hadproblem

View File

@ -38,9 +38,12 @@ class Composer:
# Ensure that the stream contains no more documents. # Ensure that the stream contains no more documents.
if not self.check_event(StreamEndEvent): if not self.check_event(StreamEndEvent):
event = self.get_event() event = self.get_event()
raise ComposerError("expected a single document in the stream", raise ComposerError(
document.start_mark, "but found another document", "expected a single document in the stream",
event.start_mark) document.start_mark,
"but found another document",
event.start_mark
)
# Drop the STREAM-END event. # Drop the STREAM-END event.
self.get_event() self.get_event()
@ -75,8 +78,7 @@ class Composer:
tag = event.tag tag = event.tag
if tag is None or tag == '!': if tag is None or tag == '!':
tag = self.resolve(ScalarNode, event.value, event.implicit, event.start_mark) tag = self.resolve(ScalarNode, event.value, event.implicit, event.start_mark)
node = ScalarNode(tag, event.value, node = ScalarNode(tag, event.value, event.start_mark, event.end_mark, style=event.style)
event.start_mark, event.end_mark, style=event.style)
return node return node
def compose_sequence_node(self): def compose_sequence_node(self):
@ -84,9 +86,7 @@ class Composer:
tag = start_event.tag tag = start_event.tag
if tag is None or tag == '!': if tag is None or tag == '!':
tag = self.resolve(SequenceNode, None, start_event.implicit) tag = self.resolve(SequenceNode, None, start_event.implicit)
node = SequenceNode(tag, [], node = SequenceNode(tag, [], start_event.start_mark, None, flow_style=start_event.flow_style)
start_event.start_mark, None,
flow_style=start_event.flow_style)
index = 0 index = 0
while not self.check_event(SequenceEndEvent): while not self.check_event(SequenceEndEvent):
node.value.append(self.compose_node(node, index)) node.value.append(self.compose_node(node, index))
@ -100,17 +100,15 @@ class Composer:
tag = start_event.tag tag = start_event.tag
if tag is None or tag == '!': if tag is None or tag == '!':
tag = self.resolve(MappingNode, None, start_event.implicit) tag = self.resolve(MappingNode, None, start_event.implicit)
node = MappingNode(tag, [], node = MappingNode(tag, [], start_event.start_mark, None, flow_style=start_event.flow_style)
start_event.start_mark, None,
flow_style=start_event.flow_style)
while not self.check_event(MappingEndEvent): while not self.check_event(MappingEndEvent):
#key_event = self.peek_event() # key_event = self.peek_event()
item_key = self.compose_node(node, None) item_key = self.compose_node(node, None)
#if item_key in node.value: # if item_key in node.value:
# raise ComposerError("while composing a mapping", start_event.start_mark, # raise ComposerError("while composing a mapping", start_event.start_mark,
# "found duplicate key", key_event.start_mark) # "found duplicate key", key_event.start_mark)
item_value = self.compose_node(node, item_key) item_value = self.compose_node(node, item_key)
#node.value[item_key] = item_value # node.value[item_key] = item_value
node.value.append((item_key, item_value)) node.value.append((item_key, item_value))
end_event = self.get_event() end_event = self.get_event()
node.end_mark = end_event.end_mark node.end_mark = end_event.end_mark

View File

@ -95,39 +95,53 @@ class BaseConstructor:
@marked @marked
def construct_scalar(self, node): def construct_scalar(self, node):
if not isinstance(node, ScalarNode): if not isinstance(node, ScalarNode):
raise ConstructorError(None, None, raise ConstructorError(
"expected a scalar node, but found %s" % node.id, None, None,
node.start_mark) "expected a scalar node, but found %s" % node.id,
node.start_mark
)
return node.value return node.value
def construct_sequence(self, node, deep=False): def construct_sequence(self, node, deep=False):
if not isinstance(node, SequenceNode): if not isinstance(node, SequenceNode):
raise ConstructorError(None, None, raise ConstructorError(
"expected a sequence node, but found %s" % node.id, None, None,
node.start_mark) "expected a sequence node, but found %s" % node.id,
return [self.construct_object(child, deep=deep) node.start_mark
for child in node.value] )
return [
self.construct_object(child, deep=deep)
for child in node.value
]
@marked @marked
def construct_mapping(self, node, deep=False): def construct_mapping(self, node, deep=False):
if not isinstance(node, MappingNode): if not isinstance(node, MappingNode):
raise ConstructorError(None, None, raise ConstructorError(
"expected a mapping node, but found %s" % node.id, None, None,
node.start_mark) "expected a mapping node, but found %s" % node.id,
node.start_mark
)
mapping = {} mapping = {}
for key_node, value_node in node.value: for key_node, value_node in node.value:
key = self.construct_object(key_node, deep=deep) key = self.construct_object(key_node, deep=deep)
if not isinstance(key, collections.Hashable): if not isinstance(key, collections.Hashable):
self.echoerr('While constructing a mapping', node.start_mark, self.echoerr(
'found unhashable key', key_node.start_mark) 'While constructing a mapping', node.start_mark,
'found unhashable key', key_node.start_mark
)
continue continue
elif type(key.value) != unicode: elif type(key.value) != unicode:
self.echoerr('Error while constructing a mapping', node.start_mark, self.echoerr(
'found key that is not a string', key_node.start_mark) 'Error while constructing a mapping', node.start_mark,
'found key that is not a string', key_node.start_mark
)
continue continue
elif key in mapping: elif key in mapping:
self.echoerr('Error while constructing a mapping', node.start_mark, self.echoerr(
'found duplicate key', key_node.start_mark) 'Error while constructing a mapping', node.start_mark,
'found duplicate key', key_node.start_mark
)
continue continue
value = self.construct_object(value_node, deep=deep) value = self.construct_object(value_node, deep=deep)
mapping[key] = value mapping[key] = value
@ -135,7 +149,7 @@ class BaseConstructor:
@classmethod @classmethod
def add_constructor(cls, tag, constructor): def add_constructor(cls, tag, constructor):
if not 'yaml_constructors' in cls.__dict__: if 'yaml_constructors' not in cls.__dict__:
cls.yaml_constructors = cls.yaml_constructors.copy() cls.yaml_constructors = cls.yaml_constructors.copy()
cls.yaml_constructors[tag] = constructor cls.yaml_constructors[tag] = constructor
@ -162,19 +176,24 @@ class Constructor(BaseConstructor):
submerge = [] submerge = []
for subnode in value_node.value: for subnode in value_node.value:
if not isinstance(subnode, MappingNode): if not isinstance(subnode, MappingNode):
raise ConstructorError("while constructing a mapping", raise ConstructorError(
node.start_mark, "while constructing a mapping",
"expected a mapping for merging, but found %s" node.start_mark,
% subnode.id, subnode.start_mark) "expected a mapping for merging, but found %s" % subnode.id,
subnode.start_mark
)
self.flatten_mapping(subnode) self.flatten_mapping(subnode)
submerge.append(subnode.value) submerge.append(subnode.value)
submerge.reverse() submerge.reverse()
for value in submerge: for value in submerge:
merge.extend(value) merge.extend(value)
else: else:
raise ConstructorError("while constructing a mapping", node.start_mark, raise ConstructorError(
"expected a mapping or list of mappings for merging, but found %s" "while constructing a mapping",
% value_node.id, value_node.start_mark) node.start_mark,
("expected a mapping or list of mappings for merging, but found %s" % value_node.id),
value_node.start_mark
)
elif key_node.tag == 'tag:yaml.org,2002:value': elif key_node.tag == 'tag:yaml.org,2002:value':
key_node.tag = 'tag:yaml.org,2002:str' key_node.tag = 'tag:yaml.org,2002:str'
index += 1 index += 1
@ -237,9 +256,11 @@ class Constructor(BaseConstructor):
data.update(value) data.update(value)
def construct_undefined(self, node): def construct_undefined(self, node):
raise ConstructorError(None, None, raise ConstructorError(
"could not determine a constructor for the tag %r" % node.tag, None, None,
node.start_mark) "could not determine a constructor for the tag %r" % node.tag,
node.start_mark
)
Constructor.add_constructor( Constructor.add_constructor(

View File

@ -53,13 +53,15 @@ class Mark:
break break
snippet = [self.buffer[start:self.pointer], self.buffer[self.pointer], self.buffer[self.pointer + 1:end]] snippet = [self.buffer[start:self.pointer], self.buffer[self.pointer], self.buffer[self.pointer + 1:end]]
snippet = [strtrans(s) for s in snippet] snippet = [strtrans(s) for s in snippet]
return (' ' * indent + head + ''.join(snippet) + tail + '\n' return (
+ ' ' * (indent + len(head) + len(snippet[0])) + '^') ' ' * indent + head + ''.join(snippet) + tail + '\n'
+ ' ' * (indent + len(head) + len(snippet[0])) + '^'
)
def __str__(self): def __str__(self):
snippet = self.get_snippet() snippet = self.get_snippet()
where = (" in \"%s\", line %d, column %d" where = (" in \"%s\", line %d, column %d" % (
% (self.name, self.line + 1, self.column + 1)) self.name, self.line + 1, self.column + 1))
if snippet is not None: if snippet is not None:
where += ":\n" + snippet where += ":\n" + snippet
if type(where) is str: if type(where) is str:
@ -77,11 +79,15 @@ def format_error(context=None, context_mark=None, problem=None, problem_mark=Non
lines = [] lines = []
if context is not None: if context is not None:
lines.append(context) lines.append(context)
if context_mark is not None \ if (
and (problem is None or problem_mark is None context_mark is not None
or context_mark.name != problem_mark.name and (
or context_mark.line != problem_mark.line problem is None or problem_mark is None
or context_mark.column != problem_mark.column): or context_mark.name != problem_mark.name
or context_mark.line != problem_mark.line
or context_mark.column != problem_mark.column
)
):
lines.append(str(context_mark)) lines.append(str(context_mark))
if problem is not None: if problem is not None:
lines.append(problem) lines.append(problem)
@ -93,7 +99,5 @@ def format_error(context=None, context_mark=None, problem=None, problem_mark=Non
class MarkedError(Exception): class MarkedError(Exception):
def __init__(self, context=None, context_mark=None, def __init__(self, context=None, context_mark=None, problem=None, problem_mark=None, note=None):
problem=None, problem_mark=None, note=None): Exception.__init__(self, format_error(context, context_mark, problem, problem_mark, note))
Exception.__init__(self, format_error(context, context_mark, problem,
problem_mark, note))

View File

@ -7,10 +7,14 @@ class Event(object):
self.end_mark = end_mark self.end_mark = end_mark
def __repr__(self): def __repr__(self):
attributes = [key for key in ['implicit', 'value'] attributes = [
if hasattr(self, key)] key for key in ['implicit', 'value']
arguments = ', '.join(['%s=%r' % (key, getattr(self, key)) if hasattr(self, key)
for key in attributes]) ]
arguments = ', '.join([
'%s=%r' % (key, getattr(self, key))
for key in attributes
])
return '%s(%s)' % (self.__class__.__name__, arguments) return '%s(%s)' % (self.__class__.__name__, arguments)
@ -21,8 +25,7 @@ class NodeEvent(Event):
class CollectionStartEvent(NodeEvent): class CollectionStartEvent(NodeEvent):
def __init__(self, implicit, start_mark=None, end_mark=None, def __init__(self, implicit, start_mark=None, end_mark=None, flow_style=None):
flow_style=None):
self.tag = None self.tag = None
self.implicit = implicit self.implicit = implicit
self.start_mark = start_mark self.start_mark = start_mark
@ -49,8 +52,7 @@ class StreamEndEvent(Event):
class DocumentStartEvent(Event): class DocumentStartEvent(Event):
def __init__(self, start_mark=None, end_mark=None, def __init__(self, start_mark=None, end_mark=None, explicit=None, version=None, tags=None):
explicit=None, version=None, tags=None):
self.start_mark = start_mark self.start_mark = start_mark
self.end_mark = end_mark self.end_mark = end_mark
self.explicit = explicit self.explicit = explicit
@ -59,8 +61,7 @@ class DocumentStartEvent(Event):
class DocumentEndEvent(Event): class DocumentEndEvent(Event):
def __init__(self, start_mark=None, end_mark=None, def __init__(self, start_mark=None, end_mark=None, explicit=None):
explicit=None):
self.start_mark = start_mark self.start_mark = start_mark
self.end_mark = end_mark self.end_mark = end_mark
self.explicit = explicit self.explicit = explicit
@ -71,8 +72,7 @@ class AliasEvent(NodeEvent):
class ScalarEvent(NodeEvent): class ScalarEvent(NodeEvent):
def __init__(self, implicit, value, def __init__(self, implicit, value, start_mark=None, end_mark=None, style=None):
start_mark=None, end_mark=None, style=None):
self.tag = None self.tag = None
self.implicit = implicit self.implicit = implicit
self.value = value self.value = value

View File

@ -110,11 +110,15 @@ def gen_marked_value(value, mark, use_special_classes=True):
elif func not in set(('__init__', '__new__', '__getattribute__')): elif func not in set(('__init__', '__new__', '__getattribute__')):
if func in set(('__eq__',)): if func in set(('__eq__',)):
# HACK to make marked dictionaries always work # HACK to make marked dictionaries always work
exec (('def {0}(self, *args):\n' exec ((
' return self.value.{0}(*[arg.value if isinstance(arg, MarkedValue) else arg for arg in args])').format(func)) 'def {0}(self, *args):\n'
' return self.value.{0}(*[arg.value if isinstance(arg, MarkedValue) else arg for arg in args])'
).format(func))
else: else:
exec (('def {0}(self, *args, **kwargs):\n' exec ((
' return self.value.{0}(*args, **kwargs)\n').format(func)) 'def {0}(self, *args, **kwargs):\n'
' return self.value.{0}(*args, **kwargs)\n'
).format(func))
classcache[value.__class__] = Marked classcache[value.__class__] = Marked
return Marked(value, mark) return Marked(value, mark)

View File

@ -7,18 +7,18 @@ class Node(object):
def __repr__(self): def __repr__(self):
value = self.value value = self.value
#if isinstance(value, list): # if isinstance(value, list):
# if len(value) == 0: # if len(value) == 0:
# value = '<empty>' # value = '<empty>'
# elif len(value) == 1: # elif len(value) == 1:
# value = '<1 item>' # value = '<1 item>'
# else: # else:
# value = '<%d items>' % len(value) # value = '<%d items>' % len(value)
#else: # else:
# if len(value) > 75: # if len(value) > 75:
# value = repr(value[:70]+u' ... ') # value = repr(value[:70]+u' ... ')
# else: # else:
# value = repr(value) # value = repr(value)
value = repr(value) value = repr(value)
return '%s(tag=%r, value=%s)' % (self.__class__.__name__, self.tag, value) return '%s(tag=%r, value=%s)' % (self.__class__.__name__, self.tag, value)
@ -26,8 +26,7 @@ class Node(object):
class ScalarNode(Node): class ScalarNode(Node):
id = 'scalar' id = 'scalar'
def __init__(self, tag, value, def __init__(self, tag, value, start_mark=None, end_mark=None, style=None):
start_mark=None, end_mark=None, style=None):
self.tag = tag self.tag = tag
self.value = value self.value = value
self.start_mark = start_mark self.start_mark = start_mark
@ -36,8 +35,7 @@ class ScalarNode(Node):
class CollectionNode(Node): class CollectionNode(Node):
def __init__(self, tag, value, def __init__(self, tag, value, start_mark=None, end_mark=None, flow_style=None):
start_mark=None, end_mark=None, flow_style=None):
self.tag = tag self.tag = tag
self.value = value self.value = value
self.start_mark = start_mark self.start_mark = start_mark

View File

@ -58,8 +58,7 @@ class Parser:
def parse_stream_start(self): def parse_stream_start(self):
# Parse the stream start. # Parse the stream start.
token = self.get_token() token = self.get_token()
event = StreamStartEvent(token.start_mark, token.end_mark, event = StreamStartEvent(token.start_mark, token.end_mark, encoding=token.encoding)
encoding=token.encoding)
# Prepare the next state. # Prepare the next state.
self.state = self.parse_implicit_document_start self.state = self.parse_implicit_document_start
@ -86,9 +85,10 @@ class Parser:
# Parse an explicit document. # Parse an explicit document.
if not self.check_token(StreamEndToken): if not self.check_token(StreamEndToken):
token = self.peek_token() token = self.peek_token()
self.echoerr(None, None, self.echoerr(
"expected '<stream end>', but found %r" % token.id, None, None,
token.start_mark) ("expected '<stream end>', but found %r" % token.id), token.start_mark
)
return StreamEndEvent(token.start_mark, token.end_mark) return StreamEndEvent(token.start_mark, token.end_mark)
else: else:
# Parse the end of the stream. # Parse the end of the stream.
@ -127,24 +127,23 @@ class Parser:
implicit = (True, False) implicit = (True, False)
else: else:
implicit = (False, True) implicit = (False, True)
event = ScalarEvent(implicit, token.value, event = ScalarEvent(implicit, token.value, start_mark, end_mark, style=token.style)
start_mark, end_mark, style=token.style)
self.state = self.states.pop() self.state = self.states.pop()
elif self.check_token(FlowSequenceStartToken): elif self.check_token(FlowSequenceStartToken):
end_mark = self.peek_token().end_mark end_mark = self.peek_token().end_mark
event = SequenceStartEvent(implicit, event = SequenceStartEvent(implicit, start_mark, end_mark, flow_style=True)
start_mark, end_mark, flow_style=True)
self.state = self.parse_flow_sequence_first_entry self.state = self.parse_flow_sequence_first_entry
elif self.check_token(FlowMappingStartToken): elif self.check_token(FlowMappingStartToken):
end_mark = self.peek_token().end_mark end_mark = self.peek_token().end_mark
event = MappingStartEvent(implicit, event = MappingStartEvent(implicit, start_mark, end_mark, flow_style=True)
start_mark, end_mark, flow_style=True)
self.state = self.parse_flow_mapping_first_key self.state = self.parse_flow_mapping_first_key
else: else:
token = self.peek_token() token = self.peek_token()
raise ParserError("while parsing a flow node", start_mark, raise ParserError(
"expected the node content, but found %r" % token.id, "while parsing a flow node", start_mark,
token.start_mark) "expected the node content, but found %r" % token.id,
token.start_mark
)
return event return event
def parse_flow_sequence_first_entry(self): def parse_flow_sequence_first_entry(self):
@ -159,12 +158,16 @@ class Parser:
self.get_token() self.get_token()
if self.check_token(FlowSequenceEndToken): if self.check_token(FlowSequenceEndToken):
token = self.peek_token() token = self.peek_token()
self.echoerr("While parsing a flow sequence", self.marks[-1], self.echoerr(
"expected sequence value, but got %r" % token.id, token.start_mark) "While parsing a flow sequence", self.marks[-1],
("expected sequence value, but got %r" % token.id), token.start_mark
)
else: else:
token = self.peek_token() token = self.peek_token()
raise ParserError("while parsing a flow sequence", self.marks[-1], raise ParserError(
"expected ',' or ']', but got %r" % token.id, token.start_mark) "while parsing a flow sequence", self.marks[-1],
("expected ',' or ']', but got %r" % token.id), token.start_mark
)
if not self.check_token(FlowSequenceEndToken): if not self.check_token(FlowSequenceEndToken):
self.states.append(self.parse_flow_sequence_entry) self.states.append(self.parse_flow_sequence_entry)
@ -192,22 +195,27 @@ class Parser:
self.get_token() self.get_token()
if self.check_token(FlowMappingEndToken): if self.check_token(FlowMappingEndToken):
token = self.peek_token() token = self.peek_token()
self.echoerr("While parsing a flow mapping", self.marks[-1], self.echoerr(
"expected mapping key, but got %r" % token.id, token.start_mark) "While parsing a flow mapping", self.marks[-1],
("expected mapping key, but got %r" % token.id), token.start_mark
)
else: else:
token = self.peek_token() token = self.peek_token()
raise ParserError("while parsing a flow mapping", self.marks[-1], raise ParserError(
"expected ',' or '}', but got %r" % token.id, token.start_mark) "while parsing a flow mapping", self.marks[-1],
("expected ',' or '}', but got %r" % token.id), token.start_mark
)
if self.check_token(KeyToken): if self.check_token(KeyToken):
token = self.get_token() token = self.get_token()
if not self.check_token(ValueToken, if not self.check_token(ValueToken, FlowEntryToken, FlowMappingEndToken):
FlowEntryToken, FlowMappingEndToken):
self.states.append(self.parse_flow_mapping_value) self.states.append(self.parse_flow_mapping_value)
return self.parse_node() return self.parse_node()
else: else:
token = self.peek_token() token = self.peek_token()
raise ParserError("while parsing a flow mapping", self.marks[-1], raise ParserError(
"expected value, but got %r" % token.id, token.start_mark) "while parsing a flow mapping", self.marks[-1],
("expected value, but got %r" % token.id), token.start_mark
)
elif not self.check_token(FlowMappingEndToken): elif not self.check_token(FlowMappingEndToken):
token = self.peek_token() token = self.peek_token()
expect_key = self.check_token(ValueToken, FlowEntryToken) expect_key = self.check_token(ValueToken, FlowEntryToken)
@ -216,12 +224,16 @@ class Parser:
expect_key = self.check_token(ValueToken) expect_key = self.check_token(ValueToken)
if expect_key: if expect_key:
raise ParserError("while parsing a flow mapping", self.marks[-1], raise ParserError(
"expected string key, but got %r" % token.id, token.start_mark) "while parsing a flow mapping", self.marks[-1],
("expected string key, but got %r" % token.id), token.start_mark
)
else: else:
token = self.peek_token() token = self.peek_token()
raise ParserError("while parsing a flow mapping", self.marks[-1], raise ParserError(
"expected ':', but got %r" % token.id, token.start_mark) "while parsing a flow mapping", self.marks[-1],
("expected ':', but got %r" % token.id), token.start_mark
)
token = self.get_token() token = self.get_token()
event = MappingEndEvent(token.start_mark, token.end_mark) event = MappingEndEvent(token.start_mark, token.end_mark)
self.state = self.states.pop() self.state = self.states.pop()
@ -236,5 +248,7 @@ class Parser:
return self.parse_node() return self.parse_node()
token = self.peek_token() token = self.peek_token()
raise ParserError("while parsing a flow mapping", self.marks[-1], raise ParserError(
"expected mapping value, but got %r" % token.id, token.start_mark) "while parsing a flow mapping", self.marks[-1],
("expected mapping value, but got %r" % token.id), token.start_mark
)

View File

@ -89,9 +89,11 @@ class Reader(object):
match = NON_PRINTABLE.search(data) match = NON_PRINTABLE.search(data)
if match: if match:
self.update_pointer(match.start()) self.update_pointer(match.start())
raise ReaderError('while reading from stream', None, raise ReaderError(
'found special characters which are not allowed', 'while reading from stream', None,
Mark(self.name, self.line, self.column, self.full_buffer, self.full_pointer)) 'found special characters which are not allowed',
Mark(self.name, self.line, self.column, self.full_buffer, self.full_pointer)
)
def update(self, length): def update(self, length):
if self.raw_buffer is None: if self.raw_buffer is None:
@ -102,8 +104,7 @@ class Reader(object):
if not self.eof: if not self.eof:
self.update_raw() self.update_raw()
try: try:
data, converted = self.raw_decode(self.raw_buffer, data, converted = self.raw_decode(self.raw_buffer, 'strict', self.eof)
'strict', self.eof)
except UnicodeDecodeError as exc: except UnicodeDecodeError as exc:
character = self.raw_buffer[exc.start] character = self.raw_buffer[exc.start]
position = self.stream_pointer - len(self.raw_buffer) + exc.start position = self.stream_pointer - len(self.raw_buffer) + exc.start
@ -112,9 +113,11 @@ class Reader(object):
self.full_buffer += data + '<' + str(ord(character)) + '>' self.full_buffer += data + '<' + str(ord(character)) + '>'
self.raw_buffer = self.raw_buffer[converted:] self.raw_buffer = self.raw_buffer[converted:]
self.update_pointer(exc.start - 1) self.update_pointer(exc.start - 1)
raise ReaderError('while reading from stream', None, raise ReaderError(
'found character #x%04x that cannot be decoded by UTF-8 codec' % ord(character), 'while reading from stream', None,
Mark(self.name, self.line, self.column, self.full_buffer, position)) 'found character #x%04x that cannot be decoded by UTF-8 codec' % ord(character),
Mark(self.name, self.line, self.column, self.full_buffer, position)
)
self.buffer += data self.buffer += data
self.full_buffer += data self.full_buffer += data
self.raw_buffer = self.raw_buffer[converted:] self.raw_buffer = self.raw_buffer[converted:]

View File

@ -24,7 +24,7 @@ class BaseResolver:
@classmethod @classmethod
def add_implicit_resolver(cls, tag, regexp, first): def add_implicit_resolver(cls, tag, regexp, first):
if not 'yaml_implicit_resolvers' in cls.__dict__: if 'yaml_implicit_resolvers' not in cls.__dict__:
cls.yaml_implicit_resolvers = cls.yaml_implicit_resolvers.copy() cls.yaml_implicit_resolvers = cls.yaml_implicit_resolvers.copy()
if first is None: if first is None:
first = [None] first = [None]
@ -39,8 +39,7 @@ class BaseResolver:
if current_node: if current_node:
depth = len(self.resolver_prefix_paths) depth = len(self.resolver_prefix_paths)
for path, kind in self.resolver_prefix_paths[-1]: for path, kind in self.resolver_prefix_paths[-1]:
if self.check_resolver_prefix(depth, path, kind, if self.check_resolver_prefix(depth, path, kind, current_node, current_index):
current_node, current_index):
if len(path) > depth: if len(path) > depth:
prefix_paths.append((path, kind)) prefix_paths.append((path, kind))
else: else:
@ -60,8 +59,7 @@ class BaseResolver:
self.resolver_exact_paths.pop() self.resolver_exact_paths.pop()
self.resolver_prefix_paths.pop() self.resolver_prefix_paths.pop()
def check_resolver_prefix(self, depth, path, kind, def check_resolver_prefix(self, depth, path, kind, current_node, current_index):
current_node, current_index):
node_check, index_check = path[depth - 1] node_check, index_check = path[depth - 1]
if isinstance(node_check, str): if isinstance(node_check, str):
if current_node.tag != node_check: if current_node.tag != node_check:
@ -75,8 +73,7 @@ class BaseResolver:
and current_index is None): and current_index is None):
return return
if isinstance(index_check, str): if isinstance(index_check, str):
if not (isinstance(current_index, ScalarNode) if not (isinstance(current_index, ScalarNode) and index_check == current_index.value):
and index_check == current_index.value):
return return
elif isinstance(index_check, int) and not isinstance(index_check, bool): elif isinstance(index_check, int) and not isinstance(index_check, bool):
if index_check != current_index: if index_check != current_index:
@ -94,9 +91,11 @@ class BaseResolver:
if regexp.match(value): if regexp.match(value):
return tag return tag
else: else:
self.echoerr('While resolving plain scalar', None, self.echoerr(
'expected floating-point value, integer, null or boolean, but got %r' % value, 'While resolving plain scalar', None,
mark) 'expected floating-point value, integer, null or boolean, but got %r' % value,
mark
)
return self.DEFAULT_SCALAR_TAG return self.DEFAULT_SCALAR_TAG
if kind is ScalarNode: if kind is ScalarNode:
return self.DEFAULT_SCALAR_TAG return self.DEFAULT_SCALAR_TAG

View File

@ -48,9 +48,9 @@ class Scanner:
# input data to Unicode. It also adds NUL to the end. # input data to Unicode. It also adds NUL to the end.
# #
# Reader supports the following methods # Reader supports the following methods
# self.peek(i=0) # peek the next i-th character # self.peek(i=0) # peek the next i-th character
# self.prefix(l=1) # peek the next l characters # self.prefix(l=1) # peek the next l characters
# self.forward(l=1) # read the next l characters and move the pointer. # self.forward(l=1) # read the next l characters and move the pointer.
# Had we reached the end of the stream? # Had we reached the end of the stream?
self.done = False self.done = False
@ -83,7 +83,7 @@ class Scanner:
# Keep track of possible simple keys. This is a dictionary. The key # Keep track of possible simple keys. This is a dictionary. The key
# is `flow_level`; there can be no more that one possible simple key # is `flow_level`; there can be no more that one possible simple key
# for each level. The value is a SimpleKey record: # for each level. The value is a SimpleKey record:
# (token_number, index, line, column, mark) # (token_number, index, line, column, mark)
# A simple key may start with SCALAR(flow), '[', or '{' tokens. # A simple key may start with SCALAR(flow), '[', or '{' tokens.
self.possible_simple_keys = {} self.possible_simple_keys = {}
@ -179,9 +179,11 @@ class Scanner:
return self.fetch_plain() return self.fetch_plain()
# No? It's an error. Let's produce a nice error message. # No? It's an error. Let's produce a nice error message.
raise ScannerError("while scanning for the next token", None, raise ScannerError(
"found character %r that cannot start any token" % ch, "while scanning for the next token", None,
self.get_mark()) "found character %r that cannot start any token" % ch,
self.get_mark()
)
# Simple keys treatment. # Simple keys treatment.
@ -189,10 +191,10 @@ class Scanner:
# Return the number of the nearest possible simple key. Actually we # Return the number of the nearest possible simple key. Actually we
# don't need to loop through the whole dictionary. We may replace it # don't need to loop through the whole dictionary. We may replace it
# with the following code: # with the following code:
# if not self.possible_simple_keys: # if not self.possible_simple_keys:
# return None # return None
# return self.possible_simple_keys[ # return self.possible_simple_keys[
# min(self.possible_simple_keys.keys())].token_number # min(self.possible_simple_keys.keys())].token_number
min_token_number = None min_token_number = None
for level in self.possible_simple_keys: for level in self.possible_simple_keys:
key = self.possible_simple_keys[level] key = self.possible_simple_keys[level]
@ -214,15 +216,14 @@ class Scanner:
def save_possible_simple_key(self): def save_possible_simple_key(self):
# The next token may start a simple key. We check if it's possible # The next token may start a simple key. We check if it's possible
# and save its position. This function is called for # and save its position. This function is called for
# SCALAR(flow), '[', and '{'. # SCALAR(flow), '[', and '{'.
# The next token might be a simple key. Let's save it's number and # The next token might be a simple key. Let's save it's number and
# position. # position.
if self.allow_simple_key: if self.allow_simple_key:
self.remove_possible_simple_key() self.remove_possible_simple_key()
token_number = self.tokens_taken + len(self.tokens) token_number = self.tokens_taken + len(self.tokens)
key = SimpleKey(token_number, key = SimpleKey(token_number, self.index, self.line, self.column, self.get_mark())
self.index, self.line, self.column, self.get_mark())
self.possible_simple_keys[self.flow_level] = key self.possible_simple_keys[self.flow_level] = key
def remove_possible_simple_key(self): def remove_possible_simple_key(self):
@ -311,8 +312,7 @@ class Scanner:
# Add KEY. # Add KEY.
key = self.possible_simple_keys[self.flow_level] key = self.possible_simple_keys[self.flow_level]
del self.possible_simple_keys[self.flow_level] del self.possible_simple_keys[self.flow_level]
self.tokens.insert(key.token_number - self.tokens_taken, self.tokens.insert(key.token_number - self.tokens_taken, KeyToken(key.mark, key.mark))
KeyToken(key.mark, key.mark))
# There cannot be two simple keys one after another. # There cannot be two simple keys one after another.
self.allow_simple_key = False self.allow_simple_key = False
@ -423,15 +423,20 @@ class Scanner:
self.forward() self.forward()
for k in range(length): for k in range(length):
if self.peek(k) not in '0123456789ABCDEFabcdef': if self.peek(k) not in '0123456789ABCDEFabcdef':
raise ScannerError("while scanning a double-quoted scalar", start_mark, raise ScannerError(
"expected escape sequence of %d hexdecimal numbers, but found %r" % "while scanning a double-quoted scalar", start_mark,
(length, self.peek(k)), self.get_mark()) "expected escape sequence of %d hexdecimal numbers, but found %r" % (
length, self.peek(k)),
self.get_mark()
)
code = int(self.prefix(length), 16) code = int(self.prefix(length), 16)
chunks.append(chr(code)) chunks.append(chr(code))
self.forward(length) self.forward(length)
else: else:
raise ScannerError("while scanning a double-quoted scalar", start_mark, raise ScannerError(
"found unknown escape character %r" % ch, self.get_mark()) "while scanning a double-quoted scalar", start_mark,
("found unknown escape character %r" % ch), self.get_mark()
)
else: else:
return chunks return chunks
@ -445,11 +450,15 @@ class Scanner:
self.forward(length) self.forward(length)
ch = self.peek() ch = self.peek()
if ch == '\0': if ch == '\0':
raise ScannerError("while scanning a quoted scalar", start_mark, raise ScannerError(
"found unexpected end of stream", self.get_mark()) "while scanning a quoted scalar", start_mark,
"found unexpected end of stream", self.get_mark()
)
elif ch == '\n': elif ch == '\n':
raise ScannerError("while scanning a quoted scalar", start_mark, raise ScannerError(
"found unexpected line end", self.get_mark()) "while scanning a quoted scalar", start_mark,
"found unexpected line end", self.get_mark()
)
else: else:
chunks.append(whitespaces) chunks.append(whitespaces)
return chunks return chunks

View File

@ -4,19 +4,22 @@ class Token(object):
self.end_mark = end_mark self.end_mark = end_mark
def __repr__(self): def __repr__(self):
attributes = [key for key in self.__dict__ attributes = [
if not key.endswith('_mark')] key for key in self.__dict__
if not key.endswith('_mark')
]
attributes.sort() attributes.sort()
arguments = ', '.join(['%s=%r' % (key, getattr(self, key)) arguments = ', '.join([
for key in attributes]) '%s=%r' % (key, getattr(self, key))
for key in attributes
])
return '%s(%s)' % (self.__class__.__name__, arguments) return '%s(%s)' % (self.__class__.__name__, arguments)
class StreamStartToken(Token): class StreamStartToken(Token):
id = '<stream start>' id = '<stream start>'
def __init__(self, start_mark=None, end_mark=None, def __init__(self, start_mark=None, end_mark=None, encoding=None):
encoding=None):
self.start_mark = start_mark self.start_mark = start_mark
self.end_mark = end_mark self.end_mark = end_mark
self.encoding = encoding self.encoding = encoding

View File

@ -90,13 +90,13 @@ class Renderer(object):
''' '''
def __init__(self, def __init__(self,
theme_config, theme_config,
local_themes, local_themes,
theme_kwargs, theme_kwargs,
colorscheme, colorscheme,
pl, pl,
ambiwidth=1, ambiwidth=1,
**options): **options):
self.__dict__.update(options) self.__dict__.update(options)
self.theme_config = theme_config self.theme_config = theme_config
theme_kwargs['pl'] = pl theme_kwargs['pl'] = pl

View File

@ -154,12 +154,14 @@ class VimRenderer(Renderer):
hl_group['attr'].append('italic') hl_group['attr'].append('italic')
if attr & ATTR_UNDERLINE: if attr & ATTR_UNDERLINE:
hl_group['attr'].append('underline') hl_group['attr'].append('underline')
hl_group['name'] = ('Pl_' + hl_group['name'] = (
str(hl_group['ctermfg']) + '_' + 'Pl_'
str(hl_group['guifg']) + '_' + + str(hl_group['ctermfg']) + '_'
str(hl_group['ctermbg']) + '_' + + str(hl_group['guifg']) + '_'
str(hl_group['guibg']) + '_' + + str(hl_group['ctermbg']) + '_'
''.join(hl_group['attr'])) + str(hl_group['guibg']) + '_'
+ ''.join(hl_group['attr'])
)
self.hl_groups[(fg, bg, attr)] = hl_group self.hl_groups[(fg, bg, attr)] = hl_group
vim.command('hi {group} ctermfg={ctermfg} guifg={guifg} guibg={guibg} ctermbg={ctermbg} cterm={attr} gui={attr}'.format( vim.command('hi {group} ctermfg={ctermfg} guifg={guifg} guibg={guibg} ctermbg={ctermbg} cterm={attr} gui={attr}'.format(
group=hl_group['name'], group=hl_group['name'],

View File

@ -368,9 +368,11 @@ class WeatherSegment(ThreadedSegment):
# only in .update() # only in .update()
if not self.location: if not self.location:
location_data = json.loads(urllib_read('http://freegeoip.net/json/')) location_data = json.loads(urllib_read('http://freegeoip.net/json/'))
self.location = ','.join([location_data['city'], self.location = ','.join((
location_data['region_code'], location_data['city'],
location_data['country_code']]) location_data['region_code'],
location_data['country_code']
))
query_data = { query_data = {
'q': 'q':
'use "https://raw.githubusercontent.com/yql/yql-tables/master/weather/weather.bylocation.xml" as we;' 'use "https://raw.githubusercontent.com/yql/yql-tables/master/weather/weather.bylocation.xml" as we;'

View File

@ -244,8 +244,10 @@ def file_directory(pl, segment_info, remove_scheme=True, shorten_user=True, shor
name = name[len(match.group(0)) + 1:] # Remove scheme and colon name = name[len(match.group(0)) + 1:] # Remove scheme and colon
file_directory = vim_funcs['fnamemodify'](name, ':h') file_directory = vim_funcs['fnamemodify'](name, ':h')
else: else:
file_directory = vim_funcs['fnamemodify'](name, (':~' if shorten_user else '') file_directory = vim_funcs['fnamemodify'](
+ (':.' if shorten_cwd else '') + ':h') name,
(':~' if shorten_user else '') + (':.' if shorten_cwd else '') + ':h'
)
if not file_directory: if not file_directory:
return None return None
if shorten_home and file_directory.startswith('/home/'): if shorten_home and file_directory.startswith('/home/'):
@ -507,7 +509,7 @@ def file_vcs_status(pl, segment_info, create_watcher):
ret.append({ ret.append({
'contents': status, 'contents': status,
'highlight_group': ['file_vcs_status_' + status, 'file_vcs_status'], 'highlight_group': ['file_vcs_status_' + status, 'file_vcs_status'],
}) })
return ret return ret

View File

@ -38,8 +38,10 @@ class ShellPowerline(Powerline):
if not local_themes: if not local_themes:
return {} return {}
return dict(((key, {'config': self.load_theme_config(val)}) return dict((
for key, val in local_themes.items())) (key, {'config': self.load_theme_config(val)})
for key, val in local_themes.items()
))
def get_argparser(parser=None, *args, **kwargs): def get_argparser(parser=None, *args, **kwargs):
@ -49,8 +51,10 @@ def get_argparser(parser=None, *args, **kwargs):
p = parser(*args, **kwargs) p = parser(*args, **kwargs)
p.add_argument('ext', nargs=1, help='Extension: application for which powerline command is launched (usually `shell\' or `tmux\')') p.add_argument('ext', nargs=1, help='Extension: application for which powerline command is launched (usually `shell\' or `tmux\')')
p.add_argument('side', nargs='?', choices=('left', 'right', 'above', 'aboveleft'), help='Side: `left\' and `right\' represent left and right side respectively, `above\' emits lines that are supposed to be printed just above the prompt and `aboveleft\' is like concatenating `above\' with `left\' with the exception that only one Python instance is used in this case.') p.add_argument('side', nargs='?', choices=('left', 'right', 'above', 'aboveleft'), help='Side: `left\' and `right\' represent left and right side respectively, `above\' emits lines that are supposed to be printed just above the prompt and `aboveleft\' is like concatenating `above\' with `left\' with the exception that only one Python instance is used in this case.')
p.add_argument('-r', '--renderer_module', metavar='MODULE', type=str, p.add_argument(
help='Renderer module. Usually something like `.bash\' or `.zsh\', is supposed to be set only in shell-specific bindings file.') '-r', '--renderer_module', metavar='MODULE', type=str,
help='Renderer module. Usually something like `.bash\' or `.zsh\', is supposed to be set only in shell-specific bindings file.'
)
p.add_argument('-w', '--width', type=int, help='Maximum prompt with. Triggers truncation of some segments') p.add_argument('-w', '--width', type=int, help='Maximum prompt with. Triggers truncation of some segments')
p.add_argument('--last_exit_code', metavar='INT', type=int, help='Last exit code') p.add_argument('--last_exit_code', metavar='INT', type=int, help='Last exit code')
p.add_argument('--last_pipe_status', metavar='LIST', default='', type=lambda s: [int(status) for status in s.split()], help='Like above, but is supposed to contain space-separated array of statuses, representing exit statuses of commands in one pipe.') p.add_argument('--last_pipe_status', metavar='LIST', default='', type=lambda s: [int(status) for status in s.split()], help='Like above, but is supposed to contain space-separated array of statuses, representing exit statuses of commands in one pipe.')

View File

@ -25,13 +25,13 @@ def new_empty_segment_line():
class Theme(object): class Theme(object):
def __init__(self, def __init__(self,
ext, ext,
theme_config, theme_config,
common_config, common_config,
pl, pl,
main_theme_config=None, main_theme_config=None,
run_once=False, run_once=False,
shutdown_event=None): shutdown_event=None):
self.dividers = theme_config['dividers'] self.dividers = theme_config['dividers']
self.dividers = dict(( self.dividers = dict((
(key, dict((k, u(v)) (key, dict((k, u(v))
@ -55,7 +55,7 @@ class Theme(object):
theme_configs.append(main_theme_config) theme_configs.append(main_theme_config)
get_segment = gen_segment_getter(pl, ext, common_config, theme_configs, theme_config.get('default_module')) get_segment = gen_segment_getter(pl, ext, common_config, theme_configs, theme_config.get('default_module'))
for segdict in itertools.chain((theme_config['segments'],), for segdict in itertools.chain((theme_config['segments'],),
theme_config['segments'].get('above', ())): theme_config['segments'].get('above', ())):
self.segments.append(new_empty_segment_line()) self.segments.append(new_empty_segment_line())
for side in ['left', 'right']: for side in ['left', 'right']:
for segment in segdict.get(side, []): for segment in segdict.get(side, []):

View File

@ -74,8 +74,10 @@ class VimPowerline(Powerline):
# Note: themes with non-[a-zA-Z0-9_] names are impossible to override # Note: themes with non-[a-zA-Z0-9_] names are impossible to override
# (though as far as I know exists() wont throw). Wont fix, use proper # (though as far as I know exists() wont throw). Wont fix, use proper
# theme names. # theme names.
return _override_from(super(VimPowerline, self).load_theme_config(name), return _override_from(
'powerline_theme_overrides__' + name) super(VimPowerline, self).load_theme_config(name),
'powerline_theme_overrides__' + name
)
def get_local_themes(self, local_themes): def get_local_themes(self, local_themes):
self.get_matcher = gen_matcher_getter(self.ext, self.import_paths) self.get_matcher = gen_matcher_getter(self.ext, self.import_paths)
@ -83,9 +85,13 @@ class VimPowerline(Powerline):
if not local_themes: if not local_themes:
return {} return {}
return dict(((None if key == '__tabline__' else self.get_matcher(key), return dict((
{'config': self.load_theme_config(val)}) (
for key, val in local_themes.items())) (None if key == '__tabline__' else self.get_matcher(key)),
{'config': self.load_theme_config(val)}
)
for key, val in local_themes.items())
)
def get_config_paths(self): def get_config_paths(self):
try: try:
@ -167,8 +173,7 @@ class VimPowerline(Powerline):
@staticmethod @staticmethod
def do_pyeval(): def do_pyeval():
import __main__ import __main__
vim.command('return ' + json.dumps(eval(vim.eval('a:e'), vim.command('return ' + json.dumps(eval(vim.eval('a:e'), __main__.__dict__)))
__main__.__dict__)))
def setup_components(self, components): def setup_components(self, components):
if components is None: if components is None:
@ -208,10 +213,10 @@ def setup(pyeval=None, pycmd=None, can_replace_pyeval=True):
# pyeval() and vim.bindeval were both introduced in one patch # pyeval() and vim.bindeval were both introduced in one patch
if not hasattr(vim, 'bindeval') and can_replace_pyeval: if not hasattr(vim, 'bindeval') and can_replace_pyeval:
vim.command((''' vim.command(('''
function! PowerlinePyeval(e) function! PowerlinePyeval(e)
{pycmd} powerline.do_pyeval() {pycmd} powerline.do_pyeval()
endfunction endfunction
''').format(pycmd=pycmd)) ''').format(pycmd=pycmd))
pyeval = 'PowerlinePyeval' pyeval = 'PowerlinePyeval'
powerline = VimPowerline(pyeval) powerline = VimPowerline(pyeval)

View File

@ -13,8 +13,10 @@ class Pl(object):
self.use_daemon_threads = True self.use_daemon_threads = True
for meth in ('error', 'warn', 'debug', 'exception'): for meth in ('error', 'warn', 'debug', 'exception'):
exec (('def {0}(self, msg, *args, **kwargs):\n' exec ((
' self.{0}s.append((kwargs.get("prefix") or self.prefix, msg, args, kwargs))\n').format(meth)) 'def {0}(self, msg, *args, **kwargs):\n'
' self.{0}s.append((kwargs.get("prefix") or self.prefix, msg, args, kwargs))\n'
).format(meth))
class Args(object): class Args(object):

View File

@ -29,8 +29,9 @@ class TestShell(TestCase):
def test_last_status(self): def test_last_status(self):
pl = Pl() pl = Pl()
segment_info = {'args': Args(last_exit_code=10)} segment_info = {'args': Args(last_exit_code=10)}
self.assertEqual(shell.last_status(pl=pl, segment_info=segment_info), self.assertEqual(shell.last_status(pl=pl, segment_info=segment_info), [
[{'contents': '10', 'highlight_group': 'exit_fail'}]) {'contents': '10', 'highlight_group': 'exit_fail'}
])
segment_info['args'].last_exit_code = 0 segment_info['args'].last_exit_code = 0
self.assertEqual(shell.last_status(pl=pl, segment_info=segment_info), None) self.assertEqual(shell.last_status(pl=pl, segment_info=segment_info), None)
segment_info['args'].last_exit_code = None segment_info['args'].last_exit_code = None
@ -222,18 +223,23 @@ class TestCommon(TestCase):
branch = partial(common.branch, pl=pl, create_watcher=create_watcher) branch = partial(common.branch, pl=pl, create_watcher=create_watcher)
with replace_attr(common, 'guess', get_dummy_guess(status=lambda: None, directory='/tmp/tests')): with replace_attr(common, 'guess', get_dummy_guess(status=lambda: None, directory='/tmp/tests')):
with replace_attr(common, 'tree_status', lambda repo, pl: None): with replace_attr(common, 'tree_status', lambda repo, pl: None):
self.assertEqual(branch(segment_info=segment_info, status_colors=False), self.assertEqual(branch(segment_info=segment_info, status_colors=False), [
[{'highlight_group': ['branch'], 'contents': 'tests'}]) {'highlight_group': ['branch'], 'contents': 'tests'}
self.assertEqual(branch(segment_info=segment_info, status_colors=True), ])
[{'contents': 'tests', 'highlight_group': ['branch_clean', 'branch']}]) self.assertEqual(branch(segment_info=segment_info, status_colors=True), [
{'contents': 'tests', 'highlight_group': ['branch_clean', 'branch']}
])
with replace_attr(common, 'guess', get_dummy_guess(status=lambda: 'D ', directory='/tmp/tests')): with replace_attr(common, 'guess', get_dummy_guess(status=lambda: 'D ', directory='/tmp/tests')):
with replace_attr(common, 'tree_status', lambda repo, pl: 'D '): with replace_attr(common, 'tree_status', lambda repo, pl: 'D '):
self.assertEqual(branch(segment_info=segment_info, status_colors=False), self.assertEqual(branch(segment_info=segment_info, status_colors=False), [
[{'highlight_group': ['branch'], 'contents': 'tests'}]) {'highlight_group': ['branch'], 'contents': 'tests'}
self.assertEqual(branch(segment_info=segment_info, status_colors=True), ])
[{'contents': 'tests', 'highlight_group': ['branch_dirty', 'branch']}]) self.assertEqual(branch(segment_info=segment_info, status_colors=True), [
self.assertEqual(branch(segment_info=segment_info, status_colors=False), {'contents': 'tests', 'highlight_group': ['branch_dirty', 'branch']}
[{'highlight_group': ['branch'], 'contents': 'tests'}]) ])
self.assertEqual(branch(segment_info=segment_info, status_colors=False), [
{'highlight_group': ['branch'], 'contents': 'tests'}
])
with replace_attr(common, 'guess', lambda path, create_watcher: None): with replace_attr(common, 'guess', lambda path, create_watcher: None):
self.assertEqual(branch(segment_info=segment_info, status_colors=False), None) self.assertEqual(branch(segment_info=segment_info, status_colors=False), None)
@ -322,8 +328,9 @@ class TestCommon(TestCase):
ose = OSError() ose = OSError()
ose.errno = 2 ose.errno = 2
cwd[0] = ose cwd[0] = ose
self.assertEqual(common.cwd(pl=pl, segment_info=segment_info, dir_limit_depth=2, dir_shorten_len=2), self.assertEqual(common.cwd(pl=pl, segment_info=segment_info, dir_limit_depth=2, dir_shorten_len=2), [
[{'contents': '[not found]', 'divider_highlight_group': 'cwd:divider', 'highlight_group': ['cwd:current_folder', 'cwd'], 'draw_inner_divider': True}]) {'contents': '[not found]', 'divider_highlight_group': 'cwd:divider', 'highlight_group': ['cwd:current_folder', 'cwd'], 'draw_inner_divider': True}
])
cwd[0] = OSError() cwd[0] = OSError()
self.assertRaises(OSError, common.cwd, pl=pl, segment_info=segment_info, dir_limit_depth=2, dir_shorten_len=2) self.assertRaises(OSError, common.cwd, pl=pl, segment_info=segment_info, dir_limit_depth=2, dir_shorten_len=2)
cwd[0] = ValueError() cwd[0] = ValueError()
@ -415,14 +422,16 @@ class TestCommon(TestCase):
pl = Pl() pl = Pl()
with replace_module_module(common, 'os', getloadavg=lambda: (7.5, 3.5, 1.5)): with replace_module_module(common, 'os', getloadavg=lambda: (7.5, 3.5, 1.5)):
with replace_attr(common, '_cpu_count', lambda: 2): with replace_attr(common, '_cpu_count', lambda: 2):
self.assertEqual(common.system_load(pl=pl), self.assertEqual(common.system_load(pl=pl), [
[{'contents': '7.5 ', 'highlight_group': ['system_load_gradient', 'system_load'], 'divider_highlight_group': 'background:divider', 'gradient_level': 100}, {'contents': '7.5 ', 'highlight_group': ['system_load_gradient', 'system_load'], 'divider_highlight_group': 'background:divider', 'gradient_level': 100},
{'contents': '3.5 ', 'highlight_group': ['system_load_gradient', 'system_load'], 'divider_highlight_group': 'background:divider', 'gradient_level': 75.0}, {'contents': '3.5 ', 'highlight_group': ['system_load_gradient', 'system_load'], 'divider_highlight_group': 'background:divider', 'gradient_level': 75.0},
{'contents': '1.5', 'highlight_group': ['system_load_gradient', 'system_load'], 'divider_highlight_group': 'background:divider', 'gradient_level': 0}]) {'contents': '1.5', 'highlight_group': ['system_load_gradient', 'system_load'], 'divider_highlight_group': 'background:divider', 'gradient_level': 0}
self.assertEqual(common.system_load(pl=pl, format='{avg:.0f}', threshold_good=0, threshold_bad=1), ])
[{'contents': '8 ', 'highlight_group': ['system_load_gradient', 'system_load'], 'divider_highlight_group': 'background:divider', 'gradient_level': 100}, self.assertEqual(common.system_load(pl=pl, format='{avg:.0f}', threshold_good=0, threshold_bad=1), [
{'contents': '4 ', 'highlight_group': ['system_load_gradient', 'system_load'], 'divider_highlight_group': 'background:divider', 'gradient_level': 100}, {'contents': '8 ', 'highlight_group': ['system_load_gradient', 'system_load'], 'divider_highlight_group': 'background:divider', 'gradient_level': 100},
{'contents': '2', 'highlight_group': ['system_load_gradient', 'system_load'], 'divider_highlight_group': 'background:divider', 'gradient_level': 75.0}]) {'contents': '4 ', 'highlight_group': ['system_load_gradient', 'system_load'], 'divider_highlight_group': 'background:divider', 'gradient_level': 100},
{'contents': '2', 'highlight_group': ['system_load_gradient', 'system_load'], 'divider_highlight_group': 'background:divider', 'gradient_level': 75.0}
])
def test_cpu_load_percent(self): def test_cpu_load_percent(self):
pl = Pl() pl = Pl()
@ -700,10 +709,12 @@ class TestVim(TestCase):
pl = Pl() pl = Pl()
segment_info = vim_module._get_segment_info() segment_info = vim_module._get_segment_info()
self.assertEqual(vim.file_name(pl=pl, segment_info=segment_info), None) self.assertEqual(vim.file_name(pl=pl, segment_info=segment_info), None)
self.assertEqual(vim.file_name(pl=pl, segment_info=segment_info, display_no_file=True), self.assertEqual(vim.file_name(pl=pl, segment_info=segment_info, display_no_file=True), [
[{'contents': '[No file]', 'highlight_group': ['file_name_no_file', 'file_name']}]) {'contents': '[No file]', 'highlight_group': ['file_name_no_file', 'file_name']}
self.assertEqual(vim.file_name(pl=pl, segment_info=segment_info, display_no_file=True, no_file_text='X'), ])
[{'contents': 'X', 'highlight_group': ['file_name_no_file', 'file_name']}]) self.assertEqual(vim.file_name(pl=pl, segment_info=segment_info, display_no_file=True, no_file_text='X'), [
{'contents': 'X', 'highlight_group': ['file_name_no_file', 'file_name']}
])
with vim_module._with('buffer', '/tmp/abc') as segment_info: with vim_module._with('buffer', '/tmp/abc') as segment_info:
self.assertEqual(vim.file_name(pl=pl, segment_info=segment_info), 'abc') self.assertEqual(vim.file_name(pl=pl, segment_info=segment_info), 'abc')
with vim_module._with('buffer', '/tmp/') as segment_info: with vim_module._with('buffer', '/tmp/') as segment_info:
@ -721,14 +732,17 @@ class TestVim(TestCase):
def test_file_opts(self): def test_file_opts(self):
pl = Pl() pl = Pl()
segment_info = vim_module._get_segment_info() segment_info = vim_module._get_segment_info()
self.assertEqual(vim.file_format(pl=pl, segment_info=segment_info), self.assertEqual(vim.file_format(pl=pl, segment_info=segment_info), [
[{'divider_highlight_group': 'background:divider', 'contents': 'unix'}]) {'divider_highlight_group': 'background:divider', 'contents': 'unix'}
self.assertEqual(vim.file_encoding(pl=pl, segment_info=segment_info), ])
[{'divider_highlight_group': 'background:divider', 'contents': 'utf-8'}]) self.assertEqual(vim.file_encoding(pl=pl, segment_info=segment_info), [
{'divider_highlight_group': 'background:divider', 'contents': 'utf-8'}
])
self.assertEqual(vim.file_type(pl=pl, segment_info=segment_info), None) self.assertEqual(vim.file_type(pl=pl, segment_info=segment_info), None)
with vim_module._with('bufoptions', filetype='python'): with vim_module._with('bufoptions', filetype='python'):
self.assertEqual(vim.file_type(pl=pl, segment_info=segment_info), self.assertEqual(vim.file_type(pl=pl, segment_info=segment_info), [
[{'divider_highlight_group': 'background:divider', 'contents': 'python'}]) {'divider_highlight_group': 'background:divider', 'contents': 'python'}
])
def test_window_title(self): def test_window_title(self):
pl = Pl() pl = Pl()
@ -745,8 +759,9 @@ class TestVim(TestCase):
self.assertEqual(vim.line_percent(pl=pl, segment_info=segment_info), '1') self.assertEqual(vim.line_percent(pl=pl, segment_info=segment_info), '1')
vim_module._set_cursor(50, 0) vim_module._set_cursor(50, 0)
self.assertEqual(vim.line_percent(pl=pl, segment_info=segment_info), '50') self.assertEqual(vim.line_percent(pl=pl, segment_info=segment_info), '50')
self.assertEqual(vim.line_percent(pl=pl, segment_info=segment_info, gradient=True), self.assertEqual(vim.line_percent(pl=pl, segment_info=segment_info, gradient=True), [
[{'contents': '50', 'highlight_group': ['line_percent_gradient', 'line_percent'], 'gradient_level': 50 * 100.0 / 101}]) {'contents': '50', 'highlight_group': ['line_percent_gradient', 'line_percent'], 'gradient_level': 50 * 100.0 / 101}
])
finally: finally:
vim_module._bw(segment_info['bufnr']) vim_module._bw(segment_info['bufnr'])
@ -768,8 +783,9 @@ class TestVim(TestCase):
segment_info['buffer'][0:-1] = [str(i) for i in range(99)] segment_info['buffer'][0:-1] = [str(i) for i in range(99)]
vim_module._set_cursor(49, 0) vim_module._set_cursor(49, 0)
self.assertEqual(vim.position(pl=pl, segment_info=segment_info), '50%') self.assertEqual(vim.position(pl=pl, segment_info=segment_info), '50%')
self.assertEqual(vim.position(pl=pl, segment_info=segment_info, gradient=True), self.assertEqual(vim.position(pl=pl, segment_info=segment_info, gradient=True), [
[{'contents': '50%', 'highlight_group': ['position_gradient', 'position'], 'gradient_level': 50.0}]) {'contents': '50%', 'highlight_group': ['position_gradient', 'position'], 'gradient_level': 50.0}
])
vim_module._set_cursor(0, 0) vim_module._set_cursor(0, 0)
self.assertEqual(vim.position(pl=pl, segment_info=segment_info), 'Top') self.assertEqual(vim.position(pl=pl, segment_info=segment_info), 'Top')
vim_module._set_cursor(97, 0) vim_module._set_cursor(97, 0)
@ -777,8 +793,9 @@ class TestVim(TestCase):
segment_info['buffer'][0:-1] = [str(i) for i in range(2)] segment_info['buffer'][0:-1] = [str(i) for i in range(2)]
vim_module._set_cursor(0, 0) vim_module._set_cursor(0, 0)
self.assertEqual(vim.position(pl=pl, segment_info=segment_info, position_strings={'top': 'Comienzo', 'bottom': 'Final', 'all': 'Todo'}), 'Todo') self.assertEqual(vim.position(pl=pl, segment_info=segment_info, position_strings={'top': 'Comienzo', 'bottom': 'Final', 'all': 'Todo'}), 'Todo')
self.assertEqual(vim.position(pl=pl, segment_info=segment_info, gradient=True), self.assertEqual(vim.position(pl=pl, segment_info=segment_info, gradient=True), [
[{'contents': 'All', 'highlight_group': ['position_gradient', 'position'], 'gradient_level': 0.0}]) {'contents': 'All', 'highlight_group': ['position_gradient', 'position'], 'gradient_level': 0.0}
])
finally: finally:
vim_module._bw(segment_info['bufnr']) vim_module._bw(segment_info['bufnr'])
@ -805,16 +822,20 @@ class TestVim(TestCase):
with vim_module._with('buffer', '/foo') as segment_info: with vim_module._with('buffer', '/foo') as segment_info:
with replace_attr(vim, 'guess', get_dummy_guess(status=lambda: None)): with replace_attr(vim, 'guess', get_dummy_guess(status=lambda: None)):
with replace_attr(vim, 'tree_status', lambda repo, pl: None): with replace_attr(vim, 'tree_status', lambda repo, pl: None):
self.assertEqual(branch(segment_info=segment_info, status_colors=False), self.assertEqual(branch(segment_info=segment_info, status_colors=False), [
[{'divider_highlight_group': 'branch:divider', 'highlight_group': ['branch'], 'contents': 'foo'}]) {'divider_highlight_group': 'branch:divider', 'highlight_group': ['branch'], 'contents': 'foo'}
self.assertEqual(branch(segment_info=segment_info, status_colors=True), ])
[{'divider_highlight_group': 'branch:divider', 'highlight_group': ['branch_clean', 'branch'], 'contents': 'foo'}]) self.assertEqual(branch(segment_info=segment_info, status_colors=True), [
{'divider_highlight_group': 'branch:divider', 'highlight_group': ['branch_clean', 'branch'], 'contents': 'foo'}
])
with replace_attr(vim, 'guess', get_dummy_guess(status=lambda: 'DU')): with replace_attr(vim, 'guess', get_dummy_guess(status=lambda: 'DU')):
with replace_attr(vim, 'tree_status', lambda repo, pl: 'DU'): with replace_attr(vim, 'tree_status', lambda repo, pl: 'DU'):
self.assertEqual(branch(segment_info=segment_info, status_colors=False), self.assertEqual(branch(segment_info=segment_info, status_colors=False), [
[{'divider_highlight_group': 'branch:divider', 'highlight_group': ['branch'], 'contents': 'foo'}]) {'divider_highlight_group': 'branch:divider', 'highlight_group': ['branch'], 'contents': 'foo'}
self.assertEqual(branch(segment_info=segment_info, status_colors=True), ])
[{'divider_highlight_group': 'branch:divider', 'highlight_group': ['branch_dirty', 'branch'], 'contents': 'foo'}]) self.assertEqual(branch(segment_info=segment_info, status_colors=True), [
{'divider_highlight_group': 'branch:divider', 'highlight_group': ['branch_dirty', 'branch'], 'contents': 'foo'}
])
def test_file_vcs_status(self): def test_file_vcs_status(self):
pl = Pl() pl = Pl()
@ -822,8 +843,9 @@ class TestVim(TestCase):
file_vcs_status = partial(vim.file_vcs_status, pl=pl, create_watcher=create_watcher) file_vcs_status = partial(vim.file_vcs_status, pl=pl, create_watcher=create_watcher)
with vim_module._with('buffer', '/foo') as segment_info: with vim_module._with('buffer', '/foo') as segment_info:
with replace_attr(vim, 'guess', get_dummy_guess(status=lambda file: 'M')): with replace_attr(vim, 'guess', get_dummy_guess(status=lambda file: 'M')):
self.assertEqual(file_vcs_status(segment_info=segment_info), self.assertEqual(file_vcs_status(segment_info=segment_info), [
[{'highlight_group': ['file_vcs_status_M', 'file_vcs_status'], 'contents': 'M'}]) {'highlight_group': ['file_vcs_status_M', 'file_vcs_status'], 'contents': 'M'}
])
with replace_attr(vim, 'guess', get_dummy_guess(status=lambda file: None)): with replace_attr(vim, 'guess', get_dummy_guess(status=lambda file: None)):
self.assertEqual(file_vcs_status(segment_info=segment_info), None) self.assertEqual(file_vcs_status(segment_info=segment_info), None)
with vim_module._with('buffer', '/bar') as segment_info: with vim_module._with('buffer', '/bar') as segment_info:

View File

@ -153,8 +153,10 @@ def _construct_result(r):
elif isinstance(r, list): elif isinstance(r, list):
return [_construct_result(i) for i in r] return [_construct_result(i) for i in r]
elif isinstance(r, dict): elif isinstance(r, dict):
return dict(((_construct_result(k), _construct_result(v)) return dict((
for k, v in r.items())) (_construct_result(k), _construct_result(v))
for k, v in r.items()
))
return r return r

View File

@ -8,8 +8,10 @@ from colormath.color_diff import delta_e_cie2000
def get_lab(name, rgb): def get_lab(name, rgb):
rgb = sRGBColor(int(rgb[:2], 16), int(rgb[2:4], 16), int(rgb[4:6], 16), rgb = sRGBColor(
is_upscaled=True) int(rgb[:2], 16), int(rgb[2:4], 16), int(rgb[4:6], 16),
is_upscaled=True
)
lab = convert_color(rgb, LabColor) lab = convert_color(rgb, LabColor)
return name, lab return name, lab

View File

@ -26,8 +26,10 @@ def num2(s):
def rgbint_to_lab(rgbint): def rgbint_to_lab(rgbint):
rgb = sRGBColor((rgbint >> 16) & 0xFF, (rgbint >> 8) & 0xFF, rgbint & 0xFF, rgb = sRGBColor(
is_upscaled=True) (rgbint >> 16) & 0xFF, (rgbint >> 8) & 0xFF, rgbint & 0xFF,
is_upscaled=True
)
return convert_color(rgb, LabColor) return convert_color(rgb, LabColor)
@ -52,8 +54,10 @@ def linear_gradient(start_value, stop_value, start_offset, stop_offset, offset):
def lab_gradient(slab, elab, soff, eoff, off): def lab_gradient(slab, elab, soff, eoff, off):
svals = slab.get_value_tuple() svals = slab.get_value_tuple()
evals = elab.get_value_tuple() evals = elab.get_value_tuple()
return LabColor(*[linear_gradient(start_value, end_value, soff, eoff, off) return LabColor(*[
for start_value, end_value in zip(svals, evals)]) linear_gradient(start_value, end_value, soff, eoff, off)
for start_value, end_value in zip(svals, evals)
])
def generate_gradient_function(DATA): def generate_gradient_function(DATA):
@ -185,8 +189,10 @@ if __name__ == '__main__':
steps = compute_steps(args.gradient, args.weights) steps = compute_steps(args.gradient, args.weights)
data = [(weight, args.gradient[i - 1], args.gradient[i]) data = [
for weight, i in zip(steps, range(1, len(args.gradient)))] (weight, args.gradient[i - 1], args.gradient[i])
for weight, i in zip(steps, range(1, len(args.gradient)))
]
gr_func = generate_gradient_function(data) gr_func = generate_gradient_function(data)
gradient = [gr_func(y) for y in range(0, m)] gradient = [gr_func(y) for y in range(0, m)]