From ae92d83eae5142322ff20b9aa81eb53b0b363575 Mon Sep 17 00:00:00 2001 From: ZyX Date: Fri, 15 Aug 2014 20:58:19 +0400 Subject: [PATCH] Fix powerline style Specifically I searched for all lines that are more then one tab off compared to the previous line with BufGrep /\(^\t\+\)\S.*\n\1\t\t\+/ and replaced them with something more appropriate. Most of time this resulted in a few more newlines, but there are cases when I used mixed tabs/spaces indentation+alignment. --- .local.vimrc | 3 + powerline/__init__.py | 42 ++++--- powerline/bindings/vim/__init__.py | 17 ++- powerline/lib/inotify.py | 7 +- powerline/lib/shell.py | 11 +- powerline/lib/tree_watcher.py | 14 ++- powerline/lib/vcs/git.py | 11 +- powerline/lint/__init__.py | 148 ++++++++++++----------- powerline/lint/markedjson/composer.py | 30 +++-- powerline/lint/markedjson/constructor.py | 77 +++++++----- powerline/lint/markedjson/error.py | 30 +++-- powerline/lint/markedjson/events.py | 24 ++-- powerline/lint/markedjson/markedvalue.py | 12 +- powerline/lint/markedjson/nodes.py | 30 +++-- powerline/lint/markedjson/parser.py | 78 +++++++----- powerline/lint/markedjson/reader.py | 19 +-- powerline/lint/markedjson/resolver.py | 19 ++- powerline/lint/markedjson/scanner.py | 59 +++++---- powerline/lint/markedjson/tokens.py | 15 ++- powerline/renderer.py | 14 +-- powerline/renderers/vim.py | 14 ++- powerline/segments/common.py | 8 +- powerline/segments/vim/__init__.py | 8 +- powerline/shell.py | 12 +- powerline/theme.py | 16 +-- powerline/vim.py | 27 +++-- tests/lib/__init__.py | 6 +- tests/test_segments.py | 118 ++++++++++-------- tests/vim.py | 6 +- tools/colors_find.py | 6 +- tools/generate_gradients.py | 18 ++- 31 files changed, 516 insertions(+), 383 deletions(-) diff --git a/.local.vimrc b/.local.vimrc index edf53ee8..c8e1ef38 100644 --- a/.local.vimrc +++ b/.local.vimrc @@ -6,3 +6,6 @@ " [2]: https://github.com/thinca/vim-localrc let g:syntastic_python_flake8_args = '--ignore=W191,E501,E128,W291,E126,E101' let b:syntastic_checkers = ['flake8'] +unlet! g:python_space_error_highlight +let g:pymode_syntax_indent_errors = 0 +let g:pymode_syntax_space_errors = 0 diff --git a/powerline/__init__.py b/powerline/__init__.py index 9587fb6c..f2a7a166 100644 --- a/powerline/__init__.py +++ b/powerline/__init__.py @@ -255,11 +255,13 @@ def finish_common_config(common_config): if sys.version_info < (3,): # `raise exception[0], None, exception[1]` is a SyntaxError in python-3* # Not using ('''…''') because this syntax does not work in python-2.6 - exec(('def reraise(exception):\n' - ' if type(exception) is tuple:\n' - ' raise exception[0], None, exception[1]\n' - ' else:\n' - ' raise exception\n')) + exec(( + 'def reraise(exception):\n' + ' if type(exception) is tuple:\n' + ' raise exception[0], None, exception[1]\n' + ' else:\n' + ' raise exception\n' + )) else: def reraise(exception): if type(exception) is tuple: @@ -302,13 +304,13 @@ class Powerline(object): ''' def __init__(self, - ext, - renderer_module=None, - run_once=False, - logger=None, - use_daemon_threads=True, - shutdown_event=None, - config_loader=None): + ext, + renderer_module=None, + run_once=False, + logger=None, + use_daemon_threads=True, + shutdown_event=None, + config_loader=None): self.ext = ext self.run_once = run_once self.logger = logger @@ -437,12 +439,16 @@ class Powerline(object): or self.ext_config.get('local_themes') != self.prev_ext_config.get('local_themes') ): self.renderer_options['local_themes'] = self.get_local_themes(self.ext_config.get('local_themes')) - load_colorscheme = (load_colorscheme - or not self.prev_ext_config - or self.prev_ext_config['colorscheme'] != self.ext_config['colorscheme']) - load_theme = (load_theme - or not self.prev_ext_config - or self.prev_ext_config['theme'] != self.ext_config['theme']) + load_colorscheme = ( + load_colorscheme + or not self.prev_ext_config + or self.prev_ext_config['colorscheme'] != self.ext_config['colorscheme'] + ) + load_theme = ( + load_theme + or not self.prev_ext_config + or self.prev_ext_config['theme'] != self.ext_config['theme'] + ) self.prev_ext_config = self.ext_config create_renderer = load_colors or load_colorscheme or load_theme or common_config_differs or ext_config_differs diff --git a/powerline/bindings/vim/__init__.py b/powerline/bindings/vim/__init__.py index d030bd43..f3f9783a 100644 --- a/powerline/bindings/vim/__init__.py +++ b/powerline/bindings/vim/__init__.py @@ -86,8 +86,9 @@ else: if not buffer or buffer.number == vim.current.buffer.number: return int(vim.eval('exists("b:{0}")'.format(varname))) else: - return int(vim.eval('has_key(getbufvar({0}, ""), {1})' - .format(buffer.number, varname))) + return int(vim.eval( + 'has_key(getbufvar({0}, ""), {1})'.format(buffer.number, varname) + )) def vim_getwinvar(segment_info, varname): # NOQA result = vim.eval('getwinvar({0}, "{1}")'.format(segment_info['winnr'], varname)) @@ -210,9 +211,15 @@ class VimEnviron(object): @staticmethod def __setitem__(key, value): - return vim.command('let $' + key + '="' - + value.replace('"', '\\"').replace('\\', '\\\\').replace('\n', '\\n').replace('\0', '') - + '"') + return vim.command( + 'let ${0}="{1}"'.format( + key, + value.replace('"', '\\"') + .replace('\\', '\\\\') + .replace('\n', '\\n') + .replace('\0', '') + ) + ) if sys.version_info < (3,): diff --git a/powerline/lib/inotify.py b/powerline/lib/inotify.py index c2b25444..48fe6ae7 100644 --- a/powerline/lib/inotify.py +++ b/powerline/lib/inotify.py @@ -97,9 +97,10 @@ class INotify(object): ONESHOT = 0x80000000 # Only send event once. # All events which a program can wait on. - ALL_EVENTS = (ACCESS | MODIFY | ATTRIB | CLOSE_WRITE | CLOSE_NOWRITE | - OPEN | MOVED_FROM | MOVED_TO | CREATE | DELETE | - DELETE_SELF | MOVE_SELF) + ALL_EVENTS = ( + ACCESS | MODIFY | ATTRIB | CLOSE_WRITE | CLOSE_NOWRITE | OPEN | + MOVED_FROM | MOVED_TO | CREATE | DELETE | DELETE_SELF | MOVE_SELF + ) # See CLOEXEC = 0x80000 diff --git a/powerline/lib/shell.py b/powerline/lib/shell.py index 4418c3d9..19b25c8b 100644 --- a/powerline/lib/shell.py +++ b/powerline/lib/shell.py @@ -82,8 +82,11 @@ except ImportError: # Additionally check that `file` is not a directory, as on Windows # directories pass the os.access check. def _access_check(fn, mode): - return (os.path.exists(fn) and os.access(fn, mode) - and not os.path.isdir(fn)) + return ( + os.path.exists(fn) + and os.access(fn, mode) + and not os.path.isdir(fn) + ) # If we're given a path with a directory part, look it up directly rather # than referring to PATH directories. This includes checking relative to the @@ -101,7 +104,7 @@ except ImportError: if sys.platform == "win32": # The current directory takes precedence on Windows. - if not os.curdir in path: + if os.curdir not in path: path.insert(0, os.curdir) # PATHEXT is necessary to check on Windows. @@ -122,7 +125,7 @@ except ImportError: seen = set() for dir in path: normdir = os.path.normcase(dir) - if not normdir in seen: + if normdir not in seen: seen.add(normdir) for thefile in files: name = os.path.join(dir, thefile) diff --git a/powerline/lib/tree_watcher.py b/powerline/lib/tree_watcher.py index c0ff4c3c..6a439dd7 100644 --- a/powerline/lib/tree_watcher.py +++ b/powerline/lib/tree_watcher.py @@ -94,13 +94,15 @@ class INotifyTreeWatcher(INotify): def add_watch(self, path): import ctypes bpath = path if isinstance(path, bytes) else path.encode(self.fenc) - wd = self._add_watch(self._inotify_fd, ctypes.c_char_p(bpath), - # Ignore symlinks and watch only directories - self.DONT_FOLLOW | self.ONLYDIR | + wd = self._add_watch( + self._inotify_fd, ctypes.c_char_p(bpath), + # Ignore symlinks and watch only directories + self.DONT_FOLLOW | self.ONLYDIR | - self.MODIFY | self.CREATE | self.DELETE | - self.MOVE_SELF | self.MOVED_FROM | self.MOVED_TO | - self.ATTRIB | self.DELETE_SELF) + self.MODIFY | self.CREATE | self.DELETE | + self.MOVE_SELF | self.MOVED_FROM | self.MOVED_TO | + self.ATTRIB | self.DELETE_SELF + ) if wd == -1: eno = ctypes.get_errno() if eno == errno.ENOTDIR: diff --git a/powerline/lib/vcs/git.py b/powerline/lib/vcs/git.py index 27a1e71a..42bdd321 100644 --- a/powerline/lib/vcs/git.py +++ b/powerline/lib/vcs/git.py @@ -139,13 +139,14 @@ try: untracked_column = 'U' continue - if status & (git.GIT_STATUS_WT_DELETED - | git.GIT_STATUS_WT_MODIFIED): + if status & (git.GIT_STATUS_WT_DELETED | git.GIT_STATUS_WT_MODIFIED): wt_column = 'D' - if status & (git.GIT_STATUS_INDEX_NEW - | git.GIT_STATUS_INDEX_MODIFIED - | git.GIT_STATUS_INDEX_DELETED): + if status & ( + git.GIT_STATUS_INDEX_NEW + | git.GIT_STATUS_INDEX_MODIFIED + | git.GIT_STATUS_INDEX_DELETED + ): index_column = 'I' r = wt_column + index_column + untracked_column return r if r != ' ' else None diff --git a/powerline/lint/__init__.py b/powerline/lint/__init__.py index 7ce152fb..995bc1f6 100644 --- a/powerline/lint/__init__.py +++ b/powerline/lint/__init__.py @@ -128,14 +128,16 @@ class Spec(object): def check_type(self, value, context_mark, data, context, echoerr, types): if type(value.value) not in types: - echoerr(context=self.cmsg.format(key=context_key(context)), - context_mark=context_mark, - problem='{0!r} must be a {1} instance, not {2}'.format( - value, - list_sep.join((t.__name__ for t in types)), - type(value.value).__name__ - ), - problem_mark=value.mark) + echoerr( + context=self.cmsg.format(key=context_key(context)), + context_mark=context_mark, + problem='{0!r} must be a {1} instance, not {2}'.format( + value, + list_sep.join((t.__name__ for t in types)), + type(value.value).__name__ + ), + problem_mark=value.mark + ) return False, True return True, False @@ -143,9 +145,9 @@ class Spec(object): proceed, echo, hadproblem = func(value, data, context, echoerr) if echo and hadproblem: echoerr(context=self.cmsg.format(key=context_key(context)), - context_mark=context_mark, - problem=msg_func(value), - problem_mark=value.mark) + context_mark=context_mark, + problem=msg_func(value), + problem_mark=value.mark) return proceed, hadproblem def check_list(self, value, context_mark, data, context, echoerr, item_func, msg_func): @@ -165,9 +167,9 @@ class Spec(object): proceed, echo, fhadproblem = item_func(item, data, context, echoerr) if echo and fhadproblem: echoerr(context=self.cmsg.format(key=context_key(context) + '/list item ' + unicode(i)), - context_mark=value.mark, - problem=msg_func(item), - problem_mark=item.mark) + context_mark=value.mark, + problem=msg_func(item), + problem_mark=item.mark) if fhadproblem: hadproblem = True if not proceed: @@ -376,9 +378,9 @@ class Spec(object): if not valspec.isoptional: hadproblem = True echoerr(context=self.cmsg.format(key=context_key(context)), - context_mark=None, - problem='required key is missing: {0}'.format(key), - problem_mark=value.mark) + context_mark=None, + problem='required key is missing: {0}'.format(key), + problem_mark=value.mark) for key in value.keys(): if key not in self.keys: for keyfunc, vali in self.uspecs: @@ -405,9 +407,9 @@ class Spec(object): hadproblem = True if self.ufailmsg: echoerr(context=self.cmsg.format(key=context_key(context)), - context_mark=None, - problem=self.ufailmsg(key), - problem_mark=key.mark) + context_mark=None, + problem=self.ufailmsg(key), + problem_mark=key.mark) return True, hadproblem @@ -435,19 +437,19 @@ def check_matcher_func(ext, match_name, data, context, echoerr): func = getattr(__import__(str(match_module), fromlist=[str(match_function)]), str(match_function)) except ImportError: echoerr(context='Error while loading matcher functions', - problem='failed to load module {0}'.format(match_module), - problem_mark=match_name.mark) + problem='failed to load module {0}'.format(match_module), + problem_mark=match_name.mark) return True, True except AttributeError: echoerr(context='Error while loading matcher functions', - problem='failed to load matcher function {0}'.format(match_function), - problem_mark=match_name.mark) + problem='failed to load matcher function {0}'.format(match_function), + problem_mark=match_name.mark) return True, True if not callable(func): echoerr(context='Error while loading matcher functions', - problem='loaded "function" {0} is not callable'.format(match_function), - problem_mark=match_name.mark) + problem='loaded "function" {0} is not callable'.format(match_function), + problem_mark=match_name.mark) return True, True if hasattr(func, 'func_code') and hasattr(func.func_code, 'co_argcount'): @@ -470,15 +472,15 @@ def check_ext(ext, data, context, echoerr): if ext not in data['lists']['exts']: hadproblem = True echoerr(context='Error while loading {0} extension configuration'.format(ext), - context_mark=ext.mark, - problem='extension configuration does not exist') + context_mark=ext.mark, + problem='extension configuration does not exist') else: for typ in ('themes', 'colorschemes'): if ext not in data['configs'][typ] and not data['configs']['top_' + typ]: hadproblem = True echoerr(context='Error while loading {0} extension configuration'.format(ext), - context_mark=ext.mark, - problem='{0} configuration does not exist'.format(typ)) + context_mark=ext.mark, + problem='{0} configuration does not exist'.format(typ)) else: hadsomedirs = True return hadsomedirs, hadproblem @@ -492,14 +494,16 @@ def check_config(d, theme, data, context, echoerr): ext = context[-3][0] if ext not in data['lists']['exts']: echoerr(context='Error while loading {0} extension configuration'.format(ext), - context_mark=ext.mark, - problem='extension configuration does not exist') + context_mark=ext.mark, + problem='extension configuration does not exist') return True, False, True - if ((ext not in data['configs'][d] or theme not in data['configs'][d][ext]) - and theme not in data['configs']['top_' + d]): + if ( + (ext not in data['configs'][d] or theme not in data['configs'][d][ext]) + and theme not in data['configs']['top_' + d] + ): echoerr(context='Error while loading {0} from {1} extension configuration'.format(d[:-1], ext), - problem='failed to find configuration file {0}/{1}/{2}.json'.format(d, ext, theme), - problem_mark=theme.mark) + problem='failed to find configuration file {0}/{1}/{2}.json'.format(d, ext, theme), + problem_mark=theme.mark) return True, False, True return True, False, False @@ -507,9 +511,9 @@ def check_config(d, theme, data, context, echoerr): def check_top_theme(theme, data, context, echoerr): if theme not in data['configs']['top_themes']: echoerr(context='Error while checking extension configuration (key {key})'.format(key=context_key(context)), - context_mark=context[-2][0].mark, - problem='failed to find top theme {0}'.format(theme), - problem_mark=theme.mark) + context_mark=context[-2][0].mark, + problem='failed to find top theme {0}'.format(theme), + problem_mark=theme.mark) return True, False, True return True, False, False @@ -778,8 +782,8 @@ def check_key_compatibility(segment, data, context, echoerr): if segment_type not in type_keys: echoerr(context='Error while checking segments (key {key})'.format(key=context_key(context)), - problem='found segment with unknown type {0}'.format(segment_type), - problem_mark=segment_type.mark) + problem='found segment with unknown type {0}'.format(segment_type), + problem_mark=segment_type.mark) return False, False, True hadproblem = False @@ -828,8 +832,8 @@ def check_segment_module(module, data, context, echoerr): if echoerr.logger.level >= logging.DEBUG: echoerr.logger.exception(e) echoerr(context='Error while checking segments (key {key})'.format(key=context_key(context)), - problem='failed to import module {0}'.format(module), - problem_mark=module.mark) + problem='failed to import module {0}'.format(module), + problem_mark=module.mark) return True, False, True return True, False, False @@ -878,19 +882,19 @@ def import_segment(name, data, context, echoerr, module=None): func = getattr(__import__(str(module), fromlist=[str(name)]), str(name)) except ImportError: echoerr(context='Error while checking segments (key {key})'.format(key=context_key(context)), - problem='failed to import module {0}'.format(module), - problem_mark=module.mark) + problem='failed to import module {0}'.format(module), + problem_mark=module.mark) return None except AttributeError: echoerr(context='Error while loading segment function (key {key})'.format(key=context_key(context)), - problem='failed to load function {0} from module {1}'.format(name, module), - problem_mark=name.mark) + problem='failed to load function {0} from module {1}'.format(name, module), + problem_mark=name.mark) return None if not callable(func): echoerr(context='Error while checking segments (key {key})'.format(key=context_key(context)), - problem='imported "function" {0} from module {1} is not callable'.format(name, module), - problem_mark=module.mark) + problem='imported "function" {0} from module {1} is not callable'.format(name, module), + problem_mark=module.mark) return None return func @@ -933,11 +937,15 @@ def check_segment_name(name, data, context, echoerr): if hl_groups: greg = re.compile(r'``([^`]+)``( \(gradient\))?') - hl_groups = [[greg.match(subs).groups() for subs in s.split(' or ')] - for s in (list_sep.join(hl_groups)).split(', ')] + hl_groups = [ + [greg.match(subs).groups() for subs in s.split(' or ')] + for s in (list_sep.join(hl_groups)).split(', ') + ] for required_pack in hl_groups: - rs = [hl_exists(hl_group, data, context, echoerr, allow_gradients=('force' if gradient else False)) - for hl_group, gradient in required_pack] + rs = [ + hl_exists(hl_group, data, context, echoerr, allow_gradients=('force' if gradient else False)) + for hl_group, gradient in required_pack + ] if all(rs): echoerr( context='Error while checking theme (key {key})'.format(key=context_key(context)), @@ -983,8 +991,8 @@ def check_segment_name(name, data, context, echoerr): and not any(((name in theme.get('segment_data', {})) for theme in data['top_themes'].values())) ): echoerr(context='Error while checking segments (key {key})'.format(key=context_key(context)), - problem='found useless use of name key (such name is not present in theme/segment_data)', - problem_mark=name.mark) + problem='found useless use of name key (such name is not present in theme/segment_data)', + problem_mark=name.mark) return True, False, False @@ -1025,14 +1033,14 @@ def hl_exists(hl_group, data, context, echoerr, allow_gradients=False): r.append(colorscheme) continue if allow_gradients == 'force' and not hadgradient: - echoerr( - context='Error while checking highlight group in theme (key {key})'.format( - key=context_key(context)), - context_mark=getattr(hl_group, 'mark', None), - problem='group {0} should have at least one gradient color, but it has no'.format(hl_group), - problem_mark=group_config.mark - ) - r.append(colorscheme) + echoerr( + context='Error while checking highlight group in theme (key {key})'.format( + key=context_key(context)), + context_mark=getattr(hl_group, 'mark', None), + problem='group {0} should have at least one gradient color, but it has no'.format(hl_group), + problem_mark=group_config.mark + ) + r.append(colorscheme) return r @@ -1109,8 +1117,8 @@ def check_segment_data_key(key, data, context, echoerr): else: if data['theme_type'] != 'top': echoerr(context='Error while checking segment data', - problem='found key {0} that cannot be associated with any segment'.format(key), - problem_mark=key.mark) + problem='found key {0} that cannot be associated with any segment'.format(key), + problem_mark=key.mark) return True, False, True return True, False, False @@ -1141,9 +1149,9 @@ def check_args_variant(func, args, data, context, echoerr): if not all_args >= present_args: echoerr(context='Error while checking segment arguments (key {key})'.format(key=context_key(context)), - context_mark=args.mark, - problem='found unknown keys: {0}'.format(list_sep.join(present_args - all_args)), - problem_mark=next(iter(present_args - all_args)).mark) + context_mark=args.mark, + problem='found unknown keys: {0}'.format(list_sep.join(present_args - all_args)), + problem_mark=next(iter(present_args - all_args)).mark) hadproblem = True if isinstance(func, ThreadedSegment): @@ -1179,8 +1187,8 @@ def check_args(get_functions, args, data, context, echoerr): new_echoerr.echo_all() else: echoerr(context='Error while checking segment arguments (key {key})'.format(key=context_key(context)), - context_mark=context[-2][1].mark, - problem='no suitable segments found') + context_mark=context[-2][1].mark, + problem='no suitable segments found') return True, False, hadproblem diff --git a/powerline/lint/markedjson/composer.py b/powerline/lint/markedjson/composer.py index 303e6f23..25e60109 100644 --- a/powerline/lint/markedjson/composer.py +++ b/powerline/lint/markedjson/composer.py @@ -38,9 +38,12 @@ class Composer: # Ensure that the stream contains no more documents. if not self.check_event(StreamEndEvent): event = self.get_event() - raise ComposerError("expected a single document in the stream", - document.start_mark, "but found another document", - event.start_mark) + raise ComposerError( + "expected a single document in the stream", + document.start_mark, + "but found another document", + event.start_mark + ) # Drop the STREAM-END event. self.get_event() @@ -75,8 +78,7 @@ class Composer: tag = event.tag if tag is None or tag == '!': tag = self.resolve(ScalarNode, event.value, event.implicit, event.start_mark) - node = ScalarNode(tag, event.value, - event.start_mark, event.end_mark, style=event.style) + node = ScalarNode(tag, event.value, event.start_mark, event.end_mark, style=event.style) return node def compose_sequence_node(self): @@ -84,9 +86,7 @@ class Composer: tag = start_event.tag if tag is None or tag == '!': tag = self.resolve(SequenceNode, None, start_event.implicit) - node = SequenceNode(tag, [], - start_event.start_mark, None, - flow_style=start_event.flow_style) + node = SequenceNode(tag, [], start_event.start_mark, None, flow_style=start_event.flow_style) index = 0 while not self.check_event(SequenceEndEvent): node.value.append(self.compose_node(node, index)) @@ -100,17 +100,15 @@ class Composer: tag = start_event.tag if tag is None or tag == '!': tag = self.resolve(MappingNode, None, start_event.implicit) - node = MappingNode(tag, [], - start_event.start_mark, None, - flow_style=start_event.flow_style) + node = MappingNode(tag, [], start_event.start_mark, None, flow_style=start_event.flow_style) while not self.check_event(MappingEndEvent): - #key_event = self.peek_event() + # key_event = self.peek_event() item_key = self.compose_node(node, None) - #if item_key in node.value: - # raise ComposerError("while composing a mapping", start_event.start_mark, - # "found duplicate key", key_event.start_mark) + # if item_key in node.value: + # raise ComposerError("while composing a mapping", start_event.start_mark, + # "found duplicate key", key_event.start_mark) item_value = self.compose_node(node, item_key) - #node.value[item_key] = item_value + # node.value[item_key] = item_value node.value.append((item_key, item_value)) end_event = self.get_event() node.end_mark = end_event.end_mark diff --git a/powerline/lint/markedjson/constructor.py b/powerline/lint/markedjson/constructor.py index bdc5c6e3..f887d9e2 100644 --- a/powerline/lint/markedjson/constructor.py +++ b/powerline/lint/markedjson/constructor.py @@ -95,39 +95,53 @@ class BaseConstructor: @marked def construct_scalar(self, node): if not isinstance(node, ScalarNode): - raise ConstructorError(None, None, - "expected a scalar node, but found %s" % node.id, - node.start_mark) + raise ConstructorError( + None, None, + "expected a scalar node, but found %s" % node.id, + node.start_mark + ) return node.value def construct_sequence(self, node, deep=False): if not isinstance(node, SequenceNode): - raise ConstructorError(None, None, - "expected a sequence node, but found %s" % node.id, - node.start_mark) - return [self.construct_object(child, deep=deep) - for child in node.value] + raise ConstructorError( + None, None, + "expected a sequence node, but found %s" % node.id, + node.start_mark + ) + return [ + self.construct_object(child, deep=deep) + for child in node.value + ] @marked def construct_mapping(self, node, deep=False): if not isinstance(node, MappingNode): - raise ConstructorError(None, None, - "expected a mapping node, but found %s" % node.id, - node.start_mark) + raise ConstructorError( + None, None, + "expected a mapping node, but found %s" % node.id, + node.start_mark + ) mapping = {} for key_node, value_node in node.value: key = self.construct_object(key_node, deep=deep) if not isinstance(key, collections.Hashable): - self.echoerr('While constructing a mapping', node.start_mark, - 'found unhashable key', key_node.start_mark) + self.echoerr( + 'While constructing a mapping', node.start_mark, + 'found unhashable key', key_node.start_mark + ) continue elif type(key.value) != unicode: - self.echoerr('Error while constructing a mapping', node.start_mark, - 'found key that is not a string', key_node.start_mark) + self.echoerr( + 'Error while constructing a mapping', node.start_mark, + 'found key that is not a string', key_node.start_mark + ) continue elif key in mapping: - self.echoerr('Error while constructing a mapping', node.start_mark, - 'found duplicate key', key_node.start_mark) + self.echoerr( + 'Error while constructing a mapping', node.start_mark, + 'found duplicate key', key_node.start_mark + ) continue value = self.construct_object(value_node, deep=deep) mapping[key] = value @@ -135,7 +149,7 @@ class BaseConstructor: @classmethod def add_constructor(cls, tag, constructor): - if not 'yaml_constructors' in cls.__dict__: + if 'yaml_constructors' not in cls.__dict__: cls.yaml_constructors = cls.yaml_constructors.copy() cls.yaml_constructors[tag] = constructor @@ -162,19 +176,24 @@ class Constructor(BaseConstructor): submerge = [] for subnode in value_node.value: if not isinstance(subnode, MappingNode): - raise ConstructorError("while constructing a mapping", - node.start_mark, - "expected a mapping for merging, but found %s" - % subnode.id, subnode.start_mark) + raise ConstructorError( + "while constructing a mapping", + node.start_mark, + "expected a mapping for merging, but found %s" % subnode.id, + subnode.start_mark + ) self.flatten_mapping(subnode) submerge.append(subnode.value) submerge.reverse() for value in submerge: merge.extend(value) else: - raise ConstructorError("while constructing a mapping", node.start_mark, - "expected a mapping or list of mappings for merging, but found %s" - % value_node.id, value_node.start_mark) + raise ConstructorError( + "while constructing a mapping", + node.start_mark, + ("expected a mapping or list of mappings for merging, but found %s" % value_node.id), + value_node.start_mark + ) elif key_node.tag == 'tag:yaml.org,2002:value': key_node.tag = 'tag:yaml.org,2002:str' index += 1 @@ -237,9 +256,11 @@ class Constructor(BaseConstructor): data.update(value) def construct_undefined(self, node): - raise ConstructorError(None, None, - "could not determine a constructor for the tag %r" % node.tag, - node.start_mark) + raise ConstructorError( + None, None, + "could not determine a constructor for the tag %r" % node.tag, + node.start_mark + ) Constructor.add_constructor( diff --git a/powerline/lint/markedjson/error.py b/powerline/lint/markedjson/error.py index 781b912b..1e1c7214 100644 --- a/powerline/lint/markedjson/error.py +++ b/powerline/lint/markedjson/error.py @@ -53,13 +53,15 @@ class Mark: break snippet = [self.buffer[start:self.pointer], self.buffer[self.pointer], self.buffer[self.pointer + 1:end]] snippet = [strtrans(s) for s in snippet] - return (' ' * indent + head + ''.join(snippet) + tail + '\n' - + ' ' * (indent + len(head) + len(snippet[0])) + '^') + return ( + ' ' * indent + head + ''.join(snippet) + tail + '\n' + + ' ' * (indent + len(head) + len(snippet[0])) + '^' + ) def __str__(self): snippet = self.get_snippet() - where = (" in \"%s\", line %d, column %d" - % (self.name, self.line + 1, self.column + 1)) + where = (" in \"%s\", line %d, column %d" % ( + self.name, self.line + 1, self.column + 1)) if snippet is not None: where += ":\n" + snippet if type(where) is str: @@ -77,11 +79,15 @@ def format_error(context=None, context_mark=None, problem=None, problem_mark=Non lines = [] if context is not None: lines.append(context) - if context_mark is not None \ - and (problem is None or problem_mark is None - or context_mark.name != problem_mark.name - or context_mark.line != problem_mark.line - or context_mark.column != problem_mark.column): + if ( + context_mark is not None + and ( + problem is None or problem_mark is None + or context_mark.name != problem_mark.name + or context_mark.line != problem_mark.line + or context_mark.column != problem_mark.column + ) + ): lines.append(str(context_mark)) if problem is not None: lines.append(problem) @@ -93,7 +99,5 @@ def format_error(context=None, context_mark=None, problem=None, problem_mark=Non class MarkedError(Exception): - def __init__(self, context=None, context_mark=None, - problem=None, problem_mark=None, note=None): - Exception.__init__(self, format_error(context, context_mark, problem, - problem_mark, note)) + def __init__(self, context=None, context_mark=None, problem=None, problem_mark=None, note=None): + Exception.__init__(self, format_error(context, context_mark, problem, problem_mark, note)) diff --git a/powerline/lint/markedjson/events.py b/powerline/lint/markedjson/events.py index 47e2667f..587c5ae4 100644 --- a/powerline/lint/markedjson/events.py +++ b/powerline/lint/markedjson/events.py @@ -7,10 +7,14 @@ class Event(object): self.end_mark = end_mark def __repr__(self): - attributes = [key for key in ['implicit', 'value'] - if hasattr(self, key)] - arguments = ', '.join(['%s=%r' % (key, getattr(self, key)) - for key in attributes]) + attributes = [ + key for key in ['implicit', 'value'] + if hasattr(self, key) + ] + arguments = ', '.join([ + '%s=%r' % (key, getattr(self, key)) + for key in attributes + ]) return '%s(%s)' % (self.__class__.__name__, arguments) @@ -21,8 +25,7 @@ class NodeEvent(Event): class CollectionStartEvent(NodeEvent): - def __init__(self, implicit, start_mark=None, end_mark=None, - flow_style=None): + def __init__(self, implicit, start_mark=None, end_mark=None, flow_style=None): self.tag = None self.implicit = implicit self.start_mark = start_mark @@ -49,8 +52,7 @@ class StreamEndEvent(Event): class DocumentStartEvent(Event): - def __init__(self, start_mark=None, end_mark=None, - explicit=None, version=None, tags=None): + def __init__(self, start_mark=None, end_mark=None, explicit=None, version=None, tags=None): self.start_mark = start_mark self.end_mark = end_mark self.explicit = explicit @@ -59,8 +61,7 @@ class DocumentStartEvent(Event): class DocumentEndEvent(Event): - def __init__(self, start_mark=None, end_mark=None, - explicit=None): + def __init__(self, start_mark=None, end_mark=None, explicit=None): self.start_mark = start_mark self.end_mark = end_mark self.explicit = explicit @@ -71,8 +72,7 @@ class AliasEvent(NodeEvent): class ScalarEvent(NodeEvent): - def __init__(self, implicit, value, - start_mark=None, end_mark=None, style=None): + def __init__(self, implicit, value, start_mark=None, end_mark=None, style=None): self.tag = None self.implicit = implicit self.value = value diff --git a/powerline/lint/markedjson/markedvalue.py b/powerline/lint/markedjson/markedvalue.py index 457b66f6..6c619c56 100644 --- a/powerline/lint/markedjson/markedvalue.py +++ b/powerline/lint/markedjson/markedvalue.py @@ -110,11 +110,15 @@ def gen_marked_value(value, mark, use_special_classes=True): elif func not in set(('__init__', '__new__', '__getattribute__')): if func in set(('__eq__',)): # HACK to make marked dictionaries always work - exec (('def {0}(self, *args):\n' - ' return self.value.{0}(*[arg.value if isinstance(arg, MarkedValue) else arg for arg in args])').format(func)) + exec (( + 'def {0}(self, *args):\n' + ' return self.value.{0}(*[arg.value if isinstance(arg, MarkedValue) else arg for arg in args])' + ).format(func)) else: - exec (('def {0}(self, *args, **kwargs):\n' - ' return self.value.{0}(*args, **kwargs)\n').format(func)) + exec (( + 'def {0}(self, *args, **kwargs):\n' + ' return self.value.{0}(*args, **kwargs)\n' + ).format(func)) classcache[value.__class__] = Marked return Marked(value, mark) diff --git a/powerline/lint/markedjson/nodes.py b/powerline/lint/markedjson/nodes.py index 11ebb3ea..9325a64c 100644 --- a/powerline/lint/markedjson/nodes.py +++ b/powerline/lint/markedjson/nodes.py @@ -7,18 +7,18 @@ class Node(object): def __repr__(self): value = self.value - #if isinstance(value, list): - # if len(value) == 0: - # value = '' - # elif len(value) == 1: - # value = '<1 item>' - # else: - # value = '<%d items>' % len(value) - #else: - # if len(value) > 75: - # value = repr(value[:70]+u' ... ') - # else: - # value = repr(value) + # if isinstance(value, list): + # if len(value) == 0: + # value = '' + # elif len(value) == 1: + # value = '<1 item>' + # else: + # value = '<%d items>' % len(value) + # else: + # if len(value) > 75: + # value = repr(value[:70]+u' ... ') + # else: + # value = repr(value) value = repr(value) return '%s(tag=%r, value=%s)' % (self.__class__.__name__, self.tag, value) @@ -26,8 +26,7 @@ class Node(object): class ScalarNode(Node): id = 'scalar' - def __init__(self, tag, value, - start_mark=None, end_mark=None, style=None): + def __init__(self, tag, value, start_mark=None, end_mark=None, style=None): self.tag = tag self.value = value self.start_mark = start_mark @@ -36,8 +35,7 @@ class ScalarNode(Node): class CollectionNode(Node): - def __init__(self, tag, value, - start_mark=None, end_mark=None, flow_style=None): + def __init__(self, tag, value, start_mark=None, end_mark=None, flow_style=None): self.tag = tag self.value = value self.start_mark = start_mark diff --git a/powerline/lint/markedjson/parser.py b/powerline/lint/markedjson/parser.py index 998de6db..aa5f7ade 100644 --- a/powerline/lint/markedjson/parser.py +++ b/powerline/lint/markedjson/parser.py @@ -58,8 +58,7 @@ class Parser: def parse_stream_start(self): # Parse the stream start. token = self.get_token() - event = StreamStartEvent(token.start_mark, token.end_mark, - encoding=token.encoding) + event = StreamStartEvent(token.start_mark, token.end_mark, encoding=token.encoding) # Prepare the next state. self.state = self.parse_implicit_document_start @@ -86,9 +85,10 @@ class Parser: # Parse an explicit document. if not self.check_token(StreamEndToken): token = self.peek_token() - self.echoerr(None, None, - "expected '', but found %r" % token.id, - token.start_mark) + self.echoerr( + None, None, + ("expected '', but found %r" % token.id), token.start_mark + ) return StreamEndEvent(token.start_mark, token.end_mark) else: # Parse the end of the stream. @@ -127,24 +127,23 @@ class Parser: implicit = (True, False) else: implicit = (False, True) - event = ScalarEvent(implicit, token.value, - start_mark, end_mark, style=token.style) + event = ScalarEvent(implicit, token.value, start_mark, end_mark, style=token.style) self.state = self.states.pop() elif self.check_token(FlowSequenceStartToken): end_mark = self.peek_token().end_mark - event = SequenceStartEvent(implicit, - start_mark, end_mark, flow_style=True) + event = SequenceStartEvent(implicit, start_mark, end_mark, flow_style=True) self.state = self.parse_flow_sequence_first_entry elif self.check_token(FlowMappingStartToken): end_mark = self.peek_token().end_mark - event = MappingStartEvent(implicit, - start_mark, end_mark, flow_style=True) + event = MappingStartEvent(implicit, start_mark, end_mark, flow_style=True) self.state = self.parse_flow_mapping_first_key else: token = self.peek_token() - raise ParserError("while parsing a flow node", start_mark, - "expected the node content, but found %r" % token.id, - token.start_mark) + raise ParserError( + "while parsing a flow node", start_mark, + "expected the node content, but found %r" % token.id, + token.start_mark + ) return event def parse_flow_sequence_first_entry(self): @@ -159,12 +158,16 @@ class Parser: self.get_token() if self.check_token(FlowSequenceEndToken): token = self.peek_token() - self.echoerr("While parsing a flow sequence", self.marks[-1], - "expected sequence value, but got %r" % token.id, token.start_mark) + self.echoerr( + "While parsing a flow sequence", self.marks[-1], + ("expected sequence value, but got %r" % token.id), token.start_mark + ) else: token = self.peek_token() - raise ParserError("while parsing a flow sequence", self.marks[-1], - "expected ',' or ']', but got %r" % token.id, token.start_mark) + raise ParserError( + "while parsing a flow sequence", self.marks[-1], + ("expected ',' or ']', but got %r" % token.id), token.start_mark + ) if not self.check_token(FlowSequenceEndToken): self.states.append(self.parse_flow_sequence_entry) @@ -192,22 +195,27 @@ class Parser: self.get_token() if self.check_token(FlowMappingEndToken): token = self.peek_token() - self.echoerr("While parsing a flow mapping", self.marks[-1], - "expected mapping key, but got %r" % token.id, token.start_mark) + self.echoerr( + "While parsing a flow mapping", self.marks[-1], + ("expected mapping key, but got %r" % token.id), token.start_mark + ) else: token = self.peek_token() - raise ParserError("while parsing a flow mapping", self.marks[-1], - "expected ',' or '}', but got %r" % token.id, token.start_mark) + raise ParserError( + "while parsing a flow mapping", self.marks[-1], + ("expected ',' or '}', but got %r" % token.id), token.start_mark + ) if self.check_token(KeyToken): token = self.get_token() - if not self.check_token(ValueToken, - FlowEntryToken, FlowMappingEndToken): + if not self.check_token(ValueToken, FlowEntryToken, FlowMappingEndToken): self.states.append(self.parse_flow_mapping_value) return self.parse_node() else: token = self.peek_token() - raise ParserError("while parsing a flow mapping", self.marks[-1], - "expected value, but got %r" % token.id, token.start_mark) + raise ParserError( + "while parsing a flow mapping", self.marks[-1], + ("expected value, but got %r" % token.id), token.start_mark + ) elif not self.check_token(FlowMappingEndToken): token = self.peek_token() expect_key = self.check_token(ValueToken, FlowEntryToken) @@ -216,12 +224,16 @@ class Parser: expect_key = self.check_token(ValueToken) if expect_key: - raise ParserError("while parsing a flow mapping", self.marks[-1], - "expected string key, but got %r" % token.id, token.start_mark) + raise ParserError( + "while parsing a flow mapping", self.marks[-1], + ("expected string key, but got %r" % token.id), token.start_mark + ) else: token = self.peek_token() - raise ParserError("while parsing a flow mapping", self.marks[-1], - "expected ':', but got %r" % token.id, token.start_mark) + raise ParserError( + "while parsing a flow mapping", self.marks[-1], + ("expected ':', but got %r" % token.id), token.start_mark + ) token = self.get_token() event = MappingEndEvent(token.start_mark, token.end_mark) self.state = self.states.pop() @@ -236,5 +248,7 @@ class Parser: return self.parse_node() token = self.peek_token() - raise ParserError("while parsing a flow mapping", self.marks[-1], - "expected mapping value, but got %r" % token.id, token.start_mark) + raise ParserError( + "while parsing a flow mapping", self.marks[-1], + ("expected mapping value, but got %r" % token.id), token.start_mark + ) diff --git a/powerline/lint/markedjson/reader.py b/powerline/lint/markedjson/reader.py index f59605ee..32f5d7b6 100644 --- a/powerline/lint/markedjson/reader.py +++ b/powerline/lint/markedjson/reader.py @@ -89,9 +89,11 @@ class Reader(object): match = NON_PRINTABLE.search(data) if match: self.update_pointer(match.start()) - raise ReaderError('while reading from stream', None, - 'found special characters which are not allowed', - Mark(self.name, self.line, self.column, self.full_buffer, self.full_pointer)) + raise ReaderError( + 'while reading from stream', None, + 'found special characters which are not allowed', + Mark(self.name, self.line, self.column, self.full_buffer, self.full_pointer) + ) def update(self, length): if self.raw_buffer is None: @@ -102,8 +104,7 @@ class Reader(object): if not self.eof: self.update_raw() try: - data, converted = self.raw_decode(self.raw_buffer, - 'strict', self.eof) + data, converted = self.raw_decode(self.raw_buffer, 'strict', self.eof) except UnicodeDecodeError as exc: character = self.raw_buffer[exc.start] position = self.stream_pointer - len(self.raw_buffer) + exc.start @@ -112,9 +113,11 @@ class Reader(object): self.full_buffer += data + '<' + str(ord(character)) + '>' self.raw_buffer = self.raw_buffer[converted:] self.update_pointer(exc.start - 1) - raise ReaderError('while reading from stream', None, - 'found character #x%04x that cannot be decoded by UTF-8 codec' % ord(character), - Mark(self.name, self.line, self.column, self.full_buffer, position)) + raise ReaderError( + 'while reading from stream', None, + 'found character #x%04x that cannot be decoded by UTF-8 codec' % ord(character), + Mark(self.name, self.line, self.column, self.full_buffer, position) + ) self.buffer += data self.full_buffer += data self.raw_buffer = self.raw_buffer[converted:] diff --git a/powerline/lint/markedjson/resolver.py b/powerline/lint/markedjson/resolver.py index e453f61e..0196c70b 100644 --- a/powerline/lint/markedjson/resolver.py +++ b/powerline/lint/markedjson/resolver.py @@ -24,7 +24,7 @@ class BaseResolver: @classmethod def add_implicit_resolver(cls, tag, regexp, first): - if not 'yaml_implicit_resolvers' in cls.__dict__: + if 'yaml_implicit_resolvers' not in cls.__dict__: cls.yaml_implicit_resolvers = cls.yaml_implicit_resolvers.copy() if first is None: first = [None] @@ -39,8 +39,7 @@ class BaseResolver: if current_node: depth = len(self.resolver_prefix_paths) for path, kind in self.resolver_prefix_paths[-1]: - if self.check_resolver_prefix(depth, path, kind, - current_node, current_index): + if self.check_resolver_prefix(depth, path, kind, current_node, current_index): if len(path) > depth: prefix_paths.append((path, kind)) else: @@ -60,8 +59,7 @@ class BaseResolver: self.resolver_exact_paths.pop() self.resolver_prefix_paths.pop() - def check_resolver_prefix(self, depth, path, kind, - current_node, current_index): + def check_resolver_prefix(self, depth, path, kind, current_node, current_index): node_check, index_check = path[depth - 1] if isinstance(node_check, str): if current_node.tag != node_check: @@ -75,8 +73,7 @@ class BaseResolver: and current_index is None): return if isinstance(index_check, str): - if not (isinstance(current_index, ScalarNode) - and index_check == current_index.value): + if not (isinstance(current_index, ScalarNode) and index_check == current_index.value): return elif isinstance(index_check, int) and not isinstance(index_check, bool): if index_check != current_index: @@ -94,9 +91,11 @@ class BaseResolver: if regexp.match(value): return tag else: - self.echoerr('While resolving plain scalar', None, - 'expected floating-point value, integer, null or boolean, but got %r' % value, - mark) + self.echoerr( + 'While resolving plain scalar', None, + 'expected floating-point value, integer, null or boolean, but got %r' % value, + mark + ) return self.DEFAULT_SCALAR_TAG if kind is ScalarNode: return self.DEFAULT_SCALAR_TAG diff --git a/powerline/lint/markedjson/scanner.py b/powerline/lint/markedjson/scanner.py index 2183f651..e4defc26 100644 --- a/powerline/lint/markedjson/scanner.py +++ b/powerline/lint/markedjson/scanner.py @@ -48,9 +48,9 @@ class Scanner: # input data to Unicode. It also adds NUL to the end. # # Reader supports the following methods - # self.peek(i=0) # peek the next i-th character - # self.prefix(l=1) # peek the next l characters - # self.forward(l=1) # read the next l characters and move the pointer. + # self.peek(i=0) # peek the next i-th character + # self.prefix(l=1) # peek the next l characters + # self.forward(l=1) # read the next l characters and move the pointer. # Had we reached the end of the stream? self.done = False @@ -83,7 +83,7 @@ class Scanner: # Keep track of possible simple keys. This is a dictionary. The key # is `flow_level`; there can be no more that one possible simple key # for each level. The value is a SimpleKey record: - # (token_number, index, line, column, mark) + # (token_number, index, line, column, mark) # A simple key may start with SCALAR(flow), '[', or '{' tokens. self.possible_simple_keys = {} @@ -179,9 +179,11 @@ class Scanner: return self.fetch_plain() # No? It's an error. Let's produce a nice error message. - raise ScannerError("while scanning for the next token", None, - "found character %r that cannot start any token" % ch, - self.get_mark()) + raise ScannerError( + "while scanning for the next token", None, + "found character %r that cannot start any token" % ch, + self.get_mark() + ) # Simple keys treatment. @@ -189,10 +191,10 @@ class Scanner: # Return the number of the nearest possible simple key. Actually we # don't need to loop through the whole dictionary. We may replace it # with the following code: - # if not self.possible_simple_keys: - # return None - # return self.possible_simple_keys[ - # min(self.possible_simple_keys.keys())].token_number + # if not self.possible_simple_keys: + # return None + # return self.possible_simple_keys[ + # min(self.possible_simple_keys.keys())].token_number min_token_number = None for level in self.possible_simple_keys: key = self.possible_simple_keys[level] @@ -214,15 +216,14 @@ class Scanner: def save_possible_simple_key(self): # The next token may start a simple key. We check if it's possible # and save its position. This function is called for - # SCALAR(flow), '[', and '{'. + # SCALAR(flow), '[', and '{'. # The next token might be a simple key. Let's save it's number and # position. if self.allow_simple_key: self.remove_possible_simple_key() token_number = self.tokens_taken + len(self.tokens) - key = SimpleKey(token_number, - self.index, self.line, self.column, self.get_mark()) + key = SimpleKey(token_number, self.index, self.line, self.column, self.get_mark()) self.possible_simple_keys[self.flow_level] = key def remove_possible_simple_key(self): @@ -311,8 +312,7 @@ class Scanner: # Add KEY. key = self.possible_simple_keys[self.flow_level] del self.possible_simple_keys[self.flow_level] - self.tokens.insert(key.token_number - self.tokens_taken, - KeyToken(key.mark, key.mark)) + self.tokens.insert(key.token_number - self.tokens_taken, KeyToken(key.mark, key.mark)) # There cannot be two simple keys one after another. self.allow_simple_key = False @@ -423,15 +423,20 @@ class Scanner: self.forward() for k in range(length): if self.peek(k) not in '0123456789ABCDEFabcdef': - raise ScannerError("while scanning a double-quoted scalar", start_mark, - "expected escape sequence of %d hexdecimal numbers, but found %r" % - (length, self.peek(k)), self.get_mark()) + raise ScannerError( + "while scanning a double-quoted scalar", start_mark, + "expected escape sequence of %d hexdecimal numbers, but found %r" % ( + length, self.peek(k)), + self.get_mark() + ) code = int(self.prefix(length), 16) chunks.append(chr(code)) self.forward(length) else: - raise ScannerError("while scanning a double-quoted scalar", start_mark, - "found unknown escape character %r" % ch, self.get_mark()) + raise ScannerError( + "while scanning a double-quoted scalar", start_mark, + ("found unknown escape character %r" % ch), self.get_mark() + ) else: return chunks @@ -445,11 +450,15 @@ class Scanner: self.forward(length) ch = self.peek() if ch == '\0': - raise ScannerError("while scanning a quoted scalar", start_mark, - "found unexpected end of stream", self.get_mark()) + raise ScannerError( + "while scanning a quoted scalar", start_mark, + "found unexpected end of stream", self.get_mark() + ) elif ch == '\n': - raise ScannerError("while scanning a quoted scalar", start_mark, - "found unexpected line end", self.get_mark()) + raise ScannerError( + "while scanning a quoted scalar", start_mark, + "found unexpected line end", self.get_mark() + ) else: chunks.append(whitespaces) return chunks diff --git a/powerline/lint/markedjson/tokens.py b/powerline/lint/markedjson/tokens.py index 8c5b38c8..15b1836c 100644 --- a/powerline/lint/markedjson/tokens.py +++ b/powerline/lint/markedjson/tokens.py @@ -4,19 +4,22 @@ class Token(object): self.end_mark = end_mark def __repr__(self): - attributes = [key for key in self.__dict__ - if not key.endswith('_mark')] + attributes = [ + key for key in self.__dict__ + if not key.endswith('_mark') + ] attributes.sort() - arguments = ', '.join(['%s=%r' % (key, getattr(self, key)) - for key in attributes]) + arguments = ', '.join([ + '%s=%r' % (key, getattr(self, key)) + for key in attributes + ]) return '%s(%s)' % (self.__class__.__name__, arguments) class StreamStartToken(Token): id = '' - def __init__(self, start_mark=None, end_mark=None, - encoding=None): + def __init__(self, start_mark=None, end_mark=None, encoding=None): self.start_mark = start_mark self.end_mark = end_mark self.encoding = encoding diff --git a/powerline/renderer.py b/powerline/renderer.py index 63bc1454..ec84e198 100644 --- a/powerline/renderer.py +++ b/powerline/renderer.py @@ -90,13 +90,13 @@ class Renderer(object): ''' def __init__(self, - theme_config, - local_themes, - theme_kwargs, - colorscheme, - pl, - ambiwidth=1, - **options): + theme_config, + local_themes, + theme_kwargs, + colorscheme, + pl, + ambiwidth=1, + **options): self.__dict__.update(options) self.theme_config = theme_config theme_kwargs['pl'] = pl diff --git a/powerline/renderers/vim.py b/powerline/renderers/vim.py index f4c2bf74..50586194 100644 --- a/powerline/renderers/vim.py +++ b/powerline/renderers/vim.py @@ -154,12 +154,14 @@ class VimRenderer(Renderer): hl_group['attr'].append('italic') if attr & ATTR_UNDERLINE: hl_group['attr'].append('underline') - hl_group['name'] = ('Pl_' + - str(hl_group['ctermfg']) + '_' + - str(hl_group['guifg']) + '_' + - str(hl_group['ctermbg']) + '_' + - str(hl_group['guibg']) + '_' + - ''.join(hl_group['attr'])) + hl_group['name'] = ( + 'Pl_' + + str(hl_group['ctermfg']) + '_' + + str(hl_group['guifg']) + '_' + + str(hl_group['ctermbg']) + '_' + + str(hl_group['guibg']) + '_' + + ''.join(hl_group['attr']) + ) self.hl_groups[(fg, bg, attr)] = hl_group vim.command('hi {group} ctermfg={ctermfg} guifg={guifg} guibg={guibg} ctermbg={ctermbg} cterm={attr} gui={attr}'.format( group=hl_group['name'], diff --git a/powerline/segments/common.py b/powerline/segments/common.py index eaef499f..da07c7a0 100644 --- a/powerline/segments/common.py +++ b/powerline/segments/common.py @@ -368,9 +368,11 @@ class WeatherSegment(ThreadedSegment): # only in .update() if not self.location: location_data = json.loads(urllib_read('http://freegeoip.net/json/')) - self.location = ','.join([location_data['city'], - location_data['region_code'], - location_data['country_code']]) + self.location = ','.join(( + location_data['city'], + location_data['region_code'], + location_data['country_code'] + )) query_data = { 'q': 'use "https://raw.githubusercontent.com/yql/yql-tables/master/weather/weather.bylocation.xml" as we;' diff --git a/powerline/segments/vim/__init__.py b/powerline/segments/vim/__init__.py index b8ebb286..4a1c4706 100644 --- a/powerline/segments/vim/__init__.py +++ b/powerline/segments/vim/__init__.py @@ -244,8 +244,10 @@ def file_directory(pl, segment_info, remove_scheme=True, shorten_user=True, shor name = name[len(match.group(0)) + 1:] # Remove scheme and colon file_directory = vim_funcs['fnamemodify'](name, ':h') else: - file_directory = vim_funcs['fnamemodify'](name, (':~' if shorten_user else '') - + (':.' if shorten_cwd else '') + ':h') + file_directory = vim_funcs['fnamemodify']( + name, + (':~' if shorten_user else '') + (':.' if shorten_cwd else '') + ':h' + ) if not file_directory: return None if shorten_home and file_directory.startswith('/home/'): @@ -507,7 +509,7 @@ def file_vcs_status(pl, segment_info, create_watcher): ret.append({ 'contents': status, 'highlight_group': ['file_vcs_status_' + status, 'file_vcs_status'], - }) + }) return ret diff --git a/powerline/shell.py b/powerline/shell.py index 25d07258..2d4152d2 100644 --- a/powerline/shell.py +++ b/powerline/shell.py @@ -38,8 +38,10 @@ class ShellPowerline(Powerline): if not local_themes: return {} - return dict(((key, {'config': self.load_theme_config(val)}) - for key, val in local_themes.items())) + return dict(( + (key, {'config': self.load_theme_config(val)}) + for key, val in local_themes.items() + )) def get_argparser(parser=None, *args, **kwargs): @@ -49,8 +51,10 @@ def get_argparser(parser=None, *args, **kwargs): p = parser(*args, **kwargs) p.add_argument('ext', nargs=1, help='Extension: application for which powerline command is launched (usually `shell\' or `tmux\')') p.add_argument('side', nargs='?', choices=('left', 'right', 'above', 'aboveleft'), help='Side: `left\' and `right\' represent left and right side respectively, `above\' emits lines that are supposed to be printed just above the prompt and `aboveleft\' is like concatenating `above\' with `left\' with the exception that only one Python instance is used in this case.') - p.add_argument('-r', '--renderer_module', metavar='MODULE', type=str, - help='Renderer module. Usually something like `.bash\' or `.zsh\', is supposed to be set only in shell-specific bindings file.') + p.add_argument( + '-r', '--renderer_module', metavar='MODULE', type=str, + help='Renderer module. Usually something like `.bash\' or `.zsh\', is supposed to be set only in shell-specific bindings file.' + ) p.add_argument('-w', '--width', type=int, help='Maximum prompt with. Triggers truncation of some segments') p.add_argument('--last_exit_code', metavar='INT', type=int, help='Last exit code') p.add_argument('--last_pipe_status', metavar='LIST', default='', type=lambda s: [int(status) for status in s.split()], help='Like above, but is supposed to contain space-separated array of statuses, representing exit statuses of commands in one pipe.') diff --git a/powerline/theme.py b/powerline/theme.py index d03b0a06..378f8495 100644 --- a/powerline/theme.py +++ b/powerline/theme.py @@ -25,13 +25,13 @@ def new_empty_segment_line(): class Theme(object): def __init__(self, - ext, - theme_config, - common_config, - pl, - main_theme_config=None, - run_once=False, - shutdown_event=None): + ext, + theme_config, + common_config, + pl, + main_theme_config=None, + run_once=False, + shutdown_event=None): self.dividers = theme_config['dividers'] self.dividers = dict(( (key, dict((k, u(v)) @@ -55,7 +55,7 @@ class Theme(object): theme_configs.append(main_theme_config) get_segment = gen_segment_getter(pl, ext, common_config, theme_configs, theme_config.get('default_module')) for segdict in itertools.chain((theme_config['segments'],), - theme_config['segments'].get('above', ())): + theme_config['segments'].get('above', ())): self.segments.append(new_empty_segment_line()) for side in ['left', 'right']: for segment in segdict.get(side, []): diff --git a/powerline/vim.py b/powerline/vim.py index 1e83b2ce..2676ac29 100644 --- a/powerline/vim.py +++ b/powerline/vim.py @@ -74,8 +74,10 @@ class VimPowerline(Powerline): # Note: themes with non-[a-zA-Z0-9_] names are impossible to override # (though as far as I know exists() won’t throw). Won’t fix, use proper # theme names. - return _override_from(super(VimPowerline, self).load_theme_config(name), - 'powerline_theme_overrides__' + name) + return _override_from( + super(VimPowerline, self).load_theme_config(name), + 'powerline_theme_overrides__' + name + ) def get_local_themes(self, local_themes): self.get_matcher = gen_matcher_getter(self.ext, self.import_paths) @@ -83,9 +85,13 @@ class VimPowerline(Powerline): if not local_themes: return {} - return dict(((None if key == '__tabline__' else self.get_matcher(key), - {'config': self.load_theme_config(val)}) - for key, val in local_themes.items())) + return dict(( + ( + (None if key == '__tabline__' else self.get_matcher(key)), + {'config': self.load_theme_config(val)} + ) + for key, val in local_themes.items()) + ) def get_config_paths(self): try: @@ -167,8 +173,7 @@ class VimPowerline(Powerline): @staticmethod def do_pyeval(): import __main__ - vim.command('return ' + json.dumps(eval(vim.eval('a:e'), - __main__.__dict__))) + vim.command('return ' + json.dumps(eval(vim.eval('a:e'), __main__.__dict__))) def setup_components(self, components): if components is None: @@ -208,10 +213,10 @@ def setup(pyeval=None, pycmd=None, can_replace_pyeval=True): # pyeval() and vim.bindeval were both introduced in one patch if not hasattr(vim, 'bindeval') and can_replace_pyeval: vim.command((''' - function! PowerlinePyeval(e) - {pycmd} powerline.do_pyeval() - endfunction - ''').format(pycmd=pycmd)) + function! PowerlinePyeval(e) + {pycmd} powerline.do_pyeval() + endfunction + ''').format(pycmd=pycmd)) pyeval = 'PowerlinePyeval' powerline = VimPowerline(pyeval) diff --git a/tests/lib/__init__.py b/tests/lib/__init__.py index 8447ab2f..ff340982 100644 --- a/tests/lib/__init__.py +++ b/tests/lib/__init__.py @@ -13,8 +13,10 @@ class Pl(object): self.use_daemon_threads = True for meth in ('error', 'warn', 'debug', 'exception'): - exec (('def {0}(self, msg, *args, **kwargs):\n' - ' self.{0}s.append((kwargs.get("prefix") or self.prefix, msg, args, kwargs))\n').format(meth)) + exec (( + 'def {0}(self, msg, *args, **kwargs):\n' + ' self.{0}s.append((kwargs.get("prefix") or self.prefix, msg, args, kwargs))\n' + ).format(meth)) class Args(object): diff --git a/tests/test_segments.py b/tests/test_segments.py index 107d982c..60d05574 100644 --- a/tests/test_segments.py +++ b/tests/test_segments.py @@ -29,8 +29,9 @@ class TestShell(TestCase): def test_last_status(self): pl = Pl() segment_info = {'args': Args(last_exit_code=10)} - self.assertEqual(shell.last_status(pl=pl, segment_info=segment_info), - [{'contents': '10', 'highlight_group': 'exit_fail'}]) + self.assertEqual(shell.last_status(pl=pl, segment_info=segment_info), [ + {'contents': '10', 'highlight_group': 'exit_fail'} + ]) segment_info['args'].last_exit_code = 0 self.assertEqual(shell.last_status(pl=pl, segment_info=segment_info), None) segment_info['args'].last_exit_code = None @@ -222,18 +223,23 @@ class TestCommon(TestCase): branch = partial(common.branch, pl=pl, create_watcher=create_watcher) with replace_attr(common, 'guess', get_dummy_guess(status=lambda: None, directory='/tmp/tests')): with replace_attr(common, 'tree_status', lambda repo, pl: None): - self.assertEqual(branch(segment_info=segment_info, status_colors=False), - [{'highlight_group': ['branch'], 'contents': 'tests'}]) - self.assertEqual(branch(segment_info=segment_info, status_colors=True), - [{'contents': 'tests', 'highlight_group': ['branch_clean', 'branch']}]) + self.assertEqual(branch(segment_info=segment_info, status_colors=False), [ + {'highlight_group': ['branch'], 'contents': 'tests'} + ]) + self.assertEqual(branch(segment_info=segment_info, status_colors=True), [ + {'contents': 'tests', 'highlight_group': ['branch_clean', 'branch']} + ]) with replace_attr(common, 'guess', get_dummy_guess(status=lambda: 'D ', directory='/tmp/tests')): with replace_attr(common, 'tree_status', lambda repo, pl: 'D '): - self.assertEqual(branch(segment_info=segment_info, status_colors=False), - [{'highlight_group': ['branch'], 'contents': 'tests'}]) - self.assertEqual(branch(segment_info=segment_info, status_colors=True), - [{'contents': 'tests', 'highlight_group': ['branch_dirty', 'branch']}]) - self.assertEqual(branch(segment_info=segment_info, status_colors=False), - [{'highlight_group': ['branch'], 'contents': 'tests'}]) + self.assertEqual(branch(segment_info=segment_info, status_colors=False), [ + {'highlight_group': ['branch'], 'contents': 'tests'} + ]) + self.assertEqual(branch(segment_info=segment_info, status_colors=True), [ + {'contents': 'tests', 'highlight_group': ['branch_dirty', 'branch']} + ]) + self.assertEqual(branch(segment_info=segment_info, status_colors=False), [ + {'highlight_group': ['branch'], 'contents': 'tests'} + ]) with replace_attr(common, 'guess', lambda path, create_watcher: None): self.assertEqual(branch(segment_info=segment_info, status_colors=False), None) @@ -322,8 +328,9 @@ class TestCommon(TestCase): ose = OSError() ose.errno = 2 cwd[0] = ose - self.assertEqual(common.cwd(pl=pl, segment_info=segment_info, dir_limit_depth=2, dir_shorten_len=2), - [{'contents': '[not found]', 'divider_highlight_group': 'cwd:divider', 'highlight_group': ['cwd:current_folder', 'cwd'], 'draw_inner_divider': True}]) + self.assertEqual(common.cwd(pl=pl, segment_info=segment_info, dir_limit_depth=2, dir_shorten_len=2), [ + {'contents': '[not found]', 'divider_highlight_group': 'cwd:divider', 'highlight_group': ['cwd:current_folder', 'cwd'], 'draw_inner_divider': True} + ]) cwd[0] = OSError() self.assertRaises(OSError, common.cwd, pl=pl, segment_info=segment_info, dir_limit_depth=2, dir_shorten_len=2) cwd[0] = ValueError() @@ -415,14 +422,16 @@ class TestCommon(TestCase): pl = Pl() with replace_module_module(common, 'os', getloadavg=lambda: (7.5, 3.5, 1.5)): with replace_attr(common, '_cpu_count', lambda: 2): - self.assertEqual(common.system_load(pl=pl), - [{'contents': '7.5 ', 'highlight_group': ['system_load_gradient', 'system_load'], 'divider_highlight_group': 'background:divider', 'gradient_level': 100}, - {'contents': '3.5 ', 'highlight_group': ['system_load_gradient', 'system_load'], 'divider_highlight_group': 'background:divider', 'gradient_level': 75.0}, - {'contents': '1.5', 'highlight_group': ['system_load_gradient', 'system_load'], 'divider_highlight_group': 'background:divider', 'gradient_level': 0}]) - self.assertEqual(common.system_load(pl=pl, format='{avg:.0f}', threshold_good=0, threshold_bad=1), - [{'contents': '8 ', 'highlight_group': ['system_load_gradient', 'system_load'], 'divider_highlight_group': 'background:divider', 'gradient_level': 100}, - {'contents': '4 ', 'highlight_group': ['system_load_gradient', 'system_load'], 'divider_highlight_group': 'background:divider', 'gradient_level': 100}, - {'contents': '2', 'highlight_group': ['system_load_gradient', 'system_load'], 'divider_highlight_group': 'background:divider', 'gradient_level': 75.0}]) + self.assertEqual(common.system_load(pl=pl), [ + {'contents': '7.5 ', 'highlight_group': ['system_load_gradient', 'system_load'], 'divider_highlight_group': 'background:divider', 'gradient_level': 100}, + {'contents': '3.5 ', 'highlight_group': ['system_load_gradient', 'system_load'], 'divider_highlight_group': 'background:divider', 'gradient_level': 75.0}, + {'contents': '1.5', 'highlight_group': ['system_load_gradient', 'system_load'], 'divider_highlight_group': 'background:divider', 'gradient_level': 0} + ]) + self.assertEqual(common.system_load(pl=pl, format='{avg:.0f}', threshold_good=0, threshold_bad=1), [ + {'contents': '8 ', 'highlight_group': ['system_load_gradient', 'system_load'], 'divider_highlight_group': 'background:divider', 'gradient_level': 100}, + {'contents': '4 ', 'highlight_group': ['system_load_gradient', 'system_load'], 'divider_highlight_group': 'background:divider', 'gradient_level': 100}, + {'contents': '2', 'highlight_group': ['system_load_gradient', 'system_load'], 'divider_highlight_group': 'background:divider', 'gradient_level': 75.0} + ]) def test_cpu_load_percent(self): pl = Pl() @@ -700,10 +709,12 @@ class TestVim(TestCase): pl = Pl() segment_info = vim_module._get_segment_info() self.assertEqual(vim.file_name(pl=pl, segment_info=segment_info), None) - self.assertEqual(vim.file_name(pl=pl, segment_info=segment_info, display_no_file=True), - [{'contents': '[No file]', 'highlight_group': ['file_name_no_file', 'file_name']}]) - self.assertEqual(vim.file_name(pl=pl, segment_info=segment_info, display_no_file=True, no_file_text='X'), - [{'contents': 'X', 'highlight_group': ['file_name_no_file', 'file_name']}]) + self.assertEqual(vim.file_name(pl=pl, segment_info=segment_info, display_no_file=True), [ + {'contents': '[No file]', 'highlight_group': ['file_name_no_file', 'file_name']} + ]) + self.assertEqual(vim.file_name(pl=pl, segment_info=segment_info, display_no_file=True, no_file_text='X'), [ + {'contents': 'X', 'highlight_group': ['file_name_no_file', 'file_name']} + ]) with vim_module._with('buffer', '/tmp/abc') as segment_info: self.assertEqual(vim.file_name(pl=pl, segment_info=segment_info), 'abc') with vim_module._with('buffer', '/tmp/’’') as segment_info: @@ -721,14 +732,17 @@ class TestVim(TestCase): def test_file_opts(self): pl = Pl() segment_info = vim_module._get_segment_info() - self.assertEqual(vim.file_format(pl=pl, segment_info=segment_info), - [{'divider_highlight_group': 'background:divider', 'contents': 'unix'}]) - self.assertEqual(vim.file_encoding(pl=pl, segment_info=segment_info), - [{'divider_highlight_group': 'background:divider', 'contents': 'utf-8'}]) + self.assertEqual(vim.file_format(pl=pl, segment_info=segment_info), [ + {'divider_highlight_group': 'background:divider', 'contents': 'unix'} + ]) + self.assertEqual(vim.file_encoding(pl=pl, segment_info=segment_info), [ + {'divider_highlight_group': 'background:divider', 'contents': 'utf-8'} + ]) self.assertEqual(vim.file_type(pl=pl, segment_info=segment_info), None) with vim_module._with('bufoptions', filetype='python'): - self.assertEqual(vim.file_type(pl=pl, segment_info=segment_info), - [{'divider_highlight_group': 'background:divider', 'contents': 'python'}]) + self.assertEqual(vim.file_type(pl=pl, segment_info=segment_info), [ + {'divider_highlight_group': 'background:divider', 'contents': 'python'} + ]) def test_window_title(self): pl = Pl() @@ -745,8 +759,9 @@ class TestVim(TestCase): self.assertEqual(vim.line_percent(pl=pl, segment_info=segment_info), '1') vim_module._set_cursor(50, 0) self.assertEqual(vim.line_percent(pl=pl, segment_info=segment_info), '50') - self.assertEqual(vim.line_percent(pl=pl, segment_info=segment_info, gradient=True), - [{'contents': '50', 'highlight_group': ['line_percent_gradient', 'line_percent'], 'gradient_level': 50 * 100.0 / 101}]) + self.assertEqual(vim.line_percent(pl=pl, segment_info=segment_info, gradient=True), [ + {'contents': '50', 'highlight_group': ['line_percent_gradient', 'line_percent'], 'gradient_level': 50 * 100.0 / 101} + ]) finally: vim_module._bw(segment_info['bufnr']) @@ -768,8 +783,9 @@ class TestVim(TestCase): segment_info['buffer'][0:-1] = [str(i) for i in range(99)] vim_module._set_cursor(49, 0) self.assertEqual(vim.position(pl=pl, segment_info=segment_info), '50%') - self.assertEqual(vim.position(pl=pl, segment_info=segment_info, gradient=True), - [{'contents': '50%', 'highlight_group': ['position_gradient', 'position'], 'gradient_level': 50.0}]) + self.assertEqual(vim.position(pl=pl, segment_info=segment_info, gradient=True), [ + {'contents': '50%', 'highlight_group': ['position_gradient', 'position'], 'gradient_level': 50.0} + ]) vim_module._set_cursor(0, 0) self.assertEqual(vim.position(pl=pl, segment_info=segment_info), 'Top') vim_module._set_cursor(97, 0) @@ -777,8 +793,9 @@ class TestVim(TestCase): segment_info['buffer'][0:-1] = [str(i) for i in range(2)] vim_module._set_cursor(0, 0) self.assertEqual(vim.position(pl=pl, segment_info=segment_info, position_strings={'top': 'Comienzo', 'bottom': 'Final', 'all': 'Todo'}), 'Todo') - self.assertEqual(vim.position(pl=pl, segment_info=segment_info, gradient=True), - [{'contents': 'All', 'highlight_group': ['position_gradient', 'position'], 'gradient_level': 0.0}]) + self.assertEqual(vim.position(pl=pl, segment_info=segment_info, gradient=True), [ + {'contents': 'All', 'highlight_group': ['position_gradient', 'position'], 'gradient_level': 0.0} + ]) finally: vim_module._bw(segment_info['bufnr']) @@ -805,16 +822,20 @@ class TestVim(TestCase): with vim_module._with('buffer', '/foo') as segment_info: with replace_attr(vim, 'guess', get_dummy_guess(status=lambda: None)): with replace_attr(vim, 'tree_status', lambda repo, pl: None): - self.assertEqual(branch(segment_info=segment_info, status_colors=False), - [{'divider_highlight_group': 'branch:divider', 'highlight_group': ['branch'], 'contents': 'foo'}]) - self.assertEqual(branch(segment_info=segment_info, status_colors=True), - [{'divider_highlight_group': 'branch:divider', 'highlight_group': ['branch_clean', 'branch'], 'contents': 'foo'}]) + self.assertEqual(branch(segment_info=segment_info, status_colors=False), [ + {'divider_highlight_group': 'branch:divider', 'highlight_group': ['branch'], 'contents': 'foo'} + ]) + self.assertEqual(branch(segment_info=segment_info, status_colors=True), [ + {'divider_highlight_group': 'branch:divider', 'highlight_group': ['branch_clean', 'branch'], 'contents': 'foo'} + ]) with replace_attr(vim, 'guess', get_dummy_guess(status=lambda: 'DU')): with replace_attr(vim, 'tree_status', lambda repo, pl: 'DU'): - self.assertEqual(branch(segment_info=segment_info, status_colors=False), - [{'divider_highlight_group': 'branch:divider', 'highlight_group': ['branch'], 'contents': 'foo'}]) - self.assertEqual(branch(segment_info=segment_info, status_colors=True), - [{'divider_highlight_group': 'branch:divider', 'highlight_group': ['branch_dirty', 'branch'], 'contents': 'foo'}]) + self.assertEqual(branch(segment_info=segment_info, status_colors=False), [ + {'divider_highlight_group': 'branch:divider', 'highlight_group': ['branch'], 'contents': 'foo'} + ]) + self.assertEqual(branch(segment_info=segment_info, status_colors=True), [ + {'divider_highlight_group': 'branch:divider', 'highlight_group': ['branch_dirty', 'branch'], 'contents': 'foo'} + ]) def test_file_vcs_status(self): pl = Pl() @@ -822,8 +843,9 @@ class TestVim(TestCase): file_vcs_status = partial(vim.file_vcs_status, pl=pl, create_watcher=create_watcher) with vim_module._with('buffer', '/foo') as segment_info: with replace_attr(vim, 'guess', get_dummy_guess(status=lambda file: 'M')): - self.assertEqual(file_vcs_status(segment_info=segment_info), - [{'highlight_group': ['file_vcs_status_M', 'file_vcs_status'], 'contents': 'M'}]) + self.assertEqual(file_vcs_status(segment_info=segment_info), [ + {'highlight_group': ['file_vcs_status_M', 'file_vcs_status'], 'contents': 'M'} + ]) with replace_attr(vim, 'guess', get_dummy_guess(status=lambda file: None)): self.assertEqual(file_vcs_status(segment_info=segment_info), None) with vim_module._with('buffer', '/bar') as segment_info: diff --git a/tests/vim.py b/tests/vim.py index 29da4415..f7e0f703 100644 --- a/tests/vim.py +++ b/tests/vim.py @@ -153,8 +153,10 @@ def _construct_result(r): elif isinstance(r, list): return [_construct_result(i) for i in r] elif isinstance(r, dict): - return dict(((_construct_result(k), _construct_result(v)) - for k, v in r.items())) + return dict(( + (_construct_result(k), _construct_result(v)) + for k, v in r.items() + )) return r diff --git a/tools/colors_find.py b/tools/colors_find.py index cf1ba1c5..cdc01e10 100755 --- a/tools/colors_find.py +++ b/tools/colors_find.py @@ -8,8 +8,10 @@ from colormath.color_diff import delta_e_cie2000 def get_lab(name, rgb): - rgb = sRGBColor(int(rgb[:2], 16), int(rgb[2:4], 16), int(rgb[4:6], 16), - is_upscaled=True) + rgb = sRGBColor( + int(rgb[:2], 16), int(rgb[2:4], 16), int(rgb[4:6], 16), + is_upscaled=True + ) lab = convert_color(rgb, LabColor) return name, lab diff --git a/tools/generate_gradients.py b/tools/generate_gradients.py index a667f301..b3c94615 100755 --- a/tools/generate_gradients.py +++ b/tools/generate_gradients.py @@ -26,8 +26,10 @@ def num2(s): def rgbint_to_lab(rgbint): - rgb = sRGBColor((rgbint >> 16) & 0xFF, (rgbint >> 8) & 0xFF, rgbint & 0xFF, - is_upscaled=True) + rgb = sRGBColor( + (rgbint >> 16) & 0xFF, (rgbint >> 8) & 0xFF, rgbint & 0xFF, + is_upscaled=True + ) return convert_color(rgb, LabColor) @@ -52,8 +54,10 @@ def linear_gradient(start_value, stop_value, start_offset, stop_offset, offset): def lab_gradient(slab, elab, soff, eoff, off): svals = slab.get_value_tuple() evals = elab.get_value_tuple() - return LabColor(*[linear_gradient(start_value, end_value, soff, eoff, off) - for start_value, end_value in zip(svals, evals)]) + return LabColor(*[ + linear_gradient(start_value, end_value, soff, eoff, off) + for start_value, end_value in zip(svals, evals) + ]) def generate_gradient_function(DATA): @@ -185,8 +189,10 @@ if __name__ == '__main__': steps = compute_steps(args.gradient, args.weights) - data = [(weight, args.gradient[i - 1], args.gradient[i]) - for weight, i in zip(steps, range(1, len(args.gradient)))] + data = [ + (weight, args.gradient[i - 1], args.gradient[i]) + for weight, i in zip(steps, range(1, len(args.gradient))) + ] gr_func = generate_gradient_function(data) gradient = [gr_func(y) for y in range(0, m)]