Use pip to install python packages.
This commit is contained in:
parent
c086fd3289
commit
13499f429e
125 changed files with 19 additions and 17208 deletions
|
@ -1,25 +0,0 @@
|
||||||
#!/usr/bin/python
|
|
||||||
# vim:fileencoding=utf-8:noet
|
|
||||||
'''Powerline prompt and statusline script.'''
|
|
||||||
import sys
|
|
||||||
import os
|
|
||||||
|
|
||||||
try:
|
|
||||||
from powerline.shell import ShellPowerline, get_argparser
|
|
||||||
except ImportError:
|
|
||||||
import os
|
|
||||||
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
|
||||||
from powerline.shell import ShellPowerline, get_argparser # NOQA
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
|
||||||
args = get_argparser(description=__doc__).parse_args()
|
|
||||||
powerline = ShellPowerline(args, run_once=True)
|
|
||||||
rendered = powerline.render(
|
|
||||||
width=args.width,
|
|
||||||
side=args.side,
|
|
||||||
segment_info={'args': args, 'environ': os.environ},
|
|
||||||
)
|
|
||||||
try:
|
|
||||||
sys.stdout.write(rendered)
|
|
||||||
except UnicodeEncodeError:
|
|
||||||
sys.stdout.write(rendered.encode('utf-8'))
|
|
|
@ -1,14 +0,0 @@
|
||||||
#!/usr/bin/python
|
|
||||||
# vim:fileencoding=utf-8:noet
|
|
||||||
'''Powerline configuration checker.'''
|
|
||||||
import argparse
|
|
||||||
from powerline.lint import check
|
|
||||||
import sys
|
|
||||||
|
|
||||||
|
|
||||||
parser = argparse.ArgumentParser(description=__doc__)
|
|
||||||
parser.add_argument('-p', '--config_path', metavar='PATH')
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
|
||||||
args = parser.parse_args()
|
|
||||||
sys.exit(check(args.config_path))
|
|
|
@ -1,325 +0,0 @@
|
||||||
#!/usr/bin/env python
|
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
|
|
||||||
import os
|
|
||||||
import subprocess
|
|
||||||
import sys
|
|
||||||
import re
|
|
||||||
import argparse
|
|
||||||
|
|
||||||
|
|
||||||
def warn(msg):
|
|
||||||
print '[powerline-zsh] ', msg
|
|
||||||
|
|
||||||
|
|
||||||
class Color:
|
|
||||||
# The following link is a pretty good resources for color values:
|
|
||||||
# http://www.calmar.ws/vim/color-output.png
|
|
||||||
|
|
||||||
PATH_BG = 237 # dark grey
|
|
||||||
PATH_FG = 250 # light grey
|
|
||||||
CWD_FG = 254 # nearly-white grey
|
|
||||||
SEPARATOR_FG = 244
|
|
||||||
|
|
||||||
REPO_CLEAN_BG = 148 # a light green color
|
|
||||||
REPO_CLEAN_FG = 0 # black
|
|
||||||
REPO_DIRTY_BG = 161 # pink/red
|
|
||||||
REPO_DIRTY_FG = 15 # white
|
|
||||||
|
|
||||||
CMD_PASSED_BG = 236
|
|
||||||
CMD_PASSED_FG = 15
|
|
||||||
CMD_FAILED_BG = 161
|
|
||||||
CMD_FAILED_FG = 15
|
|
||||||
|
|
||||||
SVN_CHANGES_BG = 148
|
|
||||||
SVN_CHANGES_FG = 22 # dark green
|
|
||||||
|
|
||||||
VIRTUAL_ENV_BG = 35 # a mid-tone green
|
|
||||||
VIRTUAL_ENV_FG = 22
|
|
||||||
|
|
||||||
|
|
||||||
class Powerline:
|
|
||||||
symbols = {
|
|
||||||
'compatible': {
|
|
||||||
'separator': u'\u25B6',
|
|
||||||
'separator_thin': u'\u276F'
|
|
||||||
},
|
|
||||||
'patched': {
|
|
||||||
'separator': u'\u2B80',
|
|
||||||
'separator_thin': u'\u2B81'
|
|
||||||
},
|
|
||||||
'default': {
|
|
||||||
'separator': '⮀',
|
|
||||||
'separator_thin': '⮁'
|
|
||||||
}
|
|
||||||
}
|
|
||||||
LSQESCRSQ = '\\[\\e%s\\]'
|
|
||||||
reset = ' %f%k'
|
|
||||||
|
|
||||||
def __init__(self, mode='default'):
|
|
||||||
self.separator = Powerline.symbols[mode]['separator']
|
|
||||||
self.separator_thin = Powerline.symbols[mode]['separator_thin']
|
|
||||||
self.segments = []
|
|
||||||
|
|
||||||
def color(self, prefix, code):
|
|
||||||
if prefix == '38':
|
|
||||||
return '%%F{%s}' % code
|
|
||||||
elif prefix == '48':
|
|
||||||
return '%%K{%s}' % code
|
|
||||||
|
|
||||||
def fgcolor(self, code):
|
|
||||||
return self.color('38', code)
|
|
||||||
|
|
||||||
def bgcolor(self, code):
|
|
||||||
return self.color('48', code)
|
|
||||||
|
|
||||||
def append(self, segment):
|
|
||||||
self.segments.append(segment)
|
|
||||||
|
|
||||||
def draw(self):
|
|
||||||
return (''.join((s[0].draw(self, s[1]) for s in zip(self.segments, self.segments[1:] + [None])))
|
|
||||||
+ self.reset)
|
|
||||||
|
|
||||||
|
|
||||||
class Segment:
|
|
||||||
def __init__(self, powerline, content, fg, bg, separator=None, separator_fg=None):
|
|
||||||
self.powerline = powerline
|
|
||||||
self.content = content
|
|
||||||
self.fg = fg
|
|
||||||
self.bg = bg
|
|
||||||
self.separator = separator or powerline.separator
|
|
||||||
self.separator_fg = separator_fg or bg
|
|
||||||
|
|
||||||
def draw(self, powerline, next_segment=None):
|
|
||||||
if next_segment:
|
|
||||||
separator_bg = powerline.bgcolor(next_segment.bg)
|
|
||||||
else:
|
|
||||||
separator_bg = powerline.reset
|
|
||||||
|
|
||||||
return ''.join((
|
|
||||||
powerline.fgcolor(self.fg),
|
|
||||||
powerline.bgcolor(self.bg),
|
|
||||||
self.content,
|
|
||||||
separator_bg,
|
|
||||||
powerline.fgcolor(self.separator_fg),
|
|
||||||
self.separator))
|
|
||||||
|
|
||||||
|
|
||||||
def add_cwd_segment(powerline, cwd, maxdepth, cwd_only=False):
|
|
||||||
#powerline.append(' \\w ', 15, 237)
|
|
||||||
home = os.getenv('HOME')
|
|
||||||
cwd = os.getenv('PWD')
|
|
||||||
|
|
||||||
if cwd.find(home) == 0:
|
|
||||||
cwd = cwd.replace(home, '~', 1)
|
|
||||||
|
|
||||||
if cwd[0] == '/':
|
|
||||||
cwd = cwd[1:]
|
|
||||||
|
|
||||||
names = cwd.split('/')
|
|
||||||
if len(names) > maxdepth:
|
|
||||||
names = names[:2] + ['⋯ '] + names[2 - maxdepth:]
|
|
||||||
|
|
||||||
if not cwd_only:
|
|
||||||
for n in names[:-1]:
|
|
||||||
powerline.append(Segment(powerline, ' %s ' % n, Color.PATH_FG, Color.PATH_BG, powerline.separator_thin, Color.SEPARATOR_FG))
|
|
||||||
powerline.append(Segment(powerline, ' %s ' % names[-1], Color.CWD_FG, Color.PATH_BG))
|
|
||||||
|
|
||||||
|
|
||||||
def get_hg_status():
|
|
||||||
has_modified_files = False
|
|
||||||
has_untracked_files = False
|
|
||||||
has_missing_files = False
|
|
||||||
output = subprocess.Popen(['hg', 'status'], stdout=subprocess.PIPE).communicate()[0]
|
|
||||||
for line in output.split('\n'):
|
|
||||||
if line == '':
|
|
||||||
continue
|
|
||||||
elif line[0] == '?':
|
|
||||||
has_untracked_files = True
|
|
||||||
elif line[0] == '!':
|
|
||||||
has_missing_files = True
|
|
||||||
else:
|
|
||||||
has_modified_files = True
|
|
||||||
return has_modified_files, has_untracked_files, has_missing_files
|
|
||||||
|
|
||||||
|
|
||||||
def add_hg_segment(powerline, cwd):
|
|
||||||
branch = os.popen('hg branch 2> /dev/null').read().rstrip()
|
|
||||||
if len(branch) == 0:
|
|
||||||
return False
|
|
||||||
bg = Color.REPO_CLEAN_BG
|
|
||||||
fg = Color.REPO_CLEAN_FG
|
|
||||||
has_modified_files, has_untracked_files, has_missing_files = get_hg_status()
|
|
||||||
if has_modified_files or has_untracked_files or has_missing_files:
|
|
||||||
bg = Color.REPO_DIRTY_BG
|
|
||||||
fg = Color.REPO_DIRTY_FG
|
|
||||||
extra = ''
|
|
||||||
if has_untracked_files:
|
|
||||||
extra += '+'
|
|
||||||
if has_missing_files:
|
|
||||||
extra += '!'
|
|
||||||
branch += (' ' + extra if extra != '' else '')
|
|
||||||
powerline.append(Segment(powerline, ' %s ' % branch, fg, bg))
|
|
||||||
return True
|
|
||||||
|
|
||||||
|
|
||||||
def get_git_status():
|
|
||||||
has_pending_commits = True
|
|
||||||
has_untracked_files = False
|
|
||||||
detached_head = False
|
|
||||||
origin_position = ""
|
|
||||||
current_branch = ''
|
|
||||||
output = subprocess.Popen(['git', 'status', '-unormal'], stdout=subprocess.PIPE).communicate()[0]
|
|
||||||
for line in output.split('\n'):
|
|
||||||
origin_status = re.findall("Your branch is (ahead|behind).*?(\d+) comm", line)
|
|
||||||
if len(origin_status) > 0:
|
|
||||||
origin_position = " %d" % int(origin_status[0][1])
|
|
||||||
if origin_status[0][0] == 'behind':
|
|
||||||
origin_position += '⇣'
|
|
||||||
if origin_status[0][0] == 'ahead':
|
|
||||||
origin_position += '⇡'
|
|
||||||
|
|
||||||
if line.find('nothing to commit (working directory clean)') >= 0:
|
|
||||||
has_pending_commits = False
|
|
||||||
if line.find('Untracked files') >= 0:
|
|
||||||
has_untracked_files = True
|
|
||||||
if line.find('Not currently on any branch') >= 0:
|
|
||||||
detached_head = True
|
|
||||||
if line.find('On branch') >= 0:
|
|
||||||
current_branch = re.findall('On branch ([^ ]+)', line)[0]
|
|
||||||
return has_pending_commits, has_untracked_files, origin_position, detached_head, current_branch
|
|
||||||
|
|
||||||
|
|
||||||
def add_git_segment(powerline, cwd):
|
|
||||||
#cmd = "git branch 2> /dev/null | grep -e '\\*'"
|
|
||||||
p1 = subprocess.Popen(['git', 'branch'], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
|
||||||
p2 = subprocess.Popen(['grep', '-e', '\\*'], stdin=p1.stdout, stdout=subprocess.PIPE)
|
|
||||||
output = p2.communicate()[0].strip()
|
|
||||||
if len(output) == 0:
|
|
||||||
return False
|
|
||||||
|
|
||||||
has_pending_commits, has_untracked_files, origin_position, detached_head, current_branch = get_git_status()
|
|
||||||
|
|
||||||
if len(current_branch) > 0:
|
|
||||||
branch = current_branch
|
|
||||||
elif detached_head:
|
|
||||||
branch = subprocess.Popen(['git', 'describe', '--all', '--contains', '--abbrev=4', 'HEAD'], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
|
||||||
branch = '((' + branch.communicate()[0].strip() + '))'
|
|
||||||
else:
|
|
||||||
return 'master'
|
|
||||||
|
|
||||||
branch += origin_position
|
|
||||||
|
|
||||||
if has_untracked_files:
|
|
||||||
branch += ' +'
|
|
||||||
|
|
||||||
bg = Color.REPO_CLEAN_BG
|
|
||||||
fg = Color.REPO_CLEAN_FG
|
|
||||||
|
|
||||||
if has_pending_commits:
|
|
||||||
bg = Color.REPO_DIRTY_BG
|
|
||||||
fg = Color.REPO_DIRTY_FG
|
|
||||||
|
|
||||||
powerline.append(Segment(powerline, ' %s ' % branch, fg, bg))
|
|
||||||
return True
|
|
||||||
|
|
||||||
|
|
||||||
def add_svn_segment(powerline, cwd):
|
|
||||||
if not os.path.exists(os.path.join(cwd, '.svn')):
|
|
||||||
return
|
|
||||||
'''svn info:
|
|
||||||
First column: Says if item was added, deleted, or otherwise changed
|
|
||||||
' ' no modifications
|
|
||||||
'A' Added
|
|
||||||
'C' Conflicted
|
|
||||||
'D' Deleted
|
|
||||||
'I' Ignored
|
|
||||||
'M' Modified
|
|
||||||
'R' Replaced
|
|
||||||
'X' an unversioned directory created by an externals definition
|
|
||||||
'?' item is not under version control
|
|
||||||
'!' item is missing (removed by non-svn command) or incomplete
|
|
||||||
'~' versioned item obstructed by some item of a different kind
|
|
||||||
'''
|
|
||||||
#TODO: Color segment based on above status codes
|
|
||||||
try:
|
|
||||||
#cmd = '"svn status | grep -c "^[ACDIMRX\\!\\~]"'
|
|
||||||
p1 = subprocess.Popen(['svn', 'status'], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
|
||||||
p2 = subprocess.Popen(['grep', '-c', '^[ACDIMRX\\!\\~]'], stdin=p1.stdout, stdout=subprocess.PIPE)
|
|
||||||
output = p2.communicate()[0].strip()
|
|
||||||
if len(output) > 0 and int(output) > 0:
|
|
||||||
changes = output.strip()
|
|
||||||
powerline.append(Segment(powerline, ' %s ' % changes, Color.SVN_CHANGES_FG, Color.SVN_CHANGES_BG))
|
|
||||||
except OSError:
|
|
||||||
return False
|
|
||||||
except subprocess.CalledProcessError:
|
|
||||||
return False
|
|
||||||
return True
|
|
||||||
|
|
||||||
|
|
||||||
def add_repo_segment(powerline, cwd):
|
|
||||||
for add_repo_segment in [add_git_segment, add_svn_segment, add_hg_segment]:
|
|
||||||
try:
|
|
||||||
if add_repo_segment(p, cwd):
|
|
||||||
return
|
|
||||||
except subprocess.CalledProcessError:
|
|
||||||
pass
|
|
||||||
except OSError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
def add_virtual_env_segment(powerline, cwd):
|
|
||||||
env = os.getenv("VIRTUAL_ENV")
|
|
||||||
if env is None:
|
|
||||||
return False
|
|
||||||
env_name = os.path.basename(env)
|
|
||||||
bg = Color.VIRTUAL_ENV_BG
|
|
||||||
fg = Color.VIRTUAL_ENV_FG
|
|
||||||
powerline.append(Segment(powerline, ' %s ' % env_name, fg, bg))
|
|
||||||
return True
|
|
||||||
|
|
||||||
|
|
||||||
def add_root_indicator(powerline, error):
|
|
||||||
bg = Color.CMD_PASSED_BG
|
|
||||||
fg = Color.CMD_PASSED_FG
|
|
||||||
if int(error) != 0:
|
|
||||||
fg = Color.CMD_FAILED_FG
|
|
||||||
bg = Color.CMD_FAILED_BG
|
|
||||||
powerline.append(Segment(powerline, ' $ ', fg, bg))
|
|
||||||
|
|
||||||
|
|
||||||
def get_valid_cwd():
|
|
||||||
try:
|
|
||||||
cwd = os.getcwd()
|
|
||||||
except:
|
|
||||||
cwd = os.getenv('PWD') # This is where the OS thinks we are
|
|
||||||
parts = cwd.split(os.sep)
|
|
||||||
up = cwd
|
|
||||||
while parts and not os.path.exists(up):
|
|
||||||
parts.pop()
|
|
||||||
up = os.sep.join(parts)
|
|
||||||
try:
|
|
||||||
os.chdir(up)
|
|
||||||
except:
|
|
||||||
warn("Your current directory is invalid.")
|
|
||||||
sys.exit(1)
|
|
||||||
warn("Your current directory is invalid. Lowest valid directory: " + up)
|
|
||||||
return cwd
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
|
||||||
arg_parser = argparse.ArgumentParser()
|
|
||||||
arg_parser.add_argument('--cwd-only', action="store_true")
|
|
||||||
arg_parser.add_argument('prev_error', nargs='?', default=0)
|
|
||||||
args = arg_parser.parse_args()
|
|
||||||
|
|
||||||
p = Powerline(mode='default')
|
|
||||||
cwd = get_valid_cwd()
|
|
||||||
add_virtual_env_segment(p, cwd)
|
|
||||||
#p.append(Segment(' \\u ', 250, 240))
|
|
||||||
#p.append(Segment(' \\h ', 250, 238))
|
|
||||||
add_cwd_segment(p, cwd, 5, args.cwd_only)
|
|
||||||
add_repo_segment(p, cwd)
|
|
||||||
add_root_indicator(p, args.prev_error)
|
|
||||||
sys.stdout.write(p.draw())
|
|
||||||
|
|
||||||
# vim: set expandtab:
|
|
|
@ -1,63 +0,0 @@
|
||||||
Metadata-Version: 1.0
|
|
||||||
Name: Powerline
|
|
||||||
Version: beta
|
|
||||||
Summary: The ultimate statusline/prompt utility.
|
|
||||||
Home-page: https://github.com/Lokaltog/powerline
|
|
||||||
Author: Kim Silkebækken
|
|
||||||
Author-email: kim.silkebaekken+vim@gmail.com
|
|
||||||
License: UNKNOWN
|
|
||||||
Description: Powerline
|
|
||||||
=========
|
|
||||||
|
|
||||||
:Author: Kim Silkebækken (kim.silkebaekken+vim@gmail.com)
|
|
||||||
:Source: https://github.com/Lokaltog/powerline
|
|
||||||
:Version: beta
|
|
||||||
:Build status:
|
|
||||||
.. image:: https://api.travis-ci.org/Lokaltog/powerline.png?branch=develop
|
|
||||||
:target: `travis-build-status`_
|
|
||||||
:alt: Build status
|
|
||||||
|
|
||||||
This is the upcoming version of Powerline, implemented in Python. The
|
|
||||||
project is currently in a stable beta and almost ready for release.
|
|
||||||
|
|
||||||
* Consult the `documentation
|
|
||||||
<https://powerline.readthedocs.org/en/latest/>`_ for more information and
|
|
||||||
installation instructions.
|
|
||||||
* Check out `powerline-fonts <https://github.com/Lokaltog/powerline-fonts>`_
|
|
||||||
for pre-patched versions of popular coding fonts.
|
|
||||||
|
|
||||||
.. _travis-build-status: https://travis-ci.org/Lokaltog/powerline
|
|
||||||
|
|
||||||
Screenshots
|
|
||||||
-----------
|
|
||||||
|
|
||||||
Vim statusline
|
|
||||||
^^^^^^^^^^^^^^
|
|
||||||
|
|
||||||
**Mode-dependent highlighting**
|
|
||||||
|
|
||||||
* .. image:: https://raw.github.com/Lokaltog/powerline/develop/docs/source/_static/img/pl-mode-normal.png
|
|
||||||
:alt: Normal mode
|
|
||||||
* .. image:: https://raw.github.com/Lokaltog/powerline/develop/docs/source/_static/img/pl-mode-insert.png
|
|
||||||
:alt: Insert mode
|
|
||||||
* .. image:: https://raw.github.com/Lokaltog/powerline/develop/docs/source/_static/img/pl-mode-visual.png
|
|
||||||
:alt: Visual mode
|
|
||||||
* .. image:: https://raw.github.com/Lokaltog/powerline/develop/docs/source/_static/img/pl-mode-replace.png
|
|
||||||
:alt: Replace mode
|
|
||||||
|
|
||||||
**Automatic truncation of segments in small windows**
|
|
||||||
|
|
||||||
* .. image:: https://raw.github.com/Lokaltog/powerline/develop/docs/source/_static/img/pl-truncate1.png
|
|
||||||
:alt: Truncation illustration
|
|
||||||
* .. image:: https://raw.github.com/Lokaltog/powerline/develop/docs/source/_static/img/pl-truncate2.png
|
|
||||||
:alt: Truncation illustration
|
|
||||||
* .. image:: https://raw.github.com/Lokaltog/powerline/develop/docs/source/_static/img/pl-truncate3.png
|
|
||||||
:alt: Truncation illustration
|
|
||||||
|
|
||||||
----
|
|
||||||
|
|
||||||
The font in the screenshots is `Pragmata Pro`_ by Fabrizio Schiavi.
|
|
||||||
|
|
||||||
.. _`Pragmata Pro`: http://www.fsd.it/fonts/pragmatapro.htm
|
|
||||||
|
|
||||||
Platform: UNKNOWN
|
|
|
@ -1,98 +0,0 @@
|
||||||
MANIFEST.in
|
|
||||||
README.rst
|
|
||||||
Powerline.egg-info/PKG-INFO
|
|
||||||
Powerline.egg-info/SOURCES.txt
|
|
||||||
Powerline.egg-info/dependency_links.txt
|
|
||||||
Powerline.egg-info/not-zip-safe
|
|
||||||
Powerline.egg-info/requires.txt
|
|
||||||
Powerline.egg-info/top_level.txt
|
|
||||||
powerline/__init__.py
|
|
||||||
powerline/colorscheme.py
|
|
||||||
powerline/ipython.py
|
|
||||||
powerline/matcher.py
|
|
||||||
powerline/renderer.py
|
|
||||||
powerline/segment.py
|
|
||||||
powerline/shell.py
|
|
||||||
powerline/theme.py
|
|
||||||
powerline/vim.py
|
|
||||||
powerline/bindings/__init__.py
|
|
||||||
powerline/bindings/awesome/powerline-awesome.py
|
|
||||||
powerline/bindings/awesome/powerline.lua
|
|
||||||
powerline/bindings/bash/powerline.sh
|
|
||||||
powerline/bindings/ipython/__init__.py
|
|
||||||
powerline/bindings/ipython/post_0_11.py
|
|
||||||
powerline/bindings/ipython/pre_0_11.py
|
|
||||||
powerline/bindings/qtile/__init__.py
|
|
||||||
powerline/bindings/qtile/widget.py
|
|
||||||
powerline/bindings/tmux/powerline.conf
|
|
||||||
powerline/bindings/vim/__init__.py
|
|
||||||
powerline/bindings/vim/plugin/powerline.vim
|
|
||||||
powerline/bindings/zsh/__init__.py
|
|
||||||
powerline/bindings/zsh/powerline.zsh
|
|
||||||
powerline/config_files/colors.json
|
|
||||||
powerline/config_files/config.json
|
|
||||||
powerline/config_files/colorschemes/ipython/default.json
|
|
||||||
powerline/config_files/colorschemes/shell/default.json
|
|
||||||
powerline/config_files/colorschemes/shell/solarized.json
|
|
||||||
powerline/config_files/colorschemes/tmux/default.json
|
|
||||||
powerline/config_files/colorschemes/vim/default.json
|
|
||||||
powerline/config_files/colorschemes/vim/solarized.json
|
|
||||||
powerline/config_files/colorschemes/wm/default.json
|
|
||||||
powerline/config_files/themes/ipython/in.json
|
|
||||||
powerline/config_files/themes/ipython/in2.json
|
|
||||||
powerline/config_files/themes/ipython/out.json
|
|
||||||
powerline/config_files/themes/ipython/rewrite.json
|
|
||||||
powerline/config_files/themes/shell/default.json
|
|
||||||
powerline/config_files/themes/shell/default_leftonly.json
|
|
||||||
powerline/config_files/themes/tmux/default.json
|
|
||||||
powerline/config_files/themes/vim/cmdwin.json
|
|
||||||
powerline/config_files/themes/vim/default.json
|
|
||||||
powerline/config_files/themes/vim/help.json
|
|
||||||
powerline/config_files/themes/vim/quickfix.json
|
|
||||||
powerline/config_files/themes/wm/default.json
|
|
||||||
powerline/lib/__init__.py
|
|
||||||
powerline/lib/config.py
|
|
||||||
powerline/lib/file_watcher.py
|
|
||||||
powerline/lib/humanize_bytes.py
|
|
||||||
powerline/lib/inotify.py
|
|
||||||
powerline/lib/memoize.py
|
|
||||||
powerline/lib/monotonic.py
|
|
||||||
powerline/lib/threaded.py
|
|
||||||
powerline/lib/tree_watcher.py
|
|
||||||
powerline/lib/url.py
|
|
||||||
powerline/lib/vcs/__init__.py
|
|
||||||
powerline/lib/vcs/bzr.py
|
|
||||||
powerline/lib/vcs/git.py
|
|
||||||
powerline/lib/vcs/mercurial.py
|
|
||||||
powerline/lint/__init__.py
|
|
||||||
powerline/lint/inspect.py
|
|
||||||
powerline/lint/markedjson/__init__.py
|
|
||||||
powerline/lint/markedjson/composer.py
|
|
||||||
powerline/lint/markedjson/constructor.py
|
|
||||||
powerline/lint/markedjson/error.py
|
|
||||||
powerline/lint/markedjson/events.py
|
|
||||||
powerline/lint/markedjson/loader.py
|
|
||||||
powerline/lint/markedjson/markedvalue.py
|
|
||||||
powerline/lint/markedjson/nodes.py
|
|
||||||
powerline/lint/markedjson/parser.py
|
|
||||||
powerline/lint/markedjson/reader.py
|
|
||||||
powerline/lint/markedjson/resolver.py
|
|
||||||
powerline/lint/markedjson/scanner.py
|
|
||||||
powerline/lint/markedjson/tokens.py
|
|
||||||
powerline/matchers/__init__.py
|
|
||||||
powerline/matchers/vim.py
|
|
||||||
powerline/renderers/__init__.py
|
|
||||||
powerline/renderers/bash_prompt.py
|
|
||||||
powerline/renderers/ipython.py
|
|
||||||
powerline/renderers/pango_markup.py
|
|
||||||
powerline/renderers/shell.py
|
|
||||||
powerline/renderers/tmux.py
|
|
||||||
powerline/renderers/vim.py
|
|
||||||
powerline/renderers/zsh_prompt.py
|
|
||||||
powerline/segments/__init__.py
|
|
||||||
powerline/segments/common.py
|
|
||||||
powerline/segments/ipython.py
|
|
||||||
powerline/segments/shell.py
|
|
||||||
powerline/segments/vim.py
|
|
||||||
scripts/powerline
|
|
||||||
scripts/powerline-lint
|
|
|
@ -1 +0,0 @@
|
||||||
|
|
|
@ -1,159 +0,0 @@
|
||||||
../powerline/shell.py
|
|
||||||
../powerline/colorscheme.py
|
|
||||||
../powerline/ipython.py
|
|
||||||
../powerline/theme.py
|
|
||||||
../powerline/vim.py
|
|
||||||
../powerline/__init__.py
|
|
||||||
../powerline/matcher.py
|
|
||||||
../powerline/renderer.py
|
|
||||||
../powerline/segment.py
|
|
||||||
../powerline/matchers/vim.py
|
|
||||||
../powerline/matchers/__init__.py
|
|
||||||
../powerline/bindings/__init__.py
|
|
||||||
../powerline/lint/__init__.py
|
|
||||||
../powerline/lint/inspect.py
|
|
||||||
../powerline/segments/shell.py
|
|
||||||
../powerline/segments/ipython.py
|
|
||||||
../powerline/segments/common.py
|
|
||||||
../powerline/segments/vim.py
|
|
||||||
../powerline/segments/__init__.py
|
|
||||||
../powerline/renderers/shell.py
|
|
||||||
../powerline/renderers/ipython.py
|
|
||||||
../powerline/renderers/bash_prompt.py
|
|
||||||
../powerline/renderers/zsh_prompt.py
|
|
||||||
../powerline/renderers/vim.py
|
|
||||||
../powerline/renderers/__init__.py
|
|
||||||
../powerline/renderers/pango_markup.py
|
|
||||||
../powerline/renderers/tmux.py
|
|
||||||
../powerline/lib/memoize.py
|
|
||||||
../powerline/lib/monotonic.py
|
|
||||||
../powerline/lib/config.py
|
|
||||||
../powerline/lib/humanize_bytes.py
|
|
||||||
../powerline/lib/__init__.py
|
|
||||||
../powerline/lib/inotify.py
|
|
||||||
../powerline/lib/url.py
|
|
||||||
../powerline/lib/threaded.py
|
|
||||||
../powerline/lib/file_watcher.py
|
|
||||||
../powerline/lib/tree_watcher.py
|
|
||||||
../powerline/bindings/ipython/post_0_11.py
|
|
||||||
../powerline/bindings/ipython/pre_0_11.py
|
|
||||||
../powerline/bindings/ipython/__init__.py
|
|
||||||
../powerline/bindings/qtile/__init__.py
|
|
||||||
../powerline/bindings/qtile/widget.py
|
|
||||||
../powerline/bindings/vim/__init__.py
|
|
||||||
../powerline/bindings/zsh/__init__.py
|
|
||||||
../powerline/lint/markedjson/composer.py
|
|
||||||
../powerline/lint/markedjson/constructor.py
|
|
||||||
../powerline/lint/markedjson/loader.py
|
|
||||||
../powerline/lint/markedjson/nodes.py
|
|
||||||
../powerline/lint/markedjson/resolver.py
|
|
||||||
../powerline/lint/markedjson/scanner.py
|
|
||||||
../powerline/lint/markedjson/reader.py
|
|
||||||
../powerline/lint/markedjson/__init__.py
|
|
||||||
../powerline/lint/markedjson/parser.py
|
|
||||||
../powerline/lint/markedjson/markedvalue.py
|
|
||||||
../powerline/lint/markedjson/events.py
|
|
||||||
../powerline/lint/markedjson/tokens.py
|
|
||||||
../powerline/lint/markedjson/error.py
|
|
||||||
../powerline/lib/vcs/git.py
|
|
||||||
../powerline/lib/vcs/mercurial.py
|
|
||||||
../powerline/lib/vcs/__init__.py
|
|
||||||
../powerline/lib/vcs/bzr.py
|
|
||||||
../powerline/config_files/colors.json
|
|
||||||
../powerline/config_files/config.json
|
|
||||||
../powerline/config_files/colorschemes/ipython/default.json
|
|
||||||
../powerline/config_files/colorschemes/shell/default.json
|
|
||||||
../powerline/config_files/colorschemes/shell/solarized.json
|
|
||||||
../powerline/config_files/colorschemes/tmux/default.json
|
|
||||||
../powerline/config_files/colorschemes/vim/default.json
|
|
||||||
../powerline/config_files/colorschemes/vim/solarized.json
|
|
||||||
../powerline/config_files/colorschemes/wm/default.json
|
|
||||||
../powerline/config_files/themes/ipython/in.json
|
|
||||||
../powerline/config_files/themes/ipython/in2.json
|
|
||||||
../powerline/config_files/themes/ipython/out.json
|
|
||||||
../powerline/config_files/themes/ipython/rewrite.json
|
|
||||||
../powerline/config_files/themes/shell/default.json
|
|
||||||
../powerline/config_files/themes/shell/default_leftonly.json
|
|
||||||
../powerline/config_files/themes/tmux/default.json
|
|
||||||
../powerline/config_files/themes/vim/cmdwin.json
|
|
||||||
../powerline/config_files/themes/vim/default.json
|
|
||||||
../powerline/config_files/themes/vim/help.json
|
|
||||||
../powerline/config_files/themes/vim/quickfix.json
|
|
||||||
../powerline/config_files/themes/wm/default.json
|
|
||||||
../powerline/bindings/awesome/powerline-awesome.py
|
|
||||||
../powerline/bindings/awesome/powerline.lua
|
|
||||||
../powerline/bindings/bash/powerline.sh
|
|
||||||
../powerline/bindings/tmux/powerline.conf
|
|
||||||
../powerline/bindings/vim/plugin/powerline.vim
|
|
||||||
../powerline/bindings/zsh/powerline.zsh
|
|
||||||
../powerline/shell.pyc
|
|
||||||
../powerline/colorscheme.pyc
|
|
||||||
../powerline/ipython.pyc
|
|
||||||
../powerline/theme.pyc
|
|
||||||
../powerline/vim.pyc
|
|
||||||
../powerline/__init__.pyc
|
|
||||||
../powerline/matcher.pyc
|
|
||||||
../powerline/renderer.pyc
|
|
||||||
../powerline/segment.pyc
|
|
||||||
../powerline/matchers/vim.pyc
|
|
||||||
../powerline/matchers/__init__.pyc
|
|
||||||
../powerline/bindings/__init__.pyc
|
|
||||||
../powerline/lint/__init__.pyc
|
|
||||||
../powerline/lint/inspect.pyc
|
|
||||||
../powerline/segments/shell.pyc
|
|
||||||
../powerline/segments/ipython.pyc
|
|
||||||
../powerline/segments/common.pyc
|
|
||||||
../powerline/segments/vim.pyc
|
|
||||||
../powerline/segments/__init__.pyc
|
|
||||||
../powerline/renderers/shell.pyc
|
|
||||||
../powerline/renderers/ipython.pyc
|
|
||||||
../powerline/renderers/bash_prompt.pyc
|
|
||||||
../powerline/renderers/zsh_prompt.pyc
|
|
||||||
../powerline/renderers/vim.pyc
|
|
||||||
../powerline/renderers/__init__.pyc
|
|
||||||
../powerline/renderers/pango_markup.pyc
|
|
||||||
../powerline/renderers/tmux.pyc
|
|
||||||
../powerline/lib/memoize.pyc
|
|
||||||
../powerline/lib/monotonic.pyc
|
|
||||||
../powerline/lib/config.pyc
|
|
||||||
../powerline/lib/humanize_bytes.pyc
|
|
||||||
../powerline/lib/__init__.pyc
|
|
||||||
../powerline/lib/inotify.pyc
|
|
||||||
../powerline/lib/url.pyc
|
|
||||||
../powerline/lib/threaded.pyc
|
|
||||||
../powerline/lib/file_watcher.pyc
|
|
||||||
../powerline/lib/tree_watcher.pyc
|
|
||||||
../powerline/bindings/ipython/post_0_11.pyc
|
|
||||||
../powerline/bindings/ipython/pre_0_11.pyc
|
|
||||||
../powerline/bindings/ipython/__init__.pyc
|
|
||||||
../powerline/bindings/qtile/__init__.pyc
|
|
||||||
../powerline/bindings/qtile/widget.pyc
|
|
||||||
../powerline/bindings/vim/__init__.pyc
|
|
||||||
../powerline/bindings/zsh/__init__.pyc
|
|
||||||
../powerline/lint/markedjson/composer.pyc
|
|
||||||
../powerline/lint/markedjson/constructor.pyc
|
|
||||||
../powerline/lint/markedjson/loader.pyc
|
|
||||||
../powerline/lint/markedjson/nodes.pyc
|
|
||||||
../powerline/lint/markedjson/resolver.pyc
|
|
||||||
../powerline/lint/markedjson/scanner.pyc
|
|
||||||
../powerline/lint/markedjson/reader.pyc
|
|
||||||
../powerline/lint/markedjson/__init__.pyc
|
|
||||||
../powerline/lint/markedjson/parser.pyc
|
|
||||||
../powerline/lint/markedjson/markedvalue.pyc
|
|
||||||
../powerline/lint/markedjson/events.pyc
|
|
||||||
../powerline/lint/markedjson/tokens.pyc
|
|
||||||
../powerline/lint/markedjson/error.pyc
|
|
||||||
../powerline/lib/vcs/git.pyc
|
|
||||||
../powerline/lib/vcs/mercurial.pyc
|
|
||||||
../powerline/lib/vcs/__init__.pyc
|
|
||||||
../powerline/lib/vcs/bzr.pyc
|
|
||||||
../powerline/bindings/awesome/powerline-awesome.pyc
|
|
||||||
./
|
|
||||||
SOURCES.txt
|
|
||||||
PKG-INFO
|
|
||||||
not-zip-safe
|
|
||||||
dependency_links.txt
|
|
||||||
top_level.txt
|
|
||||||
requires.txt
|
|
||||||
../../../../bin/powerline
|
|
||||||
../../../../bin/powerline-lint
|
|
|
@ -1,4 +0,0 @@
|
||||||
|
|
||||||
|
|
||||||
[docs]
|
|
||||||
Sphinx
|
|
|
@ -1 +0,0 @@
|
||||||
powerline
|
|
|
@ -1,282 +0,0 @@
|
||||||
Metadata-Version: 1.1
|
|
||||||
Name: jedi
|
|
||||||
Version: 0.5b5
|
|
||||||
Summary: An autocompletion tool for Python that can be used for text editors.
|
|
||||||
Home-page: https://github.com/davidhalter/jedi
|
|
||||||
Author: David Halter
|
|
||||||
Author-email: davidhalter88@gmail.com
|
|
||||||
License: LGPLv3
|
|
||||||
Description: ########################################
|
|
||||||
Jedi - an awesome Python auto-completion
|
|
||||||
########################################
|
|
||||||
|
|
||||||
.. image:: https://secure.travis-ci.org/davidhalter/jedi.png?branch=master
|
|
||||||
:target: http://travis-ci.org/davidhalter/jedi
|
|
||||||
:alt: Travis-CI build status
|
|
||||||
|
|
||||||
**beta testing**
|
|
||||||
|
|
||||||
*If you have any comments or feature requests, please tell me! I really want to
|
|
||||||
know, what you think about Jedi.*
|
|
||||||
|
|
||||||
Jedi is an autocompletion tool for Python. It works. With and without syntax
|
|
||||||
errors. Sometimes it sucks, but that's normal in dynamic languages. But it
|
|
||||||
sucks less than other tools. It understands almost all of the basic Python
|
|
||||||
syntax elements including many builtins.
|
|
||||||
|
|
||||||
Jedi suports two different goto functions and has support for renaming.
|
|
||||||
Probably it will also have some support for refactoring in the future.
|
|
||||||
|
|
||||||
Jedi uses a very simple interface to connect with IDE's. As an reference, there
|
|
||||||
is a VIM implementation, which uses Jedi's autocompletion. However, I encourage
|
|
||||||
you to use Jedi in your IDEs. Start writing plugins! If there are problems with
|
|
||||||
licensing, just contact me.
|
|
||||||
|
|
||||||
At the moment Jedi can be used as a
|
|
||||||
`VIM-Plugin <http://github.com/davidhalter/jedi-vim>`_. So, if you want to test
|
|
||||||
Jedi for now, you'll have to use VIM. But there are new plugins emerging:
|
|
||||||
|
|
||||||
- `Emacs-Plugin <https://github.com/tkf/emacs-jedi>`_
|
|
||||||
- `Sublime-Plugin <https://github.com/svaiter/SublimeJEDI>`_ **Under construction**
|
|
||||||
|
|
||||||
Here are some pictures:
|
|
||||||
|
|
||||||
.. image:: https://github.com/davidhalter/jedi/raw/master/screenshot_complete.png
|
|
||||||
|
|
||||||
Completion for almost anything (Ctrl+Space).
|
|
||||||
|
|
||||||
.. image:: https://github.com/davidhalter/jedi/raw/master/screenshot_function.png
|
|
||||||
|
|
||||||
Display of function/class bodies, docstrings.
|
|
||||||
|
|
||||||
.. image:: https://github.com/davidhalter/jedi/raw/master/screenshot_pydoc.png
|
|
||||||
|
|
||||||
Pydoc support (with highlighting, Shift+k).
|
|
||||||
|
|
||||||
There is also support for goto and renaming.
|
|
||||||
|
|
||||||
Get the latest from `github <http://github.com/davidhalter/jedi>`_.
|
|
||||||
|
|
||||||
|
|
||||||
Installation
|
|
||||||
============
|
|
||||||
|
|
||||||
You can either include Jedi as a submodule in your text editor plugin (like
|
|
||||||
jedi-vim_ does it by default), or you
|
|
||||||
can install Jedi systemwide.
|
|
||||||
|
|
||||||
The preferred way to install the Jedi library into your system is by using
|
|
||||||
pip_::
|
|
||||||
|
|
||||||
sudo pip install jedi
|
|
||||||
|
|
||||||
If you want to install the current development version::
|
|
||||||
|
|
||||||
sudo pip install -e git://github.com/davidhalter/jedi.git#egg=jedi
|
|
||||||
|
|
||||||
Note: This just installs the Jedi library, not the editor plugins. For
|
|
||||||
information about how to make it work with your editor, refer to the
|
|
||||||
corresponding documentation.
|
|
||||||
|
|
||||||
|
|
||||||
Support
|
|
||||||
=======
|
|
||||||
|
|
||||||
Jedi supports Python 2.5 up to 3.x. There is just one code base, for both
|
|
||||||
Python 2 and 3.
|
|
||||||
Jedi supports many of the widely used Python features:
|
|
||||||
|
|
||||||
- builtin functions/classes support
|
|
||||||
- complex module / function / class structures
|
|
||||||
- ignores syntax and indentation errors
|
|
||||||
- multiple returns / yields
|
|
||||||
- tuple assignments / array indexing / dictionary indexing
|
|
||||||
- exceptions / with-statement
|
|
||||||
- \*args / \*\*kwargs
|
|
||||||
- decorators
|
|
||||||
- descriptors -> property / staticmethod / classmethod
|
|
||||||
- closures
|
|
||||||
- generators (yield statement) / iterators
|
|
||||||
- support for some magic methods: ``__call__``, ``__iter__``, ``__next__``,
|
|
||||||
``__get__``, ``__getitem__``, ``__init__``
|
|
||||||
- support for list.append, set.add, list.extend, etc.
|
|
||||||
- (nested) list comprehensions / ternary expressions
|
|
||||||
- relative imports
|
|
||||||
- ``getattr()`` / ``__getattr__`` / ``__getattribute__``
|
|
||||||
- function annotations (py3k feature, are ignored right now, but being parsed.
|
|
||||||
I don't know what to do with them.)
|
|
||||||
- class decorators (py3k feature, are being ignored too, until I find a use
|
|
||||||
case, that doesn't work with Jedi)
|
|
||||||
- simple/usual ``sys.path`` modifications
|
|
||||||
- ``isinstance`` checks for if/while/assert
|
|
||||||
- virtualenv support
|
|
||||||
- infer function arguments with sphinx (and other) docstrings
|
|
||||||
|
|
||||||
However, it does not yet support (and probably will in future versions, because
|
|
||||||
they are on my todo list):
|
|
||||||
|
|
||||||
- manipulations of instances outside the instance variables, without using
|
|
||||||
functions
|
|
||||||
|
|
||||||
It does not support (and most probably will not in future versions):
|
|
||||||
|
|
||||||
- metaclasses (how could an auto-completion ever support this)
|
|
||||||
- ``setattr()``, ``__import__()``
|
|
||||||
- Writing to some dicts: ``globals()``, ``locals()``, ``object.__dict__``
|
|
||||||
- evaluate ``if`` / ``while``
|
|
||||||
|
|
||||||
|
|
||||||
Caveats
|
|
||||||
=======
|
|
||||||
|
|
||||||
This framework should work for both Python 2/3. However, some things were just
|
|
||||||
not as *pythonic* in Python 2 as things should be. To keep things simple, some
|
|
||||||
things have been held back:
|
|
||||||
|
|
||||||
- Classes: Always Python 3 like, therefore all classes inherit from ``object``.
|
|
||||||
- Generators: No ``next`` method. The ``__next__`` method is used instead.
|
|
||||||
- Exceptions are only looked at in the form of ``Exception as e``, no comma!
|
|
||||||
|
|
||||||
Syntax errors and other strange stuff, that is defined differently in the
|
|
||||||
Python language, may lead to undefined behaviour of the completion. Jedi is
|
|
||||||
**NOT** a Python compiler, that tries to correct you. It is a tool that wants
|
|
||||||
to help you. But **YOU** have to know Python, not Jedi.
|
|
||||||
|
|
||||||
Importing ``numpy`` can be quite slow sometimes, as well as loading the builtins
|
|
||||||
the first time. If you want to speed it up, you could write import hooks in
|
|
||||||
jedi, which preloads this stuff. However, once loaded, this is not a problem
|
|
||||||
anymore. The same is true for huge modules like ``PySide``, ``wx``, etc.
|
|
||||||
|
|
||||||
Security is an important issue for Jedi. Therefore no Python code is executed.
|
|
||||||
As long as you write pure python, everything is evaluated statically. But: If
|
|
||||||
you use builtin modules (`c_builtin`) there is no other option than to execute
|
|
||||||
those modules. However: Execute isn't that critical (as e.g. in pythoncomplete,
|
|
||||||
which used to execute *every* import!), because it means one import and no
|
|
||||||
more. So basically the only dangerous thing is using the import itself. If your
|
|
||||||
`c_builtin` uses some strange initializations, it might be dangerous. But if it
|
|
||||||
does you're screwed anyways, because eventualy you're going to execute your
|
|
||||||
code, which executes the import.
|
|
||||||
|
|
||||||
|
|
||||||
A little history
|
|
||||||
================
|
|
||||||
|
|
||||||
The Star Wars Jedi are awesome. My Jedi software tries to imitate a little bit
|
|
||||||
of the precognition the Jedi have. There is even an awesome `scene
|
|
||||||
<http://www.youtube.com/watch?v=5BDO3pyavOY>`_ of Monty Python Jedi's :-).
|
|
||||||
|
|
||||||
But actually the name hasn't so much to do with Star Wars. It's part of my
|
|
||||||
second name.
|
|
||||||
|
|
||||||
After I explained Guido van Rossum, how some parts of my auto-completion work,
|
|
||||||
he said (we drank a beer or two):
|
|
||||||
|
|
||||||
*Oh, that worries me*
|
|
||||||
|
|
||||||
When it's finished, I hope he'll like it :-)
|
|
||||||
|
|
||||||
I actually started Jedi, because there were no good solutions available for
|
|
||||||
VIM. Most auto-completions just didn't work well. The only good solution was
|
|
||||||
PyCharm. I just like my good old VIM. Rope was never really intended to be an
|
|
||||||
auto-completion (and also I really hate project folders for my Python scripts).
|
|
||||||
It's more of a refactoring suite. So I decided to do my own version of a
|
|
||||||
completion, which would execute non-dangerous code. But I soon realized, that
|
|
||||||
this wouldn't work. So I built an extremely recursive thing which understands
|
|
||||||
many of Python's key features.
|
|
||||||
|
|
||||||
By the way, I really tried to program it as understandable as possible. But I
|
|
||||||
think understanding it might need quite some time, because of its recursive
|
|
||||||
nature.
|
|
||||||
|
|
||||||
|
|
||||||
API-Design for IDEs
|
|
||||||
===================
|
|
||||||
|
|
||||||
If you want to set up an IDE with Jedi, you need to ``import jedi``. You should
|
|
||||||
have the following objects available:
|
|
||||||
|
|
||||||
::
|
|
||||||
|
|
||||||
Script(source, line, column, source_path)
|
|
||||||
|
|
||||||
``source`` would be the source of your python file/script, separated by new
|
|
||||||
lines. ``line`` is the current line you want to perform actions on (starting
|
|
||||||
with line #1 as the first line). ``column`` represents the current
|
|
||||||
column/indent of the cursor (starting with zero). ``source_path`` should be the
|
|
||||||
path of your file in the file system.
|
|
||||||
|
|
||||||
It returns a script object that contains the relevant information for the other
|
|
||||||
functions to work without params.
|
|
||||||
|
|
||||||
::
|
|
||||||
|
|
||||||
Script().complete
|
|
||||||
|
|
||||||
Returns ``api.Completion`` objects. Those objects have got
|
|
||||||
informations about the completions. More than just names.
|
|
||||||
|
|
||||||
::
|
|
||||||
|
|
||||||
Script().goto
|
|
||||||
|
|
||||||
Similar to complete. The returned ``api.Definition`` objects contain
|
|
||||||
information about the definitions found.
|
|
||||||
|
|
||||||
::
|
|
||||||
|
|
||||||
Script().get_definition
|
|
||||||
|
|
||||||
Mostly used for tests. Like goto, but follows statements and imports and
|
|
||||||
doesn't break there. You probably don't want to use this function. It's
|
|
||||||
mostly for testing.
|
|
||||||
|
|
||||||
::
|
|
||||||
|
|
||||||
Script().related_names
|
|
||||||
|
|
||||||
Returns all names that point to the definition of the name under the
|
|
||||||
cursor. This is also very useful for refactoring (renaming).
|
|
||||||
|
|
||||||
::
|
|
||||||
|
|
||||||
Script().get_in_function_call
|
|
||||||
|
|
||||||
Get the ``Function`` object of the call you're currently in, e.g.: ``abs(``
|
|
||||||
with the cursor at the end would return the builtin ``abs`` function.
|
|
||||||
|
|
||||||
::
|
|
||||||
|
|
||||||
NotFoundError
|
|
||||||
|
|
||||||
If you use the goto function and no valid identifier (name) is at the
|
|
||||||
place of the cursor (position). It will raise this exception.
|
|
||||||
|
|
||||||
::
|
|
||||||
|
|
||||||
set_debug_function
|
|
||||||
|
|
||||||
Sets a callback function for ``debug.py``. This function is called with
|
|
||||||
multiple text objects, in python 3 you could insert ``print``.
|
|
||||||
|
|
||||||
::
|
|
||||||
|
|
||||||
settings
|
|
||||||
|
|
||||||
Access to the ``settings.py`` module. The settings are described there.
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
.. _jedi-vim: http://github.com/davidhalter/jedi-vim
|
|
||||||
.. _pip: http://www.pip-installer.org/
|
|
||||||
|
|
||||||
Keywords: python completion refactoring vim
|
|
||||||
Platform: any
|
|
||||||
Classifier: Development Status :: 4 - Beta
|
|
||||||
Classifier: Environment :: Plugins
|
|
||||||
Classifier: Intended Audience :: Developers
|
|
||||||
Classifier: License :: OSI Approved :: GNU Library or Lesser General Public License (LGPL)
|
|
||||||
Classifier: Operating System :: OS Independent
|
|
||||||
Classifier: Programming Language :: Python
|
|
||||||
Classifier: Topic :: Software Development :: Libraries :: Python Modules
|
|
||||||
Classifier: Topic :: Text Editors :: Integrated Development Environments (IDE)
|
|
||||||
Classifier: Topic :: Utilities
|
|
|
@ -1,31 +0,0 @@
|
||||||
AUTHORS.txt
|
|
||||||
LICENSE.txt
|
|
||||||
MANIFEST.in
|
|
||||||
README.rst
|
|
||||||
setup.cfg
|
|
||||||
setup.py
|
|
||||||
jedi/__init__.py
|
|
||||||
jedi/_compatibility.py
|
|
||||||
jedi/api.py
|
|
||||||
jedi/api_classes.py
|
|
||||||
jedi/builtin.py
|
|
||||||
jedi/debug.py
|
|
||||||
jedi/docstrings.py
|
|
||||||
jedi/dynamic.py
|
|
||||||
jedi/evaluate.py
|
|
||||||
jedi/helpers.py
|
|
||||||
jedi/imports.py
|
|
||||||
jedi/keywords.py
|
|
||||||
jedi/modules.py
|
|
||||||
jedi/parsing.py
|
|
||||||
jedi/settings.py
|
|
||||||
jedi.egg-info/PKG-INFO
|
|
||||||
jedi.egg-info/SOURCES.txt
|
|
||||||
jedi.egg-info/dependency_links.txt
|
|
||||||
jedi.egg-info/top_level.txt
|
|
||||||
jedi/mixin/_functools.pym
|
|
||||||
jedi/mixin/_sre.pym
|
|
||||||
jedi/mixin/_weakref.pym
|
|
||||||
jedi/mixin/builtins.pym
|
|
||||||
jedi/mixin/datetime.pym
|
|
||||||
jedi/mixin/posix.pym
|
|
|
@ -1,41 +0,0 @@
|
||||||
../jedi/builtin.py
|
|
||||||
../jedi/evaluate.py
|
|
||||||
../jedi/debug.py
|
|
||||||
../jedi/helpers.py
|
|
||||||
../jedi/settings.py
|
|
||||||
../jedi/keywords.py
|
|
||||||
../jedi/api_classes.py
|
|
||||||
../jedi/api.py
|
|
||||||
../jedi/modules.py
|
|
||||||
../jedi/__init__.py
|
|
||||||
../jedi/parsing.py
|
|
||||||
../jedi/docstrings.py
|
|
||||||
../jedi/_compatibility.py
|
|
||||||
../jedi/imports.py
|
|
||||||
../jedi/dynamic.py
|
|
||||||
../jedi/mixin/builtins.pym
|
|
||||||
../jedi/mixin/posix.pym
|
|
||||||
../jedi/mixin/_functools.pym
|
|
||||||
../jedi/mixin/_weakref.pym
|
|
||||||
../jedi/mixin/_sre.pym
|
|
||||||
../jedi/mixin/datetime.pym
|
|
||||||
../jedi/builtin.pyc
|
|
||||||
../jedi/evaluate.pyc
|
|
||||||
../jedi/debug.pyc
|
|
||||||
../jedi/helpers.pyc
|
|
||||||
../jedi/settings.pyc
|
|
||||||
../jedi/keywords.pyc
|
|
||||||
../jedi/api_classes.pyc
|
|
||||||
../jedi/api.pyc
|
|
||||||
../jedi/modules.pyc
|
|
||||||
../jedi/__init__.pyc
|
|
||||||
../jedi/parsing.pyc
|
|
||||||
../jedi/docstrings.pyc
|
|
||||||
../jedi/_compatibility.pyc
|
|
||||||
../jedi/imports.pyc
|
|
||||||
../jedi/dynamic.pyc
|
|
||||||
./
|
|
||||||
SOURCES.txt
|
|
||||||
PKG-INFO
|
|
||||||
dependency_links.txt
|
|
||||||
top_level.txt
|
|
|
@ -1 +0,0 @@
|
||||||
jedi
|
|
|
@ -1,16 +0,0 @@
|
||||||
import sys
|
|
||||||
|
|
||||||
# python imports are hell sometimes. Especially the combination of relative
|
|
||||||
# imports and circular imports... Just avoid it:
|
|
||||||
sys.path.insert(0, __path__[0])
|
|
||||||
|
|
||||||
from .api import Script, NotFoundError, set_debug_function
|
|
||||||
from . import settings
|
|
||||||
|
|
||||||
from . import api
|
|
||||||
|
|
||||||
__doc__ = api.__doc__
|
|
||||||
|
|
||||||
del api
|
|
||||||
|
|
||||||
sys.path.pop(0)
|
|
|
@ -1,169 +0,0 @@
|
||||||
"""
|
|
||||||
This is a compatibility module, to make it possible to use jedi also with older
|
|
||||||
python versions.
|
|
||||||
"""
|
|
||||||
import sys
|
|
||||||
|
|
||||||
is_py3k = sys.hexversion >= 0x03000000
|
|
||||||
|
|
||||||
is_py25 = sys.hexversion < 0x02060000
|
|
||||||
|
|
||||||
# next was defined in python 2.6, in python 3 obj.next won't be possible
|
|
||||||
# anymore
|
|
||||||
try:
|
|
||||||
next = next
|
|
||||||
except NameError:
|
|
||||||
_raiseStopIteration = object()
|
|
||||||
|
|
||||||
def next(iterator, default=_raiseStopIteration):
|
|
||||||
if not hasattr(iterator, 'next'):
|
|
||||||
raise TypeError("not an iterator")
|
|
||||||
try:
|
|
||||||
return iterator.next()
|
|
||||||
except StopIteration:
|
|
||||||
if default is _raiseStopIteration:
|
|
||||||
raise
|
|
||||||
else:
|
|
||||||
return default
|
|
||||||
|
|
||||||
# ast module was defined in python 2.6
|
|
||||||
try:
|
|
||||||
from ast import literal_eval
|
|
||||||
except ImportError:
|
|
||||||
literal_eval = eval
|
|
||||||
|
|
||||||
|
|
||||||
# properties in 2.5
|
|
||||||
try:
|
|
||||||
property.setter
|
|
||||||
except AttributeError:
|
|
||||||
class property(property):
|
|
||||||
def __init__(self, fget, *args, **kwargs):
|
|
||||||
self.__doc__ = fget.__doc__
|
|
||||||
super(property, self).__init__(fget, *args, **kwargs)
|
|
||||||
|
|
||||||
def setter(self, fset):
|
|
||||||
cls_ns = sys._getframe(1).f_locals
|
|
||||||
for k, v in cls_ns.iteritems():
|
|
||||||
if v == self:
|
|
||||||
propname = k
|
|
||||||
break
|
|
||||||
cls_ns[propname] = property(self.fget, fset,
|
|
||||||
self.fdel, self.__doc__)
|
|
||||||
return cls_ns[propname]
|
|
||||||
else:
|
|
||||||
property = property
|
|
||||||
|
|
||||||
# unicode function
|
|
||||||
try:
|
|
||||||
unicode = unicode
|
|
||||||
except NameError:
|
|
||||||
unicode = str
|
|
||||||
|
|
||||||
if is_py3k:
|
|
||||||
utf8 = lambda s: s
|
|
||||||
else:
|
|
||||||
utf8 = lambda s: s.decode('utf-8')
|
|
||||||
|
|
||||||
utf8.__doc__ = """
|
|
||||||
Decode a raw string into unicode object. Do nothing in Python 3.
|
|
||||||
"""
|
|
||||||
|
|
||||||
# exec function
|
|
||||||
if is_py3k:
|
|
||||||
def exec_function(source, global_map):
|
|
||||||
exec(source, global_map)
|
|
||||||
else:
|
|
||||||
eval(compile("""def exec_function(source, global_map):
|
|
||||||
exec source in global_map """, 'blub', 'exec'))
|
|
||||||
|
|
||||||
# StringIO (Python 2.5 has no io module), so use io only for py3k
|
|
||||||
try:
|
|
||||||
from StringIO import StringIO
|
|
||||||
except ImportError:
|
|
||||||
from io import StringIO
|
|
||||||
|
|
||||||
# hasattr function used because python
|
|
||||||
if is_py3k:
|
|
||||||
hasattr = hasattr
|
|
||||||
else:
|
|
||||||
def hasattr(obj, name):
|
|
||||||
try:
|
|
||||||
getattr(obj, name)
|
|
||||||
return True
|
|
||||||
except AttributeError:
|
|
||||||
return False
|
|
||||||
|
|
||||||
|
|
||||||
class Python3Method(object):
|
|
||||||
def __init__(self, func):
|
|
||||||
self.func = func
|
|
||||||
|
|
||||||
def __get__(self, obj, objtype):
|
|
||||||
if obj is None:
|
|
||||||
return lambda *args, **kwargs: self.func(*args, **kwargs)
|
|
||||||
else:
|
|
||||||
return lambda *args, **kwargs: self.func(obj, *args, **kwargs)
|
|
||||||
|
|
||||||
try:
|
|
||||||
# the python3 way
|
|
||||||
from functools import reduce
|
|
||||||
except ImportError:
|
|
||||||
reduce = reduce
|
|
||||||
|
|
||||||
|
|
||||||
def use_metaclass(meta, *bases):
|
|
||||||
""" Create a class with a metaclass. """
|
|
||||||
if not bases:
|
|
||||||
bases = (object,)
|
|
||||||
return meta("HackClass", bases, {})
|
|
||||||
|
|
||||||
try:
|
|
||||||
from inspect import cleandoc
|
|
||||||
except ImportError:
|
|
||||||
# python 2.5 doesn't have this method
|
|
||||||
import string
|
|
||||||
|
|
||||||
def cleandoc(doc):
|
|
||||||
"""Clean up indentation from docstrings.
|
|
||||||
|
|
||||||
Any whitespace that can be uniformly removed from the second line
|
|
||||||
onwards is removed."""
|
|
||||||
try:
|
|
||||||
lines = string.split(string.expandtabs(doc), '\n')
|
|
||||||
except UnicodeError:
|
|
||||||
return None
|
|
||||||
else:
|
|
||||||
# Find minimum indentation of any non-blank lines after first line.
|
|
||||||
margin = sys.maxint
|
|
||||||
for line in lines[1:]:
|
|
||||||
content = len(string.lstrip(line))
|
|
||||||
if content:
|
|
||||||
indent = len(line) - content
|
|
||||||
margin = min(margin, indent)
|
|
||||||
# Remove indentation.
|
|
||||||
if lines:
|
|
||||||
lines[0] = lines[0].lstrip()
|
|
||||||
if margin < sys.maxint:
|
|
||||||
for i in range(1, len(lines)):
|
|
||||||
lines[i] = lines[i][margin:]
|
|
||||||
# Remove any trailing or leading blank lines.
|
|
||||||
while lines and not lines[-1]:
|
|
||||||
lines.pop()
|
|
||||||
while lines and not lines[0]:
|
|
||||||
lines.pop(0)
|
|
||||||
return string.join(lines, '\n')
|
|
||||||
|
|
||||||
if is_py25:
|
|
||||||
# adds the `itertools.chain.from_iterable` constructor
|
|
||||||
import itertools
|
|
||||||
|
|
||||||
class chain(itertools.chain):
|
|
||||||
@staticmethod
|
|
||||||
def from_iterable(iterables):
|
|
||||||
# chain.from_iterable(['ABC', 'DEF']) --> A B C D E F
|
|
||||||
for it in iterables:
|
|
||||||
for element in it:
|
|
||||||
yield element
|
|
||||||
itertools.chain = chain
|
|
||||||
del chain
|
|
|
@ -1,457 +0,0 @@
|
||||||
"""
|
|
||||||
Jedi is an autocompletion library for Python. It offers additonal
|
|
||||||
services such as goto / get_definition / pydoc support /
|
|
||||||
get_in_function_call / related names.
|
|
||||||
|
|
||||||
To give you a simple exmple how you can use the jedi library,
|
|
||||||
here is an exmple for the autocompletion feature:
|
|
||||||
|
|
||||||
>>> import jedi
|
|
||||||
>>> source = '''import json; json.l'''
|
|
||||||
>>> script = jedi.Script(source, 1, 19, '')
|
|
||||||
>>> script
|
|
||||||
<jedi.api.Script at 0x7f6d40f3db90>
|
|
||||||
>>> completions = script.complete()
|
|
||||||
>>> completions
|
|
||||||
[<Completion: load>, <Completion: loads>]
|
|
||||||
>>> completions[0].complete
|
|
||||||
'oad'
|
|
||||||
>>> completions[0].word
|
|
||||||
'load'
|
|
||||||
|
|
||||||
As you see Jedi is pretty simple and allows you to concentrate
|
|
||||||
writing a good text editor, while still having very good IDE features
|
|
||||||
for Python.
|
|
||||||
"""
|
|
||||||
from __future__ import with_statement
|
|
||||||
__all__ = ['Script', 'NotFoundError', 'set_debug_function']
|
|
||||||
|
|
||||||
import re
|
|
||||||
|
|
||||||
import parsing
|
|
||||||
import dynamic
|
|
||||||
import imports
|
|
||||||
import evaluate
|
|
||||||
import modules
|
|
||||||
import debug
|
|
||||||
import settings
|
|
||||||
import keywords
|
|
||||||
import helpers
|
|
||||||
import builtin
|
|
||||||
import api_classes
|
|
||||||
|
|
||||||
from _compatibility import next, unicode
|
|
||||||
|
|
||||||
|
|
||||||
class NotFoundError(Exception):
|
|
||||||
""" A custom error to avoid catching the wrong exceptions """
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class Script(object):
|
|
||||||
"""
|
|
||||||
A Script is the base for a completion, goto or whatever call.
|
|
||||||
|
|
||||||
:param source: The source code of the current file
|
|
||||||
:type source: string
|
|
||||||
:param line: The line to complete in.
|
|
||||||
:type line: int
|
|
||||||
:param col: The column to complete in.
|
|
||||||
:type col: int
|
|
||||||
:param source_path: The path in the os, the current module is in.
|
|
||||||
:type source_path: string or None
|
|
||||||
:param source_encoding: encoding for decoding `source`, when it
|
|
||||||
is not a `unicode` object.
|
|
||||||
:type source_encoding: string
|
|
||||||
"""
|
|
||||||
def __init__(self, source, line, column, source_path,
|
|
||||||
source_encoding='utf-8'):
|
|
||||||
debug.reset_time()
|
|
||||||
try:
|
|
||||||
source = unicode(source, source_encoding, 'replace')
|
|
||||||
# Use 'replace' over 'ignore' to hold code structure.
|
|
||||||
except TypeError: # `source` is already a unicode object
|
|
||||||
pass
|
|
||||||
self.pos = line, column
|
|
||||||
self.module = modules.ModuleWithCursor(source_path, source=source,
|
|
||||||
position=self.pos)
|
|
||||||
self.source_path = source_path
|
|
||||||
debug.speed('init')
|
|
||||||
|
|
||||||
@property
|
|
||||||
def parser(self):
|
|
||||||
""" The lazy parser """
|
|
||||||
return self.module.parser
|
|
||||||
|
|
||||||
def complete(self):
|
|
||||||
"""
|
|
||||||
An auto completer for python files.
|
|
||||||
|
|
||||||
:return: list of Completion objects, sorted by name and __ comes last.
|
|
||||||
:rtype: list
|
|
||||||
"""
|
|
||||||
def follow_imports_if_possible(name):
|
|
||||||
# TODO remove this, or move to another place (not used)
|
|
||||||
par = name.parent
|
|
||||||
if isinstance(par, parsing.Import) and not \
|
|
||||||
isinstance(self.parser.user_stmt, parsing.Import):
|
|
||||||
new = imports.ImportPath(par).follow(is_goto=True)
|
|
||||||
# Only remove the old entry if a new one has been found.
|
|
||||||
#print par, new, par.parent
|
|
||||||
if new:
|
|
||||||
try:
|
|
||||||
return new
|
|
||||||
except AttributeError: # .name undefined
|
|
||||||
pass
|
|
||||||
return [name]
|
|
||||||
|
|
||||||
|
|
||||||
debug.speed('complete start')
|
|
||||||
path = self.module.get_path_until_cursor()
|
|
||||||
path, dot, like = self._get_completion_parts(path)
|
|
||||||
|
|
||||||
try:
|
|
||||||
scopes = list(self._prepare_goto(path, True))
|
|
||||||
except NotFoundError:
|
|
||||||
scopes = []
|
|
||||||
scope_generator = evaluate.get_names_for_scope(
|
|
||||||
self.parser.user_scope, self.pos)
|
|
||||||
completions = []
|
|
||||||
for scope, name_list in scope_generator:
|
|
||||||
for c in name_list:
|
|
||||||
completions.append((c, scope))
|
|
||||||
else:
|
|
||||||
completions = []
|
|
||||||
debug.dbg('possible scopes', scopes)
|
|
||||||
for s in scopes:
|
|
||||||
if s.isinstance(evaluate.Function):
|
|
||||||
names = s.get_magic_method_names()
|
|
||||||
else:
|
|
||||||
if isinstance(s, imports.ImportPath):
|
|
||||||
if like == 'import':
|
|
||||||
l = self.module.get_line(self.pos[0])[:self.pos[1]]
|
|
||||||
if not l.endswith('import import'):
|
|
||||||
continue
|
|
||||||
names = s.get_defined_names(on_import_stmt=True)
|
|
||||||
else:
|
|
||||||
names = s.get_defined_names()
|
|
||||||
|
|
||||||
for c in names:
|
|
||||||
completions.append((c, s))
|
|
||||||
|
|
||||||
if not dot: # named_params have no dots
|
|
||||||
call_def = self.get_in_function_call()
|
|
||||||
if call_def:
|
|
||||||
if not call_def.module.is_builtin():
|
|
||||||
for p in call_def.params:
|
|
||||||
completions.append((p.get_name(), p))
|
|
||||||
|
|
||||||
# Do the completion if there is no path before and no import stmt.
|
|
||||||
if (not scopes or not isinstance(scopes[0], imports.ImportPath)) \
|
|
||||||
and not path:
|
|
||||||
# add keywords
|
|
||||||
bs = builtin.Builtin.scope
|
|
||||||
completions += ((k, bs) for k in keywords.get_keywords(
|
|
||||||
all=True))
|
|
||||||
|
|
||||||
needs_dot = not dot and path
|
|
||||||
|
|
||||||
comps = []
|
|
||||||
for c, s in set(completions):
|
|
||||||
n = c.names[-1]
|
|
||||||
if settings.case_insensitive_completion \
|
|
||||||
and n.lower().startswith(like.lower()) \
|
|
||||||
or n.startswith(like):
|
|
||||||
if not evaluate.filter_private_variable(s,
|
|
||||||
self.parser.user_stmt, n):
|
|
||||||
new = api_classes.Completion(c, needs_dot,
|
|
||||||
len(like), s)
|
|
||||||
comps.append(new)
|
|
||||||
|
|
||||||
debug.speed('complete end')
|
|
||||||
|
|
||||||
return sorted(comps, key=lambda x: (x.word.startswith('__'),
|
|
||||||
x.word.startswith('_'),
|
|
||||||
x.word.lower()))
|
|
||||||
|
|
||||||
def _prepare_goto(self, goto_path, is_like_search=False):
|
|
||||||
""" Base for complete, goto and get_definition. Basically it returns
|
|
||||||
the resolved scopes under cursor. """
|
|
||||||
debug.dbg('start: %s in %s' % (goto_path, self.parser.scope))
|
|
||||||
|
|
||||||
user_stmt = self.parser.user_stmt
|
|
||||||
debug.speed('parsed')
|
|
||||||
if not user_stmt and len(goto_path.split('\n')) > 1:
|
|
||||||
# If the user_stmt is not defined and the goto_path is multi line,
|
|
||||||
# something's strange. Most probably the backwards tokenizer
|
|
||||||
# matched to much.
|
|
||||||
return []
|
|
||||||
|
|
||||||
if isinstance(user_stmt, parsing.Import):
|
|
||||||
scopes = [self._get_on_import_stmt(is_like_search)[0]]
|
|
||||||
else:
|
|
||||||
# just parse one statement, take it and evaluate it
|
|
||||||
stmt = self._get_under_cursor_stmt(goto_path)
|
|
||||||
scopes = evaluate.follow_statement(stmt)
|
|
||||||
return scopes
|
|
||||||
|
|
||||||
def _get_under_cursor_stmt(self, cursor_txt):
|
|
||||||
r = parsing.PyFuzzyParser(cursor_txt, self.source_path, no_docstr=True)
|
|
||||||
try:
|
|
||||||
stmt = r.module.statements[0]
|
|
||||||
except IndexError:
|
|
||||||
raise NotFoundError()
|
|
||||||
stmt.start_pos = self.pos
|
|
||||||
stmt.parent = self.parser.user_scope
|
|
||||||
return stmt
|
|
||||||
|
|
||||||
def get_definition(self):
|
|
||||||
"""
|
|
||||||
Returns the definitions of a the path under the cursor. This is
|
|
||||||
not a goto function! This follows complicated paths and returns the
|
|
||||||
end, not the first definition.
|
|
||||||
The big difference of goto and get_definition is that goto doesn't
|
|
||||||
follow imports and statements.
|
|
||||||
Multiple objects may be returned, because Python itself is a dynamic
|
|
||||||
language, which means depending on an option you can have two different
|
|
||||||
versions of a function.
|
|
||||||
|
|
||||||
:return: list of Definition objects, which are basically scopes.
|
|
||||||
:rtype: list
|
|
||||||
"""
|
|
||||||
def resolve_import_paths(scopes):
|
|
||||||
for s in scopes.copy():
|
|
||||||
if isinstance(s, imports.ImportPath):
|
|
||||||
scopes.remove(s)
|
|
||||||
scopes.update(resolve_import_paths(set(s.follow())))
|
|
||||||
return scopes
|
|
||||||
|
|
||||||
goto_path = self.module.get_path_under_cursor()
|
|
||||||
|
|
||||||
context = self.module.get_context()
|
|
||||||
if next(context) in ('class', 'def'):
|
|
||||||
scopes = set([self.module.parser.user_scope])
|
|
||||||
elif not goto_path:
|
|
||||||
op = self.module.get_operator_under_cursor()
|
|
||||||
scopes = set([keywords.get_operator(op, self.pos)] if op else [])
|
|
||||||
else:
|
|
||||||
scopes = set(self._prepare_goto(goto_path))
|
|
||||||
|
|
||||||
scopes = resolve_import_paths(scopes)
|
|
||||||
|
|
||||||
# add keywords
|
|
||||||
scopes |= keywords.get_keywords(string=goto_path, pos=self.pos)
|
|
||||||
|
|
||||||
d = set([api_classes.Definition(s) for s in scopes
|
|
||||||
if not isinstance(s, imports.ImportPath._GlobalNamespace)])
|
|
||||||
return sorted(d, key=lambda x: (x.module_path, x.start_pos))
|
|
||||||
|
|
||||||
def goto(self):
|
|
||||||
"""
|
|
||||||
Returns the first definition found by goto. This means: It doesn't
|
|
||||||
follow imports and statements.
|
|
||||||
Multiple objects may be returned, because Python itself is a dynamic
|
|
||||||
language, which means depending on an option you can have two different
|
|
||||||
versions of a function.
|
|
||||||
|
|
||||||
:return: list of Definition objects, which are basically scopes.
|
|
||||||
"""
|
|
||||||
d = [api_classes.Definition(d) for d in set(self._goto()[0])]
|
|
||||||
return sorted(d, key=lambda x: (x.module_path, x.start_pos))
|
|
||||||
|
|
||||||
def _goto(self, add_import_name=False):
|
|
||||||
"""
|
|
||||||
Used for goto and related_names.
|
|
||||||
:param add_import_name: TODO add description
|
|
||||||
"""
|
|
||||||
def follow_inexistent_imports(defs):
|
|
||||||
""" Imports can be generated, e.g. following
|
|
||||||
`multiprocessing.dummy` generates an import dummy in the
|
|
||||||
multiprocessing module. The Import doesn't exist -> follow.
|
|
||||||
"""
|
|
||||||
definitions = set(defs)
|
|
||||||
for d in defs:
|
|
||||||
if isinstance(d.parent, parsing.Import) \
|
|
||||||
and d.start_pos == (0, 0):
|
|
||||||
i = imports.ImportPath(d.parent).follow(is_goto=True)
|
|
||||||
definitions.remove(d)
|
|
||||||
definitions |= follow_inexistent_imports(i)
|
|
||||||
return definitions
|
|
||||||
|
|
||||||
goto_path = self.module.get_path_under_cursor()
|
|
||||||
context = self.module.get_context()
|
|
||||||
if next(context) in ('class', 'def'):
|
|
||||||
user_scope = self.parser.user_scope
|
|
||||||
definitions = set([user_scope.name])
|
|
||||||
search_name = str(user_scope.name)
|
|
||||||
elif isinstance(self.parser.user_stmt, parsing.Import):
|
|
||||||
s, name_part = self._get_on_import_stmt()
|
|
||||||
try:
|
|
||||||
definitions = [s.follow(is_goto=True)[0]]
|
|
||||||
except IndexError:
|
|
||||||
definitions = []
|
|
||||||
search_name = str(name_part)
|
|
||||||
|
|
||||||
if add_import_name:
|
|
||||||
import_name = self.parser.user_stmt.get_defined_names()
|
|
||||||
# imports have only one name
|
|
||||||
if name_part == import_name[0].names[-1]:
|
|
||||||
definitions.append(import_name[0])
|
|
||||||
else:
|
|
||||||
stmt = self._get_under_cursor_stmt(goto_path)
|
|
||||||
defs, search_name = evaluate.goto(stmt)
|
|
||||||
definitions = follow_inexistent_imports(defs)
|
|
||||||
return definitions, search_name
|
|
||||||
|
|
||||||
def related_names(self, additional_module_paths=[]):
|
|
||||||
"""
|
|
||||||
Returns `dynamic.RelatedName` objects, which contain all names, that
|
|
||||||
are defined by the same variable, function, class or import.
|
|
||||||
This function can be used either to show all the usages of a variable
|
|
||||||
or for renaming purposes.
|
|
||||||
|
|
||||||
TODO implement additional_module_paths
|
|
||||||
"""
|
|
||||||
user_stmt = self.parser.user_stmt
|
|
||||||
definitions, search_name = self._goto(add_import_name=True)
|
|
||||||
if isinstance(user_stmt, parsing.Statement) \
|
|
||||||
and self.pos < user_stmt.get_assignment_calls().start_pos:
|
|
||||||
# the search_name might be before `=`
|
|
||||||
definitions = [v for v in user_stmt.set_vars
|
|
||||||
if str(v) == search_name]
|
|
||||||
if not isinstance(user_stmt, parsing.Import):
|
|
||||||
# import case is looked at with add_import_name option
|
|
||||||
definitions = dynamic.related_name_add_import_modules(definitions,
|
|
||||||
search_name)
|
|
||||||
|
|
||||||
module = set([d.get_parent_until() for d in definitions])
|
|
||||||
module.add(self.parser.module)
|
|
||||||
names = dynamic.related_names(definitions, search_name, module)
|
|
||||||
|
|
||||||
for d in set(definitions):
|
|
||||||
if isinstance(d, parsing.Module):
|
|
||||||
names.append(api_classes.RelatedName(d, d))
|
|
||||||
else:
|
|
||||||
names.append(api_classes.RelatedName(d.names[0], d))
|
|
||||||
|
|
||||||
return sorted(set(names), key=lambda x: (x.module_path, x.start_pos),
|
|
||||||
reverse=True)
|
|
||||||
|
|
||||||
def get_in_function_call(self):
|
|
||||||
"""
|
|
||||||
Return the function, that the cursor is in, e.g.:
|
|
||||||
>>> isinstance(| # | <-- cursor is here
|
|
||||||
|
|
||||||
This would return the `isinstance` function. In contrary:
|
|
||||||
>>> isinstance()| # | <-- cursor is here
|
|
||||||
|
|
||||||
This would return `None`.
|
|
||||||
"""
|
|
||||||
def check_user_stmt(user_stmt):
|
|
||||||
if user_stmt is None \
|
|
||||||
or not isinstance(user_stmt, parsing.Statement):
|
|
||||||
return None, 0
|
|
||||||
ass = helpers.fast_parent_copy(user_stmt.get_assignment_calls())
|
|
||||||
|
|
||||||
call, index, stop = helpers.scan_array_for_pos(ass, self.pos)
|
|
||||||
return call, index
|
|
||||||
|
|
||||||
def check_cache():
|
|
||||||
""" Do the parsing with a part parser, therefore reduce ressource
|
|
||||||
costs.
|
|
||||||
TODO this is not working with multi-line docstrings, improve.
|
|
||||||
"""
|
|
||||||
if self.source_path is None:
|
|
||||||
return None, 0
|
|
||||||
|
|
||||||
try:
|
|
||||||
timestamp, parser = builtin.CachedModule.cache[
|
|
||||||
self.source_path]
|
|
||||||
except KeyError:
|
|
||||||
return None, 0
|
|
||||||
part_parser = self.module.get_part_parser()
|
|
||||||
user_stmt = part_parser.user_stmt
|
|
||||||
call, index = check_user_stmt(user_stmt)
|
|
||||||
if call:
|
|
||||||
old_stmt = parser.module.get_statement_for_position(self.pos)
|
|
||||||
if old_stmt is None:
|
|
||||||
return None, 0
|
|
||||||
old_call, old_index = check_user_stmt(old_stmt)
|
|
||||||
if old_call:
|
|
||||||
# compare repr because that should definitely be the same.
|
|
||||||
# Otherwise the whole thing is out of sync.
|
|
||||||
if repr(old_call) == repr(call):
|
|
||||||
# return the index of the part_parser
|
|
||||||
return old_call, index
|
|
||||||
return None, 0
|
|
||||||
else:
|
|
||||||
raise NotFoundError()
|
|
||||||
|
|
||||||
debug.speed('func_call start')
|
|
||||||
try:
|
|
||||||
call, index = check_cache()
|
|
||||||
except NotFoundError:
|
|
||||||
return None
|
|
||||||
debug.speed('func_call parsed')
|
|
||||||
|
|
||||||
if call is None:
|
|
||||||
# This is a backup, if the above is not successful.
|
|
||||||
user_stmt = self.parser.user_stmt
|
|
||||||
call, index = check_user_stmt(user_stmt)
|
|
||||||
if call is None:
|
|
||||||
return None
|
|
||||||
|
|
||||||
debug.speed('func_call user_stmt')
|
|
||||||
with helpers.scale_speed_settings(settings.scale_get_in_function_call):
|
|
||||||
origins = evaluate.follow_call(call)
|
|
||||||
debug.speed('func_call followed')
|
|
||||||
|
|
||||||
if len(origins) == 0:
|
|
||||||
return None
|
|
||||||
# just take entry zero, because we need just one.
|
|
||||||
executable = origins[0]
|
|
||||||
|
|
||||||
return api_classes.CallDef(executable, index, call)
|
|
||||||
|
|
||||||
def _get_on_import_stmt(self, is_like_search=False):
|
|
||||||
""" Resolve the user statement, if it is an import. Only resolve the
|
|
||||||
parts until the user position. """
|
|
||||||
user_stmt = self.parser.user_stmt
|
|
||||||
import_names = user_stmt.get_all_import_names()
|
|
||||||
kill_count = -1
|
|
||||||
cur_name_part = None
|
|
||||||
for i in import_names:
|
|
||||||
if user_stmt.alias == i:
|
|
||||||
continue
|
|
||||||
for name_part in i.names:
|
|
||||||
if name_part.end_pos >= self.pos:
|
|
||||||
if not cur_name_part:
|
|
||||||
cur_name_part = name_part
|
|
||||||
kill_count += 1
|
|
||||||
|
|
||||||
i = imports.ImportPath(user_stmt, is_like_search,
|
|
||||||
kill_count=kill_count, direct_resolve=True)
|
|
||||||
return i, cur_name_part
|
|
||||||
|
|
||||||
def _get_completion_parts(self, path):
|
|
||||||
"""
|
|
||||||
Returns the parts for the completion
|
|
||||||
:return: tuple - (path, dot, like)
|
|
||||||
"""
|
|
||||||
match = re.match(r'^(.*?)(\.|)(\w?[\w\d]*)$', path, flags=re.S)
|
|
||||||
return match.groups()
|
|
||||||
|
|
||||||
def __del__(self):
|
|
||||||
evaluate.clear_caches()
|
|
||||||
|
|
||||||
|
|
||||||
def set_debug_function(func_cb=debug.print_to_stdout, warnings=True,
|
|
||||||
notices=True, speed=True):
|
|
||||||
"""
|
|
||||||
You can define a callback debug function to get all the debug messages.
|
|
||||||
:param func_cb: The callback function for debug messages, with n params.
|
|
||||||
"""
|
|
||||||
debug.debug_function = func_cb
|
|
||||||
debug.enable_warning = warnings
|
|
||||||
debug.enable_notice = notices
|
|
||||||
debug.enable_speed = speed
|
|
|
@ -1,311 +0,0 @@
|
||||||
""" The classes returned by the api """
|
|
||||||
|
|
||||||
import re
|
|
||||||
import os
|
|
||||||
|
|
||||||
import settings
|
|
||||||
import evaluate
|
|
||||||
import imports
|
|
||||||
import parsing
|
|
||||||
import keywords
|
|
||||||
|
|
||||||
|
|
||||||
class BaseDefinition(object):
|
|
||||||
_mapping = {'posixpath': 'os.path',
|
|
||||||
'riscospath': 'os.path',
|
|
||||||
'ntpath': 'os.path',
|
|
||||||
'os2emxpath': 'os.path',
|
|
||||||
'macpath': 'os.path',
|
|
||||||
'genericpath': 'os.path',
|
|
||||||
'_io': 'io',
|
|
||||||
'__builtin__': '',
|
|
||||||
'builtins': '',
|
|
||||||
}
|
|
||||||
|
|
||||||
_tuple_mapping = dict((tuple(k.split('.')), v) for (k, v) in {
|
|
||||||
'argparse._ActionsContainer': 'argparse.ArgumentParser',
|
|
||||||
'_sre.SRE_Match': 're.MatchObject',
|
|
||||||
'_sre.SRE_Pattern': 're.RegexObject',
|
|
||||||
}.items())
|
|
||||||
|
|
||||||
def __init__(self, definition, start_pos):
|
|
||||||
self.start_pos = start_pos
|
|
||||||
self.definition = definition
|
|
||||||
self.is_keyword = isinstance(definition, keywords.Keyword)
|
|
||||||
|
|
||||||
# generate a path to the definition
|
|
||||||
self.module_path = str(definition.get_parent_until().path)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def type(self):
|
|
||||||
# generate the type
|
|
||||||
stripped = self.definition
|
|
||||||
if isinstance(self.definition, evaluate.InstanceElement):
|
|
||||||
stripped = self.definition.var
|
|
||||||
return type(stripped).__name__
|
|
||||||
|
|
||||||
@property
|
|
||||||
def path(self):
|
|
||||||
path = []
|
|
||||||
if not isinstance(self.definition, keywords.Keyword):
|
|
||||||
par = self.definition
|
|
||||||
while par is not None:
|
|
||||||
try:
|
|
||||||
path.insert(0, par.name)
|
|
||||||
except AttributeError:
|
|
||||||
pass
|
|
||||||
par = par.parent
|
|
||||||
return path
|
|
||||||
|
|
||||||
@property
|
|
||||||
def module_name(self):
|
|
||||||
path = self.module_path
|
|
||||||
sep = os.path.sep
|
|
||||||
p = re.sub(r'^.*?([\w\d]+)(%s__init__)?.py$' % sep, r'\1', path)
|
|
||||||
return p
|
|
||||||
|
|
||||||
def in_builtin_module(self):
|
|
||||||
return not self.module_path.endswith('.py')
|
|
||||||
|
|
||||||
@property
|
|
||||||
def line_nr(self):
|
|
||||||
return self.start_pos[0]
|
|
||||||
|
|
||||||
@property
|
|
||||||
def column(self):
|
|
||||||
return self.start_pos[1]
|
|
||||||
|
|
||||||
@property
|
|
||||||
def doc(self):
|
|
||||||
""" Return a document string for this completion object. """
|
|
||||||
try:
|
|
||||||
return self.definition.doc
|
|
||||||
except AttributeError:
|
|
||||||
return self.raw_doc
|
|
||||||
|
|
||||||
@property
|
|
||||||
def raw_doc(self):
|
|
||||||
""" Returns the raw docstring `__doc__` for any object """
|
|
||||||
try:
|
|
||||||
return str(self.definition.docstr)
|
|
||||||
except AttributeError:
|
|
||||||
return ''
|
|
||||||
|
|
||||||
@property
|
|
||||||
def description(self):
|
|
||||||
return str(self.definition)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def full_name(self):
|
|
||||||
"""
|
|
||||||
Returns the path to a certain class/function, see #61.
|
|
||||||
"""
|
|
||||||
path = [str(p) for p in self.path]
|
|
||||||
# TODO add further checks, the mapping should only occur on stdlib.
|
|
||||||
try:
|
|
||||||
path[0] = self._mapping[path[0]]
|
|
||||||
except KeyError:
|
|
||||||
pass
|
|
||||||
for key, repl in self._tuple_mapping.items():
|
|
||||||
if tuple(path[:len(key)]) == key:
|
|
||||||
path = [repl] + path[len(key):]
|
|
||||||
|
|
||||||
return '.'.join(path if path[0] else path[1:])
|
|
||||||
|
|
||||||
def __repr__(self):
|
|
||||||
return "<%s %s>" % (type(self).__name__, self.description)
|
|
||||||
|
|
||||||
|
|
||||||
class Completion(BaseDefinition):
|
|
||||||
""" `Completion` objects are returned from `Script.complete`. Providing
|
|
||||||
some useful functions for IDE's. """
|
|
||||||
def __init__(self, name, needs_dot, like_name_length, base):
|
|
||||||
super(Completion, self).__init__(name.parent, name.start_pos)
|
|
||||||
|
|
||||||
self.name = name
|
|
||||||
self.needs_dot = needs_dot
|
|
||||||
self.like_name_length = like_name_length
|
|
||||||
self.base = base
|
|
||||||
|
|
||||||
self._followed_definitions = None
|
|
||||||
|
|
||||||
@property
|
|
||||||
def complete(self):
|
|
||||||
""" Delievers the rest of the word, e.g. completing `isinstance`
|
|
||||||
>>> isinstan
|
|
||||||
|
|
||||||
would return the string 'ce'. It also adds additional stuff, depending
|
|
||||||
on your `settings.py`
|
|
||||||
"""
|
|
||||||
dot = '.' if self.needs_dot else ''
|
|
||||||
append = ''
|
|
||||||
if settings.add_bracket_after_function \
|
|
||||||
and self.type == 'Function':
|
|
||||||
append = '('
|
|
||||||
|
|
||||||
if settings.add_dot_after_module:
|
|
||||||
if isinstance(self.base, parsing.Module):
|
|
||||||
append += '.'
|
|
||||||
if isinstance(self.base, parsing.Param):
|
|
||||||
append += '='
|
|
||||||
return dot + self.name.names[-1][self.like_name_length:] + append
|
|
||||||
|
|
||||||
@property
|
|
||||||
def word(self):
|
|
||||||
""" In contrary to `complete` returns the whole word, e.g.
|
|
||||||
>>> isinstan
|
|
||||||
|
|
||||||
would return 'isinstance'.
|
|
||||||
"""
|
|
||||||
return str(self.name.names[-1])
|
|
||||||
|
|
||||||
@property
|
|
||||||
def description(self):
|
|
||||||
""" Provides a description of the completion object
|
|
||||||
TODO return value is just __repr__ of some objects, improve! """
|
|
||||||
parent = self.name.parent
|
|
||||||
if parent is None:
|
|
||||||
return ''
|
|
||||||
t = self.type
|
|
||||||
if t == 'Statement' or t == 'Import':
|
|
||||||
desc = self.definition.get_code(False)
|
|
||||||
else:
|
|
||||||
desc = '.'.join(str(p) for p in self.path)
|
|
||||||
|
|
||||||
line_nr = '' if self.in_builtin_module else '@%s' % self.line_nr
|
|
||||||
return '%s: %s%s' % (t, desc, line_nr)
|
|
||||||
|
|
||||||
def follow_definition(self):
|
|
||||||
""" Returns you the original definitions. I strongly recommend not
|
|
||||||
using it for your completions, because it might slow down Jedi. If you
|
|
||||||
want to read only a few objects (<=20). I think it might be useful,
|
|
||||||
especially to get the original docstrings.
|
|
||||||
The basic problem of this function is that it follows all results. This
|
|
||||||
means with 1000 completions (e.g. numpy), it's just PITA slow.
|
|
||||||
"""
|
|
||||||
if self._followed_definitions is None:
|
|
||||||
if self.definition.isinstance(parsing.Statement):
|
|
||||||
defs = evaluate.follow_statement(self.definition)
|
|
||||||
elif self.definition.isinstance(parsing.Import):
|
|
||||||
defs = imports.strip_imports([self.definition])
|
|
||||||
else:
|
|
||||||
return [self]
|
|
||||||
|
|
||||||
self._followed_definitions = \
|
|
||||||
[BaseDefinition(d, d.start_pos) for d in defs]
|
|
||||||
evaluate.clear_caches()
|
|
||||||
|
|
||||||
return self._followed_definitions
|
|
||||||
|
|
||||||
def __repr__(self):
|
|
||||||
return '<%s: %s>' % (type(self).__name__, self.name)
|
|
||||||
|
|
||||||
|
|
||||||
class Definition(BaseDefinition):
|
|
||||||
""" These are the objects returned by either `Script.goto` or
|
|
||||||
`Script.get_definition`. """
|
|
||||||
def __init__(self, definition):
|
|
||||||
super(Definition, self).__init__(definition, definition.start_pos)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def description(self):
|
|
||||||
""" A description of the Definition object, which is heavily used in
|
|
||||||
testing. e.g. for `isinstance` it returns 'def isinstance' """
|
|
||||||
d = self.definition
|
|
||||||
if isinstance(d, evaluate.InstanceElement):
|
|
||||||
d = d.var
|
|
||||||
if isinstance(d, evaluate.parsing.Name):
|
|
||||||
d = d.parent
|
|
||||||
|
|
||||||
if isinstance(d, evaluate.Array):
|
|
||||||
d = 'class ' + d.type
|
|
||||||
elif isinstance(d, (parsing.Class, evaluate.Class, evaluate.Instance)):
|
|
||||||
d = 'class ' + str(d.name)
|
|
||||||
elif isinstance(d, (evaluate.Function, evaluate.parsing.Function)):
|
|
||||||
d = 'def ' + str(d.name)
|
|
||||||
elif isinstance(d, evaluate.parsing.Module):
|
|
||||||
# only show module name
|
|
||||||
d = 'module %s' % self.module_name
|
|
||||||
elif self.is_keyword:
|
|
||||||
d = 'keyword %s' % d.name
|
|
||||||
else:
|
|
||||||
d = d.get_code().replace('\n', '')
|
|
||||||
return d
|
|
||||||
|
|
||||||
@property
|
|
||||||
def desc_with_module(self):
|
|
||||||
""" In addition to the Definition, it also returns the module. Don't
|
|
||||||
use it yet, its behaviour may change. If you really need it, talk to me
|
|
||||||
TODO add full path. This function is should return a
|
|
||||||
module.class.function path. """
|
|
||||||
if self.module_path.endswith('.py') \
|
|
||||||
and not isinstance(self.definition, parsing.Module):
|
|
||||||
position = '@%s' % (self.line_nr)
|
|
||||||
else:
|
|
||||||
# is a builtin or module
|
|
||||||
position = ''
|
|
||||||
return "%s:%s%s" % (self.module_name, self.description, position)
|
|
||||||
|
|
||||||
|
|
||||||
class RelatedName(BaseDefinition):
|
|
||||||
def __init__(self, name_part, scope):
|
|
||||||
super(RelatedName, self).__init__(scope, name_part.start_pos)
|
|
||||||
self.name_part = name_part
|
|
||||||
self.text = str(name_part)
|
|
||||||
self.end_pos = name_part.end_pos
|
|
||||||
|
|
||||||
@property
|
|
||||||
def description(self):
|
|
||||||
return "%s@%s,%s" % (self.text, self.start_pos[0], self.start_pos[1])
|
|
||||||
|
|
||||||
def __eq__(self, other):
|
|
||||||
return self.start_pos == other.start_pos \
|
|
||||||
and self.module_path == other.module_path
|
|
||||||
|
|
||||||
def __hash__(self):
|
|
||||||
return hash((self.start_pos, self.module_path))
|
|
||||||
|
|
||||||
|
|
||||||
class CallDef(object):
|
|
||||||
""" `CallDef` objects is the return value of `Script.get_in_function_call`.
|
|
||||||
It knows what functions you are currently in. e.g. `isinstance(` would
|
|
||||||
return the `isinstance` function. without `(` it would return nothing."""
|
|
||||||
def __init__(self, executable, index, call):
|
|
||||||
self.executable = executable
|
|
||||||
self.index = index
|
|
||||||
self.call = call
|
|
||||||
|
|
||||||
@property
|
|
||||||
def params(self):
|
|
||||||
if self.executable.isinstance(evaluate.Function):
|
|
||||||
if isinstance(self.executable, evaluate.InstanceElement):
|
|
||||||
return self.executable.params[1:]
|
|
||||||
return self.executable.params
|
|
||||||
else:
|
|
||||||
try:
|
|
||||||
sub = self.executable.get_subscope_by_name('__init__')
|
|
||||||
return sub.params[1:] # ignore self
|
|
||||||
except KeyError:
|
|
||||||
return []
|
|
||||||
|
|
||||||
@property
|
|
||||||
def bracket_start(self):
|
|
||||||
""" The indent of the bracket that is responsible for the last function
|
|
||||||
call. """
|
|
||||||
c = self.call
|
|
||||||
while c.next is not None:
|
|
||||||
c = c.next
|
|
||||||
return c.name.end_pos
|
|
||||||
|
|
||||||
@property
|
|
||||||
def call_name(self):
|
|
||||||
""" The name (e.g. 'isinstance') as a string. """
|
|
||||||
return str(self.executable.name)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def module(self):
|
|
||||||
return self.executable.get_parent_until()
|
|
||||||
|
|
||||||
def __repr__(self):
|
|
||||||
return '<%s: %s index %s>' % (type(self).__name__, self.executable,
|
|
||||||
self.index)
|
|
|
@ -1,472 +0,0 @@
|
||||||
from __future__ import with_statement
|
|
||||||
from _compatibility import exec_function, is_py3k
|
|
||||||
|
|
||||||
import re
|
|
||||||
import sys
|
|
||||||
import os
|
|
||||||
if is_py3k:
|
|
||||||
import io
|
|
||||||
import types
|
|
||||||
import inspect
|
|
||||||
|
|
||||||
import debug
|
|
||||||
import parsing
|
|
||||||
import imports
|
|
||||||
import evaluate
|
|
||||||
|
|
||||||
|
|
||||||
def get_sys_path():
|
|
||||||
def check_virtual_env(sys_path):
|
|
||||||
""" Add virtualenv's site-packages to the `sys.path`."""
|
|
||||||
venv = os.getenv('VIRTUAL_ENV')
|
|
||||||
if not venv:
|
|
||||||
return
|
|
||||||
venv = os.path.abspath(venv)
|
|
||||||
p = os.path.join(
|
|
||||||
venv, 'lib', 'python%d.%d' % sys.version_info[:2], 'site-packages')
|
|
||||||
sys_path.insert(0, p)
|
|
||||||
|
|
||||||
p = sys.path[1:]
|
|
||||||
check_virtual_env(p)
|
|
||||||
return p
|
|
||||||
|
|
||||||
|
|
||||||
class CachedModule(object):
|
|
||||||
"""
|
|
||||||
The base type for all modules, which is not to be confused with
|
|
||||||
`parsing.Module`. Caching happens here.
|
|
||||||
"""
|
|
||||||
cache = {}
|
|
||||||
|
|
||||||
def __init__(self, path=None, name=None):
|
|
||||||
self.path = path and os.path.abspath(path)
|
|
||||||
self.name = name
|
|
||||||
self._parser = None
|
|
||||||
|
|
||||||
@property
|
|
||||||
def parser(self):
|
|
||||||
""" get the parser lazy """
|
|
||||||
if not self._parser:
|
|
||||||
try:
|
|
||||||
timestamp, parser = self.cache[self.path or self.name]
|
|
||||||
if not self.path or os.path.getmtime(self.path) <= timestamp:
|
|
||||||
self._parser = parser
|
|
||||||
else:
|
|
||||||
# In case there is already a module cached and this module
|
|
||||||
# has to be reparsed, we also need to invalidate the import
|
|
||||||
# caches.
|
|
||||||
imports.invalidate_star_import_cache(parser.module)
|
|
||||||
raise KeyError()
|
|
||||||
except KeyError:
|
|
||||||
self._load_module()
|
|
||||||
return self._parser
|
|
||||||
|
|
||||||
def _get_source(self):
|
|
||||||
raise NotImplementedError()
|
|
||||||
|
|
||||||
def _load_module(self):
|
|
||||||
source = self._get_source()
|
|
||||||
self._parser = parsing.PyFuzzyParser(source, self.path or self.name)
|
|
||||||
p_time = None if not self.path else os.path.getmtime(self.path)
|
|
||||||
|
|
||||||
if self.path or self.name:
|
|
||||||
self.cache[self.path or self.name] = p_time, self._parser
|
|
||||||
|
|
||||||
|
|
||||||
class Parser(CachedModule):
|
|
||||||
"""
|
|
||||||
This module is a parser for all builtin modules, which are programmed in
|
|
||||||
C/C++. It should also work on third party modules.
|
|
||||||
It can be instantiated with either a path or a name of the module. The path
|
|
||||||
is important for third party modules.
|
|
||||||
|
|
||||||
:param name: The name of the module.
|
|
||||||
:param path: The path of the module.
|
|
||||||
:param sys_path: The sys.path, which is can be customizable.
|
|
||||||
"""
|
|
||||||
|
|
||||||
map_types = {
|
|
||||||
'floating point number': '0.0',
|
|
||||||
'string': '""',
|
|
||||||
'str': '""',
|
|
||||||
'character': '"a"',
|
|
||||||
'integer': '0',
|
|
||||||
'int': '0',
|
|
||||||
'dictionary': '{}',
|
|
||||||
'list': '[]',
|
|
||||||
'file object': 'file("")',
|
|
||||||
# TODO things like dbg: ('not working', 'tuple of integers')
|
|
||||||
}
|
|
||||||
|
|
||||||
if is_py3k:
|
|
||||||
map_types['file object'] = 'import io; return io.TextIOWrapper()'
|
|
||||||
|
|
||||||
module_cache = {}
|
|
||||||
|
|
||||||
def __init__(self, path=None, name=None, sys_path=None):
|
|
||||||
if sys_path is None:
|
|
||||||
sys_path = get_sys_path()
|
|
||||||
if not name:
|
|
||||||
name = os.path.basename(path)
|
|
||||||
name = name.rpartition('.')[0] # cut file type (normally .so)
|
|
||||||
super(Parser, self).__init__(path=path, name=name)
|
|
||||||
|
|
||||||
self.sys_path = list(sys_path)
|
|
||||||
self._module = None
|
|
||||||
|
|
||||||
@property
|
|
||||||
def module(self):
|
|
||||||
def load_module(name, path):
|
|
||||||
if path:
|
|
||||||
self.sys_path.insert(0, path)
|
|
||||||
|
|
||||||
temp, sys.path = sys.path, self.sys_path
|
|
||||||
content = {}
|
|
||||||
try:
|
|
||||||
exec_function('import %s as module' % name, content)
|
|
||||||
self._module = content['module']
|
|
||||||
except AttributeError:
|
|
||||||
# use sys.modules, because you cannot access some modules
|
|
||||||
# directly. -> #59
|
|
||||||
self._module = sys.modules[name]
|
|
||||||
sys.path = temp
|
|
||||||
|
|
||||||
if path:
|
|
||||||
self.sys_path.pop(0)
|
|
||||||
|
|
||||||
# module might already be defined
|
|
||||||
if not self._module:
|
|
||||||
path = self.path
|
|
||||||
name = self.name
|
|
||||||
if self.path:
|
|
||||||
|
|
||||||
dot_path = []
|
|
||||||
p = self.path
|
|
||||||
# search for the builtin with the correct path
|
|
||||||
while p and p not in sys.path:
|
|
||||||
p, sep, mod = p.rpartition(os.path.sep)
|
|
||||||
dot_path.append(mod.partition('.')[0])
|
|
||||||
if p:
|
|
||||||
name = ".".join(reversed(dot_path))
|
|
||||||
path = p
|
|
||||||
else:
|
|
||||||
path = os.path.dirname(self.path)
|
|
||||||
|
|
||||||
load_module(name, path)
|
|
||||||
return self._module
|
|
||||||
|
|
||||||
def _get_source(self):
|
|
||||||
""" Override this abstract method """
|
|
||||||
return _generate_code(self.module, self._load_mixins())
|
|
||||||
|
|
||||||
def _load_mixins(self):
|
|
||||||
"""
|
|
||||||
Load functions that are mixed in to the standard library.
|
|
||||||
E.g. builtins are written in C (binaries), but my autocompletion only
|
|
||||||
understands Python code. By mixing in Python code, the autocompletion
|
|
||||||
should work much better for builtins.
|
|
||||||
"""
|
|
||||||
regex = r'^(def|class)\s+([\w\d]+)'
|
|
||||||
|
|
||||||
def process_code(code, depth=0):
|
|
||||||
funcs = {}
|
|
||||||
matches = list(re.finditer(regex, code, re.MULTILINE))
|
|
||||||
positions = [m.start() for m in matches]
|
|
||||||
for i, pos in enumerate(positions):
|
|
||||||
try:
|
|
||||||
code_block = code[pos:positions[i + 1]]
|
|
||||||
except IndexError:
|
|
||||||
code_block = code[pos:len(code)]
|
|
||||||
structure_name = matches[i].group(1)
|
|
||||||
name = matches[i].group(2)
|
|
||||||
if structure_name == 'def':
|
|
||||||
funcs[name] = code_block
|
|
||||||
elif structure_name == 'class':
|
|
||||||
if depth > 0:
|
|
||||||
raise NotImplementedError()
|
|
||||||
|
|
||||||
# remove class line
|
|
||||||
c = re.sub(r'^[^\n]+', '', code_block)
|
|
||||||
# remove whitespace
|
|
||||||
c = re.compile(r'^[ ]{4}', re.MULTILINE).sub('', c)
|
|
||||||
|
|
||||||
funcs[name] = process_code(c)
|
|
||||||
else:
|
|
||||||
raise NotImplementedError()
|
|
||||||
return funcs
|
|
||||||
|
|
||||||
try:
|
|
||||||
name = self.name
|
|
||||||
if name == '__builtin__' and not is_py3k:
|
|
||||||
name = 'builtins'
|
|
||||||
path = os.path.dirname(os.path.abspath(__file__))
|
|
||||||
with open(os.path.sep.join([path, 'mixin', name]) + '.pym') as f:
|
|
||||||
s = f.read()
|
|
||||||
except IOError:
|
|
||||||
return {}
|
|
||||||
else:
|
|
||||||
mixin_dct = process_code(s)
|
|
||||||
if is_py3k and self.name == Builtin.name:
|
|
||||||
# in the case of Py3k xrange is now range
|
|
||||||
mixin_dct['range'] = mixin_dct['xrange']
|
|
||||||
return mixin_dct
|
|
||||||
|
|
||||||
|
|
||||||
def _generate_code(scope, mixin_funcs={}, depth=0):
|
|
||||||
"""
|
|
||||||
Generate a string, which uses python syntax as an input to the
|
|
||||||
PyFuzzyParser.
|
|
||||||
"""
|
|
||||||
def get_doc(obj, indent=False):
|
|
||||||
doc = inspect.getdoc(obj)
|
|
||||||
if doc:
|
|
||||||
doc = ('r"""\n%s\n"""\n' % doc)
|
|
||||||
if indent:
|
|
||||||
doc = parsing.indent_block(doc)
|
|
||||||
return doc
|
|
||||||
return ''
|
|
||||||
|
|
||||||
def is_in_base_classes(cls, name, comparison):
|
|
||||||
""" Base classes may contain the exact same object """
|
|
||||||
if name in mixin_funcs:
|
|
||||||
return False
|
|
||||||
try:
|
|
||||||
mro = cls.mro()
|
|
||||||
except TypeError:
|
|
||||||
# this happens, if cls == type
|
|
||||||
return False
|
|
||||||
for base in mro[1:]:
|
|
||||||
try:
|
|
||||||
attr = getattr(base, name)
|
|
||||||
except AttributeError:
|
|
||||||
continue
|
|
||||||
if attr == comparison:
|
|
||||||
return True
|
|
||||||
return False
|
|
||||||
|
|
||||||
def get_scope_objects(names):
|
|
||||||
"""
|
|
||||||
Looks for the names defined with dir() in an objects and divides
|
|
||||||
them into different object types.
|
|
||||||
"""
|
|
||||||
classes = {}
|
|
||||||
funcs = {}
|
|
||||||
stmts = {}
|
|
||||||
members = {}
|
|
||||||
for n in names:
|
|
||||||
try:
|
|
||||||
# this has a builtin_function_or_method
|
|
||||||
exe = getattr(scope, n)
|
|
||||||
except AttributeError:
|
|
||||||
# happens e.g. in properties of
|
|
||||||
# PyQt4.QtGui.QStyleOptionComboBox.currentText
|
|
||||||
# -> just set it to None
|
|
||||||
members[n] = None
|
|
||||||
else:
|
|
||||||
if inspect.isclass(scope):
|
|
||||||
if is_in_base_classes(scope, n, exe):
|
|
||||||
continue
|
|
||||||
if inspect.isbuiltin(exe) or inspect.ismethod(exe) \
|
|
||||||
or inspect.ismethoddescriptor(exe):
|
|
||||||
funcs[n] = exe
|
|
||||||
elif inspect.isclass(exe):
|
|
||||||
classes[n] = exe
|
|
||||||
elif inspect.ismemberdescriptor(exe):
|
|
||||||
members[n] = exe
|
|
||||||
else:
|
|
||||||
stmts[n] = exe
|
|
||||||
return classes, funcs, stmts, members
|
|
||||||
|
|
||||||
code = ''
|
|
||||||
if inspect.ismodule(scope): # generate comment where the code's from.
|
|
||||||
try:
|
|
||||||
path = scope.__file__
|
|
||||||
except AttributeError:
|
|
||||||
path = '?'
|
|
||||||
code += '# Generated module %s from %s\n' % (scope.__name__, path)
|
|
||||||
|
|
||||||
code += get_doc(scope)
|
|
||||||
|
|
||||||
names = set(dir(scope)) - set(['__file__', '__name__', '__doc__',
|
|
||||||
'__path__', '__package__']) \
|
|
||||||
| set(['mro'])
|
|
||||||
|
|
||||||
classes, funcs, stmts, members = get_scope_objects(names)
|
|
||||||
|
|
||||||
# classes
|
|
||||||
for name, cl in classes.items():
|
|
||||||
bases = (c.__name__ for c in cl.__bases__)
|
|
||||||
code += 'class %s(%s):\n' % (name, ','.join(bases))
|
|
||||||
if depth == 0:
|
|
||||||
try:
|
|
||||||
mixin = mixin_funcs[name]
|
|
||||||
except KeyError:
|
|
||||||
mixin = {}
|
|
||||||
cl_code = _generate_code(cl, mixin, depth + 1)
|
|
||||||
code += parsing.indent_block(cl_code)
|
|
||||||
code += '\n'
|
|
||||||
|
|
||||||
# functions
|
|
||||||
for name, func in funcs.items():
|
|
||||||
params, ret = parse_function_doc(func)
|
|
||||||
if depth > 0:
|
|
||||||
params = 'self, ' + params
|
|
||||||
doc_str = get_doc(func, indent=True)
|
|
||||||
try:
|
|
||||||
mixin = mixin_funcs[name]
|
|
||||||
except KeyError:
|
|
||||||
# normal code generation
|
|
||||||
code += 'def %s(%s):\n' % (name, params)
|
|
||||||
code += doc_str
|
|
||||||
code += parsing.indent_block('%s\n\n' % ret)
|
|
||||||
else:
|
|
||||||
# generation of code with mixins
|
|
||||||
# the parser only supports basic functions with a newline after
|
|
||||||
# the double dots
|
|
||||||
# find doc_str place
|
|
||||||
pos = re.search(r'\):\s*\n', mixin).end()
|
|
||||||
if pos is None:
|
|
||||||
raise Exception("Builtin function not parsed correctly")
|
|
||||||
code += mixin[:pos] + doc_str + mixin[pos:]
|
|
||||||
|
|
||||||
# class members (functions) properties?
|
|
||||||
for name, func in members.items():
|
|
||||||
# recursion problem in properties TODO remove
|
|
||||||
if name in ['fget', 'fset', 'fdel']:
|
|
||||||
continue
|
|
||||||
ret = 'pass'
|
|
||||||
code += '@property\ndef %s(self):\n' % (name)
|
|
||||||
code += parsing.indent_block(get_doc(func) + '%s\n\n' % ret)
|
|
||||||
|
|
||||||
# variables
|
|
||||||
for name, value in stmts.items():
|
|
||||||
if is_py3k:
|
|
||||||
file_type = io.TextIOWrapper
|
|
||||||
else:
|
|
||||||
file_type = types.FileType
|
|
||||||
if type(value) == file_type:
|
|
||||||
value = 'open()'
|
|
||||||
elif name == 'None':
|
|
||||||
value = ''
|
|
||||||
elif type(value).__name__ in ['int', 'bool', 'float',
|
|
||||||
'dict', 'list', 'tuple']:
|
|
||||||
value = repr(value)
|
|
||||||
else:
|
|
||||||
# get the type, if the type is not simple.
|
|
||||||
mod = type(value).__module__
|
|
||||||
value = type(value).__name__ + '()'
|
|
||||||
if mod != '__builtin__':
|
|
||||||
value = '%s.%s' % (mod, value)
|
|
||||||
code += '%s = %s\n' % (name, value)
|
|
||||||
|
|
||||||
if depth == 0:
|
|
||||||
#with open('writeout.py', 'w') as f:
|
|
||||||
# f.write(code)
|
|
||||||
#import sys
|
|
||||||
#sys.stdout.write(code)
|
|
||||||
#exit()
|
|
||||||
pass
|
|
||||||
return code
|
|
||||||
|
|
||||||
|
|
||||||
def parse_function_doc(func):
|
|
||||||
"""
|
|
||||||
Takes a function and returns the params and return value as a tuple.
|
|
||||||
This is nothing more than a docstring parser.
|
|
||||||
"""
|
|
||||||
# TODO: things like utime(path, (atime, mtime)) and a(b [, b]) -> None
|
|
||||||
doc = inspect.getdoc(func)
|
|
||||||
|
|
||||||
# get full string, parse round parentheses: def func(a, (b,c))
|
|
||||||
try:
|
|
||||||
count = 0
|
|
||||||
debug.dbg(func, func.__name__, doc)
|
|
||||||
start = doc.index('(')
|
|
||||||
for i, s in enumerate(doc[start:]):
|
|
||||||
if s == '(':
|
|
||||||
count += 1
|
|
||||||
elif s == ')':
|
|
||||||
count -= 1
|
|
||||||
if count == 0:
|
|
||||||
end = start + i
|
|
||||||
break
|
|
||||||
param_str = doc[start + 1:end]
|
|
||||||
|
|
||||||
# remove square brackets, that show an optional param ( = None)
|
|
||||||
def change_options(m):
|
|
||||||
args = m.group(1).split(',')
|
|
||||||
for i, a in enumerate(args):
|
|
||||||
if a and '=' not in a:
|
|
||||||
args[i] += '=None'
|
|
||||||
return ','.join(args)
|
|
||||||
while True:
|
|
||||||
param_str, changes = re.subn(r' ?\[([^\[\]]+)\]',
|
|
||||||
change_options, param_str)
|
|
||||||
if changes == 0:
|
|
||||||
break
|
|
||||||
except (ValueError, AttributeError):
|
|
||||||
debug.dbg('no brackets found - no param')
|
|
||||||
end = 0
|
|
||||||
param_str = ''
|
|
||||||
|
|
||||||
param_str = param_str.replace('-', '_') # see: isinstance.__doc__
|
|
||||||
|
|
||||||
if doc is not None:
|
|
||||||
r = re.search('-[>-]* ', doc[end:end + 7])
|
|
||||||
if doc is None or r is None:
|
|
||||||
ret = 'pass'
|
|
||||||
else:
|
|
||||||
index = end + r.end()
|
|
||||||
# get result type, which can contain newlines
|
|
||||||
pattern = re.compile(r'(,\n|[^\n-])+')
|
|
||||||
ret_str = pattern.match(doc, index).group(0).strip()
|
|
||||||
# New object -> object()
|
|
||||||
ret_str = re.sub(r'[nN]ew (.*)', r'\1()', ret_str)
|
|
||||||
|
|
||||||
ret = Parser.map_types.get(ret_str, ret_str)
|
|
||||||
if ret == ret_str and ret not in ['None', 'object', 'tuple', 'set']:
|
|
||||||
debug.dbg('not working', ret_str)
|
|
||||||
if ret != 'pass':
|
|
||||||
ret = ('return ' if 'return' not in ret else '') + ret
|
|
||||||
return param_str, ret
|
|
||||||
|
|
||||||
|
|
||||||
class Builtin(object):
|
|
||||||
""" The builtin scope / module """
|
|
||||||
# Python 3 compatibility
|
|
||||||
if is_py3k:
|
|
||||||
name = 'builtins'
|
|
||||||
else:
|
|
||||||
name = '__builtin__'
|
|
||||||
|
|
||||||
_builtin = None
|
|
||||||
|
|
||||||
@property
|
|
||||||
def builtin(self):
|
|
||||||
if self._builtin is None:
|
|
||||||
self._builtin = Parser(name=self.name)
|
|
||||||
return self._builtin
|
|
||||||
|
|
||||||
@property
|
|
||||||
def scope(self):
|
|
||||||
return self.builtin.parser.module
|
|
||||||
|
|
||||||
@property
|
|
||||||
def magic_function_scope(self):
|
|
||||||
try:
|
|
||||||
return self._magic_function_scope
|
|
||||||
except AttributeError:
|
|
||||||
# depth = 1 because this is not a module
|
|
||||||
class Container(object):
|
|
||||||
FunctionType = types.FunctionType
|
|
||||||
source = _generate_code(Container, depth=0)
|
|
||||||
parser = parsing.PyFuzzyParser(source, None)
|
|
||||||
module = parser.module
|
|
||||||
module.parent = self.scope
|
|
||||||
typ = evaluate.follow_path(iter(['FunctionType']), module, module)
|
|
||||||
|
|
||||||
s = self._magic_function_scope = typ.pop()
|
|
||||||
return s
|
|
||||||
|
|
||||||
|
|
||||||
Builtin = Builtin()
|
|
|
@ -1,64 +0,0 @@
|
||||||
import inspect
|
|
||||||
import time
|
|
||||||
|
|
||||||
try:
|
|
||||||
# Use colorama for nicer console output.
|
|
||||||
from colorama import Fore, init
|
|
||||||
init()
|
|
||||||
except ImportError:
|
|
||||||
class Fore(object):
|
|
||||||
RED = ''
|
|
||||||
GREEN = ''
|
|
||||||
YELLOW = ''
|
|
||||||
RESET = ''
|
|
||||||
|
|
||||||
NOTICE = object()
|
|
||||||
WARNING = object()
|
|
||||||
SPEED = object()
|
|
||||||
|
|
||||||
enable_speed = False
|
|
||||||
enable_warning = False
|
|
||||||
enable_notice = False
|
|
||||||
|
|
||||||
# callback, interface: level, str
|
|
||||||
debug_function = None
|
|
||||||
ignored_modules = ['parsing', 'builtin', 'jedi.builtin', 'jedi.parsing']
|
|
||||||
|
|
||||||
|
|
||||||
def reset_time():
|
|
||||||
global start_time
|
|
||||||
start_time = time.time()
|
|
||||||
|
|
||||||
|
|
||||||
def dbg(*args):
|
|
||||||
""" Looks at the stack, to see if a debug message should be printed. """
|
|
||||||
if debug_function and enable_notice:
|
|
||||||
frm = inspect.stack()[1]
|
|
||||||
mod = inspect.getmodule(frm[0])
|
|
||||||
if not (mod.__name__ in ignored_modules):
|
|
||||||
debug_function(NOTICE, 'dbg: ' + ', '.join(str(a) for a in args))
|
|
||||||
|
|
||||||
|
|
||||||
def warning(*args):
|
|
||||||
if debug_function and enable_warning:
|
|
||||||
debug_function(WARNING, 'warning: ' + ', '.join(str(a) for a in args))
|
|
||||||
|
|
||||||
|
|
||||||
def speed(name):
|
|
||||||
if debug_function and enable_speed:
|
|
||||||
now = time.time()
|
|
||||||
debug_function(SPEED, 'speed: ' + '%s %s' % (name, now - start_time))
|
|
||||||
|
|
||||||
|
|
||||||
def print_to_stdout(level, str_out):
|
|
||||||
""" The default debug function """
|
|
||||||
if level == NOTICE:
|
|
||||||
col = Fore.GREEN
|
|
||||||
elif level == WARNING:
|
|
||||||
col = Fore.RED
|
|
||||||
else:
|
|
||||||
col = Fore.YELLOW
|
|
||||||
print(col + str_out + Fore.RESET)
|
|
||||||
|
|
||||||
|
|
||||||
#debug_function = print_to_stdout
|
|
|
@ -1,34 +0,0 @@
|
||||||
""" Processing of docstrings, which means parsing for types. """
|
|
||||||
|
|
||||||
import re
|
|
||||||
|
|
||||||
import evaluate
|
|
||||||
|
|
||||||
|
|
||||||
#@evaluate.memoize_default() # TODO add
|
|
||||||
def follow_param(param):
|
|
||||||
func = param.parent_function
|
|
||||||
#print func, param, param.parent_function
|
|
||||||
param_str = search_param_in_docstr(func.docstr, str(param.get_name()))
|
|
||||||
|
|
||||||
if param_str is not None:
|
|
||||||
scope = func.get_parent_until()
|
|
||||||
return evaluate.get_scopes_for_name(scope, param_str,
|
|
||||||
search_global=True)
|
|
||||||
return []
|
|
||||||
|
|
||||||
|
|
||||||
def search_param_in_docstr(docstr, param_str):
|
|
||||||
lines = docstr.split('\n')
|
|
||||||
|
|
||||||
# look at #40 to see definitions of those params
|
|
||||||
sphinx_comp = ':type %s:' % param_str
|
|
||||||
googley_comp = re.compile('\s*%s\s+\(([^()]+)\)' % re.escape(param_str))
|
|
||||||
for l in lines:
|
|
||||||
if l.startswith(sphinx_comp):
|
|
||||||
return l.replace(sphinx_comp, '', 1).strip()
|
|
||||||
|
|
||||||
r = re.match(googley_comp, l)
|
|
||||||
if r is not None:
|
|
||||||
return r.group(1)
|
|
||||||
return None
|
|
|
@ -1,507 +0,0 @@
|
||||||
"""
|
|
||||||
For dynamic completion.
|
|
||||||
|
|
||||||
Sorry to everyone who is reading this code. Especially the array parts are
|
|
||||||
really cryptic and not understandable. It's just a hack, that turned out to be
|
|
||||||
working quite good.
|
|
||||||
"""
|
|
||||||
from __future__ import with_statement
|
|
||||||
|
|
||||||
import os
|
|
||||||
|
|
||||||
import parsing
|
|
||||||
import modules
|
|
||||||
import evaluate
|
|
||||||
import helpers
|
|
||||||
import settings
|
|
||||||
import debug
|
|
||||||
import builtin
|
|
||||||
import imports
|
|
||||||
import api_classes
|
|
||||||
|
|
||||||
# This is something like the sys.path, but only for searching params. It means
|
|
||||||
# that this is the order in which Jedi searches params.
|
|
||||||
search_param_modules = ['.']
|
|
||||||
search_param_cache = {}
|
|
||||||
|
|
||||||
|
|
||||||
def get_directory_modules_for_name(mods, name):
|
|
||||||
"""
|
|
||||||
Search a name in the directories of modules.
|
|
||||||
"""
|
|
||||||
def check_python_file(path):
|
|
||||||
try:
|
|
||||||
return builtin.CachedModule.cache[path][1].module
|
|
||||||
except KeyError:
|
|
||||||
try:
|
|
||||||
return check_fs(path)
|
|
||||||
except IOError:
|
|
||||||
return None
|
|
||||||
|
|
||||||
def check_fs(path):
|
|
||||||
with open(path) as f:
|
|
||||||
source = f.read()
|
|
||||||
if name in source:
|
|
||||||
return modules.Module(path, source).parser.module
|
|
||||||
|
|
||||||
# skip non python modules
|
|
||||||
mods = set(m for m in mods if m.path.endswith('.py'))
|
|
||||||
mod_paths = set()
|
|
||||||
for m in mods:
|
|
||||||
mod_paths.add(m.path)
|
|
||||||
yield m
|
|
||||||
|
|
||||||
if settings.dynamic_params_for_other_modules:
|
|
||||||
paths = set(settings.additional_dynamic_modules)
|
|
||||||
for p in mod_paths:
|
|
||||||
d = os.path.dirname(p)
|
|
||||||
for entry in os.listdir(d):
|
|
||||||
if entry not in mod_paths:
|
|
||||||
if entry.endswith('.py'):
|
|
||||||
paths.add(d + os.path.sep + entry)
|
|
||||||
|
|
||||||
for p in paths:
|
|
||||||
c = check_python_file(p)
|
|
||||||
if c is not None and c not in mods:
|
|
||||||
yield c
|
|
||||||
|
|
||||||
|
|
||||||
def search_param_memoize(func):
|
|
||||||
"""
|
|
||||||
Is only good for search params memoize, respectively the closure,
|
|
||||||
because it just caches the input, not the func, like normal memoize does.
|
|
||||||
"""
|
|
||||||
def wrapper(*args, **kwargs):
|
|
||||||
key = (args, frozenset(kwargs.items()))
|
|
||||||
if key in search_param_cache:
|
|
||||||
return search_param_cache[key]
|
|
||||||
else:
|
|
||||||
rv = func(*args, **kwargs)
|
|
||||||
search_param_cache[key] = rv
|
|
||||||
return rv
|
|
||||||
return wrapper
|
|
||||||
|
|
||||||
|
|
||||||
class ParamListener(object):
|
|
||||||
"""
|
|
||||||
This listener is used to get the params for a function.
|
|
||||||
"""
|
|
||||||
def __init__(self):
|
|
||||||
self.param_possibilities = []
|
|
||||||
|
|
||||||
def execute(self, params):
|
|
||||||
self.param_possibilities.append(params)
|
|
||||||
|
|
||||||
|
|
||||||
@evaluate.memoize_default([])
|
|
||||||
def search_params(param):
|
|
||||||
"""
|
|
||||||
This is a dynamic search for params. If you try to complete a type:
|
|
||||||
>>> def func(foo):
|
|
||||||
>>> # here is the completion
|
|
||||||
>>> foo
|
|
||||||
>>> func(1)
|
|
||||||
>>> func("")
|
|
||||||
|
|
||||||
It is not known what the type is, because it cannot be guessed with
|
|
||||||
recursive madness. Therefore one has to analyse the statements that are
|
|
||||||
calling the function, as well as analyzing the incoming params.
|
|
||||||
"""
|
|
||||||
if not settings.dynamic_params:
|
|
||||||
return []
|
|
||||||
|
|
||||||
def get_params_for_module(module):
|
|
||||||
"""
|
|
||||||
Returns the values of a param, or an empty array.
|
|
||||||
"""
|
|
||||||
@search_param_memoize
|
|
||||||
def get_posibilities(module, func_name):
|
|
||||||
try:
|
|
||||||
possible_stmts = module.used_names[func_name]
|
|
||||||
except KeyError:
|
|
||||||
return []
|
|
||||||
|
|
||||||
for stmt in possible_stmts:
|
|
||||||
if not isinstance(stmt, parsing.Import):
|
|
||||||
calls = _scan_array(stmt.get_assignment_calls(), func_name)
|
|
||||||
for c in calls:
|
|
||||||
# no execution means that params cannot be set
|
|
||||||
call_path = c.generate_call_path()
|
|
||||||
pos = c.start_pos
|
|
||||||
scope = stmt.parent
|
|
||||||
evaluate.follow_call_path(call_path, scope, pos)
|
|
||||||
return listener.param_possibilities
|
|
||||||
|
|
||||||
result = []
|
|
||||||
for params in get_posibilities(module, func_name):
|
|
||||||
for p in params:
|
|
||||||
if str(p) == param_name:
|
|
||||||
result += evaluate.follow_statement(p.parent)
|
|
||||||
return result
|
|
||||||
|
|
||||||
func = param.get_parent_until(parsing.Function)
|
|
||||||
current_module = param.get_parent_until()
|
|
||||||
func_name = str(func.name)
|
|
||||||
if func_name == '__init__' and isinstance(func.parent, parsing.Class):
|
|
||||||
func_name = str(func.parent.name)
|
|
||||||
|
|
||||||
# get the param name
|
|
||||||
if param.assignment_details:
|
|
||||||
arr = param.assignment_details[0][1]
|
|
||||||
else:
|
|
||||||
arr = param.get_assignment_calls()
|
|
||||||
offset = 1 if arr[0][0] in ['*', '**'] else 0
|
|
||||||
param_name = str(arr[0][offset].name)
|
|
||||||
|
|
||||||
# add the listener
|
|
||||||
listener = ParamListener()
|
|
||||||
func.listeners.add(listener)
|
|
||||||
|
|
||||||
result = []
|
|
||||||
# This is like backtracking: Get the first possible result.
|
|
||||||
for mod in get_directory_modules_for_name([current_module], func_name):
|
|
||||||
result = get_params_for_module(mod)
|
|
||||||
if result:
|
|
||||||
break
|
|
||||||
|
|
||||||
# cleanup: remove the listener; important: should not stick.
|
|
||||||
func.listeners.remove(listener)
|
|
||||||
|
|
||||||
return result
|
|
||||||
|
|
||||||
|
|
||||||
def check_array_additions(array):
|
|
||||||
""" Just a mapper function for the internal _check_array_additions """
|
|
||||||
if array._array.type not in ['list', 'set']:
|
|
||||||
# TODO also check for dict updates
|
|
||||||
return []
|
|
||||||
|
|
||||||
is_list = array._array.type == 'list'
|
|
||||||
current_module = array._array.parent_stmt.get_parent_until()
|
|
||||||
res = _check_array_additions(array, current_module, is_list)
|
|
||||||
return res
|
|
||||||
|
|
||||||
|
|
||||||
def _scan_array(arr, search_name):
|
|
||||||
""" Returns the function Call that match search_name in an Array. """
|
|
||||||
result = []
|
|
||||||
for sub in arr:
|
|
||||||
for s in sub:
|
|
||||||
if isinstance(s, parsing.Array):
|
|
||||||
result += _scan_array(s, search_name)
|
|
||||||
elif isinstance(s, parsing.Call):
|
|
||||||
s_new = s
|
|
||||||
while s_new is not None:
|
|
||||||
n = s_new.name
|
|
||||||
if isinstance(n, parsing.Name) and search_name in n.names:
|
|
||||||
result.append(s)
|
|
||||||
|
|
||||||
if s_new.execution is not None:
|
|
||||||
result += _scan_array(s_new.execution, search_name)
|
|
||||||
s_new = s_new.next
|
|
||||||
return result
|
|
||||||
|
|
||||||
|
|
||||||
counter = 0
|
|
||||||
def dec(func):
|
|
||||||
""" TODO delete this """
|
|
||||||
def wrapper(*args, **kwargs):
|
|
||||||
global counter
|
|
||||||
element = args[0]
|
|
||||||
if isinstance(element, evaluate.Array):
|
|
||||||
stmt = element._array.parent_stmt
|
|
||||||
else:
|
|
||||||
# must be instance
|
|
||||||
stmt = element.var_args.parent_stmt
|
|
||||||
print(' ' * counter + 'recursion,', stmt)
|
|
||||||
counter += 1
|
|
||||||
res = func(*args, **kwargs)
|
|
||||||
counter -= 1
|
|
||||||
#print ' '*counter + 'end,'
|
|
||||||
return res
|
|
||||||
return wrapper
|
|
||||||
|
|
||||||
|
|
||||||
#@dec
|
|
||||||
@evaluate.memoize_default([])
|
|
||||||
def _check_array_additions(compare_array, module, is_list):
|
|
||||||
"""
|
|
||||||
Checks if a `parsing.Array` has "add" statements:
|
|
||||||
>>> a = [""]
|
|
||||||
>>> a.append(1)
|
|
||||||
"""
|
|
||||||
if not settings.dynamic_array_additions or module.is_builtin():
|
|
||||||
return []
|
|
||||||
|
|
||||||
def check_calls(calls, add_name):
|
|
||||||
"""
|
|
||||||
Calls are processed here. The part before the call is searched and
|
|
||||||
compared with the original Array.
|
|
||||||
"""
|
|
||||||
result = []
|
|
||||||
for c in calls:
|
|
||||||
call_path = list(c.generate_call_path())
|
|
||||||
separate_index = call_path.index(add_name)
|
|
||||||
if add_name == call_path[-1] or separate_index == 0:
|
|
||||||
# this means that there is no execution -> [].append
|
|
||||||
# or the keyword is at the start -> append()
|
|
||||||
continue
|
|
||||||
backtrack_path = iter(call_path[:separate_index])
|
|
||||||
|
|
||||||
position = c.start_pos
|
|
||||||
scope = c.parent_stmt.parent
|
|
||||||
|
|
||||||
found = evaluate.follow_call_path(backtrack_path, scope, position)
|
|
||||||
if not compare_array in found:
|
|
||||||
continue
|
|
||||||
|
|
||||||
params = call_path[separate_index + 1]
|
|
||||||
if not params.values:
|
|
||||||
continue # no params: just ignore it
|
|
||||||
if add_name in ['append', 'add']:
|
|
||||||
result += evaluate.follow_call_list(params)
|
|
||||||
elif add_name in ['insert']:
|
|
||||||
try:
|
|
||||||
second_param = params[1]
|
|
||||||
except IndexError:
|
|
||||||
continue
|
|
||||||
else:
|
|
||||||
result += evaluate.follow_call_list([second_param])
|
|
||||||
elif add_name in ['extend', 'update']:
|
|
||||||
iterators = evaluate.follow_call_list(params)
|
|
||||||
result += evaluate.get_iterator_types(iterators)
|
|
||||||
return result
|
|
||||||
|
|
||||||
def get_execution_parent(element, *stop_classes):
|
|
||||||
""" Used to get an Instance/Execution parent """
|
|
||||||
if isinstance(element, evaluate.Array):
|
|
||||||
stmt = element._array.parent_stmt
|
|
||||||
else:
|
|
||||||
# must be instance
|
|
||||||
stmt = element.var_args.parent_stmt
|
|
||||||
if isinstance(stmt, evaluate.InstanceElement):
|
|
||||||
stop_classes = list(stop_classes) + [evaluate.Function]
|
|
||||||
return stmt.get_parent_until(stop_classes)
|
|
||||||
|
|
||||||
temp_param_add = settings.dynamic_params_for_other_modules
|
|
||||||
settings.dynamic_params_for_other_modules = False
|
|
||||||
|
|
||||||
search_names = ['append', 'extend', 'insert'] if is_list else \
|
|
||||||
['add', 'update']
|
|
||||||
comp_arr_parent = get_execution_parent(compare_array, evaluate.Execution)
|
|
||||||
possible_stmts = []
|
|
||||||
res = []
|
|
||||||
for n in search_names:
|
|
||||||
try:
|
|
||||||
possible_stmts += module.used_names[n]
|
|
||||||
except KeyError:
|
|
||||||
continue
|
|
||||||
for stmt in possible_stmts:
|
|
||||||
# Check if the original scope is an execution. If it is, one
|
|
||||||
# can search for the same statement, that is in the module
|
|
||||||
# dict. Executions are somewhat special in jedi, since they
|
|
||||||
# literally copy the contents of a function.
|
|
||||||
if isinstance(comp_arr_parent, evaluate.Execution):
|
|
||||||
stmt = comp_arr_parent. \
|
|
||||||
get_statement_for_position(stmt.start_pos)
|
|
||||||
if stmt is None:
|
|
||||||
continue
|
|
||||||
# InstanceElements are special, because they don't get copied,
|
|
||||||
# but have this wrapper around them.
|
|
||||||
if isinstance(comp_arr_parent, evaluate.InstanceElement):
|
|
||||||
stmt = evaluate.InstanceElement(comp_arr_parent.instance, stmt)
|
|
||||||
|
|
||||||
if evaluate.follow_statement.push_stmt(stmt):
|
|
||||||
# check recursion
|
|
||||||
continue
|
|
||||||
res += check_calls(_scan_array(stmt.get_assignment_calls(), n), n)
|
|
||||||
evaluate.follow_statement.pop_stmt()
|
|
||||||
# reset settings
|
|
||||||
settings.dynamic_params_for_other_modules = temp_param_add
|
|
||||||
return res
|
|
||||||
|
|
||||||
|
|
||||||
def check_array_instances(instance):
|
|
||||||
""" Used for set() and list() instances. """
|
|
||||||
if not settings.dynamic_arrays_instances:
|
|
||||||
return instance.var_args
|
|
||||||
ai = ArrayInstance(instance)
|
|
||||||
return helpers.generate_param_array([ai], instance.var_args.parent_stmt)
|
|
||||||
|
|
||||||
|
|
||||||
class ArrayInstance(parsing.Base):
|
|
||||||
"""
|
|
||||||
Used for the usage of set() and list().
|
|
||||||
This is definitely a hack, but a good one :-)
|
|
||||||
It makes it possible to use set/list conversions.
|
|
||||||
"""
|
|
||||||
def __init__(self, instance):
|
|
||||||
self.instance = instance
|
|
||||||
self.var_args = instance.var_args
|
|
||||||
|
|
||||||
def iter_content(self):
|
|
||||||
"""
|
|
||||||
The index is here just ignored, because of all the appends, etc.
|
|
||||||
lists/sets are too complicated too handle that.
|
|
||||||
"""
|
|
||||||
items = []
|
|
||||||
for array in evaluate.follow_call_list(self.var_args):
|
|
||||||
if isinstance(array, evaluate.Instance) and len(array.var_args):
|
|
||||||
temp = array.var_args[0][0]
|
|
||||||
if isinstance(temp, ArrayInstance):
|
|
||||||
# prevent recursions
|
|
||||||
# TODO compare Modules
|
|
||||||
if self.var_args.start_pos != temp.var_args.start_pos:
|
|
||||||
items += temp.iter_content()
|
|
||||||
else:
|
|
||||||
debug.warning('ArrayInstance recursion', self.var_args)
|
|
||||||
continue
|
|
||||||
items += evaluate.get_iterator_types([array])
|
|
||||||
|
|
||||||
if self.var_args.parent_stmt is None:
|
|
||||||
return [] # generated var_args should not be checked for arrays
|
|
||||||
|
|
||||||
module = self.var_args.parent_stmt.get_parent_until()
|
|
||||||
is_list = str(self.instance.name) == 'list'
|
|
||||||
items += _check_array_additions(self.instance, module, is_list)
|
|
||||||
return items
|
|
||||||
|
|
||||||
|
|
||||||
def related_names(definitions, search_name, mods):
|
|
||||||
def check_call(call):
|
|
||||||
result = []
|
|
||||||
follow = [] # There might be multiple search_name's in one call_path
|
|
||||||
call_path = list(call.generate_call_path())
|
|
||||||
for i, name in enumerate(call_path):
|
|
||||||
# name is `parsing.NamePart`.
|
|
||||||
if name == search_name:
|
|
||||||
follow.append(call_path[:i + 1])
|
|
||||||
|
|
||||||
for f in follow:
|
|
||||||
follow_res, search = evaluate.goto(call.parent_stmt, f)
|
|
||||||
follow_res = related_name_add_import_modules(follow_res, search)
|
|
||||||
|
|
||||||
#print follow_res, [d.parent for d in follow_res]
|
|
||||||
# compare to see if they match
|
|
||||||
if any(r in definitions for r in follow_res):
|
|
||||||
scope = call.parent_stmt
|
|
||||||
result.append(api_classes.RelatedName(search, scope))
|
|
||||||
|
|
||||||
return result
|
|
||||||
|
|
||||||
if not definitions:
|
|
||||||
return set()
|
|
||||||
|
|
||||||
def is_definition(arr):
|
|
||||||
try:
|
|
||||||
for a in arr:
|
|
||||||
assert len(a) == 1
|
|
||||||
a = a[0]
|
|
||||||
if a.isinstance(parsing.Array):
|
|
||||||
assert is_definition(a)
|
|
||||||
elif a.isinstance(parsing.Call):
|
|
||||||
assert a.execution is None
|
|
||||||
return True
|
|
||||||
except AssertionError:
|
|
||||||
return False
|
|
||||||
|
|
||||||
mods |= set([d.get_parent_until() for d in definitions])
|
|
||||||
names = []
|
|
||||||
for m in get_directory_modules_for_name(mods, search_name):
|
|
||||||
try:
|
|
||||||
stmts = m.used_names[search_name]
|
|
||||||
except KeyError:
|
|
||||||
continue
|
|
||||||
for stmt in stmts:
|
|
||||||
if isinstance(stmt, parsing.Import):
|
|
||||||
count = 0
|
|
||||||
imps = []
|
|
||||||
for i in stmt.get_all_import_names():
|
|
||||||
for name_part in i.names:
|
|
||||||
count += 1
|
|
||||||
if name_part == search_name:
|
|
||||||
imps.append((count, name_part))
|
|
||||||
|
|
||||||
for used_count, name_part in imps:
|
|
||||||
i = imports.ImportPath(stmt, kill_count=count - used_count,
|
|
||||||
direct_resolve=True)
|
|
||||||
f = i.follow(is_goto=True)
|
|
||||||
if set(f) & set(definitions):
|
|
||||||
names.append(api_classes.RelatedName(name_part, stmt))
|
|
||||||
else:
|
|
||||||
calls = _scan_array(stmt.get_assignment_calls(), search_name)
|
|
||||||
for d in stmt.assignment_details:
|
|
||||||
if not is_definition(d[1]):
|
|
||||||
calls += _scan_array(d[1], search_name)
|
|
||||||
for call in calls:
|
|
||||||
names += check_call(call)
|
|
||||||
return names
|
|
||||||
|
|
||||||
|
|
||||||
def related_name_add_import_modules(definitions, search_name):
|
|
||||||
""" Adds the modules of the imports """
|
|
||||||
new = set()
|
|
||||||
for d in definitions:
|
|
||||||
if isinstance(d.parent, parsing.Import):
|
|
||||||
s = imports.ImportPath(d.parent, direct_resolve=True)
|
|
||||||
try:
|
|
||||||
new.add(s.follow(is_goto=True)[0])
|
|
||||||
except IndexError:
|
|
||||||
pass
|
|
||||||
return set(definitions) | new
|
|
||||||
|
|
||||||
|
|
||||||
def check_flow_information(flow, search_name, pos):
|
|
||||||
""" Try to find out the type of a variable just with the information that
|
|
||||||
is given by the flows: e.g. It is also responsible for assert checks.
|
|
||||||
>>> if isinstance(k, str):
|
|
||||||
>>> k. # <- completion here
|
|
||||||
|
|
||||||
ensures that `k` is a string.
|
|
||||||
"""
|
|
||||||
result = []
|
|
||||||
if isinstance(flow, parsing.Scope) and not result:
|
|
||||||
for ass in reversed(flow.asserts):
|
|
||||||
if pos is None or ass.start_pos > pos:
|
|
||||||
continue
|
|
||||||
result = check_statement_information(ass, search_name)
|
|
||||||
if result:
|
|
||||||
break
|
|
||||||
|
|
||||||
if isinstance(flow, parsing.Flow) and not result:
|
|
||||||
if flow.command in ['if', 'while'] and len(flow.inits) == 1:
|
|
||||||
result = check_statement_information(flow.inits[0], search_name)
|
|
||||||
return result
|
|
||||||
|
|
||||||
|
|
||||||
def check_statement_information(stmt, search_name):
|
|
||||||
try:
|
|
||||||
ass = stmt.get_assignment_calls()
|
|
||||||
try:
|
|
||||||
call = ass.get_only_subelement()
|
|
||||||
except AttributeError:
|
|
||||||
assert False
|
|
||||||
assert type(call) == parsing.Call and str(call.name) == 'isinstance'
|
|
||||||
assert bool(call.execution)
|
|
||||||
|
|
||||||
# isinstance check
|
|
||||||
isinst = call.execution.values
|
|
||||||
assert len(isinst) == 2 # has two params
|
|
||||||
assert len(isinst[0]) == 1
|
|
||||||
assert len(isinst[1]) == 1
|
|
||||||
assert isinstance(isinst[0][0], parsing.Call)
|
|
||||||
# names fit?
|
|
||||||
assert str(isinst[0][0].name) == search_name
|
|
||||||
classes_call = isinst[1][0] # class_or_type_or_tuple
|
|
||||||
assert isinstance(classes_call, parsing.Call)
|
|
||||||
result = []
|
|
||||||
for c in evaluate.follow_call(classes_call):
|
|
||||||
if isinstance(c, evaluate.Array):
|
|
||||||
result += c.get_index_types()
|
|
||||||
else:
|
|
||||||
result.append(c)
|
|
||||||
for i, c in enumerate(result):
|
|
||||||
result[i] = evaluate.Instance(c)
|
|
||||||
return result
|
|
||||||
except AssertionError:
|
|
||||||
return []
|
|
File diff suppressed because it is too large
Load diff
|
@ -1,266 +0,0 @@
|
||||||
import copy
|
|
||||||
import contextlib
|
|
||||||
|
|
||||||
import parsing
|
|
||||||
import evaluate
|
|
||||||
import debug
|
|
||||||
import builtin
|
|
||||||
import settings
|
|
||||||
|
|
||||||
|
|
||||||
class RecursionDecorator(object):
|
|
||||||
"""
|
|
||||||
A decorator to detect recursions in statements. In a recursion a statement
|
|
||||||
at the same place, in the same module may not be executed two times.
|
|
||||||
"""
|
|
||||||
def __init__(self, func):
|
|
||||||
self.func = func
|
|
||||||
self.reset()
|
|
||||||
|
|
||||||
def __call__(self, stmt, *args, **kwargs):
|
|
||||||
#print stmt, len(self.node_statements())
|
|
||||||
if self.push_stmt(stmt):
|
|
||||||
return []
|
|
||||||
else:
|
|
||||||
result = self.func(stmt, *args, **kwargs)
|
|
||||||
self.pop_stmt()
|
|
||||||
return result
|
|
||||||
|
|
||||||
def push_stmt(self, stmt):
|
|
||||||
self.current = RecursionNode(stmt, self.current)
|
|
||||||
if self._check_recursion():
|
|
||||||
debug.warning('catched recursion', stmt)
|
|
||||||
self.pop_stmt()
|
|
||||||
return True
|
|
||||||
return False
|
|
||||||
|
|
||||||
def pop_stmt(self):
|
|
||||||
if self.current is not None:
|
|
||||||
# I don't know how current can be None, but sometimes it happens
|
|
||||||
# with Python3.
|
|
||||||
self.current = self.current.parent
|
|
||||||
|
|
||||||
def _check_recursion(self):
|
|
||||||
test = self.current
|
|
||||||
while True:
|
|
||||||
test = test.parent
|
|
||||||
if self.current == test:
|
|
||||||
return True
|
|
||||||
if not test:
|
|
||||||
return False
|
|
||||||
|
|
||||||
def reset(self):
|
|
||||||
self.top = None
|
|
||||||
self.current = None
|
|
||||||
|
|
||||||
def node_statements(self):
|
|
||||||
result = []
|
|
||||||
n = self.current
|
|
||||||
while n:
|
|
||||||
result.insert(0, n.stmt)
|
|
||||||
n = n.parent
|
|
||||||
return result
|
|
||||||
|
|
||||||
|
|
||||||
class RecursionNode(object):
|
|
||||||
""" A node of the RecursionDecorator. """
|
|
||||||
def __init__(self, stmt, parent):
|
|
||||||
self.script = stmt.get_parent_until()
|
|
||||||
self.position = stmt.start_pos
|
|
||||||
self.parent = parent
|
|
||||||
self.stmt = stmt
|
|
||||||
|
|
||||||
# Don't check param instances, they are not causing recursions
|
|
||||||
# The same's true for the builtins, because the builtins are really
|
|
||||||
# simple.
|
|
||||||
self.is_ignored = isinstance(stmt, parsing.Param) \
|
|
||||||
or (self.script == builtin.Builtin.scope)
|
|
||||||
|
|
||||||
def __eq__(self, other):
|
|
||||||
if not other:
|
|
||||||
return None
|
|
||||||
return self.script == other.script \
|
|
||||||
and self.position == other.position \
|
|
||||||
and not self.is_ignored and not other.is_ignored
|
|
||||||
|
|
||||||
|
|
||||||
class ExecutionRecursionDecorator(object):
|
|
||||||
"""
|
|
||||||
Catches recursions of executions.
|
|
||||||
It is designed like a Singelton. Only one instance should exist.
|
|
||||||
"""
|
|
||||||
def __init__(self, func):
|
|
||||||
self.func = func
|
|
||||||
self.reset()
|
|
||||||
|
|
||||||
def __call__(self, execution, evaluate_generator=False):
|
|
||||||
debug.dbg('Execution recursions: %s' % execution, self.recursion_level,
|
|
||||||
self.execution_count, len(self.execution_funcs))
|
|
||||||
if self.check_recursion(execution, evaluate_generator):
|
|
||||||
result = []
|
|
||||||
else:
|
|
||||||
result = self.func(execution, evaluate_generator)
|
|
||||||
self.cleanup()
|
|
||||||
return result
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def cleanup(cls):
|
|
||||||
cls.parent_execution_funcs.pop()
|
|
||||||
cls.recursion_level -= 1
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def check_recursion(cls, execution, evaluate_generator):
|
|
||||||
in_par_execution_funcs = execution.base in cls.parent_execution_funcs
|
|
||||||
in_execution_funcs = execution.base in cls.execution_funcs
|
|
||||||
cls.recursion_level += 1
|
|
||||||
cls.execution_count += 1
|
|
||||||
cls.execution_funcs.add(execution.base)
|
|
||||||
cls.parent_execution_funcs.append(execution.base)
|
|
||||||
|
|
||||||
if cls.execution_count > settings.max_executions:
|
|
||||||
return True
|
|
||||||
|
|
||||||
if isinstance(execution.base, (evaluate.Generator, evaluate.Array)):
|
|
||||||
return False
|
|
||||||
module = execution.get_parent_until()
|
|
||||||
if evaluate_generator or module == builtin.Builtin.scope:
|
|
||||||
return False
|
|
||||||
|
|
||||||
if in_par_execution_funcs:
|
|
||||||
if cls.recursion_level > settings.max_function_recursion_level:
|
|
||||||
return True
|
|
||||||
if in_execution_funcs and \
|
|
||||||
len(cls.execution_funcs) > settings.max_until_execution_unique:
|
|
||||||
return True
|
|
||||||
if cls.execution_count > settings.max_executions_without_builtins:
|
|
||||||
return True
|
|
||||||
return False
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def reset(cls):
|
|
||||||
cls.recursion_level = 0
|
|
||||||
cls.parent_execution_funcs = []
|
|
||||||
cls.execution_funcs = set()
|
|
||||||
cls.execution_count = 0
|
|
||||||
|
|
||||||
|
|
||||||
def fast_parent_copy(obj):
|
|
||||||
"""
|
|
||||||
Much, much faster than copy.deepcopy, but just for certain elements.
|
|
||||||
"""
|
|
||||||
new_elements = {}
|
|
||||||
|
|
||||||
def recursion(obj):
|
|
||||||
new_obj = copy.copy(obj)
|
|
||||||
new_elements[obj] = new_obj
|
|
||||||
|
|
||||||
items = new_obj.__dict__.items()
|
|
||||||
for key, value in items:
|
|
||||||
# replace parent (first try _parent and then parent)
|
|
||||||
if key in ['parent', '_parent', '_parent_stmt'] \
|
|
||||||
and value is not None:
|
|
||||||
if key == 'parent' and '_parent' in items:
|
|
||||||
# parent can be a property
|
|
||||||
continue
|
|
||||||
try:
|
|
||||||
setattr(new_obj, key, new_elements[value])
|
|
||||||
except KeyError:
|
|
||||||
pass
|
|
||||||
elif key in ['parent_stmt', 'parent_function']:
|
|
||||||
continue
|
|
||||||
elif isinstance(value, list):
|
|
||||||
setattr(new_obj, key, list_rec(value))
|
|
||||||
elif isinstance(value, (parsing.Simple, parsing.Call)):
|
|
||||||
setattr(new_obj, key, recursion(value))
|
|
||||||
return new_obj
|
|
||||||
|
|
||||||
def list_rec(list_obj):
|
|
||||||
copied_list = list_obj[:] # lists, tuples, strings, unicode
|
|
||||||
for i, el in enumerate(copied_list):
|
|
||||||
if isinstance(el, (parsing.Simple, parsing.Call)):
|
|
||||||
copied_list[i] = recursion(el)
|
|
||||||
elif isinstance(el, list):
|
|
||||||
copied_list[i] = list_rec(el)
|
|
||||||
return copied_list
|
|
||||||
return recursion(obj)
|
|
||||||
|
|
||||||
|
|
||||||
def generate_param_array(args_tuple, parent_stmt=None):
|
|
||||||
""" This generates an array, that can be used as a param. """
|
|
||||||
values = []
|
|
||||||
for arg in args_tuple:
|
|
||||||
if arg is None:
|
|
||||||
values.append([])
|
|
||||||
else:
|
|
||||||
values.append([arg])
|
|
||||||
pos = None
|
|
||||||
arr = parsing.Array(pos, parsing.Array.TUPLE, parent_stmt, values=values)
|
|
||||||
evaluate.faked_scopes.append(arr)
|
|
||||||
return arr
|
|
||||||
|
|
||||||
|
|
||||||
def scan_array_for_pos(arr, pos):
|
|
||||||
"""
|
|
||||||
Returns the function Call that match search_name in an Array.
|
|
||||||
Makes changes to arr!
|
|
||||||
"""
|
|
||||||
def check_arr_index():
|
|
||||||
positions = arr.arr_el_pos
|
|
||||||
for index, comma_pos in enumerate(positions):
|
|
||||||
if pos < comma_pos:
|
|
||||||
return index
|
|
||||||
return len(positions)
|
|
||||||
|
|
||||||
call = None
|
|
||||||
stop = False
|
|
||||||
for sub in arr.values:
|
|
||||||
call = None
|
|
||||||
for s in sub:
|
|
||||||
if isinstance(s, parsing.Array):
|
|
||||||
new = scan_array_for_pos(s, pos)
|
|
||||||
if new[0] is not None:
|
|
||||||
call, index, stop = new
|
|
||||||
if stop:
|
|
||||||
return call, index, stop
|
|
||||||
elif isinstance(s, parsing.Call):
|
|
||||||
start_s = s
|
|
||||||
# check parts of calls
|
|
||||||
while s is not None:
|
|
||||||
if s.start_pos >= pos:
|
|
||||||
return call, check_arr_index(), stop
|
|
||||||
elif s.execution is not None:
|
|
||||||
end = s.execution.end_pos
|
|
||||||
if s.execution.start_pos < pos and \
|
|
||||||
(end is None or pos < end):
|
|
||||||
c, index, stop = scan_array_for_pos(
|
|
||||||
s.execution, pos)
|
|
||||||
if stop:
|
|
||||||
return c, index, stop
|
|
||||||
|
|
||||||
# call should return without execution and
|
|
||||||
# next
|
|
||||||
reset = c or s
|
|
||||||
if reset.execution.type not in \
|
|
||||||
[parsing.Array.TUPLE,
|
|
||||||
parsing.Array.NOARRAY]:
|
|
||||||
return start_s, index, False
|
|
||||||
|
|
||||||
reset.execution = None
|
|
||||||
reset.next = None
|
|
||||||
return c or start_s, index, True
|
|
||||||
s = s.next
|
|
||||||
|
|
||||||
# The third return is just necessary for recursion inside, because
|
|
||||||
# it needs to know when to stop iterating.
|
|
||||||
return call, check_arr_index(), stop
|
|
||||||
|
|
||||||
|
|
||||||
@contextlib.contextmanager
|
|
||||||
def scale_speed_settings(factor):
|
|
||||||
a = settings.max_executions
|
|
||||||
b = settings.max_until_execution_unique
|
|
||||||
settings.max_executions *= factor
|
|
||||||
settings.max_until_execution_unique *= factor
|
|
||||||
yield
|
|
||||||
settings.max_executions = a
|
|
||||||
settings.max_until_execution_unique = b
|
|
|
@ -1,332 +0,0 @@
|
||||||
from __future__ import with_statement
|
|
||||||
|
|
||||||
import os
|
|
||||||
import pkgutil
|
|
||||||
import imp
|
|
||||||
import sys
|
|
||||||
import time
|
|
||||||
|
|
||||||
import builtin
|
|
||||||
import modules
|
|
||||||
import debug
|
|
||||||
import parsing
|
|
||||||
import evaluate
|
|
||||||
import itertools
|
|
||||||
import settings
|
|
||||||
|
|
||||||
# for debugging purposes only
|
|
||||||
imports_processed = 0
|
|
||||||
|
|
||||||
star_import_cache = {}
|
|
||||||
|
|
||||||
|
|
||||||
class ModuleNotFound(Exception):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class ImportPath(parsing.Base):
|
|
||||||
"""
|
|
||||||
An ImportPath is the path of a `parsing.Import` object.
|
|
||||||
"""
|
|
||||||
class _GlobalNamespace(object):
|
|
||||||
def get_defined_names(self):
|
|
||||||
return []
|
|
||||||
|
|
||||||
def get_imports(self):
|
|
||||||
return []
|
|
||||||
|
|
||||||
@property
|
|
||||||
def start_pos(self):
|
|
||||||
return (0, 0)
|
|
||||||
|
|
||||||
def get_parent_until(self):
|
|
||||||
return None
|
|
||||||
|
|
||||||
GlobalNamespace = _GlobalNamespace()
|
|
||||||
|
|
||||||
def __init__(self, import_stmt, is_like_search=False, kill_count=0,
|
|
||||||
direct_resolve=False):
|
|
||||||
self.import_stmt = import_stmt
|
|
||||||
self.is_like_search = is_like_search
|
|
||||||
self.direct_resolve = direct_resolve
|
|
||||||
self.is_partial_import = bool(kill_count)
|
|
||||||
path = import_stmt.get_parent_until().path
|
|
||||||
self.file_path = os.path.dirname(path) if path is not None else None
|
|
||||||
|
|
||||||
# rest is import_path resolution
|
|
||||||
self.import_path = []
|
|
||||||
if import_stmt.from_ns:
|
|
||||||
self.import_path += import_stmt.from_ns.names
|
|
||||||
if import_stmt.namespace:
|
|
||||||
if self.is_nested_import() and not direct_resolve:
|
|
||||||
self.import_path.append(import_stmt.namespace.names[0])
|
|
||||||
else:
|
|
||||||
self.import_path += import_stmt.namespace.names
|
|
||||||
|
|
||||||
for i in range(kill_count + int(is_like_search)):
|
|
||||||
self.import_path.pop()
|
|
||||||
|
|
||||||
def __repr__(self):
|
|
||||||
return '<%s: %s>' % (type(self).__name__, self.import_stmt)
|
|
||||||
|
|
||||||
def is_nested_import(self):
|
|
||||||
"""
|
|
||||||
This checks for the special case of nested imports, without aliases and
|
|
||||||
from statement:
|
|
||||||
>>> import foo.bar
|
|
||||||
"""
|
|
||||||
return not self.import_stmt.alias and not self.import_stmt.from_ns \
|
|
||||||
and len(self.import_stmt.namespace.names) > 1 \
|
|
||||||
and not self.direct_resolve
|
|
||||||
|
|
||||||
def get_nested_import(self, parent):
|
|
||||||
"""
|
|
||||||
See documentation of `self.is_nested_import`.
|
|
||||||
Generates an Import statement, that can be used to fake nested imports.
|
|
||||||
"""
|
|
||||||
i = self.import_stmt
|
|
||||||
# This is not an existing Import statement. Therefore, set position to
|
|
||||||
# 0 (0 is not a valid line number).
|
|
||||||
zero = (0, 0)
|
|
||||||
n = parsing.Name(i.namespace.names[1:], zero, zero, self.import_stmt)
|
|
||||||
new = parsing.Import(zero, zero, n)
|
|
||||||
new.parent = parent
|
|
||||||
evaluate.faked_scopes.append(new)
|
|
||||||
debug.dbg('Generated a nested import: %s' % new)
|
|
||||||
return new
|
|
||||||
|
|
||||||
def get_defined_names(self, on_import_stmt=False):
|
|
||||||
names = []
|
|
||||||
for scope in self.follow():
|
|
||||||
if scope is ImportPath.GlobalNamespace:
|
|
||||||
if self.import_stmt.relative_count == 0:
|
|
||||||
names += self.get_module_names()
|
|
||||||
|
|
||||||
if self.file_path is not None:
|
|
||||||
path = os.path.abspath(self.file_path)
|
|
||||||
for i in range(self.import_stmt.relative_count - 1):
|
|
||||||
path = os.path.dirname(path)
|
|
||||||
names += self.get_module_names([path])
|
|
||||||
else:
|
|
||||||
if on_import_stmt and isinstance(scope, parsing.Module) \
|
|
||||||
and scope.path.endswith('__init__.py'):
|
|
||||||
pkg_path = os.path.dirname(scope.path)
|
|
||||||
names += self.get_module_names([pkg_path])
|
|
||||||
for s, scope_names in evaluate.get_names_for_scope(scope,
|
|
||||||
include_builtin=False):
|
|
||||||
for n in scope_names:
|
|
||||||
if self.import_stmt.from_ns is None \
|
|
||||||
or self.is_partial_import:
|
|
||||||
# from_ns must be defined to access module
|
|
||||||
# values plus a partial import means that there
|
|
||||||
# is something after the import, which
|
|
||||||
# automatically implies that there must not be
|
|
||||||
# any non-module scope.
|
|
||||||
continue
|
|
||||||
names.append(n)
|
|
||||||
return names
|
|
||||||
|
|
||||||
def get_module_names(self, search_path=None):
|
|
||||||
"""
|
|
||||||
Get the names of all modules in the search_path. This means file names
|
|
||||||
and not names defined in the files.
|
|
||||||
"""
|
|
||||||
if not search_path:
|
|
||||||
search_path = self.sys_path_with_modifications()
|
|
||||||
names = []
|
|
||||||
for module_loader, name, is_pkg in pkgutil.iter_modules(search_path):
|
|
||||||
inf_pos = (float('inf'), float('inf'))
|
|
||||||
names.append(parsing.Name([(name, inf_pos)], inf_pos, inf_pos,
|
|
||||||
self.import_stmt))
|
|
||||||
return names
|
|
||||||
|
|
||||||
def sys_path_with_modifications(self):
|
|
||||||
module = self.import_stmt.get_parent_until()
|
|
||||||
return modules.sys_path_with_modifications(module)
|
|
||||||
|
|
||||||
def follow(self, is_goto=False):
|
|
||||||
"""
|
|
||||||
Returns the imported modules.
|
|
||||||
"""
|
|
||||||
if evaluate.follow_statement.push_stmt(self.import_stmt):
|
|
||||||
# check recursion
|
|
||||||
return []
|
|
||||||
|
|
||||||
if self.import_path:
|
|
||||||
try:
|
|
||||||
scope, rest = self._follow_file_system()
|
|
||||||
except ModuleNotFound:
|
|
||||||
debug.warning('Module not found: ' + str(self.import_stmt))
|
|
||||||
evaluate.follow_statement.pop_stmt()
|
|
||||||
return []
|
|
||||||
|
|
||||||
scopes = [scope]
|
|
||||||
scopes += itertools.chain.from_iterable(
|
|
||||||
remove_star_imports(s) for s in scopes)
|
|
||||||
|
|
||||||
# follow the rest of the import (not FS -> classes, functions)
|
|
||||||
if len(rest) > 1 or rest and self.is_like_search:
|
|
||||||
scopes = []
|
|
||||||
elif rest:
|
|
||||||
if is_goto:
|
|
||||||
scopes = itertools.chain.from_iterable(
|
|
||||||
evaluate.get_scopes_for_name(s, rest[0], is_goto=True)
|
|
||||||
for s in scopes)
|
|
||||||
else:
|
|
||||||
scopes = itertools.chain.from_iterable(
|
|
||||||
evaluate.follow_path(iter(rest), s, s)
|
|
||||||
for s in scopes)
|
|
||||||
scopes = list(scopes)
|
|
||||||
|
|
||||||
if self.is_nested_import():
|
|
||||||
scopes.append(self.get_nested_import(scope))
|
|
||||||
else:
|
|
||||||
scopes = [ImportPath.GlobalNamespace]
|
|
||||||
debug.dbg('after import', scopes)
|
|
||||||
|
|
||||||
evaluate.follow_statement.pop_stmt()
|
|
||||||
return scopes
|
|
||||||
|
|
||||||
def _follow_file_system(self):
|
|
||||||
"""
|
|
||||||
Find a module with a path (of the module, like usb.backend.libusb10).
|
|
||||||
"""
|
|
||||||
def follow_str(ns, string):
|
|
||||||
debug.dbg('follow_module', ns, string)
|
|
||||||
path = None
|
|
||||||
if ns:
|
|
||||||
path = ns[1]
|
|
||||||
elif self.import_stmt.relative_count:
|
|
||||||
module = self.import_stmt.get_parent_until()
|
|
||||||
path = os.path.abspath(module.path)
|
|
||||||
for i in range(self.import_stmt.relative_count):
|
|
||||||
path = os.path.dirname(path)
|
|
||||||
|
|
||||||
global imports_processed
|
|
||||||
imports_processed += 1
|
|
||||||
if path is not None:
|
|
||||||
return imp.find_module(string, [path])
|
|
||||||
else:
|
|
||||||
debug.dbg('search_module', string, self.file_path)
|
|
||||||
# Override the sys.path. It works only good that way.
|
|
||||||
# Injecting the path directly into `find_module` did not work.
|
|
||||||
sys.path, temp = sys_path_mod, sys.path
|
|
||||||
try:
|
|
||||||
i = imp.find_module(string)
|
|
||||||
except ImportError:
|
|
||||||
sys.path = temp
|
|
||||||
raise
|
|
||||||
sys.path = temp
|
|
||||||
return i
|
|
||||||
|
|
||||||
if self.file_path:
|
|
||||||
sys_path_mod = list(self.sys_path_with_modifications())
|
|
||||||
sys_path_mod.insert(0, self.file_path)
|
|
||||||
else:
|
|
||||||
sys_path_mod = list(builtin.get_sys_path())
|
|
||||||
|
|
||||||
current_namespace = None
|
|
||||||
# now execute those paths
|
|
||||||
rest = []
|
|
||||||
for i, s in enumerate(self.import_path):
|
|
||||||
try:
|
|
||||||
current_namespace = follow_str(current_namespace, s)
|
|
||||||
except ImportError:
|
|
||||||
if current_namespace:
|
|
||||||
rest = self.import_path[i:]
|
|
||||||
else:
|
|
||||||
raise ModuleNotFound(
|
|
||||||
'The module you searched has not been found')
|
|
||||||
|
|
||||||
sys_path_mod.pop(0) # TODO why is this here?
|
|
||||||
path = current_namespace[1]
|
|
||||||
is_package_directory = current_namespace[2][2] == imp.PKG_DIRECTORY
|
|
||||||
|
|
||||||
f = None
|
|
||||||
if is_package_directory or current_namespace[0]:
|
|
||||||
# is a directory module
|
|
||||||
if is_package_directory:
|
|
||||||
path += '/__init__.py'
|
|
||||||
with open(path) as f:
|
|
||||||
source = f.read()
|
|
||||||
else:
|
|
||||||
source = current_namespace[0].read()
|
|
||||||
current_namespace[0].close()
|
|
||||||
if path.endswith('.py'):
|
|
||||||
f = modules.Module(path, source)
|
|
||||||
else:
|
|
||||||
f = builtin.Parser(path=path)
|
|
||||||
else:
|
|
||||||
f = builtin.Parser(name=path)
|
|
||||||
|
|
||||||
return f.parser.module, rest
|
|
||||||
|
|
||||||
|
|
||||||
def strip_imports(scopes):
|
|
||||||
"""
|
|
||||||
Here we strip the imports - they don't get resolved necessarily.
|
|
||||||
Really used anymore? Merge with remove_star_imports?
|
|
||||||
"""
|
|
||||||
result = []
|
|
||||||
for s in scopes:
|
|
||||||
if isinstance(s, parsing.Import):
|
|
||||||
result += ImportPath(s).follow()
|
|
||||||
else:
|
|
||||||
result.append(s)
|
|
||||||
return result
|
|
||||||
|
|
||||||
|
|
||||||
def cache_star_import(func):
|
|
||||||
def wrapper(scope, *args, **kwargs):
|
|
||||||
try:
|
|
||||||
mods = star_import_cache[scope]
|
|
||||||
if mods[0] + settings.star_import_cache_validity > time.time():
|
|
||||||
return mods[1]
|
|
||||||
except KeyError:
|
|
||||||
pass
|
|
||||||
# cache is too old and therefore invalid or not available
|
|
||||||
invalidate_star_import_cache(scope)
|
|
||||||
mods = func(scope, *args, **kwargs)
|
|
||||||
star_import_cache[scope] = time.time(), mods
|
|
||||||
|
|
||||||
return mods
|
|
||||||
return wrapper
|
|
||||||
|
|
||||||
|
|
||||||
def invalidate_star_import_cache(module, only_main=False):
|
|
||||||
""" Important if some new modules are being reparsed """
|
|
||||||
try:
|
|
||||||
t, mods = star_import_cache[module]
|
|
||||||
|
|
||||||
del star_import_cache[module]
|
|
||||||
|
|
||||||
for m in mods:
|
|
||||||
invalidate_star_import_cache(m, only_main=True)
|
|
||||||
except KeyError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
if not only_main:
|
|
||||||
# We need a list here because otherwise the list is being changed
|
|
||||||
# during the iteration in py3k: iteritems -> items.
|
|
||||||
for key, (t, mods) in list(star_import_cache.items()):
|
|
||||||
if module in mods:
|
|
||||||
invalidate_star_import_cache(key)
|
|
||||||
|
|
||||||
|
|
||||||
@cache_star_import
|
|
||||||
def remove_star_imports(scope, ignored_modules=[]):
|
|
||||||
"""
|
|
||||||
Check a module for star imports:
|
|
||||||
>>> from module import *
|
|
||||||
|
|
||||||
and follow these modules.
|
|
||||||
"""
|
|
||||||
modules = strip_imports(i for i in scope.get_imports() if i.star)
|
|
||||||
new = []
|
|
||||||
for m in modules:
|
|
||||||
if m not in ignored_modules:
|
|
||||||
new += remove_star_imports(m, modules)
|
|
||||||
modules += new
|
|
||||||
|
|
||||||
# Filter duplicate modules.
|
|
||||||
return set(modules)
|
|
|
@ -1,86 +0,0 @@
|
||||||
import keyword
|
|
||||||
|
|
||||||
from _compatibility import is_py3k
|
|
||||||
import builtin
|
|
||||||
|
|
||||||
import pydoc
|
|
||||||
try:
|
|
||||||
from pydoc_data import topics as pydoc_topics
|
|
||||||
except ImportError:
|
|
||||||
# Python 2.6
|
|
||||||
try:
|
|
||||||
import pydoc_topics
|
|
||||||
except ImportError:
|
|
||||||
# Python 2.5
|
|
||||||
pydoc_topics = None
|
|
||||||
|
|
||||||
if is_py3k:
|
|
||||||
keys = keyword.kwlist
|
|
||||||
else:
|
|
||||||
keys = keyword.kwlist + ['None', 'False', 'True']
|
|
||||||
|
|
||||||
|
|
||||||
def get_keywords(string='', pos=(0, 0), all=False):
|
|
||||||
if all:
|
|
||||||
return set([Keyword(k, pos) for k in keys])
|
|
||||||
if string in keys:
|
|
||||||
return set([Keyword(string, pos)])
|
|
||||||
return set()
|
|
||||||
|
|
||||||
|
|
||||||
def get_operator(string, pos):
|
|
||||||
return Keyword(string, pos)
|
|
||||||
|
|
||||||
|
|
||||||
class Keyword(object):
|
|
||||||
def __init__(self, name, pos):
|
|
||||||
self.name = name
|
|
||||||
self.start_pos = pos
|
|
||||||
self.parent = builtin.Builtin.scope
|
|
||||||
|
|
||||||
def get_parent_until(self):
|
|
||||||
return self.parent
|
|
||||||
|
|
||||||
@property
|
|
||||||
def names(self):
|
|
||||||
""" For a `parsing.Name` like comparision """
|
|
||||||
return [self.name]
|
|
||||||
|
|
||||||
@property
|
|
||||||
def docstr(self):
|
|
||||||
return imitate_pydoc(self.name)
|
|
||||||
|
|
||||||
def __repr__(self):
|
|
||||||
return '<%s: %s>' % (type(self).__name__, self.name)
|
|
||||||
|
|
||||||
|
|
||||||
def imitate_pydoc(string):
|
|
||||||
"""
|
|
||||||
It's not possible to get the pydoc's without starting the annoying pager
|
|
||||||
stuff.
|
|
||||||
"""
|
|
||||||
# str needed because of possible unicode stuff in py2k (pydoc doesn't work
|
|
||||||
# with unicode strings)
|
|
||||||
string = str(string)
|
|
||||||
h = pydoc.help
|
|
||||||
try:
|
|
||||||
# try to access symbols
|
|
||||||
string = h.symbols[string]
|
|
||||||
string, _, related = string.partition(' ')
|
|
||||||
except KeyError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
get_target = lambda s: h.topics.get(s, h.keywords.get(s))
|
|
||||||
while isinstance(string, str):
|
|
||||||
string = get_target(string)
|
|
||||||
|
|
||||||
try:
|
|
||||||
# is a tuple now
|
|
||||||
label, related = string
|
|
||||||
except TypeError:
|
|
||||||
return ''
|
|
||||||
|
|
||||||
try:
|
|
||||||
return pydoc_topics.topics[label] if pydoc_topics else ''
|
|
||||||
except KeyError:
|
|
||||||
return ''
|
|
|
@ -1,9 +0,0 @@
|
||||||
class partial():
|
|
||||||
def __init__(self, func, *args, **keywords):
|
|
||||||
self.__func = func
|
|
||||||
self.__args = args
|
|
||||||
self.__keywords = keywords
|
|
||||||
|
|
||||||
def __call__(self, *args, **kwargs):
|
|
||||||
# I know this doesn't work in Python, but Jedi can this ;-)
|
|
||||||
return self.__func(*self.__args, *args, **self.keywords, **kwargs)
|
|
|
@ -1,99 +0,0 @@
|
||||||
def compile():
|
|
||||||
class SRE_Match():
|
|
||||||
endpos = 1
|
|
||||||
lastgroup = 0
|
|
||||||
lastindex = 1
|
|
||||||
pos = 0
|
|
||||||
string = 'a'
|
|
||||||
regs = ((0, 1),)
|
|
||||||
|
|
||||||
def __init__(self, pattern):
|
|
||||||
self.re = pattern
|
|
||||||
|
|
||||||
def start(self):
|
|
||||||
return 0
|
|
||||||
|
|
||||||
def end(self):
|
|
||||||
return 1
|
|
||||||
|
|
||||||
def span(self):
|
|
||||||
return 0, 1
|
|
||||||
|
|
||||||
def expand(self):
|
|
||||||
return ''
|
|
||||||
|
|
||||||
def group(self):
|
|
||||||
return ''
|
|
||||||
|
|
||||||
def groupdict(self):
|
|
||||||
return {'a', 'a'}
|
|
||||||
|
|
||||||
def groups(self):
|
|
||||||
return ('a',)
|
|
||||||
|
|
||||||
class SRE_Pattern():
|
|
||||||
flags = 0
|
|
||||||
groupindex = {}
|
|
||||||
groups = 0
|
|
||||||
pattern = 'a'
|
|
||||||
|
|
||||||
def findall(self):
|
|
||||||
"""
|
|
||||||
findall(string[, pos[, endpos]]) --> list.
|
|
||||||
Return a list of all non-overlapping matches of pattern in string.
|
|
||||||
"""
|
|
||||||
return ['a']
|
|
||||||
|
|
||||||
def finditer(self):
|
|
||||||
"""
|
|
||||||
finditer(string[, pos[, endpos]]) --> iterator.
|
|
||||||
Return an iterator over all non-overlapping matches for the
|
|
||||||
RE pattern in string. For each match, the iterator returns a
|
|
||||||
match object.
|
|
||||||
"""
|
|
||||||
yield SRE_Match(self)
|
|
||||||
|
|
||||||
def match(self):
|
|
||||||
"""
|
|
||||||
match(string[, pos[, endpos]]) --> match object or None.
|
|
||||||
Matches zero or more characters at the beginning of the string
|
|
||||||
pattern
|
|
||||||
"""
|
|
||||||
return SRE_Match(self)
|
|
||||||
|
|
||||||
def scanner(self):
|
|
||||||
pass
|
|
||||||
|
|
||||||
def search(self):
|
|
||||||
"""
|
|
||||||
search(string[, pos[, endpos]]) --> match object or None.
|
|
||||||
Scan through string looking for a match, and return a corresponding
|
|
||||||
MatchObject instance. Return None if no position in the string matches.
|
|
||||||
"""
|
|
||||||
return SRE_Match(self)
|
|
||||||
|
|
||||||
def split(self):
|
|
||||||
"""
|
|
||||||
split(string[, maxsplit = 0]) --> list.
|
|
||||||
Split string by the occurrences of pattern.
|
|
||||||
"""
|
|
||||||
return ['a']
|
|
||||||
|
|
||||||
def sub(self):
|
|
||||||
"""
|
|
||||||
sub(repl, string[, count = 0]) --> newstring
|
|
||||||
Return the string obtained by replacing the leftmost non-overlapping
|
|
||||||
occurrences of pattern in string by the replacement repl.
|
|
||||||
"""
|
|
||||||
return ''
|
|
||||||
|
|
||||||
def subn(self):
|
|
||||||
"""
|
|
||||||
subn(repl, string[, count = 0]) --> (newstring, number of subs)
|
|
||||||
Return the tuple (new_string, number_of_subs_made) found by replacing
|
|
||||||
the leftmost non-overlapping occurrences of pattern with the
|
|
||||||
replacement repl.
|
|
||||||
"""
|
|
||||||
return ('', 1)
|
|
||||||
|
|
||||||
return SRE_Pattern()
|
|
|
@ -1,8 +0,0 @@
|
||||||
def proxy(object, callback=None):
|
|
||||||
return object
|
|
||||||
|
|
||||||
class ref():
|
|
||||||
def __init__(self, object, callback=None):
|
|
||||||
self.__object = object
|
|
||||||
def __call__(self):
|
|
||||||
return self.__object
|
|
|
@ -1,218 +0,0 @@
|
||||||
"""
|
|
||||||
Pure Python implementation of some builtins.
|
|
||||||
This code is not going to be executed anywhere.
|
|
||||||
These implementations are not always correct, but should work as good as
|
|
||||||
possible for the auto completion.
|
|
||||||
"""
|
|
||||||
|
|
||||||
|
|
||||||
def next(iterator, default=None):
|
|
||||||
if hasattr("next"):
|
|
||||||
return iterator.next()
|
|
||||||
else:
|
|
||||||
return iterator.__next__()
|
|
||||||
return default
|
|
||||||
|
|
||||||
|
|
||||||
def iter(collection, sentinel=None):
|
|
||||||
if sentinel:
|
|
||||||
yield collection()
|
|
||||||
else:
|
|
||||||
for c in collection:
|
|
||||||
yield c
|
|
||||||
|
|
||||||
|
|
||||||
def range(start, stop=None, step=1):
|
|
||||||
return [0]
|
|
||||||
|
|
||||||
|
|
||||||
class xrange():
|
|
||||||
# Attention: this function doesn't exist in Py3k (there it is range).
|
|
||||||
def __iter__(self):
|
|
||||||
yield 1
|
|
||||||
|
|
||||||
def count(self):
|
|
||||||
return 1
|
|
||||||
|
|
||||||
def index(self):
|
|
||||||
return 1
|
|
||||||
|
|
||||||
|
|
||||||
#--------------------------------------------------------
|
|
||||||
# descriptors
|
|
||||||
#--------------------------------------------------------
|
|
||||||
class property():
|
|
||||||
def __init__(self, fget, fset=None, fdel=None, doc=None):
|
|
||||||
self.fget = fget
|
|
||||||
self.fset = fset
|
|
||||||
self.fdel = fdel
|
|
||||||
self.__doc__ = doc
|
|
||||||
|
|
||||||
def __get__(self, obj, cls):
|
|
||||||
return self.fget(obj)
|
|
||||||
|
|
||||||
def __set__(self, obj, value):
|
|
||||||
self.fset(obj, value)
|
|
||||||
|
|
||||||
def __delete__(self, obj):
|
|
||||||
self.fdel(obj)
|
|
||||||
|
|
||||||
def setter(self, func):
|
|
||||||
self.fset = func
|
|
||||||
return self
|
|
||||||
|
|
||||||
def getter(self, func):
|
|
||||||
self.fget = func
|
|
||||||
return self
|
|
||||||
|
|
||||||
def deleter(self, func):
|
|
||||||
self.fdel = func
|
|
||||||
return self
|
|
||||||
|
|
||||||
|
|
||||||
class staticmethod():
|
|
||||||
def __init__(self, func):
|
|
||||||
self.__func = func
|
|
||||||
|
|
||||||
def __get__(self, obj, cls):
|
|
||||||
return self.__func
|
|
||||||
|
|
||||||
|
|
||||||
class classmethod():
|
|
||||||
def __init__(self, func):
|
|
||||||
self.__func = func
|
|
||||||
|
|
||||||
def __get__(self, obj, cls):
|
|
||||||
def _method(*args, **kwargs):
|
|
||||||
return self.__func(cls, *args, **kwargs)
|
|
||||||
return _method
|
|
||||||
|
|
||||||
|
|
||||||
#--------------------------------------------------------
|
|
||||||
# array stuff
|
|
||||||
#--------------------------------------------------------
|
|
||||||
class list():
|
|
||||||
def __init__(self, iterable=[]):
|
|
||||||
self.__iterable = []
|
|
||||||
for i in iterable:
|
|
||||||
self.__iterable += [i]
|
|
||||||
|
|
||||||
def __iter__(self):
|
|
||||||
for i in self.__iterable:
|
|
||||||
yield i
|
|
||||||
|
|
||||||
def __getitem__(self, y):
|
|
||||||
return self.__iterable[y]
|
|
||||||
|
|
||||||
def pop(self):
|
|
||||||
return self.__iterable[-1]
|
|
||||||
|
|
||||||
|
|
||||||
class tuple():
|
|
||||||
def __init__(self, iterable=[]):
|
|
||||||
self.__iterable = []
|
|
||||||
for i in iterable:
|
|
||||||
self.__iterable += [i]
|
|
||||||
|
|
||||||
def __iter__(self):
|
|
||||||
for i in self.__iterable:
|
|
||||||
yield i
|
|
||||||
|
|
||||||
def __getitem__(self, y):
|
|
||||||
return self.__iterable[y]
|
|
||||||
|
|
||||||
def index(self):
|
|
||||||
return 1
|
|
||||||
|
|
||||||
def count(self):
|
|
||||||
return 1
|
|
||||||
|
|
||||||
|
|
||||||
class set():
|
|
||||||
def __init__(self, iterable=[]):
|
|
||||||
self.__iterable = iterable
|
|
||||||
|
|
||||||
def __iter__(self):
|
|
||||||
for i in self.__iterable:
|
|
||||||
yield i
|
|
||||||
|
|
||||||
def pop(self):
|
|
||||||
return self.__iterable.pop()
|
|
||||||
|
|
||||||
def copy(self):
|
|
||||||
return self
|
|
||||||
|
|
||||||
def difference(self, other):
|
|
||||||
return self - other
|
|
||||||
|
|
||||||
def intersection(self, other):
|
|
||||||
return self & other
|
|
||||||
|
|
||||||
def symmetric_difference(self, other):
|
|
||||||
return self ^ other
|
|
||||||
|
|
||||||
def union(self, other):
|
|
||||||
return self | other
|
|
||||||
|
|
||||||
|
|
||||||
class frozenset():
|
|
||||||
def __init__(self, iterable=[]):
|
|
||||||
self.__iterable = iterable
|
|
||||||
|
|
||||||
def __iter__(self):
|
|
||||||
for i in self.__iterable:
|
|
||||||
yield i
|
|
||||||
|
|
||||||
def copy(self):
|
|
||||||
return self
|
|
||||||
|
|
||||||
|
|
||||||
class dict():
|
|
||||||
def __init__(self, **elements):
|
|
||||||
self.__elements = elements
|
|
||||||
|
|
||||||
def clear(self):
|
|
||||||
# has a strange docstr
|
|
||||||
pass
|
|
||||||
|
|
||||||
def get(self, k, d=None):
|
|
||||||
# TODO implement
|
|
||||||
try:
|
|
||||||
#return self.__elements[k]
|
|
||||||
pass
|
|
||||||
except KeyError:
|
|
||||||
return d
|
|
||||||
|
|
||||||
|
|
||||||
class reversed():
|
|
||||||
def __init__(self, sequence):
|
|
||||||
self.__sequence = sequence
|
|
||||||
|
|
||||||
def __iter__(self):
|
|
||||||
for i in self.__sequence:
|
|
||||||
yield i
|
|
||||||
|
|
||||||
def __next__(self):
|
|
||||||
return next(self.__iter__())
|
|
||||||
|
|
||||||
def next(self):
|
|
||||||
return self.__next__()
|
|
||||||
|
|
||||||
|
|
||||||
#--------------------------------------------------------
|
|
||||||
# basic types
|
|
||||||
#--------------------------------------------------------
|
|
||||||
class int():
|
|
||||||
def __init__(self, x, base=None):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class str():
|
|
||||||
def __init__(self, obj):
|
|
||||||
pass
|
|
||||||
|
|
||||||
class object():
|
|
||||||
def mro():
|
|
||||||
""" mro() -> list
|
|
||||||
return a type's method resolution order """
|
|
||||||
return [object]
|
|
|
@ -1,4 +0,0 @@
|
||||||
class datetime():
|
|
||||||
@staticmethod
|
|
||||||
def now():
|
|
||||||
return datetime()
|
|
|
@ -1,5 +0,0 @@
|
||||||
def getcwd():
|
|
||||||
return ''
|
|
||||||
|
|
||||||
def getcwdu():
|
|
||||||
return ''
|
|
|
@ -1,312 +0,0 @@
|
||||||
from __future__ import with_statement
|
|
||||||
|
|
||||||
from _compatibility import exec_function
|
|
||||||
|
|
||||||
import re
|
|
||||||
import tokenize
|
|
||||||
import sys
|
|
||||||
import os
|
|
||||||
import time
|
|
||||||
|
|
||||||
import parsing
|
|
||||||
import builtin
|
|
||||||
import debug
|
|
||||||
import evaluate
|
|
||||||
import settings
|
|
||||||
import imports
|
|
||||||
|
|
||||||
|
|
||||||
class Module(builtin.CachedModule):
|
|
||||||
"""
|
|
||||||
Manages all files, that are parsed and caches them.
|
|
||||||
|
|
||||||
:param path: The module path of the file.
|
|
||||||
:param source: The source code of the file.
|
|
||||||
"""
|
|
||||||
def __init__(self, path, source):
|
|
||||||
super(Module, self).__init__(path=path)
|
|
||||||
self.source = source
|
|
||||||
self._line_cache = None
|
|
||||||
|
|
||||||
def _get_source(self):
|
|
||||||
""" Just one time """
|
|
||||||
s = self.source
|
|
||||||
del self.source # memory efficiency
|
|
||||||
return s
|
|
||||||
|
|
||||||
|
|
||||||
class ModuleWithCursor(Module):
|
|
||||||
"""
|
|
||||||
Manages all files, that are parsed and caches them.
|
|
||||||
Important are the params source and path, one of them has to
|
|
||||||
be there.
|
|
||||||
|
|
||||||
:param source: The source code of the file.
|
|
||||||
:param path: The module path of the file or None.
|
|
||||||
:param position: The position, the user is currently in. Only important \
|
|
||||||
for the main file.
|
|
||||||
"""
|
|
||||||
def __init__(self, path, source, position):
|
|
||||||
super(ModuleWithCursor, self).__init__(path, source)
|
|
||||||
self.position = position
|
|
||||||
|
|
||||||
# this two are only used, because there is no nonlocal in Python 2
|
|
||||||
self._line_temp = None
|
|
||||||
self._relevant_temp = None
|
|
||||||
|
|
||||||
self.source = source
|
|
||||||
self._part_parser = None
|
|
||||||
|
|
||||||
@property
|
|
||||||
def parser(self):
|
|
||||||
""" get the parser lazy """
|
|
||||||
if not self._parser:
|
|
||||||
try:
|
|
||||||
ts, parser = builtin.CachedModule.cache[self.path]
|
|
||||||
imports.invalidate_star_import_cache(parser.module)
|
|
||||||
|
|
||||||
del builtin.CachedModule.cache[self.path]
|
|
||||||
except KeyError:
|
|
||||||
pass
|
|
||||||
# Call the parser already here, because it will be used anyways.
|
|
||||||
# Also, the position is here important (which will not be used by
|
|
||||||
# default), therefore fill the cache here.
|
|
||||||
self._parser = parsing.PyFuzzyParser(self.source, self.path,
|
|
||||||
self.position)
|
|
||||||
if self.path is not None:
|
|
||||||
builtin.CachedModule.cache[self.path] = time.time(), \
|
|
||||||
self._parser
|
|
||||||
return self._parser
|
|
||||||
|
|
||||||
def get_path_until_cursor(self):
|
|
||||||
""" Get the path under the cursor. """
|
|
||||||
result = self._get_path_until_cursor()
|
|
||||||
self._start_cursor_pos = self._line_temp + 1, self._column_temp
|
|
||||||
return result
|
|
||||||
|
|
||||||
def _get_path_until_cursor(self, start_pos=None):
|
|
||||||
def fetch_line():
|
|
||||||
line = self.get_line(self._line_temp)
|
|
||||||
if self._is_first:
|
|
||||||
self._is_first = False
|
|
||||||
self._line_length = self._column_temp
|
|
||||||
line = line[:self._column_temp]
|
|
||||||
else:
|
|
||||||
self._line_length = len(line)
|
|
||||||
line = line + '\n'
|
|
||||||
# add lines with a backslash at the end
|
|
||||||
while 1:
|
|
||||||
self._line_temp -= 1
|
|
||||||
last_line = self.get_line(self._line_temp)
|
|
||||||
if last_line and last_line[-1] == '\\':
|
|
||||||
line = last_line[:-1] + ' ' + line
|
|
||||||
else:
|
|
||||||
break
|
|
||||||
return line[::-1]
|
|
||||||
|
|
||||||
self._is_first = True
|
|
||||||
if start_pos is None:
|
|
||||||
self._line_temp = self.position[0]
|
|
||||||
self._column_temp = self.position[1]
|
|
||||||
else:
|
|
||||||
self._line_temp, self._column_temp = start_pos
|
|
||||||
|
|
||||||
open_brackets = ['(', '[', '{']
|
|
||||||
close_brackets = [')', ']', '}']
|
|
||||||
|
|
||||||
gen = tokenize.generate_tokens(fetch_line)
|
|
||||||
string = ''
|
|
||||||
level = 0
|
|
||||||
force_point = False
|
|
||||||
try:
|
|
||||||
for token_type, tok, start, end, line in gen:
|
|
||||||
#print 'tok', token_type, tok, force_point
|
|
||||||
if level > 0:
|
|
||||||
if tok in close_brackets:
|
|
||||||
level += 1
|
|
||||||
if tok in open_brackets:
|
|
||||||
level -= 1
|
|
||||||
elif tok == '.':
|
|
||||||
force_point = False
|
|
||||||
elif force_point:
|
|
||||||
# it is reversed, therefore a number is getting recognized
|
|
||||||
# as a floating point number
|
|
||||||
if token_type == tokenize.NUMBER and tok[0] == '.':
|
|
||||||
force_point = False
|
|
||||||
else:
|
|
||||||
break
|
|
||||||
elif tok in close_brackets:
|
|
||||||
level += 1
|
|
||||||
elif token_type in [tokenize.NAME, tokenize.STRING]:
|
|
||||||
force_point = True
|
|
||||||
elif token_type == tokenize.NUMBER:
|
|
||||||
pass
|
|
||||||
else:
|
|
||||||
break
|
|
||||||
|
|
||||||
self._column_temp = self._line_length - end[1]
|
|
||||||
string += tok
|
|
||||||
except tokenize.TokenError:
|
|
||||||
debug.warning("Tokenize couldn't finish", sys.exc_info)
|
|
||||||
|
|
||||||
return string[::-1]
|
|
||||||
|
|
||||||
def get_path_under_cursor(self):
|
|
||||||
"""
|
|
||||||
Return the path under the cursor. If there is a rest of the path left,
|
|
||||||
it will be added to the stuff before it.
|
|
||||||
"""
|
|
||||||
line = self.get_line(self.position[0])
|
|
||||||
after = re.search("[\w\d]*", line[self.position[1]:]).group(0)
|
|
||||||
return self.get_path_until_cursor() + after
|
|
||||||
|
|
||||||
def get_operator_under_cursor(self):
|
|
||||||
line = self.get_line(self.position[0])
|
|
||||||
after = re.match("[^\w\s]+", line[self.position[1]:])
|
|
||||||
before = re.match("[^\w\s]+", line[:self.position[1]][::-1])
|
|
||||||
return (before.group(0) if before is not None else '') \
|
|
||||||
+ (after.group(0) if after is not None else '')
|
|
||||||
|
|
||||||
def get_context(self):
|
|
||||||
pos = self._start_cursor_pos
|
|
||||||
while pos > (1, 0):
|
|
||||||
# remove non important white space
|
|
||||||
line = self.get_line(pos[0])
|
|
||||||
while pos[1] > 0 and line[pos[1] - 1].isspace():
|
|
||||||
pos = pos[0], pos[1] - 1
|
|
||||||
|
|
||||||
try:
|
|
||||||
yield self._get_path_until_cursor(start_pos=pos)
|
|
||||||
except StopIteration:
|
|
||||||
yield ''
|
|
||||||
pos = self._line_temp, self._column_temp
|
|
||||||
|
|
||||||
while True:
|
|
||||||
yield ''
|
|
||||||
|
|
||||||
def get_line(self, line_nr):
|
|
||||||
if not self._line_cache:
|
|
||||||
self._line_cache = self.source.split('\n')
|
|
||||||
|
|
||||||
if line_nr == 0:
|
|
||||||
# This is a fix for the zeroth line. We need a newline there, for
|
|
||||||
# the backwards parser.
|
|
||||||
return ''
|
|
||||||
if line_nr < 0:
|
|
||||||
raise StopIteration()
|
|
||||||
try:
|
|
||||||
return self._line_cache[line_nr - 1]
|
|
||||||
except IndexError:
|
|
||||||
raise StopIteration()
|
|
||||||
|
|
||||||
def get_part_parser(self):
|
|
||||||
""" Returns a parser that contains only part of the source code. This
|
|
||||||
exists only because of performance reasons.
|
|
||||||
"""
|
|
||||||
if self._part_parser:
|
|
||||||
return self._part_parser
|
|
||||||
|
|
||||||
# TODO check for docstrings
|
|
||||||
length = settings.part_line_length
|
|
||||||
offset = max(self.position[0] - length, 0)
|
|
||||||
s = '\n'.join(self.source.split('\n')[offset:offset + length])
|
|
||||||
self._part_parser = parsing.PyFuzzyParser(s, self.path, self.position,
|
|
||||||
line_offset=offset)
|
|
||||||
return self._part_parser
|
|
||||||
|
|
||||||
|
|
||||||
@evaluate.memoize_default([])
|
|
||||||
def sys_path_with_modifications(module):
|
|
||||||
def execute_code(code):
|
|
||||||
c = "import os; from os.path import *; result=%s"
|
|
||||||
variables = {'__file__': module.path}
|
|
||||||
try:
|
|
||||||
exec_function(c % code, variables)
|
|
||||||
except Exception:
|
|
||||||
debug.warning('sys path detected, but failed to evaluate')
|
|
||||||
return None
|
|
||||||
try:
|
|
||||||
res = variables['result']
|
|
||||||
if isinstance(res, str):
|
|
||||||
return os.path.abspath(res)
|
|
||||||
else:
|
|
||||||
return None
|
|
||||||
except KeyError:
|
|
||||||
return None
|
|
||||||
|
|
||||||
def check_module(module):
|
|
||||||
try:
|
|
||||||
possible_stmts = module.used_names['path']
|
|
||||||
except KeyError:
|
|
||||||
return builtin.get_sys_path()
|
|
||||||
|
|
||||||
sys_path = list(builtin.get_sys_path()) # copy
|
|
||||||
for p in possible_stmts:
|
|
||||||
try:
|
|
||||||
call = p.get_assignment_calls().get_only_subelement()
|
|
||||||
except AttributeError:
|
|
||||||
continue
|
|
||||||
n = call.name
|
|
||||||
if not isinstance(n, parsing.Name) or len(n.names) != 3:
|
|
||||||
continue
|
|
||||||
if n.names[:2] != ('sys', 'path'):
|
|
||||||
continue
|
|
||||||
array_cmd = n.names[2]
|
|
||||||
if call.execution is None:
|
|
||||||
continue
|
|
||||||
exe = call.execution
|
|
||||||
if not (array_cmd == 'insert' and len(exe) == 2
|
|
||||||
or array_cmd == 'append' and len(exe) == 1):
|
|
||||||
continue
|
|
||||||
|
|
||||||
if array_cmd == 'insert':
|
|
||||||
exe_type, exe.type = exe.type, parsing.Array.NOARRAY
|
|
||||||
exe_pop = exe.values.pop(0)
|
|
||||||
res = execute_code(exe.get_code())
|
|
||||||
if res is not None:
|
|
||||||
sys_path.insert(0, res)
|
|
||||||
debug.dbg('sys path inserted: %s' % res)
|
|
||||||
exe.type = exe_type
|
|
||||||
exe.values.insert(0, exe_pop)
|
|
||||||
elif array_cmd == 'append':
|
|
||||||
res = execute_code(exe.get_code())
|
|
||||||
if res is not None:
|
|
||||||
sys_path.append(res)
|
|
||||||
debug.dbg('sys path added: %s' % res)
|
|
||||||
return sys_path
|
|
||||||
|
|
||||||
if module.path is None:
|
|
||||||
return [] # support for modules without a path is intentionally bad.
|
|
||||||
|
|
||||||
curdir = os.path.abspath(os.curdir)
|
|
||||||
try:
|
|
||||||
os.chdir(os.path.dirname(module.path))
|
|
||||||
except OSError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
result = check_module(module)
|
|
||||||
result += detect_django_path(module.path)
|
|
||||||
|
|
||||||
# cleanup, back to old directory
|
|
||||||
os.chdir(curdir)
|
|
||||||
return result
|
|
||||||
|
|
||||||
|
|
||||||
def detect_django_path(module_path):
|
|
||||||
""" Detects the path of the very well known Django library (if used) """
|
|
||||||
result = []
|
|
||||||
while True:
|
|
||||||
new = os.path.dirname(module_path)
|
|
||||||
# If the module_path doesn't change anymore, we're finished -> /
|
|
||||||
if new == module_path:
|
|
||||||
break
|
|
||||||
else:
|
|
||||||
module_path = new
|
|
||||||
|
|
||||||
try:
|
|
||||||
with open(module_path + os.path.sep + 'manage.py'):
|
|
||||||
debug.dbg('Found django path: %s' % module_path)
|
|
||||||
result.append(module_path)
|
|
||||||
except IOError:
|
|
||||||
pass
|
|
||||||
return result
|
|
File diff suppressed because it is too large
Load diff
|
@ -1,87 +0,0 @@
|
||||||
# ----------------
|
|
||||||
# completion output settings
|
|
||||||
# ----------------
|
|
||||||
|
|
||||||
# The completion is by default case insensitive.
|
|
||||||
case_insensitive_completion = True
|
|
||||||
|
|
||||||
# Adds a dot after a module, because a module that is not accessed this way is
|
|
||||||
# definitely not the normal case. However, in VIM this doesn't work, that's why
|
|
||||||
# it isn't used at the moment.
|
|
||||||
add_dot_after_module = False
|
|
||||||
|
|
||||||
# Adds an opening bracket after a function, because that's normal behaviour.
|
|
||||||
# Removed it again, because in VIM that is not very practical.
|
|
||||||
add_bracket_after_function = False
|
|
||||||
|
|
||||||
|
|
||||||
# ----------------
|
|
||||||
# dynamic stuff
|
|
||||||
# ----------------
|
|
||||||
|
|
||||||
# check for `append`, etc. on array instances like list()
|
|
||||||
dynamic_arrays_instances = True
|
|
||||||
# check for `append`, etc. on arrays: [], {}, ()
|
|
||||||
dynamic_array_additions = True
|
|
||||||
|
|
||||||
# A dynamic param completion, finds the callees of the function, which define
|
|
||||||
# the params of a function.
|
|
||||||
dynamic_params = True
|
|
||||||
# Do the same for other modules.
|
|
||||||
dynamic_params_for_other_modules = True
|
|
||||||
|
|
||||||
# Additional modules in which Jedi checks if statements are to be found. This
|
|
||||||
# is practical for IDE's, that want to administrate their modules themselves.
|
|
||||||
additional_dynamic_modules = []
|
|
||||||
|
|
||||||
# ----------------
|
|
||||||
# recursions
|
|
||||||
# ----------------
|
|
||||||
|
|
||||||
# Recursion settings are important if you don't want extremly recursive python
|
|
||||||
# code to go absolutely crazy. First of there is a global limit
|
|
||||||
# `max_executions`. This limit is important, to set a maximum amount of time,
|
|
||||||
# the completion may use.
|
|
||||||
#
|
|
||||||
# The `max_until_execution_unique` limit is probably the most important one,
|
|
||||||
# because if that limit is passed, functions can only be one time executed. So
|
|
||||||
# new functions will be executed, complex recursions with the same functions
|
|
||||||
# again and again, are ignored.
|
|
||||||
#
|
|
||||||
# `max_function_recursion_level` is more about whether the recursions are
|
|
||||||
# stopped in deepth or in width. The ratio beetween this and
|
|
||||||
# `max_until_execution_unique` is important here. It stops a recursion (after
|
|
||||||
# the number of function calls in the recursion), if it was already used
|
|
||||||
# earlier.
|
|
||||||
#
|
|
||||||
# The values are based on my experimental tries, used on the jedi library. But
|
|
||||||
# I don't think there's any other Python library, that uses recursion in a
|
|
||||||
# similar (extreme) way. This makes the completion definitely worse in some
|
|
||||||
# cases. But a completion should also be fast.
|
|
||||||
|
|
||||||
max_function_recursion_level = 5
|
|
||||||
max_until_execution_unique = 50
|
|
||||||
max_executions_without_builtins = 200
|
|
||||||
max_executions = 250
|
|
||||||
|
|
||||||
# Because get_in_function_call is normally used on every single key hit, it has
|
|
||||||
# to be faster than a normal completion. This is the factor that is used to
|
|
||||||
# scale `max_executions` and `max_until_execution_unique`:
|
|
||||||
scale_get_in_function_call = 0.1
|
|
||||||
|
|
||||||
# ----------------
|
|
||||||
# various
|
|
||||||
# ----------------
|
|
||||||
|
|
||||||
# Size of the current code part, which is used to speed up parsing.
|
|
||||||
part_line_length = 20
|
|
||||||
|
|
||||||
# ----------------
|
|
||||||
# star import caching
|
|
||||||
# ----------------
|
|
||||||
|
|
||||||
# In huge packages like numpy, checking all star imports on every completion
|
|
||||||
# might be slow, therefore we do a star import caching, that lasts a certain
|
|
||||||
# time span (in seconds).
|
|
||||||
|
|
||||||
star_import_cache_validity = 60.0
|
|
|
@ -1,409 +0,0 @@
|
||||||
# vim:fileencoding=utf-8:noet
|
|
||||||
|
|
||||||
from __future__ import absolute_import
|
|
||||||
import os
|
|
||||||
import sys
|
|
||||||
import logging
|
|
||||||
|
|
||||||
from powerline.colorscheme import Colorscheme
|
|
||||||
from powerline.lib.config import ConfigLoader
|
|
||||||
|
|
||||||
from threading import Lock, Event
|
|
||||||
|
|
||||||
|
|
||||||
DEFAULT_SYSTEM_CONFIG_DIR = None
|
|
||||||
|
|
||||||
|
|
||||||
def find_config_file(search_paths, config_file):
|
|
||||||
config_file += '.json'
|
|
||||||
for path in search_paths:
|
|
||||||
config_file_path = os.path.join(path, config_file)
|
|
||||||
if os.path.isfile(config_file_path):
|
|
||||||
return config_file_path
|
|
||||||
raise IOError('Config file not found in search path: {0}'.format(config_file))
|
|
||||||
|
|
||||||
|
|
||||||
class PowerlineLogger(object):
|
|
||||||
def __init__(self, use_daemon_threads, logger, ext):
|
|
||||||
self.logger = logger
|
|
||||||
self.ext = ext
|
|
||||||
self.use_daemon_threads = use_daemon_threads
|
|
||||||
self.prefix = ''
|
|
||||||
self.last_msgs = {}
|
|
||||||
|
|
||||||
def _log(self, attr, msg, *args, **kwargs):
|
|
||||||
prefix = kwargs.get('prefix') or self.prefix
|
|
||||||
prefix = self.ext + ((':' + prefix) if prefix else '')
|
|
||||||
if args or kwargs:
|
|
||||||
msg = msg.format(*args, **kwargs)
|
|
||||||
msg = prefix + ':' + msg
|
|
||||||
key = attr + ':' + prefix
|
|
||||||
if msg != self.last_msgs.get(key):
|
|
||||||
getattr(self.logger, attr)(msg)
|
|
||||||
self.last_msgs[key] = msg
|
|
||||||
|
|
||||||
def critical(self, msg, *args, **kwargs):
|
|
||||||
self._log('critical', msg, *args, **kwargs)
|
|
||||||
|
|
||||||
def exception(self, msg, *args, **kwargs):
|
|
||||||
self._log('exception', msg, *args, **kwargs)
|
|
||||||
|
|
||||||
def info(self, msg, *args, **kwargs):
|
|
||||||
self._log('info', msg, *args, **kwargs)
|
|
||||||
|
|
||||||
def error(self, msg, *args, **kwargs):
|
|
||||||
self._log('error', msg, *args, **kwargs)
|
|
||||||
|
|
||||||
def warn(self, msg, *args, **kwargs):
|
|
||||||
self._log('warning', msg, *args, **kwargs)
|
|
||||||
|
|
||||||
def debug(self, msg, *args, **kwargs):
|
|
||||||
self._log('debug', msg, *args, **kwargs)
|
|
||||||
|
|
||||||
|
|
||||||
class Powerline(object):
|
|
||||||
'''Main powerline class, entrance point for all powerline uses. Sets
|
|
||||||
powerline up and loads the configuration.
|
|
||||||
|
|
||||||
:param str ext:
|
|
||||||
extension used. Determines where configuration files will
|
|
||||||
searched and what renderer module will be used. Affected: used ``ext``
|
|
||||||
dictionary from :file:`powerline/config.json`, location of themes and
|
|
||||||
colorschemes, render module (``powerline.renders.{ext}``).
|
|
||||||
:param str renderer_module:
|
|
||||||
Overrides renderer module (defaults to ``ext``). Should be the name of
|
|
||||||
the package imported like this: ``powerline.renders.{render_module}``.
|
|
||||||
If this parameter contains a dot, ``powerline.renderers.`` is not
|
|
||||||
prepended. There is also a special case for renderers defined in
|
|
||||||
toplevel modules: ``foo.`` (note: dot at the end) tries to get renderer
|
|
||||||
from module ``foo`` (because ``foo`` (without dot) tries to get renderer
|
|
||||||
from module ``powerline.renderers.foo``).
|
|
||||||
:param bool run_once:
|
|
||||||
Determines whether .renderer.render() method will be run only once
|
|
||||||
during python session.
|
|
||||||
:param Logger logger:
|
|
||||||
If present, no new logger will be created and this logger will be used.
|
|
||||||
:param bool use_daemon_threads:
|
|
||||||
Use daemon threads for.
|
|
||||||
:param Event shutdown_event:
|
|
||||||
Use this Event as shutdown_event.
|
|
||||||
:param ConfigLoader config_loader:
|
|
||||||
Class that manages (re)loading of configuration.
|
|
||||||
'''
|
|
||||||
|
|
||||||
def __init__(self,
|
|
||||||
ext,
|
|
||||||
renderer_module=None,
|
|
||||||
run_once=False,
|
|
||||||
logger=None,
|
|
||||||
use_daemon_threads=True,
|
|
||||||
shutdown_event=None,
|
|
||||||
config_loader=None):
|
|
||||||
self.ext = ext
|
|
||||||
self.renderer_module = renderer_module or ext
|
|
||||||
self.run_once = run_once
|
|
||||||
self.logger = logger
|
|
||||||
self.use_daemon_threads = use_daemon_threads
|
|
||||||
|
|
||||||
if '.' not in self.renderer_module:
|
|
||||||
self.renderer_module = 'powerline.renderers.' + self.renderer_module
|
|
||||||
elif self.renderer_module[-1] == '.':
|
|
||||||
self.renderer_module = self.renderer_module[:-1]
|
|
||||||
|
|
||||||
config_paths = self.get_config_paths()
|
|
||||||
self.find_config_file = lambda cfg_path: find_config_file(config_paths, cfg_path)
|
|
||||||
|
|
||||||
self.cr_kwargs_lock = Lock()
|
|
||||||
self.create_renderer_kwargs = {
|
|
||||||
'load_main': True,
|
|
||||||
'load_colors': True,
|
|
||||||
'load_colorscheme': True,
|
|
||||||
'load_theme': True,
|
|
||||||
}
|
|
||||||
self.shutdown_event = shutdown_event or Event()
|
|
||||||
self.config_loader = config_loader or ConfigLoader(shutdown_event=self.shutdown_event)
|
|
||||||
self.run_loader_update = False
|
|
||||||
|
|
||||||
self.renderer_options = {}
|
|
||||||
|
|
||||||
self.prev_common_config = None
|
|
||||||
self.prev_ext_config = None
|
|
||||||
self.pl = None
|
|
||||||
|
|
||||||
def create_renderer(self, load_main=False, load_colors=False, load_colorscheme=False, load_theme=False):
|
|
||||||
'''(Re)create renderer object. Can be used after Powerline object was
|
|
||||||
successfully initialized. If any of the below parameters except
|
|
||||||
``load_main`` is True renderer object will be recreated.
|
|
||||||
|
|
||||||
:param bool load_main:
|
|
||||||
Determines whether main configuration file (:file:`config.json`)
|
|
||||||
should be loaded. If appropriate configuration changes implies
|
|
||||||
``load_colorscheme`` and ``load_theme`` and recreation of renderer
|
|
||||||
object. Won’t trigger recreation if only unrelated configuration
|
|
||||||
changed.
|
|
||||||
:param bool load_colors:
|
|
||||||
Determines whether colors configuration from :file:`colors.json`
|
|
||||||
should be (re)loaded.
|
|
||||||
:param bool load_colorscheme:
|
|
||||||
Determines whether colorscheme configuration should be (re)loaded.
|
|
||||||
:param bool load_theme:
|
|
||||||
Determines whether theme configuration should be reloaded.
|
|
||||||
'''
|
|
||||||
common_config_differs = False
|
|
||||||
ext_config_differs = False
|
|
||||||
if load_main:
|
|
||||||
self._purge_configs('main')
|
|
||||||
config = self.load_main_config()
|
|
||||||
self.common_config = config['common']
|
|
||||||
if self.common_config != self.prev_common_config:
|
|
||||||
common_config_differs = True
|
|
||||||
self.prev_common_config = self.common_config
|
|
||||||
self.common_config['paths'] = [os.path.expanduser(path) for path in self.common_config.get('paths', [])]
|
|
||||||
self.import_paths = self.common_config['paths']
|
|
||||||
|
|
||||||
if not self.logger:
|
|
||||||
log_format = self.common_config.get('log_format', '%(asctime)s:%(levelname)s:%(message)s')
|
|
||||||
formatter = logging.Formatter(log_format)
|
|
||||||
|
|
||||||
level = getattr(logging, self.common_config.get('log_level', 'WARNING'))
|
|
||||||
handler = self.get_log_handler()
|
|
||||||
handler.setLevel(level)
|
|
||||||
handler.setFormatter(formatter)
|
|
||||||
|
|
||||||
self.logger = logging.getLogger('powerline')
|
|
||||||
self.logger.setLevel(level)
|
|
||||||
self.logger.addHandler(handler)
|
|
||||||
|
|
||||||
if not self.pl:
|
|
||||||
self.pl = PowerlineLogger(self.use_daemon_threads, self.logger, self.ext)
|
|
||||||
if not self.config_loader.pl:
|
|
||||||
self.config_loader.pl = self.pl
|
|
||||||
|
|
||||||
self.renderer_options.update(
|
|
||||||
pl=self.pl,
|
|
||||||
term_truecolor=self.common_config.get('term_truecolor', False),
|
|
||||||
ambiwidth=self.common_config.get('ambiwidth', 1),
|
|
||||||
tmux_escape=self.common_config.get('additional_escapes') == 'tmux',
|
|
||||||
screen_escape=self.common_config.get('additional_escapes') == 'screen',
|
|
||||||
theme_kwargs={
|
|
||||||
'ext': self.ext,
|
|
||||||
'common_config': self.common_config,
|
|
||||||
'run_once': self.run_once,
|
|
||||||
'shutdown_event': self.shutdown_event,
|
|
||||||
},
|
|
||||||
)
|
|
||||||
|
|
||||||
if not self.run_once and self.common_config.get('reload_config', True):
|
|
||||||
interval = self.common_config.get('interval', None)
|
|
||||||
self.config_loader.set_interval(interval)
|
|
||||||
self.run_loader_update = (interval is None)
|
|
||||||
if interval is not None and not self.config_loader.is_alive():
|
|
||||||
self.config_loader.start()
|
|
||||||
|
|
||||||
self.ext_config = config['ext'][self.ext]
|
|
||||||
if self.ext_config != self.prev_ext_config:
|
|
||||||
ext_config_differs = True
|
|
||||||
if not self.prev_ext_config or self.ext_config.get('local_themes') != self.prev_ext_config.get('local_themes'):
|
|
||||||
self.renderer_options['local_themes'] = self.get_local_themes(self.ext_config.get('local_themes'))
|
|
||||||
load_colorscheme = (load_colorscheme
|
|
||||||
or not self.prev_ext_config
|
|
||||||
or self.prev_ext_config['colorscheme'] != self.ext_config['colorscheme'])
|
|
||||||
load_theme = (load_theme
|
|
||||||
or not self.prev_ext_config
|
|
||||||
or self.prev_ext_config['theme'] != self.ext_config['theme'])
|
|
||||||
self.prev_ext_config = self.ext_config
|
|
||||||
|
|
||||||
create_renderer = load_colors or load_colorscheme or load_theme or common_config_differs or ext_config_differs
|
|
||||||
|
|
||||||
if load_colors:
|
|
||||||
self._purge_configs('colors')
|
|
||||||
self.colors_config = self.load_colors_config()
|
|
||||||
|
|
||||||
if load_colorscheme or load_colors:
|
|
||||||
self._purge_configs('colorscheme')
|
|
||||||
if load_colorscheme:
|
|
||||||
self.colorscheme_config = self.load_colorscheme_config(self.ext_config['colorscheme'])
|
|
||||||
self.renderer_options['colorscheme'] = Colorscheme(self.colorscheme_config, self.colors_config)
|
|
||||||
|
|
||||||
if load_theme:
|
|
||||||
self._purge_configs('theme')
|
|
||||||
self.renderer_options['theme_config'] = self.load_theme_config(self.ext_config.get('theme', 'default'))
|
|
||||||
|
|
||||||
if create_renderer:
|
|
||||||
try:
|
|
||||||
Renderer = __import__(self.renderer_module, fromlist=['renderer']).renderer
|
|
||||||
except Exception as e:
|
|
||||||
self.pl.exception('Failed to import renderer module: {0}', str(e))
|
|
||||||
sys.exit(1)
|
|
||||||
|
|
||||||
# Renderer updates configuration file via segments’ .startup thus it
|
|
||||||
# should be locked to prevent state when configuration was updated,
|
|
||||||
# but .render still uses old renderer.
|
|
||||||
try:
|
|
||||||
renderer = Renderer(**self.renderer_options)
|
|
||||||
except Exception as e:
|
|
||||||
self.pl.exception('Failed to construct renderer object: {0}', str(e))
|
|
||||||
if not hasattr(self, 'renderer'):
|
|
||||||
raise
|
|
||||||
else:
|
|
||||||
self.renderer = renderer
|
|
||||||
|
|
||||||
def get_log_handler(self):
|
|
||||||
'''Get log handler.
|
|
||||||
|
|
||||||
:param dict common_config:
|
|
||||||
Common configuration.
|
|
||||||
|
|
||||||
:return: logging.Handler subclass.
|
|
||||||
'''
|
|
||||||
log_file = self.common_config.get('log_file', None)
|
|
||||||
if log_file:
|
|
||||||
log_file = os.path.expanduser(log_file)
|
|
||||||
log_dir = os.path.dirname(log_file)
|
|
||||||
if not os.path.isdir(log_dir):
|
|
||||||
os.mkdir(log_dir)
|
|
||||||
return logging.FileHandler(log_file)
|
|
||||||
else:
|
|
||||||
return logging.StreamHandler()
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def get_config_paths():
|
|
||||||
'''Get configuration paths.
|
|
||||||
|
|
||||||
:return: list of paths
|
|
||||||
'''
|
|
||||||
config_home = os.environ.get('XDG_CONFIG_HOME', os.path.join(os.path.expanduser('~'), '.config'))
|
|
||||||
config_path = os.path.join(config_home, 'powerline')
|
|
||||||
config_paths = [config_path]
|
|
||||||
config_dirs = os.environ.get('XDG_CONFIG_DIRS', DEFAULT_SYSTEM_CONFIG_DIR)
|
|
||||||
if config_dirs is not None:
|
|
||||||
config_paths.extend([os.path.join(d, 'powerline') for d in config_dirs.split(':')])
|
|
||||||
plugin_path = os.path.join(os.path.realpath(os.path.dirname(__file__)), 'config_files')
|
|
||||||
config_paths.append(plugin_path)
|
|
||||||
return config_paths
|
|
||||||
|
|
||||||
def _load_config(self, cfg_path, type):
|
|
||||||
'''Load configuration and setup watches.'''
|
|
||||||
function = getattr(self, 'on_' + type + '_change')
|
|
||||||
try:
|
|
||||||
path = self.find_config_file(cfg_path)
|
|
||||||
except IOError:
|
|
||||||
self.config_loader.register_missing(self.find_config_file, function, cfg_path)
|
|
||||||
raise
|
|
||||||
self.config_loader.register(function, path)
|
|
||||||
return self.config_loader.load(path)
|
|
||||||
|
|
||||||
def _purge_configs(self, type):
|
|
||||||
function = getattr(self, 'on_' + type + '_change')
|
|
||||||
self.config_loader.unregister_functions(set((function,)))
|
|
||||||
self.config_loader.unregister_missing(set(((self.find_config_file, function),)))
|
|
||||||
|
|
||||||
def load_theme_config(self, name):
|
|
||||||
'''Get theme configuration.
|
|
||||||
|
|
||||||
:param str name:
|
|
||||||
Name of the theme to load.
|
|
||||||
|
|
||||||
:return: dictionary with :ref:`theme configuration <config-themes>`
|
|
||||||
'''
|
|
||||||
return self._load_config(os.path.join('themes', self.ext, name), 'theme')
|
|
||||||
|
|
||||||
def load_main_config(self):
|
|
||||||
'''Get top-level configuration.
|
|
||||||
|
|
||||||
:return: dictionary with :ref:`top-level configuration <config-main>`.
|
|
||||||
'''
|
|
||||||
return self._load_config('config', 'main')
|
|
||||||
|
|
||||||
def load_colorscheme_config(self, name):
|
|
||||||
'''Get colorscheme.
|
|
||||||
|
|
||||||
:param str name:
|
|
||||||
Name of the colorscheme to load.
|
|
||||||
|
|
||||||
:return: dictionary with :ref:`colorscheme configuration <config-colorschemes>`.
|
|
||||||
'''
|
|
||||||
return self._load_config(os.path.join('colorschemes', self.ext, name), 'colorscheme')
|
|
||||||
|
|
||||||
def load_colors_config(self):
|
|
||||||
'''Get colorscheme.
|
|
||||||
|
|
||||||
:return: dictionary with :ref:`colors configuration <config-colors>`.
|
|
||||||
'''
|
|
||||||
return self._load_config('colors', 'colors')
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def get_local_themes(local_themes):
|
|
||||||
'''Get local themes. No-op here, to be overridden in subclasses if
|
|
||||||
required.
|
|
||||||
|
|
||||||
:param dict local_themes:
|
|
||||||
Usually accepts ``{matcher_name : theme_name}``. May also receive
|
|
||||||
None in case there is no local_themes configuration.
|
|
||||||
|
|
||||||
:return:
|
|
||||||
anything accepted by ``self.renderer.get_theme`` and processable by
|
|
||||||
``self.renderer.add_local_theme``. Renderer module is determined by
|
|
||||||
``__init__`` arguments, refer to its documentation.
|
|
||||||
'''
|
|
||||||
return None
|
|
||||||
|
|
||||||
def update_renderer(self):
|
|
||||||
'''Updates/creates a renderer if needed.'''
|
|
||||||
if self.run_loader_update:
|
|
||||||
self.config_loader.update()
|
|
||||||
create_renderer_kwargs = None
|
|
||||||
with self.cr_kwargs_lock:
|
|
||||||
if self.create_renderer_kwargs:
|
|
||||||
create_renderer_kwargs = self.create_renderer_kwargs.copy()
|
|
||||||
if create_renderer_kwargs:
|
|
||||||
try:
|
|
||||||
self.create_renderer(**create_renderer_kwargs)
|
|
||||||
except Exception as e:
|
|
||||||
self.pl.exception('Failed to create renderer: {0}', str(e))
|
|
||||||
finally:
|
|
||||||
self.create_renderer_kwargs.clear()
|
|
||||||
|
|
||||||
def render(self, *args, **kwargs):
|
|
||||||
'''Update/create renderer if needed and pass all arguments further to
|
|
||||||
``self.renderer.render()``.
|
|
||||||
'''
|
|
||||||
self.update_renderer()
|
|
||||||
return self.renderer.render(*args, **kwargs)
|
|
||||||
|
|
||||||
def shutdown(self):
|
|
||||||
'''Shut down all background threads. Must be run only prior to exiting
|
|
||||||
current application.
|
|
||||||
'''
|
|
||||||
self.shutdown_event.set()
|
|
||||||
self.renderer.shutdown()
|
|
||||||
functions = (
|
|
||||||
self.on_main_change,
|
|
||||||
self.on_colors_change,
|
|
||||||
self.on_colorscheme_change,
|
|
||||||
self.on_theme_change,
|
|
||||||
)
|
|
||||||
self.config_loader.unregister_functions(set(functions))
|
|
||||||
self.config_loader.unregister_missing(set(((find_config_file, function) for function in functions)))
|
|
||||||
|
|
||||||
def on_main_change(self, path):
|
|
||||||
with self.cr_kwargs_lock:
|
|
||||||
self.create_renderer_kwargs['load_main'] = True
|
|
||||||
|
|
||||||
def on_colors_change(self, path):
|
|
||||||
with self.cr_kwargs_lock:
|
|
||||||
self.create_renderer_kwargs['load_colors'] = True
|
|
||||||
|
|
||||||
def on_colorscheme_change(self, path):
|
|
||||||
with self.cr_kwargs_lock:
|
|
||||||
self.create_renderer_kwargs['load_colorscheme'] = True
|
|
||||||
|
|
||||||
def on_theme_change(self, path):
|
|
||||||
with self.cr_kwargs_lock:
|
|
||||||
self.create_renderer_kwargs['load_theme'] = True
|
|
||||||
|
|
||||||
def __enter__(self):
|
|
||||||
return self
|
|
||||||
|
|
||||||
def __exit__(self, *args):
|
|
||||||
self.shutdown()
|
|
|
@ -1,38 +0,0 @@
|
||||||
#!/usr/bin/env python
|
|
||||||
# vim:fileencoding=utf-8:noet
|
|
||||||
|
|
||||||
from powerline import Powerline
|
|
||||||
import sys
|
|
||||||
from time import sleep
|
|
||||||
from powerline.lib.monotonic import monotonic
|
|
||||||
from subprocess import Popen, PIPE
|
|
||||||
|
|
||||||
powerline = Powerline('wm', renderer_module='pango_markup')
|
|
||||||
powerline.update_renderer()
|
|
||||||
|
|
||||||
try:
|
|
||||||
interval = float(sys.argv[1])
|
|
||||||
except IndexError:
|
|
||||||
interval = 2
|
|
||||||
|
|
||||||
|
|
||||||
def read_to_log(pl, client):
|
|
||||||
for line in client.stdout:
|
|
||||||
if line:
|
|
||||||
pl.info(line, prefix='awesome-client')
|
|
||||||
for line in client.stderr:
|
|
||||||
if line:
|
|
||||||
pl.error(line, prefix='awesome-client')
|
|
||||||
if client.wait():
|
|
||||||
pl.error('Client exited with {0}', client.returncode, prefix='awesome')
|
|
||||||
|
|
||||||
|
|
||||||
while True:
|
|
||||||
start_time = monotonic()
|
|
||||||
s = powerline.render(side='right')
|
|
||||||
request = "powerline_widget:set_markup('" + s.replace('\\', '\\\\').replace("'", "\\'") + "')\n"
|
|
||||||
client = Popen(['awesome-client'], shell=False, stdout=PIPE, stderr=PIPE, stdin=PIPE)
|
|
||||||
client.stdin.write(request.encode('utf-8'))
|
|
||||||
client.stdin.close()
|
|
||||||
read_to_log(powerline.pl, client)
|
|
||||||
sleep(max(interval - (monotonic() - start_time), 0.1))
|
|
|
@ -1,11 +0,0 @@
|
||||||
local wibox = require('wibox')
|
|
||||||
local awful = require('awful')
|
|
||||||
|
|
||||||
powerline_widget = wibox.widget.textbox()
|
|
||||||
powerline_widget:set_align('right')
|
|
||||||
|
|
||||||
function powerline(mode, widget) end
|
|
||||||
|
|
||||||
bindings_path = string.gsub(debug.getinfo(1).source:match('@(.*)$'), '/[^/]+$', '')
|
|
||||||
powerline_cmd = bindings_path .. '/powerline-awesome.py'
|
|
||||||
awful.util.spawn_with_shell('ps -C powerline-awesome.py || ' .. powerline_cmd)
|
|
|
@ -1,28 +0,0 @@
|
||||||
_powerline_tmux_setenv() {
|
|
||||||
if [[ -n "$TMUX" ]]; then
|
|
||||||
tmux setenv TMUX_"$1"_$(tmux display -p "#D" | tr -d %) "$2"
|
|
||||||
fi
|
|
||||||
}
|
|
||||||
|
|
||||||
_powerline_tmux_set_pwd() {
|
|
||||||
_powerline_tmux_setenv PWD "$PWD"
|
|
||||||
}
|
|
||||||
|
|
||||||
_powerline_tmux_set_columns() {
|
|
||||||
_powerline_tmux_setenv COLUMNS "$COLUMNS"
|
|
||||||
}
|
|
||||||
|
|
||||||
_powerline_prompt() {
|
|
||||||
local last_exit_code=$?
|
|
||||||
[[ -z "$POWERLINE_OLD_PROMPT_COMMAND" ]] ||
|
|
||||||
eval $POWERLINE_OLD_PROMPT_COMMAND
|
|
||||||
PS1="$(powerline shell left -r bash_prompt --last_exit_code=$last_exit_code)"
|
|
||||||
_powerline_tmux_set_pwd
|
|
||||||
}
|
|
||||||
|
|
||||||
trap "_powerline_tmux_set_columns" SIGWINCH
|
|
||||||
_powerline_tmux_set_columns
|
|
||||||
|
|
||||||
[[ "$PROMPT_COMMAND" == "_powerline_prompt" ]] ||
|
|
||||||
POWERLINE_OLD_PROMPT_COMMAND="$PROMPT_COMMAND"
|
|
||||||
export PROMPT_COMMAND="_powerline_prompt"
|
|
|
@ -1,61 +0,0 @@
|
||||||
# vim:fileencoding=utf-8:noet
|
|
||||||
from powerline.ipython import IpythonPowerline
|
|
||||||
|
|
||||||
from IPython.core.prompts import PromptManager
|
|
||||||
from IPython.core.hooks import TryNext
|
|
||||||
|
|
||||||
|
|
||||||
class IpythonInfo(object):
|
|
||||||
def __init__(self, shell):
|
|
||||||
self._shell = shell
|
|
||||||
|
|
||||||
@property
|
|
||||||
def prompt_count(self):
|
|
||||||
return self._shell.execution_count
|
|
||||||
|
|
||||||
|
|
||||||
class PowerlinePromptManager(PromptManager):
|
|
||||||
powerline = None
|
|
||||||
|
|
||||||
def __init__(self, powerline, shell):
|
|
||||||
self.powerline = powerline
|
|
||||||
self.powerline_segment_info = IpythonInfo(shell)
|
|
||||||
self.shell = shell
|
|
||||||
|
|
||||||
def render(self, name, color=True, *args, **kwargs):
|
|
||||||
width = None if name == 'in' else self.width
|
|
||||||
res, res_nocolor = self.powerline.render(output_raw=True, width=width, matcher_info=name, segment_info=self.powerline_segment_info)
|
|
||||||
self.txtwidth = len(res_nocolor)
|
|
||||||
self.width = self.txtwidth
|
|
||||||
return res if color else res_nocolor
|
|
||||||
|
|
||||||
|
|
||||||
class ConfigurableIpythonPowerline(IpythonPowerline):
|
|
||||||
def __init__(self, ip):
|
|
||||||
config = ip.config.Powerline
|
|
||||||
self.config_overrides = config.get('config_overrides')
|
|
||||||
self.theme_overrides = config.get('theme_overrides', {})
|
|
||||||
self.path = config.get('path')
|
|
||||||
super(ConfigurableIpythonPowerline, self).__init__()
|
|
||||||
|
|
||||||
|
|
||||||
old_prompt_manager = None
|
|
||||||
|
|
||||||
|
|
||||||
def load_ipython_extension(ip):
|
|
||||||
global old_prompt_manager
|
|
||||||
|
|
||||||
old_prompt_manager = ip.prompt_manager
|
|
||||||
powerline = ConfigurableIpythonPowerline(ip)
|
|
||||||
|
|
||||||
ip.prompt_manager = PowerlinePromptManager(powerline=powerline, shell=ip.prompt_manager.shell)
|
|
||||||
|
|
||||||
def shutdown_hook():
|
|
||||||
powerline.shutdown()
|
|
||||||
raise TryNext()
|
|
||||||
|
|
||||||
ip.hooks.shutdown_hook.add(shutdown_hook)
|
|
||||||
|
|
||||||
|
|
||||||
def unload_ipython_extension(ip):
|
|
||||||
ip.prompt_manager = old_prompt_manager
|
|
|
@ -1,135 +0,0 @@
|
||||||
# vim:fileencoding=utf-8:noet
|
|
||||||
from powerline.ipython import IpythonPowerline
|
|
||||||
from IPython.Prompts import BasePrompt
|
|
||||||
from IPython.ipapi import get as get_ipython
|
|
||||||
from IPython.ipapi import TryNext
|
|
||||||
|
|
||||||
import re
|
|
||||||
|
|
||||||
|
|
||||||
def string(s):
|
|
||||||
if type(s) is not str:
|
|
||||||
return s.encode('utf-8')
|
|
||||||
else:
|
|
||||||
return s
|
|
||||||
|
|
||||||
|
|
||||||
# HACK: ipython tries to only leave us with plain ASCII
|
|
||||||
class RewriteResult(object):
|
|
||||||
def __init__(self, prompt):
|
|
||||||
self.prompt = string(prompt)
|
|
||||||
|
|
||||||
def __str__(self):
|
|
||||||
return self.prompt
|
|
||||||
|
|
||||||
def __add__(self, s):
|
|
||||||
if type(s) is not str:
|
|
||||||
try:
|
|
||||||
s = s.encode('utf-8')
|
|
||||||
except AttributeError:
|
|
||||||
raise NotImplementedError
|
|
||||||
return RewriteResult(self.prompt + s)
|
|
||||||
|
|
||||||
|
|
||||||
class IpythonInfo(object):
|
|
||||||
def __init__(self, cache):
|
|
||||||
self._cache = cache
|
|
||||||
|
|
||||||
@property
|
|
||||||
def prompt_count(self):
|
|
||||||
return self._cache.prompt_count
|
|
||||||
|
|
||||||
|
|
||||||
class PowerlinePrompt(BasePrompt):
|
|
||||||
def __init__(self, powerline, powerline_last_in, old_prompt):
|
|
||||||
self.powerline = powerline
|
|
||||||
self.powerline_last_in = powerline_last_in
|
|
||||||
self.powerline_segment_info = IpythonInfo(old_prompt.cache)
|
|
||||||
self.cache = old_prompt.cache
|
|
||||||
if hasattr(old_prompt, 'sep'):
|
|
||||||
self.sep = old_prompt.sep
|
|
||||||
self.pad_left = False
|
|
||||||
|
|
||||||
def __str__(self):
|
|
||||||
self.set_p_str()
|
|
||||||
return string(self.p_str)
|
|
||||||
|
|
||||||
def set_p_str(self, width=None):
|
|
||||||
self.p_str, self.p_str_nocolor = (
|
|
||||||
self.powerline.render(output_raw=True,
|
|
||||||
segment_info=self.powerline_segment_info,
|
|
||||||
matcher_info=self.powerline_prompt_type,
|
|
||||||
width=width)
|
|
||||||
)
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def set_colors():
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class PowerlinePrompt1(PowerlinePrompt):
|
|
||||||
powerline_prompt_type = 'in'
|
|
||||||
rspace = re.compile(r'(\s*)$')
|
|
||||||
|
|
||||||
def __str__(self):
|
|
||||||
self.cache.prompt_count += 1
|
|
||||||
self.set_p_str()
|
|
||||||
self.cache.last_prompt = self.p_str_nocolor.split('\n')[-1]
|
|
||||||
return string(self.p_str)
|
|
||||||
|
|
||||||
def set_p_str(self):
|
|
||||||
super(PowerlinePrompt1, self).set_p_str()
|
|
||||||
self.nrspaces = len(self.rspace.search(self.p_str_nocolor).group())
|
|
||||||
self.prompt_text_len = len(self.p_str_nocolor) - self.nrspaces
|
|
||||||
self.powerline_last_in['nrspaces'] = self.nrspaces
|
|
||||||
self.powerline_last_in['prompt_text_len'] = self.prompt_text_len
|
|
||||||
|
|
||||||
def auto_rewrite(self):
|
|
||||||
return RewriteResult(self.powerline.render(matcher_info='rewrite', width=self.prompt_text_len, segment_info=self.powerline_segment_info)
|
|
||||||
+ (' ' * self.nrspaces))
|
|
||||||
|
|
||||||
|
|
||||||
class PowerlinePromptOut(PowerlinePrompt):
|
|
||||||
powerline_prompt_type = 'out'
|
|
||||||
|
|
||||||
def set_p_str(self):
|
|
||||||
super(PowerlinePromptOut, self).set_p_str(width=self.powerline_last_in['prompt_text_len'])
|
|
||||||
spaces = ' ' * self.powerline_last_in['nrspaces']
|
|
||||||
self.p_str += spaces
|
|
||||||
self.p_str_nocolor += spaces
|
|
||||||
|
|
||||||
|
|
||||||
class PowerlinePrompt2(PowerlinePromptOut):
|
|
||||||
powerline_prompt_type = 'in2'
|
|
||||||
|
|
||||||
|
|
||||||
class ConfigurableIpythonPowerline(IpythonPowerline):
|
|
||||||
def __init__(self, config_overrides=None, theme_overrides={}, path=None):
|
|
||||||
self.config_overrides = config_overrides
|
|
||||||
self.theme_overrides = theme_overrides
|
|
||||||
self.path = path
|
|
||||||
super(ConfigurableIpythonPowerline, self).__init__()
|
|
||||||
|
|
||||||
|
|
||||||
def setup(**kwargs):
|
|
||||||
ip = get_ipython()
|
|
||||||
|
|
||||||
powerline = ConfigurableIpythonPowerline(**kwargs)
|
|
||||||
|
|
||||||
def late_startup_hook():
|
|
||||||
last_in = {'nrspaces': 0, 'prompt_text_len': None}
|
|
||||||
for attr, prompt_class in (
|
|
||||||
('prompt1', PowerlinePrompt1),
|
|
||||||
('prompt2', PowerlinePrompt2),
|
|
||||||
('prompt_out', PowerlinePromptOut)
|
|
||||||
):
|
|
||||||
old_prompt = getattr(ip.IP.outputcache, attr)
|
|
||||||
setattr(ip.IP.outputcache, attr, prompt_class(powerline, last_in, old_prompt))
|
|
||||||
raise TryNext()
|
|
||||||
|
|
||||||
def shutdown_hook():
|
|
||||||
powerline.shutdown()
|
|
||||||
raise TryNext()
|
|
||||||
|
|
||||||
ip.IP.hooks.late_startup_hook.add(late_startup_hook)
|
|
||||||
ip.IP.hooks.shutdown_hook.add(shutdown_hook)
|
|
|
@ -1,36 +0,0 @@
|
||||||
# vim:fileencoding=utf-8:noet
|
|
||||||
|
|
||||||
from libqtile import bar
|
|
||||||
from libqtile.widget import base
|
|
||||||
|
|
||||||
from powerline import Powerline as PowerlineCore
|
|
||||||
|
|
||||||
|
|
||||||
class Powerline(base._TextBox):
|
|
||||||
def __init__(self, timeout=2, text=" ", width=bar.CALCULATED, **config):
|
|
||||||
base._TextBox.__init__(self, text, width, **config)
|
|
||||||
self.timeout_add(timeout, self.update)
|
|
||||||
self.powerline = PowerlineCore(ext='wm', renderer_module='pango_markup')
|
|
||||||
|
|
||||||
def update(self):
|
|
||||||
if not self.configured:
|
|
||||||
return True
|
|
||||||
self.text = self.powerline.render(side='right')
|
|
||||||
self.bar.draw()
|
|
||||||
return True
|
|
||||||
|
|
||||||
def cmd_update(self, text):
|
|
||||||
self.update(text)
|
|
||||||
|
|
||||||
def cmd_get(self):
|
|
||||||
return self.text
|
|
||||||
|
|
||||||
def _configure(self, qtile, bar):
|
|
||||||
base._TextBox._configure(self, qtile, bar)
|
|
||||||
self.layout = self.drawer.textlayout(
|
|
||||||
self.text,
|
|
||||||
self.foreground,
|
|
||||||
self.font,
|
|
||||||
self.fontsize,
|
|
||||||
self.fontshadow,
|
|
||||||
markup=True)
|
|
|
@ -1,11 +0,0 @@
|
||||||
set -g status on
|
|
||||||
set -g status-utf8 on
|
|
||||||
set -g status-interval 2
|
|
||||||
set -g status-fg colour231
|
|
||||||
set -g status-bg colour234
|
|
||||||
set -g status-left-length 20
|
|
||||||
set -g status-left '#[fg=colour16,bg=colour254,bold] #S #[fg=colour254,bg=colour234,nobold]#(powerline tmux left)'
|
|
||||||
set -g status-right '#(powerline tmux right)'
|
|
||||||
set -g status-right-length 150
|
|
||||||
set -g window-status-format "#[fg=colour244,bg=colour234]#I #[fg=colour240] #[fg=colour249]#W "
|
|
||||||
set -g window-status-current-format "#[fg=colour234,bg=colour31]#[fg=colour117,bg=colour31] #I #[fg=colour231,bold]#W #[fg=colour31,bg=colour234,nobold]"
|
|
|
@ -1,64 +0,0 @@
|
||||||
# vim:fileencoding=utf-8:noet
|
|
||||||
|
|
||||||
import sys
|
|
||||||
|
|
||||||
try:
|
|
||||||
import vim
|
|
||||||
except ImportError:
|
|
||||||
vim = {}
|
|
||||||
|
|
||||||
try:
|
|
||||||
_vim_globals = vim.bindeval('g:')
|
|
||||||
|
|
||||||
def vim_set_global_var(var, val):
|
|
||||||
'''Set a global var in vim using bindeval().'''
|
|
||||||
_vim_globals[var] = val
|
|
||||||
|
|
||||||
def vim_get_func(f, rettype=None):
|
|
||||||
'''Return a vim function binding.'''
|
|
||||||
try:
|
|
||||||
func = vim.bindeval('function("' + f + '")')
|
|
||||||
if sys.version_info >= (3,) and rettype is str:
|
|
||||||
return (lambda *args, **kwargs: func(*args, **kwargs).decode('utf-8', errors='replace'))
|
|
||||||
return func
|
|
||||||
except vim.error:
|
|
||||||
return None
|
|
||||||
except AttributeError:
|
|
||||||
import json
|
|
||||||
|
|
||||||
def vim_set_global_var(var, val): # NOQA
|
|
||||||
'''Set a global var in vim using vim.command().
|
|
||||||
|
|
||||||
This is a fallback function for older vim versions.
|
|
||||||
'''
|
|
||||||
vim.command('let g:{0}={1}'.format(var, json.dumps(val)))
|
|
||||||
|
|
||||||
class VimFunc(object):
|
|
||||||
'''Evaluate a vim function using vim.eval().
|
|
||||||
|
|
||||||
This is a fallback class for older vim versions.
|
|
||||||
'''
|
|
||||||
__slots__ = ('f', 'rettype')
|
|
||||||
|
|
||||||
def __init__(self, f, rettype=None):
|
|
||||||
self.f = f
|
|
||||||
self.rettype = rettype
|
|
||||||
|
|
||||||
def __call__(self, *args):
|
|
||||||
r = vim.eval(self.f + '(' + json.dumps(args)[1:-1] + ')')
|
|
||||||
if self.rettype:
|
|
||||||
return self.rettype(r)
|
|
||||||
return r
|
|
||||||
|
|
||||||
vim_get_func = VimFunc
|
|
||||||
|
|
||||||
if sys.version_info < (3,) or not hasattr(vim, 'bindeval'):
|
|
||||||
getbufvar = vim_get_func('getbufvar')
|
|
||||||
else:
|
|
||||||
_getbufvar = vim_get_func('getbufvar')
|
|
||||||
|
|
||||||
def getbufvar(*args):
|
|
||||||
r = _getbufvar(*args)
|
|
||||||
if type(r) is bytes:
|
|
||||||
return r.decode('utf-8')
|
|
||||||
return r
|
|
|
@ -1,95 +0,0 @@
|
||||||
if exists('g:powerline_loaded')
|
|
||||||
finish
|
|
||||||
endif
|
|
||||||
let g:powerline_loaded = 1
|
|
||||||
|
|
||||||
function! s:CriticalError(message)
|
|
||||||
echohl ErrorMsg
|
|
||||||
echomsg a:message
|
|
||||||
echohl None
|
|
||||||
endfunction
|
|
||||||
|
|
||||||
if ! has('python') && ! has('python3')
|
|
||||||
call s:CriticalError('You need vim compiled with Python 2.6+ or 3.2+ support
|
|
||||||
\ for Powerline to work. Please consult the documentation for more details.')
|
|
||||||
finish
|
|
||||||
endif
|
|
||||||
|
|
||||||
let s:powerline_pycmd = substitute(get(g:, 'powerline_pycmd', has('python') ? 'py' : 'py3'),
|
|
||||||
\'\v^(py)%[thon](3?)$', '\1\2', '')
|
|
||||||
let s:powerline_pyeval = get(g:, 'powerline_pyeval', s:powerline_pycmd.'eval')
|
|
||||||
|
|
||||||
let s:import_cmd = 'from powerline.vim import VimPowerline'
|
|
||||||
try
|
|
||||||
exec s:powerline_pycmd "try:\n"
|
|
||||||
\ ." ".s:import_cmd."\n"
|
|
||||||
\ ."except ImportError:\n"
|
|
||||||
\ ." import sys, vim\n"
|
|
||||||
\ ." sys.path.append(vim.eval('expand(\"<sfile>:h:h:h:h:h\")'))\n"
|
|
||||||
\ ." ".s:import_cmd
|
|
||||||
let s:launched = 1
|
|
||||||
finally
|
|
||||||
if !exists('s:launched')
|
|
||||||
call s:CriticalError('An error occurred while importing the Powerline package.
|
|
||||||
\ This could be caused by an invalid sys.path setting, or by an incompatible
|
|
||||||
\ Python version (Powerline requires Python 2.6+ or 3.2+ to work). Please consult
|
|
||||||
\ the troubleshooting section in the documentation for possible solutions.')
|
|
||||||
finish
|
|
||||||
else
|
|
||||||
unlet s:launched
|
|
||||||
endif
|
|
||||||
endtry
|
|
||||||
|
|
||||||
if !get(g:, 'powerline_debugging_pyeval') && exists('*'. s:powerline_pyeval)
|
|
||||||
let s:pyeval = function(s:powerline_pyeval)
|
|
||||||
else
|
|
||||||
exec s:powerline_pycmd 'import json, vim'
|
|
||||||
exec "function! s:pyeval(e)\n".
|
|
||||||
\ s:powerline_pycmd." vim.command('return ' + json.dumps(eval(vim.eval('a:e'))))\n".
|
|
||||||
\"endfunction"
|
|
||||||
endif
|
|
||||||
|
|
||||||
let s:last_window_id = 0
|
|
||||||
function! s:GetWinID(winnr)
|
|
||||||
let r = getwinvar(a:winnr, 'window_id')
|
|
||||||
if empty(r)
|
|
||||||
let r = s:last_window_id
|
|
||||||
let s:last_window_id += 1
|
|
||||||
call setwinvar(a:winnr, 'window_id', r)
|
|
||||||
endif
|
|
||||||
" Without this condition it triggers unneeded statusline redraw
|
|
||||||
if getwinvar(a:winnr, '&statusline') isnot# '%!Powerline('.r.')'
|
|
||||||
call setwinvar(a:winnr, '&statusline', '%!Powerline('.r.')')
|
|
||||||
endif
|
|
||||||
return r
|
|
||||||
endfunction
|
|
||||||
|
|
||||||
function! Powerline(window_id)
|
|
||||||
let winidx = index(map(range(1, winnr('$')), 's:GetWinID(v:val)'), a:window_id)
|
|
||||||
let current = w:window_id is# a:window_id
|
|
||||||
return s:pyeval('powerline.render('. a:window_id .', '. winidx .', '. current .')')
|
|
||||||
endfunction
|
|
||||||
|
|
||||||
function! PowerlineNew()
|
|
||||||
call map(range(1, winnr('$')), 's:GetWinID(v:val)')
|
|
||||||
endfunction
|
|
||||||
|
|
||||||
function! PowerlineRegisterCachePurgerEvent(event)
|
|
||||||
exec s:powerline_pycmd 'from powerline.segments.vim import launchevent as powerline_launchevent'
|
|
||||||
augroup Powerline
|
|
||||||
exec 'autocmd' a:event '*' s:powerline_pycmd.' powerline_launchevent("'.a:event.'")'
|
|
||||||
augroup END
|
|
||||||
endfunction
|
|
||||||
|
|
||||||
augroup Powerline
|
|
||||||
autocmd! ColorScheme * :exec s:powerline_pycmd 'powerline.reset_highlight()'
|
|
||||||
autocmd! VimEnter * :redrawstatus!
|
|
||||||
autocmd! VimLeavePre * :exec s:powerline_pycmd 'powerline.shutdown()'
|
|
||||||
augroup END
|
|
||||||
|
|
||||||
exec s:powerline_pycmd 'powerline = VimPowerline()'
|
|
||||||
exec s:powerline_pycmd 'del VimPowerline'
|
|
||||||
" Is immediately changed when PowerlineNew() function is run. Good for global
|
|
||||||
" value.
|
|
||||||
set statusline=%!PowerlineNew()
|
|
||||||
call PowerlineNew()
|
|
|
@ -1,128 +0,0 @@
|
||||||
# vim:fileencoding=utf-8:noet
|
|
||||||
import zsh
|
|
||||||
import atexit
|
|
||||||
from powerline.shell import ShellPowerline
|
|
||||||
from powerline.lib import parsedotval
|
|
||||||
|
|
||||||
|
|
||||||
used_powerlines = []
|
|
||||||
|
|
||||||
|
|
||||||
def shutdown():
|
|
||||||
for powerline in used_powerlines:
|
|
||||||
powerline.shutdown()
|
|
||||||
|
|
||||||
|
|
||||||
def get_var_config(var):
|
|
||||||
try:
|
|
||||||
return [parsedotval(i) for i in zsh.getvalue(var).items()]
|
|
||||||
except:
|
|
||||||
return None
|
|
||||||
|
|
||||||
|
|
||||||
class Args(object):
|
|
||||||
ext = ['shell']
|
|
||||||
renderer_module = 'zsh_prompt'
|
|
||||||
|
|
||||||
@property
|
|
||||||
def last_exit_code(self):
|
|
||||||
return zsh.last_exit_code()
|
|
||||||
|
|
||||||
@property
|
|
||||||
def last_pipe_status(self):
|
|
||||||
return zsh.pipestatus()
|
|
||||||
|
|
||||||
@property
|
|
||||||
def config(self):
|
|
||||||
try:
|
|
||||||
return get_var_config('POWERLINE_CONFIG')
|
|
||||||
except IndexError:
|
|
||||||
return None
|
|
||||||
|
|
||||||
@property
|
|
||||||
def theme_option(self):
|
|
||||||
try:
|
|
||||||
return get_var_config('POWERLINE_THEME_CONFIG')
|
|
||||||
except IndexError:
|
|
||||||
return None
|
|
||||||
|
|
||||||
@property
|
|
||||||
def config_path(self):
|
|
||||||
try:
|
|
||||||
return zsh.getvalue('POWERLINE_CONFIG_PATH')
|
|
||||||
except IndexError:
|
|
||||||
return None
|
|
||||||
|
|
||||||
|
|
||||||
def string(s):
|
|
||||||
if type(s) is bytes:
|
|
||||||
return s.decode('utf-8', errors='replace')
|
|
||||||
else:
|
|
||||||
return str(s)
|
|
||||||
|
|
||||||
|
|
||||||
class Environment(object):
|
|
||||||
@staticmethod
|
|
||||||
def __getitem__(key):
|
|
||||||
try:
|
|
||||||
return string(zsh.getvalue(key))
|
|
||||||
except IndexError as e:
|
|
||||||
raise KeyError(*e.args)
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def get(key, default=None):
|
|
||||||
try:
|
|
||||||
return string(zsh.getvalue(key))
|
|
||||||
except IndexError:
|
|
||||||
return default
|
|
||||||
|
|
||||||
|
|
||||||
environ = Environment()
|
|
||||||
|
|
||||||
|
|
||||||
class Prompt(object):
|
|
||||||
__slots__ = ('powerline', 'side', 'savedpsvar', 'savedps', 'args')
|
|
||||||
|
|
||||||
def __init__(self, powerline, side, savedpsvar=None, savedps=None):
|
|
||||||
self.powerline = powerline
|
|
||||||
self.side = side
|
|
||||||
self.savedpsvar = savedpsvar
|
|
||||||
self.savedps = savedps
|
|
||||||
self.args = powerline.args
|
|
||||||
|
|
||||||
def __str__(self):
|
|
||||||
r = self.powerline.render(
|
|
||||||
width=zsh.columns(),
|
|
||||||
side=self.side,
|
|
||||||
segment_info={'args': self.args, 'environ': environ}
|
|
||||||
)
|
|
||||||
if type(r) is not str:
|
|
||||||
if type(r) is bytes:
|
|
||||||
return r.decode('utf-8')
|
|
||||||
else:
|
|
||||||
return r.encode('utf-8')
|
|
||||||
return r
|
|
||||||
|
|
||||||
def __del__(self):
|
|
||||||
if self.savedps:
|
|
||||||
zsh.setvalue(self.savedpsvar, self.savedps)
|
|
||||||
used_powerlines.remove(self.powerline)
|
|
||||||
if self.powerline not in used_powerlines:
|
|
||||||
self.powerline.shutdown()
|
|
||||||
|
|
||||||
|
|
||||||
def set_prompt(powerline, psvar, side):
|
|
||||||
savedps = zsh.getvalue(psvar)
|
|
||||||
zpyvar = 'ZPYTHON_POWERLINE_' + psvar
|
|
||||||
prompt = Prompt(powerline, side, psvar, savedps)
|
|
||||||
zsh.set_special_string(zpyvar, prompt)
|
|
||||||
zsh.setvalue(psvar, '${' + zpyvar + '}')
|
|
||||||
|
|
||||||
|
|
||||||
def setup():
|
|
||||||
powerline = ShellPowerline(Args())
|
|
||||||
used_powerlines.append(powerline)
|
|
||||||
used_powerlines.append(powerline)
|
|
||||||
set_prompt(powerline, 'PS1', 'left')
|
|
||||||
set_prompt(powerline, 'RPS1', 'right')
|
|
||||||
atexit.register(shutdown)
|
|
|
@ -1,39 +0,0 @@
|
||||||
_powerline_tmux_setenv() {
|
|
||||||
emulate -L zsh
|
|
||||||
if [[ -n "$TMUX" ]]; then
|
|
||||||
tmux setenv TMUX_"$1"_$(tmux display -p "#D" | tr -d %) "$2"
|
|
||||||
fi
|
|
||||||
}
|
|
||||||
|
|
||||||
_powerline_tmux_set_pwd() {
|
|
||||||
_powerline_tmux_setenv PWD "$PWD"
|
|
||||||
}
|
|
||||||
|
|
||||||
_powerline_tmux_set_columns() {
|
|
||||||
_powerline_tmux_setenv COLUMNS "$COLUMNS"
|
|
||||||
}
|
|
||||||
|
|
||||||
_powerline_install_precmd() {
|
|
||||||
emulate zsh
|
|
||||||
for f in "${precmd_functions[@]}"; do
|
|
||||||
if [[ "$f" = "_powerline_precmd" ]]; then
|
|
||||||
return
|
|
||||||
fi
|
|
||||||
done
|
|
||||||
chpwd_functions+=( _powerline_tmux_set_pwd )
|
|
||||||
setopt promptpercent
|
|
||||||
setopt promptsubst
|
|
||||||
if zmodload zsh/zpython &>/dev/null ; then
|
|
||||||
zpython 'from powerline.bindings.zsh import setup as powerline_setup'
|
|
||||||
zpython 'powerline_setup()'
|
|
||||||
zpython 'del powerline_setup'
|
|
||||||
else
|
|
||||||
PS1='$(powerline shell left -r zsh_prompt --last_exit_code=$? --last_pipe_status="$pipestatus")'
|
|
||||||
RPS1='$(powerline shell right -r zsh_prompt --last_exit_code=$? --last_pipe_status="$pipestatus")'
|
|
||||||
fi
|
|
||||||
}
|
|
||||||
|
|
||||||
trap "_powerline_tmux_set_columns" SIGWINCH
|
|
||||||
_powerline_tmux_set_columns
|
|
||||||
|
|
||||||
_powerline_install_precmd
|
|
|
@ -1,143 +0,0 @@
|
||||||
# vim:fileencoding=utf-8:noet
|
|
||||||
|
|
||||||
from copy import copy
|
|
||||||
|
|
||||||
|
|
||||||
DEFAULT_MODE_KEY = None
|
|
||||||
ATTR_BOLD = 1
|
|
||||||
ATTR_ITALIC = 2
|
|
||||||
ATTR_UNDERLINE = 4
|
|
||||||
|
|
||||||
|
|
||||||
def get_attr_flag(attributes):
|
|
||||||
'''Convert an attribute array to a renderer flag.'''
|
|
||||||
attr_flag = 0
|
|
||||||
if 'bold' in attributes:
|
|
||||||
attr_flag |= ATTR_BOLD
|
|
||||||
if 'italic' in attributes:
|
|
||||||
attr_flag |= ATTR_ITALIC
|
|
||||||
if 'underline' in attributes:
|
|
||||||
attr_flag |= ATTR_UNDERLINE
|
|
||||||
return attr_flag
|
|
||||||
|
|
||||||
|
|
||||||
def pick_gradient_value(grad_list, gradient_level):
|
|
||||||
'''Given a list of colors and gradient percent, return a color that should be used.
|
|
||||||
|
|
||||||
Note: gradient level is not checked for being inside [0, 100] interval.
|
|
||||||
'''
|
|
||||||
return grad_list[int(round(gradient_level * (len(grad_list) - 1) / 100))]
|
|
||||||
|
|
||||||
|
|
||||||
def hl_iter(value):
|
|
||||||
if type(value) is list:
|
|
||||||
for v in value:
|
|
||||||
yield v
|
|
||||||
else:
|
|
||||||
yield value
|
|
||||||
|
|
||||||
|
|
||||||
class Colorscheme(object):
|
|
||||||
def __init__(self, colorscheme_config, colors_config):
|
|
||||||
'''Initialize a colorscheme.'''
|
|
||||||
self.colors = {}
|
|
||||||
self.gradients = {}
|
|
||||||
|
|
||||||
self.groups = colorscheme_config['groups']
|
|
||||||
self.translations = colorscheme_config.get('mode_translations', {})
|
|
||||||
|
|
||||||
# Create a dict of color tuples with both a cterm and hex value
|
|
||||||
for color_name, color in colors_config['colors'].items():
|
|
||||||
try:
|
|
||||||
self.colors[color_name] = (color[0], int(color[1], 16))
|
|
||||||
except TypeError:
|
|
||||||
self.colors[color_name] = (color, cterm_to_hex[color])
|
|
||||||
|
|
||||||
# Create a dict of gradient names with two lists: for cterm and hex
|
|
||||||
# values. Two lists in place of one list of pairs were chosen because
|
|
||||||
# true colors allow more precise gradients.
|
|
||||||
for gradient_name, gradient in colors_config['gradients'].items():
|
|
||||||
if len(gradient) == 2:
|
|
||||||
self.gradients[gradient_name] = (
|
|
||||||
(gradient[0], [int(color, 16) for color in gradient[1]]))
|
|
||||||
else:
|
|
||||||
self.gradients[gradient_name] = (
|
|
||||||
(gradient[0], [cterm_to_hex[color] for color in gradient[0]]))
|
|
||||||
|
|
||||||
def get_gradient(self, gradient, gradient_level):
|
|
||||||
if gradient in self.gradients:
|
|
||||||
return tuple((pick_gradient_value(grad_list, gradient_level) for grad_list in self.gradients[gradient]))
|
|
||||||
else:
|
|
||||||
return self.colors[gradient]
|
|
||||||
|
|
||||||
def get_highlighting(self, groups, mode, gradient_level=None):
|
|
||||||
trans = self.translations.get(mode, {})
|
|
||||||
for group in hl_iter(groups):
|
|
||||||
if 'groups' in trans and group in trans['groups']:
|
|
||||||
try:
|
|
||||||
group_props = trans['groups'][group]
|
|
||||||
except KeyError:
|
|
||||||
continue
|
|
||||||
break
|
|
||||||
|
|
||||||
else:
|
|
||||||
try:
|
|
||||||
group_props = copy(self.groups[group])
|
|
||||||
except KeyError:
|
|
||||||
continue
|
|
||||||
|
|
||||||
try:
|
|
||||||
ctrans = trans['colors']
|
|
||||||
for key in ('fg', 'bg'):
|
|
||||||
try:
|
|
||||||
group_props[key] = ctrans[group_props[key]]
|
|
||||||
except KeyError:
|
|
||||||
pass
|
|
||||||
except KeyError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
break
|
|
||||||
else:
|
|
||||||
raise KeyError('Highlighting groups not found in colorscheme: ' + ', '.join(hl_iter(groups)))
|
|
||||||
|
|
||||||
if gradient_level is None:
|
|
||||||
pick_color = self.colors.__getitem__
|
|
||||||
else:
|
|
||||||
pick_color = lambda gradient: self.get_gradient(gradient, gradient_level)
|
|
||||||
|
|
||||||
return {
|
|
||||||
'fg': pick_color(group_props['fg']),
|
|
||||||
'bg': pick_color(group_props['bg']),
|
|
||||||
'attr': get_attr_flag(group_props.get('attr', [])),
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
# 0 1 2 3 4 5 6 7 8 9
|
|
||||||
cterm_to_hex = (
|
|
||||||
0x000000, 0xc00000, 0x008000, 0x804000, 0x0000c0, 0xc000c0, 0x008080, 0xc0c0c0, 0x808080, 0xff6060, # 0
|
|
||||||
0x00ff00, 0xffff00, 0x8080ff, 0xff40ff, 0x00ffff, 0xffffff, 0x000000, 0x00005f, 0x000087, 0x0000af, # 1
|
|
||||||
0x0000d7, 0x0000ff, 0x005f00, 0x005f5f, 0x005f87, 0x005faf, 0x005fd7, 0x005fff, 0x008700, 0x00875f, # 2
|
|
||||||
0x008787, 0x0087af, 0x0087d7, 0x0087ff, 0x00af00, 0x00af5f, 0x00af87, 0x00afaf, 0x00afd7, 0x00afff, # 3
|
|
||||||
0x00d700, 0x00d75f, 0x00d787, 0x00d7af, 0x00d7d7, 0x00d7ff, 0x00ff00, 0x00ff5f, 0x00ff87, 0x00ffaf, # 4
|
|
||||||
0x00ffd7, 0x00ffff, 0x5f0000, 0x5f005f, 0x5f0087, 0x5f00af, 0x5f00d7, 0x5f00ff, 0x5f5f00, 0x5f5f5f, # 5
|
|
||||||
0x5f5f87, 0x5f5faf, 0x5f5fd7, 0x5f5fff, 0x5f8700, 0x5f875f, 0x5f8787, 0x5f87af, 0x5f87d7, 0x5f87ff, # 6
|
|
||||||
0x5faf00, 0x5faf5f, 0x5faf87, 0x5fafaf, 0x5fafd7, 0x5fafff, 0x5fd700, 0x5fd75f, 0x5fd787, 0x5fd7af, # 7
|
|
||||||
0x5fd7d7, 0x5fd7ff, 0x5fff00, 0x5fff5f, 0x5fff87, 0x5fffaf, 0x5fffd7, 0x5fffff, 0x870000, 0x87005f, # 8
|
|
||||||
0x870087, 0x8700af, 0x8700d7, 0x8700ff, 0x875f00, 0x875f5f, 0x875f87, 0x875faf, 0x875fd7, 0x875fff, # 9
|
|
||||||
0x878700, 0x87875f, 0x878787, 0x8787af, 0x8787d7, 0x8787ff, 0x87af00, 0x87af5f, 0x87af87, 0x87afaf, # 10
|
|
||||||
0x87afd7, 0x87afff, 0x87d700, 0x87d75f, 0x87d787, 0x87d7af, 0x87d7d7, 0x87d7ff, 0x87ff00, 0x87ff5f, # 11
|
|
||||||
0x87ff87, 0x87ffaf, 0x87ffd7, 0x87ffff, 0xaf0000, 0xaf005f, 0xaf0087, 0xaf00af, 0xaf00d7, 0xaf00ff, # 12
|
|
||||||
0xaf5f00, 0xaf5f5f, 0xaf5f87, 0xaf5faf, 0xaf5fd7, 0xaf5fff, 0xaf8700, 0xaf875f, 0xaf8787, 0xaf87af, # 13
|
|
||||||
0xaf87d7, 0xaf87ff, 0xafaf00, 0xafaf5f, 0xafaf87, 0xafafaf, 0xafafd7, 0xafafff, 0xafd700, 0xafd75f, # 14
|
|
||||||
0xafd787, 0xafd7af, 0xafd7d7, 0xafd7ff, 0xafff00, 0xafff5f, 0xafff87, 0xafffaf, 0xafffd7, 0xafffff, # 15
|
|
||||||
0xd70000, 0xd7005f, 0xd70087, 0xd700af, 0xd700d7, 0xd700ff, 0xd75f00, 0xd75f5f, 0xd75f87, 0xd75faf, # 16
|
|
||||||
0xd75fd7, 0xd75fff, 0xd78700, 0xd7875f, 0xd78787, 0xd787af, 0xd787d7, 0xd787ff, 0xd7af00, 0xd7af5f, # 17
|
|
||||||
0xd7af87, 0xd7afaf, 0xd7afd7, 0xd7afff, 0xd7d700, 0xd7d75f, 0xd7d787, 0xd7d7af, 0xd7d7d7, 0xd7d7ff, # 18
|
|
||||||
0xd7ff00, 0xd7ff5f, 0xd7ff87, 0xd7ffaf, 0xd7ffd7, 0xd7ffff, 0xff0000, 0xff005f, 0xff0087, 0xff00af, # 19
|
|
||||||
0xff00d7, 0xff00ff, 0xff5f00, 0xff5f5f, 0xff5f87, 0xff5faf, 0xff5fd7, 0xff5fff, 0xff8700, 0xff875f, # 20
|
|
||||||
0xff8787, 0xff87af, 0xff87d7, 0xff87ff, 0xffaf00, 0xffaf5f, 0xffaf87, 0xffafaf, 0xffafd7, 0xffafff, # 21
|
|
||||||
0xffd700, 0xffd75f, 0xffd787, 0xffd7af, 0xffd7d7, 0xffd7ff, 0xffff00, 0xffff5f, 0xffff87, 0xffffaf, # 22
|
|
||||||
0xffffd7, 0xffffff, 0x080808, 0x121212, 0x1c1c1c, 0x262626, 0x303030, 0x3a3a3a, 0x444444, 0x4e4e4e, # 23
|
|
||||||
0x585858, 0x626262, 0x6c6c6c, 0x767676, 0x808080, 0x8a8a8a, 0x949494, 0x9e9e9e, 0xa8a8a8, 0xb2b2b2, # 24
|
|
||||||
0xbcbcbc, 0xc6c6c6, 0xd0d0d0, 0xdadada, 0xe4e4e4, 0xeeeeee # 25
|
|
||||||
)
|
|
|
@ -1,106 +0,0 @@
|
||||||
{
|
|
||||||
"colors": {
|
|
||||||
"black": 16,
|
|
||||||
"white": 231,
|
|
||||||
|
|
||||||
"darkestgreen": 22,
|
|
||||||
"darkgreen": 28,
|
|
||||||
"mediumgreen": 70,
|
|
||||||
"brightgreen": 148,
|
|
||||||
|
|
||||||
"darkestcyan": 23,
|
|
||||||
"darkcyan": 74,
|
|
||||||
"mediumcyan": 117,
|
|
||||||
"brightcyan": 159,
|
|
||||||
|
|
||||||
"darkestblue": 24,
|
|
||||||
"darkblue": 31,
|
|
||||||
|
|
||||||
"darkestred": 52,
|
|
||||||
"darkred": 88,
|
|
||||||
"mediumred": 124,
|
|
||||||
"brightred": 160,
|
|
||||||
"brightestred": 196,
|
|
||||||
|
|
||||||
"darkestpurple": 55,
|
|
||||||
"mediumpurple": 98,
|
|
||||||
"brightpurple": 189,
|
|
||||||
|
|
||||||
"darkorange": 94,
|
|
||||||
"mediumorange": 166,
|
|
||||||
"brightorange": 208,
|
|
||||||
"brightestorange": 214,
|
|
||||||
|
|
||||||
"brightyellow": 220,
|
|
||||||
|
|
||||||
"gray0": 233,
|
|
||||||
"gray1": 235,
|
|
||||||
"gray2": 236,
|
|
||||||
"gray3": 239,
|
|
||||||
"gray4": 240,
|
|
||||||
"gray5": 241,
|
|
||||||
"gray6": 244,
|
|
||||||
"gray7": 245,
|
|
||||||
"gray8": 247,
|
|
||||||
"gray9": 250,
|
|
||||||
"gray10": 252,
|
|
||||||
|
|
||||||
"gray61": [14, "93a1a1"],
|
|
||||||
"gray13": [8, "002b36"],
|
|
||||||
|
|
||||||
"royalblue5": [0, "073642"],
|
|
||||||
"darkgreencopper": [10, "586e75"],
|
|
||||||
"lightskyblue4": [11, "657b83"],
|
|
||||||
"azure4": [12, "839496"],
|
|
||||||
"lightyellow": [7, "eee8d5"],
|
|
||||||
"oldlace": [15, "fdf6e3"],
|
|
||||||
|
|
||||||
"green": [2, "719e07"],
|
|
||||||
"cyan": [6, "2aa198"],
|
|
||||||
"blue": [4, "268bd2"],
|
|
||||||
"red": [1, "dc322f"],
|
|
||||||
"magenta": [5, "d33682"],
|
|
||||||
"violet": [13, "6c71c4"],
|
|
||||||
"orange": [9, "cb4b16"],
|
|
||||||
"yellow": [3, "b58900"],
|
|
||||||
|
|
||||||
"lightyellowgreen": 106,
|
|
||||||
"gold3": 178,
|
|
||||||
"orangered": 202,
|
|
||||||
|
|
||||||
"steelblue": 67,
|
|
||||||
"darkorange3": 166,
|
|
||||||
"skyblue1": 117,
|
|
||||||
"khaki1": 228
|
|
||||||
},
|
|
||||||
"gradients": {
|
|
||||||
"dark_GREEN_Orange_red": [
|
|
||||||
[22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 94, 94, 94, 94, 94, 94, 94, 52],
|
|
||||||
["005f00", "015f00", "025f00", "035f00", "045f00", "055f00", "065f00", "075f00", "085f00", "095f00", "0b5f00", "0c5f00", "0d5f00", "0e5f00", "0f5f00", "105f00", "115f00", "125f00", "135f00", "145f00", "165f00", "175f00", "185f00", "195f00", "1a5f00", "1b5f00", "1c5f00", "1d5f00", "1e5f00", "1f5f00", "215f00", "225f00", "235f00", "245f00", "255f00", "265f00", "275f00", "285f00", "295f00", "2a5f00", "2c5f00", "2d5f00", "2e5f00", "2f5f00", "305f00", "315f00", "325f00", "335f00", "345f00", "355f00", "375f00", "385f00", "395f00", "3a5f00", "3b5f00", "3c5f00", "3d5f00", "3e5f00", "3f5f00", "415f00", "425f00", "435f00", "445f00", "455f00", "465f00", "475f00", "485f00", "495f00", "4a5f00", "4c5f00", "4d5f00", "4e5f00", "4f5f00", "505f00", "515f00", "525f00", "535f00", "545f00", "555f00", "575f00", "585f00", "595f00", "5a5f00", "5b5f00", "5c5f00", "5d5f00", "5e5f00", "615f00", "655f00", "685f00", "6c5f00", "6f5f00", "735f00", "765f00", "7a5f00", "7d5f00", "815f00", "845f00", "815200", "702900"]
|
|
||||||
],
|
|
||||||
"GREEN_Orange_red": [
|
|
||||||
[2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 1],
|
|
||||||
["005f00", "015f00", "025f00", "035f00", "045f00", "055f00", "065f00", "075f00", "085f00", "095f00", "0b5f00", "0c5f00", "0d5f00", "0e5f00", "0f5f00", "105f00", "115f00", "125f00", "135f00", "145f00", "165f00", "175f00", "185f00", "195f00", "1a5f00", "1b5f00", "1c5f00", "1d5f00", "1e5f00", "1f5f00", "215f00", "225f00", "235f00", "245f00", "255f00", "265f00", "275f00", "285f00", "295f00", "2a5f00", "2c5f00", "2d5f00", "2e5f00", "2f5f00", "305f00", "315f00", "325f00", "335f00", "345f00", "355f00", "375f00", "385f00", "395f00", "3a5f00", "3b5f00", "3c5f00", "3d5f00", "3e5f00", "3f5f00", "415f00", "425f00", "435f00", "445f00", "455f00", "465f00", "475f00", "485f00", "495f00", "4a5f00", "4c5f00", "4d5f00", "4e5f00", "4f5f00", "505f00", "515f00", "525f00", "535f00", "545f00", "555f00", "575f00", "585f00", "595f00", "5a5f00", "5b5f00", "5c5f00", "5d5f00", "5e5f00", "615f00", "655f00", "685f00", "6c5f00", "6f5f00", "735f00", "765f00", "7a5f00", "7d5f00", "815f00", "845f00", "815200", "702900"]
|
|
||||||
],
|
|
||||||
"green_yellow_red": [
|
|
||||||
[190, 184, 178, 172, 166, 160],
|
|
||||||
["8ae71c", "8ce71c", "8fe71c", "92e71c", "95e71d", "98e71d", "9ae71d", "9de71d", "a0e71e", "a3e71e", "a6e71e", "a8e71e", "abe71f", "aee71f", "b1e71f", "b4e71f", "b6e720", "b9e720", "bce720", "bfe720", "c2e821", "c3e721", "c5e621", "c7e521", "c9e522", "cbe422", "cde322", "cfe222", "d1e223", "d3e123", "d5e023", "d7df23", "d9df24", "dbde24", "dddd24", "dfdc24", "e1dc25", "e3db25", "e5da25", "e7d925", "e9d926", "e9d626", "e9d426", "e9d126", "e9cf27", "e9cc27", "e9ca27", "e9c727", "e9c528", "e9c228", "e9c028", "e9bd28", "e9bb29", "e9b829", "e9b629", "e9b329", "e9b12a", "e9ae2a", "e9ac2a", "e9a92a", "eaa72b", "eaa42b", "eaa22b", "ea9f2b", "ea9d2c", "ea9b2c", "ea982c", "ea962c", "ea942d", "ea912d", "ea8f2d", "ea8d2d", "ea8a2e", "ea882e", "ea862e", "ea832e", "ea812f", "ea7f2f", "ea7c2f", "ea7a2f", "eb7830", "eb7530", "eb7330", "eb7130", "eb6f31", "eb6c31", "eb6a31", "eb6831", "eb6632", "eb6332", "eb6132", "eb5f32", "eb5d33", "eb5a33", "eb5833", "eb5633", "eb5434", "eb5134", "eb4f34", "eb4d34", "ec4b35"]
|
|
||||||
],
|
|
||||||
"green_yellow_orange_red": [
|
|
||||||
[2, 3, 9, 1],
|
|
||||||
["719e07", "739d06", "759c06", "779c06", "799b06", "7b9a05", "7d9a05", "7f9905", "819805", "839805", "859704", "879704", "899604", "8b9504", "8d9504", "8f9403", "919303", "949303", "969203", "989102", "9a9102", "9c9002", "9e9002", "a08f02", "a28e01", "a48e01", "a68d01", "a88c01", "aa8c01", "ac8b00", "ae8a00", "b08a00", "b28900", "b58900", "b58700", "b68501", "b78302", "b78102", "b87f03", "b97d04", "b97b04", "ba7905", "bb7806", "bb7606", "bc7407", "bd7208", "bd7008", "be6e09", "bf6c0a", "bf6a0a", "c0690b", "c1670c", "c1650c", "c2630d", "c3610e", "c35f0e", "c45d0f", "c55b10", "c55a10", "c65811", "c75612", "c75412", "c85213", "c95014", "c94e14", "ca4c15", "cb4b16", "cb4a16", "cc4917", "cc4818", "cd4719", "cd4719", "ce461a", "ce451b", "cf441c", "cf441c", "d0431d", "d0421e", "d1411f", "d1411f", "d24020", "d23f21", "d33e22", "d33e22", "d43d23", "d43c24", "d53b25", "d53b25", "d63a26", "d63927", "d73828", "d73828", "d83729", "d8362a", "d9352b", "d9352b", "da342c", "da332d", "db322e", "dc322f"]
|
|
||||||
],
|
|
||||||
"yellow_red": [
|
|
||||||
[220, 178, 172, 166, 160],
|
|
||||||
["ffd700", "fdd500", "fbd300", "fad200", "f8d000", "f7cf00", "f5cd00", "f3cb00", "f2ca00", "f0c800", "efc700", "edc500", "ebc300", "eac200", "e8c000", "e7bf00", "e5bd00", "e3bb00", "e2ba00", "e0b800", "dfb700", "ddb500", "dbb300", "dab200", "d8b000", "d7af00", "d7ad00", "d7ab00", "d7aa00", "d7a800", "d7a700", "d7a500", "d7a300", "d7a200", "d7a000", "d79f00", "d79d00", "d79b00", "d79a00", "d79800", "d79700", "d79500", "d79300", "d79200", "d79000", "d78f00", "d78d00", "d78b00", "d78a00", "d78800", "d78700", "d78500", "d78300", "d78200", "d78000", "d77f00", "d77d00", "d77b00", "d77a00", "d77800", "d77700", "d77500", "d77300", "d77200", "d77000", "d76f00", "d76d00", "d76b00", "d76a00", "d76800", "d76700", "d76500", "d76300", "d76200", "d76000", "d75f00", "d75b00", "d75700", "d75300", "d74f00", "d74c00", "d74800", "d74400", "d74000", "d73c00", "d73900", "d73500", "d73100", "d72d00", "d72900", "d72600", "d72200", "d71e00", "d71a00", "d71600", "d71300", "d70f00", "d70b00", "d70700"]
|
|
||||||
],
|
|
||||||
"yellow_orange_red": [
|
|
||||||
[3, 9, 1],
|
|
||||||
["b58900", "b58700", "b58600", "b68501", "b68401", "b78202", "b78102", "b88003", "b87f03", "b87d03", "b97c04", "b97b04", "ba7a05", "ba7805", "bb7706", "bb7606", "bc7507", "bc7307", "bc7207", "bd7108", "bd7008", "be6e09", "be6d09", "bf6c0a", "bf6b0a", "c06a0b", "c0680b", "c0670b", "c1660c", "c1650c", "c2630d", "c2620d", "c3610e", "c3600e", "c35e0e", "c45d0f", "c45c0f", "c55b10", "c55910", "c65811", "c65711", "c75612", "c75412", "c75312", "c85213", "c85113", "c94f14", "c94e14", "ca4d15", "ca4c15", "cb4b16", "cb4a16", "cb4a17", "cc4917", "cc4918", "cc4818", "cd4819", "cd4719", "cd471a", "ce461a", "ce461b", "ce451b", "cf451c", "cf441c", "cf441d", "d0431d", "d0431e", "d0421e", "d1421f", "d1411f", "d14120", "d24020", "d24021", "d23f21", "d33f22", "d33e22", "d33e23", "d43d23", "d43d24", "d43c24", "d53c25", "d53b25", "d53b26", "d63a26", "d63a27", "d63927", "d73928", "d73828", "d73829", "d83729", "d8372a", "d8362a", "d9362b", "d9352b", "d9352c", "da342c", "da342d", "da332d", "db332e"]
|
|
||||||
],
|
|
||||||
"blue_red": [
|
|
||||||
[39, 74, 68, 67, 103, 97, 96, 132, 131, 167, 203, 197],
|
|
||||||
["19b4fe", "1bb2fc", "1db1fa", "1faff8", "22aef6", "24adf4", "26abf2", "29aaf0", "2ba9ee", "2da7ec", "30a6ea", "32a5e8", "34a3e6", "36a2e4", "39a0e2", "3b9fe1", "3d9edf", "409cdd", "429bdb", "449ad9", "4798d7", "4997d5", "4b96d3", "4d94d1", "5093cf", "5292cd", "5490cb", "578fc9", "598dc7", "5b8cc6", "5e8bc4", "6089c2", "6288c0", "6487be", "6785bc", "6984ba", "6b83b8", "6e81b6", "7080b4", "727eb2", "757db0", "777cae", "797aac", "7b79ab", "7e78a9", "8076a7", "8275a5", "8574a3", "8772a1", "89719f", "8c709d", "8e6e9b", "906d99", "926b97", "956a95", "976993", "996791", "9c668f", "9e658e", "a0638c", "a3628a", "a56188", "a75f86", "a95e84", "ac5c82", "ae5b80", "b05a7e", "b3587c", "b5577a", "b75678", "ba5476", "bc5374", "be5273", "c05071", "c34f6f", "c54e6d", "c74c6b", "ca4b69", "cc4967", "ce4865", "d14763", "d34561", "d5445f", "d7435d", "da415b", "dc4059", "de3f58", "e13d56", "e33c54", "e53a52", "e83950", "ea384e", "ec364c", "ee354a", "f13448", "f33246", "f53144", "f83042", "fa2e40"]
|
|
||||||
]
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,8 +0,0 @@
|
||||||
{
|
|
||||||
"name": "Default color scheme for IPython prompt",
|
|
||||||
"groups": {
|
|
||||||
"virtualenv": { "fg": "white", "bg": "darkcyan" },
|
|
||||||
"prompt": { "fg": "gray9", "bg": "gray4" },
|
|
||||||
"prompt_count": { "fg": "white", "bg": "gray4" }
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,17 +0,0 @@
|
||||||
{
|
|
||||||
"name": "Default color scheme for shell prompts",
|
|
||||||
"groups": {
|
|
||||||
"user": { "fg": "white", "bg": "darkblue", "attr": ["bold"] },
|
|
||||||
"superuser": { "fg": "white", "bg": "brightred", "attr": ["bold"] },
|
|
||||||
"virtualenv": { "fg": "white", "bg": "darkcyan" },
|
|
||||||
"branch": { "fg": "gray9", "bg": "gray2" },
|
|
||||||
"branch_dirty": { "fg": "brightyellow", "bg": "gray2" },
|
|
||||||
"branch_clean": { "fg": "gray9", "bg": "gray2" },
|
|
||||||
"cwd": { "fg": "gray9", "bg": "gray4" },
|
|
||||||
"cwd:current_folder": { "fg": "gray10", "bg": "gray4", "attr": ["bold"] },
|
|
||||||
"cwd:divider": { "fg": "gray7", "bg": "gray4" },
|
|
||||||
"hostname": { "fg": "brightyellow", "bg": "mediumorange" },
|
|
||||||
"exit_fail": { "fg": "white", "bg": "darkestred" },
|
|
||||||
"exit_success": { "fg": "white", "bg": "darkestgreen" }
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,17 +0,0 @@
|
||||||
{
|
|
||||||
"name": "Solarized Dark",
|
|
||||||
"groups": {
|
|
||||||
"user": { "fg": "oldlace", "bg": "blue", "attr": ["bold"] },
|
|
||||||
"superuser": { "fg": "oldlace", "bg": "red", "attr": ["bold"] },
|
|
||||||
"virtualenv": { "fg": "oldlace", "bg": "green" },
|
|
||||||
"branch": { "fg": "gray61", "bg": "royalblue5" },
|
|
||||||
"branch_dirty": { "fg": "yellow", "bg": "royalblue5" },
|
|
||||||
"branch_clean": { "fg": "gray61", "bg": "royalblue5" },
|
|
||||||
"cwd": { "fg": "lightyellow", "bg": "darkgreencopper" },
|
|
||||||
"cwd:current_folder": { "fg": "oldlace", "bg": "darkgreencopper", "attr": ["bold"] },
|
|
||||||
"cwd:divider": { "fg": "gray61", "bg": "darkgreencopper" },
|
|
||||||
"hostname": { "fg": "oldlace", "bg": "darkgreencopper" },
|
|
||||||
"exit_fail": { "fg": "oldlace", "bg": "red" },
|
|
||||||
"exit_success": { "fg": "oldlace", "bg": "green" }
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,24 +0,0 @@
|
||||||
{
|
|
||||||
"name": "Default color scheme for terminal prompts",
|
|
||||||
"groups": {
|
|
||||||
"background:divider": { "fg": "gray5", "bg": "gray0" },
|
|
||||||
"session": { "fg": "black", "bg": "gray10", "attr": ["bold"] },
|
|
||||||
"date": { "fg": "gray8", "bg": "gray2" },
|
|
||||||
"time": { "fg": "gray10", "bg": "gray2", "attr": ["bold"] },
|
|
||||||
"time:divider": { "fg": "gray5", "bg": "gray2" },
|
|
||||||
"email_alert": { "fg": "white", "bg": "brightred", "attr": ["bold"] },
|
|
||||||
"email_alert_gradient": { "fg": "white", "bg": "yellow_orange_red", "attr": ["bold"] },
|
|
||||||
"hostname": { "fg": "black", "bg": "gray10", "attr": ["bold"] },
|
|
||||||
"weather": { "fg": "gray8", "bg": "gray0" },
|
|
||||||
"weather_temp_gradient": { "fg": "blue_red", "bg": "gray0" },
|
|
||||||
"weather_condition_hot": { "fg": "khaki1", "bg": "gray0" },
|
|
||||||
"weather_condition_snowy": { "fg": "skyblue1", "bg": "gray0" },
|
|
||||||
"weather_condition_rainy": { "fg": "skyblue1", "bg": "gray0" },
|
|
||||||
"uptime": { "fg": "gray8", "bg": "gray0" },
|
|
||||||
"external_ip": { "fg": "gray8", "bg": "gray0" },
|
|
||||||
"network_load": { "fg": "gray8", "bg": "gray0" },
|
|
||||||
"network_load_gradient": { "fg": "green_yellow_orange_red", "bg": "gray0" },
|
|
||||||
"system_load": { "fg": "gray8", "bg": "gray0" },
|
|
||||||
"system_load_gradient": { "fg": "green_yellow_orange_red", "bg": "gray0" }
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,95 +0,0 @@
|
||||||
{
|
|
||||||
"name": "Default color scheme",
|
|
||||||
"groups": {
|
|
||||||
"background": { "fg": "white", "bg": "gray2" },
|
|
||||||
"background:divider": { "fg": "gray6", "bg": "gray2" },
|
|
||||||
"mode": { "fg": "darkestgreen", "bg": "brightgreen", "attr": ["bold"] },
|
|
||||||
"modified_indicator": { "fg": "brightyellow", "bg": "gray4", "attr": ["bold"] },
|
|
||||||
"paste_indicator": { "fg": "white", "bg": "mediumorange", "attr": ["bold"] },
|
|
||||||
"readonly_indicator": { "fg": "brightestred", "bg": "gray4" },
|
|
||||||
"branch": { "fg": "gray9", "bg": "gray4" },
|
|
||||||
"branch_dirty": { "fg": "brightyellow", "bg": "gray4" },
|
|
||||||
"branch_clean": { "fg": "gray9", "bg": "gray4" },
|
|
||||||
"branch:divider": { "fg": "gray7", "bg": "gray4" },
|
|
||||||
"file_directory": { "fg": "gray9", "bg": "gray4" },
|
|
||||||
"file_name": { "fg": "white", "bg": "gray4", "attr": ["bold"] },
|
|
||||||
"file_size": { "fg": "gray8", "bg": "gray2" },
|
|
||||||
"file_name_no_file": { "fg": "gray9", "bg": "gray4", "attr": ["bold"] },
|
|
||||||
"file_name_empty": { "fg": "gray9", "bg": "gray4" },
|
|
||||||
"file_format": { "fg": "gray8", "bg": "gray2" },
|
|
||||||
"file_encoding": { "fg": "gray8", "bg": "gray2" },
|
|
||||||
"file_type": { "fg": "gray8", "bg": "gray2" },
|
|
||||||
"file_vcs_status": { "fg": "brightestred", "bg": "gray4" },
|
|
||||||
"file_vcs_status_M": { "fg": "brightyellow", "bg": "gray4" },
|
|
||||||
"file_vcs_status_A": { "fg": "brightgreen", "bg": "gray4" },
|
|
||||||
"line_percent": { "fg": "gray9", "bg": "gray4" },
|
|
||||||
"line_percent_gradient": { "fg": "green_yellow_red", "bg": "gray4" },
|
|
||||||
"line_current": { "fg": "gray1", "bg": "gray10", "attr": ["bold"] },
|
|
||||||
"line_current_symbol": { "fg": "gray1", "bg": "gray10" },
|
|
||||||
"virtcol_current_gradient": { "fg": "dark_GREEN_Orange_red", "bg": "gray10" },
|
|
||||||
"col_current": { "fg": "gray6", "bg": "gray10" },
|
|
||||||
"modified_buffers": { "fg": "brightyellow", "bg": "gray2" }
|
|
||||||
},
|
|
||||||
"mode_translations": {
|
|
||||||
"nc": {
|
|
||||||
"colors": {
|
|
||||||
"brightyellow": "darkorange",
|
|
||||||
"brightestred": "darkred",
|
|
||||||
"gray0": "gray0",
|
|
||||||
"gray1": "gray0",
|
|
||||||
"gray2": "gray0",
|
|
||||||
"gray3": "gray1",
|
|
||||||
"gray4": "gray1",
|
|
||||||
"gray5": "gray1",
|
|
||||||
"gray6": "gray1",
|
|
||||||
"gray7": "gray4",
|
|
||||||
"gray8": "gray4",
|
|
||||||
"gray9": "gray4",
|
|
||||||
"gray10": "gray5",
|
|
||||||
"white": "gray6",
|
|
||||||
"green_yellow_red": "gray5"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"i": {
|
|
||||||
"colors": {
|
|
||||||
"gray0": "darkestblue",
|
|
||||||
"gray1": "darkestblue",
|
|
||||||
"gray2": "darkestblue",
|
|
||||||
"gray3": "darkblue",
|
|
||||||
"gray4": "darkblue",
|
|
||||||
"gray5": "darkestcyan",
|
|
||||||
"gray6": "darkestcyan",
|
|
||||||
"gray7": "darkestcyan",
|
|
||||||
"gray8": "mediumcyan",
|
|
||||||
"gray9": "mediumcyan",
|
|
||||||
"gray10": "mediumcyan",
|
|
||||||
"green_yellow_red": "gray5"
|
|
||||||
},
|
|
||||||
"groups": {
|
|
||||||
"mode": { "fg": "darkestcyan", "bg": "white", "attr": ["bold"] },
|
|
||||||
"background:divider": { "fg": "darkcyan", "bg": "darkestblue" },
|
|
||||||
"branch:divider": { "fg": "darkcyan", "bg": "darkblue" }
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"v": {
|
|
||||||
"groups": {
|
|
||||||
"mode": { "fg": "darkorange", "bg": "brightestorange", "attr": ["bold"] }
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"V": {
|
|
||||||
"groups": {
|
|
||||||
"mode": { "fg": "darkorange", "bg": "brightestorange", "attr": ["bold"] }
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"^V": {
|
|
||||||
"groups": {
|
|
||||||
"mode": { "fg": "darkorange", "bg": "brightestorange", "attr": ["bold"] }
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"R": {
|
|
||||||
"groups": {
|
|
||||||
"mode": { "fg": "white", "bg": "brightred", "attr": ["bold"] }
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,92 +0,0 @@
|
||||||
{
|
|
||||||
"name": "Solarized Dark",
|
|
||||||
"groups": {
|
|
||||||
"background": { "fg": "oldlace", "bg": "royalblue5" },
|
|
||||||
"background:divider": { "fg": "lightskyblue4", "bg": "royalblue5" },
|
|
||||||
"mode": { "fg": "oldlace", "bg": "green", "attr": ["bold"] },
|
|
||||||
"modified_indicator": { "fg": "yellow", "bg": "darkgreencopper", "attr": ["bold"] },
|
|
||||||
"paste_indicator": { "fg": "oldlace", "bg": "orange", "attr": ["bold"] },
|
|
||||||
"readonly_indicator": { "fg": "red", "bg": "darkgreencopper" },
|
|
||||||
"branch": { "fg": "lightyellow", "bg": "darkgreencopper" },
|
|
||||||
"branch_dirty": { "fg": "yellow", "bg": "darkgreencopper" },
|
|
||||||
"branch_clean": { "fg": "lightyellow", "bg": "darkgreencopper" },
|
|
||||||
"branch:divider": { "fg": "gray61", "bg": "darkgreencopper" },
|
|
||||||
"file_directory": { "fg": "lightyellow", "bg": "darkgreencopper" },
|
|
||||||
"file_name": { "fg": "oldlace", "bg": "darkgreencopper", "attr": ["bold"] },
|
|
||||||
"file_size": { "fg": "oldlace", "bg": "darkgreencopper" },
|
|
||||||
"file_name_no_file": { "fg": "oldlace", "bg": "darkgreencopper", "attr": ["bold"] },
|
|
||||||
"file_name_empty": { "fg": "oldlace", "bg": "darkgreencopper" },
|
|
||||||
"file_format": { "fg": "gray61", "bg": "royalblue5" },
|
|
||||||
"file_encoding": { "fg": "gray61", "bg": "royalblue5" },
|
|
||||||
"file_type": { "fg": "gray61", "bg": "royalblue5" },
|
|
||||||
"file_vcs_status": { "fg": "red", "bg": "darkgreencopper" },
|
|
||||||
"file_vcs_status_M": { "fg": "yellow", "bg": "darkgreencopper" },
|
|
||||||
"file_vcs_status_A": { "fg": "green", "bg": "darkgreencopper" },
|
|
||||||
"line_percent": { "fg": "oldlace", "bg": "lightskyblue4" },
|
|
||||||
"line_percent_gradient": { "fg": "green_yellow_orange_red", "bg": "lightskyblue4" },
|
|
||||||
"line_current": { "fg": "gray13", "bg": "lightyellow", "attr": ["bold"] },
|
|
||||||
"line_current_symbol": { "fg": "gray13", "bg": "lightyellow" },
|
|
||||||
"virtcol_current_gradient": { "fg": "GREEN_Orange_red", "bg": "gray10" },
|
|
||||||
"col_current": { "fg": "azure4", "bg": "lightyellow" }
|
|
||||||
},
|
|
||||||
"mode_translations": {
|
|
||||||
"nc": {
|
|
||||||
"colors": {
|
|
||||||
"darkgreencopper": "royalblue5",
|
|
||||||
"lightskyblue4": "royalblue5",
|
|
||||||
"azure4": "darkgreencopper",
|
|
||||||
"gray61": "lightskyblue4",
|
|
||||||
"lightyellow": "azure4",
|
|
||||||
"oldlace": "gray61"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"i": {
|
|
||||||
"groups": {
|
|
||||||
"background": { "fg": "oldlace", "bg": "darkgreencopper" },
|
|
||||||
"background:divider": { "fg": "lightyellow", "bg": "darkgreencopper" },
|
|
||||||
"mode": { "fg": "oldlace", "bg": "blue", "attr": ["bold"] },
|
|
||||||
"modified_indicator": { "fg": "yellow", "bg": "lightyellow", "attr": ["bold"] },
|
|
||||||
"paste_indicator": { "fg": "oldlace", "bg": "orange", "attr": ["bold"] },
|
|
||||||
"readonly_indicator": { "fg": "red", "bg": "lightyellow" },
|
|
||||||
"branch": { "fg": "darkgreencopper", "bg": "lightyellow" },
|
|
||||||
"branch:divider": { "fg": "lightskyblue4", "bg": "lightyellow" },
|
|
||||||
"file_directory": { "fg": "darkgreencopper", "bg": "lightyellow" },
|
|
||||||
"file_name": { "fg": "royalblue5", "bg": "lightyellow", "attr": ["bold"] },
|
|
||||||
"file_size": { "fg": "royalblue5", "bg": "lightyellow" },
|
|
||||||
"file_name_no_file": { "fg": "royalblue5", "bg": "lightyellow", "attr": ["bold"] },
|
|
||||||
"file_name_empty": { "fg": "royalblue5", "bg": "lightyellow" },
|
|
||||||
"file_format": { "fg": "lightyellow", "bg": "darkgreencopper" },
|
|
||||||
"file_encoding": { "fg": "lightyellow", "bg": "darkgreencopper" },
|
|
||||||
"file_type": { "fg": "lightyellow", "bg": "darkgreencopper" },
|
|
||||||
"file_vcs_status": { "fg": "red", "bg": "lightyellow" },
|
|
||||||
"file_vcs_status_M": { "fg": "yellow", "bg": "lightyellow" },
|
|
||||||
"file_vcs_status_A": { "fg": "green", "bg": "lightyellow" },
|
|
||||||
"line_percent": { "fg": "oldlace", "bg": "gray61" },
|
|
||||||
"line_percent_gradient": { "fg": "oldlace", "bg": "gray61" },
|
|
||||||
"line_current": { "fg": "gray13", "bg": "oldlace", "attr": ["bold"] },
|
|
||||||
"line_current_symbol": { "fg": "gray13", "bg": "oldlace" },
|
|
||||||
"col_current": { "fg": "azure4", "bg": "oldlace" }
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"v": {
|
|
||||||
"groups": {
|
|
||||||
"mode": { "fg": "oldlace", "bg": "orange", "attr": ["bold"] }
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"V": {
|
|
||||||
"groups": {
|
|
||||||
"mode": { "fg": "oldlace", "bg": "orange", "attr": ["bold"] }
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"^V": {
|
|
||||||
"groups": {
|
|
||||||
"mode": { "fg": "oldlace", "bg": "orange", "attr": ["bold"] }
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"R": {
|
|
||||||
"groups": {
|
|
||||||
"mode": { "fg": "oldlace", "bg": "red", "attr": ["bold"] }
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,25 +0,0 @@
|
||||||
{
|
|
||||||
"name": "Default color scheme for window managers",
|
|
||||||
"groups": {
|
|
||||||
"background:divider": { "fg": "gray5", "bg": "gray0" },
|
|
||||||
"session": { "fg": "black", "bg": "gray10", "attr": ["bold"] },
|
|
||||||
"date": { "fg": "gray8", "bg": "gray2" },
|
|
||||||
"time": { "fg": "gray10", "bg": "gray2", "attr": ["bold"] },
|
|
||||||
"time:divider": { "fg": "gray5", "bg": "gray2" },
|
|
||||||
"email_alert": { "fg": "white", "bg": "brightred", "attr": ["bold"] },
|
|
||||||
"email_alert_gradient": { "fg": "white", "bg": "yellow_orange_red", "attr": ["bold"] },
|
|
||||||
"hostname": { "fg": "black", "bg": "gray10", "attr": ["bold"] },
|
|
||||||
"weather": { "fg": "gray8", "bg": "gray0" },
|
|
||||||
"weather_temp_gradient": { "fg": "blue_red", "bg": "gray0" },
|
|
||||||
"weather_condition_hot": { "fg": "khaki1", "bg": "gray0" },
|
|
||||||
"weather_condition_snowy": { "fg": "skyblue1", "bg": "gray0" },
|
|
||||||
"weather_condition_rainy": { "fg": "skyblue1", "bg": "gray0" },
|
|
||||||
"uptime": { "fg": "gray8", "bg": "gray0" },
|
|
||||||
"external_ip": { "fg": "gray8", "bg": "gray0" },
|
|
||||||
"network_load": { "fg": "gray8", "bg": "gray0" },
|
|
||||||
"system_load": { "fg": "gray8", "bg": "gray0" },
|
|
||||||
"system_load_good": { "fg": "lightyellowgreen", "bg": "gray0" },
|
|
||||||
"system_load_bad": { "fg": "gold3", "bg": "gray0" },
|
|
||||||
"system_load_ugly": { "fg": "orangered", "bg": "gray0" }
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,48 +0,0 @@
|
||||||
{
|
|
||||||
"common": {
|
|
||||||
"term_truecolor": false,
|
|
||||||
"dividers": {
|
|
||||||
"left": {
|
|
||||||
"hard": " ",
|
|
||||||
"soft": " "
|
|
||||||
},
|
|
||||||
"right": {
|
|
||||||
"hard": " ",
|
|
||||||
"soft": " "
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"spaces": 1
|
|
||||||
},
|
|
||||||
"ext": {
|
|
||||||
"ipython": {
|
|
||||||
"colorscheme": "default",
|
|
||||||
"theme": "in",
|
|
||||||
"local_themes": {
|
|
||||||
"rewrite": "rewrite",
|
|
||||||
"out": "out",
|
|
||||||
"in2": "in2"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"shell": {
|
|
||||||
"colorscheme": "default",
|
|
||||||
"theme": "default"
|
|
||||||
},
|
|
||||||
"tmux": {
|
|
||||||
"colorscheme": "default",
|
|
||||||
"theme": "default"
|
|
||||||
},
|
|
||||||
"vim": {
|
|
||||||
"colorscheme": "default",
|
|
||||||
"theme": "default",
|
|
||||||
"local_themes": {
|
|
||||||
"cmdwin": "cmdwin",
|
|
||||||
"help": "help",
|
|
||||||
"quickfix": "quickfix"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"wm": {
|
|
||||||
"colorscheme": "default",
|
|
||||||
"theme": "default"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,26 +0,0 @@
|
||||||
{
|
|
||||||
"default_module": "powerline.segments.common",
|
|
||||||
"segments": {
|
|
||||||
"left": [
|
|
||||||
{
|
|
||||||
"name": "virtualenv"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"type": "string",
|
|
||||||
"contents": "In[",
|
|
||||||
"draw_soft_divider": false,
|
|
||||||
"highlight_group": ["prompt"]
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "prompt_count",
|
|
||||||
"module": "powerline.segments.ipython",
|
|
||||||
"draw_soft_divider": false
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"type": "string",
|
|
||||||
"contents": "]",
|
|
||||||
"highlight_group": ["prompt"]
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,13 +0,0 @@
|
||||||
{
|
|
||||||
"default_module": "powerline.segments.common",
|
|
||||||
"segments": {
|
|
||||||
"left": [
|
|
||||||
{
|
|
||||||
"type": "string",
|
|
||||||
"contents": "",
|
|
||||||
"width": "auto",
|
|
||||||
"highlight_group": ["prompt"]
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,25 +0,0 @@
|
||||||
{
|
|
||||||
"default_module": "powerline.segments.common",
|
|
||||||
"segments": {
|
|
||||||
"left": [
|
|
||||||
{
|
|
||||||
"type": "string",
|
|
||||||
"contents": "Out[",
|
|
||||||
"draw_soft_divider": false,
|
|
||||||
"width": "auto",
|
|
||||||
"align": "r",
|
|
||||||
"highlight_group": ["prompt"]
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "prompt_count",
|
|
||||||
"module": "powerline.segments.ipython",
|
|
||||||
"draw_soft_divider": false
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"type": "string",
|
|
||||||
"contents": "]",
|
|
||||||
"highlight_group": ["prompt"]
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,23 +0,0 @@
|
||||||
{
|
|
||||||
"segments": {
|
|
||||||
"left": [
|
|
||||||
{
|
|
||||||
"type": "string",
|
|
||||||
"contents": "",
|
|
||||||
"draw_soft_divider": false,
|
|
||||||
"width": "auto",
|
|
||||||
"highlight_group": ["prompt"]
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "prompt_count",
|
|
||||||
"module": "powerline.segments.ipython",
|
|
||||||
"draw_soft_divider": false
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"type": "string",
|
|
||||||
"contents": ">",
|
|
||||||
"highlight_group": ["prompt"]
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,45 +0,0 @@
|
||||||
{
|
|
||||||
"default_module": "powerline.segments.common",
|
|
||||||
"segment_data": {
|
|
||||||
"hostname": {
|
|
||||||
"before": " ",
|
|
||||||
"args": {
|
|
||||||
"only_if_ssh": true
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"virtualenv": {
|
|
||||||
"before": "ⓔ "
|
|
||||||
},
|
|
||||||
"branch": {
|
|
||||||
"before": " "
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"segments": {
|
|
||||||
"left": [
|
|
||||||
{
|
|
||||||
"name": "hostname"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "user"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "virtualenv"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "cwd",
|
|
||||||
"args": {
|
|
||||||
"dir_limit_depth": 3
|
|
||||||
}
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"right": [
|
|
||||||
{
|
|
||||||
"module": "powerline.segments.shell",
|
|
||||||
"name": "last_pipe_status"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "branch"
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,43 +0,0 @@
|
||||||
{
|
|
||||||
"default_module": "powerline.segments.common",
|
|
||||||
"segment_data": {
|
|
||||||
"hostname": {
|
|
||||||
"before": " ",
|
|
||||||
"args": {
|
|
||||||
"only_if_ssh": true
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"virtualenv": {
|
|
||||||
"before": "ⓔ "
|
|
||||||
},
|
|
||||||
"branch": {
|
|
||||||
"before": " "
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"segments": {
|
|
||||||
"left": [
|
|
||||||
{
|
|
||||||
"name": "hostname"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "user"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "virtualenv"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "branch"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "cwd",
|
|
||||||
"args": {
|
|
||||||
"dir_limit_depth": 3
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "last_status",
|
|
||||||
"module": "powerline.segments.shell"
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,62 +0,0 @@
|
||||||
{
|
|
||||||
"default_module": "powerline.segments.common",
|
|
||||||
"segment_data": {
|
|
||||||
"uptime": {
|
|
||||||
"before": "⇑ "
|
|
||||||
},
|
|
||||||
"external_ip": {
|
|
||||||
"before": "ⓦ "
|
|
||||||
},
|
|
||||||
"date": {
|
|
||||||
"before": "⌚ "
|
|
||||||
},
|
|
||||||
"email_imap_alert": {
|
|
||||||
"before": "✉ ",
|
|
||||||
"args": {
|
|
||||||
"username": "",
|
|
||||||
"password": ""
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"segments": {
|
|
||||||
"right": [
|
|
||||||
{
|
|
||||||
"name": "uptime",
|
|
||||||
"priority": 50
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "external_ip",
|
|
||||||
"priority": 50
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "network_load",
|
|
||||||
"priority": 50
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "system_load",
|
|
||||||
"priority": 50
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "weather",
|
|
||||||
"priority": 50
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "date"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "date",
|
|
||||||
"args": {
|
|
||||||
"format": "%H:%M",
|
|
||||||
"istime": true
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "email_imap_alert",
|
|
||||||
"priority": 10
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "hostname"
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,18 +0,0 @@
|
||||||
{
|
|
||||||
"segments": {
|
|
||||||
"left": [
|
|
||||||
{
|
|
||||||
"type": "string",
|
|
||||||
"contents": "Command Line",
|
|
||||||
"highlight_group": ["file_name"]
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"type": "string",
|
|
||||||
"highlight_group": ["background"],
|
|
||||||
"draw_soft_divider": false,
|
|
||||||
"draw_hard_divider": false,
|
|
||||||
"width": "auto"
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,108 +0,0 @@
|
||||||
{
|
|
||||||
"segment_data": {
|
|
||||||
"branch": {
|
|
||||||
"before": " "
|
|
||||||
},
|
|
||||||
"modified_indicator": {
|
|
||||||
"args": { "text": "+" }
|
|
||||||
},
|
|
||||||
"line_percent": {
|
|
||||||
"args": { "gradient": true },
|
|
||||||
"after": "%"
|
|
||||||
},
|
|
||||||
"line_current_symbol": {
|
|
||||||
"contents": " "
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"segments": {
|
|
||||||
"left": [
|
|
||||||
{
|
|
||||||
"name": "mode",
|
|
||||||
"exclude_modes": ["nc"]
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "paste_indicator",
|
|
||||||
"exclude_modes": ["nc"],
|
|
||||||
"priority": 10
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "branch",
|
|
||||||
"exclude_modes": ["nc"],
|
|
||||||
"priority": 30
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "readonly_indicator",
|
|
||||||
"draw_soft_divider": false,
|
|
||||||
"after": " "
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "file_directory",
|
|
||||||
"priority": 40,
|
|
||||||
"draw_soft_divider": false
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "file_name",
|
|
||||||
"draw_soft_divider": false
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "file_vcs_status",
|
|
||||||
"before": " ",
|
|
||||||
"draw_soft_divider": false
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "modified_indicator",
|
|
||||||
"before": " "
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"type": "string",
|
|
||||||
"highlight_group": ["background"],
|
|
||||||
"draw_soft_divider": false,
|
|
||||||
"draw_hard_divider": false,
|
|
||||||
"width": "auto"
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"right": [
|
|
||||||
{
|
|
||||||
"name": "file_format",
|
|
||||||
"draw_soft_divider": false,
|
|
||||||
"exclude_modes": ["nc"],
|
|
||||||
"priority": 60
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "file_encoding",
|
|
||||||
"exclude_modes": ["nc"],
|
|
||||||
"priority": 60
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "file_type",
|
|
||||||
"exclude_modes": ["nc"],
|
|
||||||
"priority": 60
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "line_percent",
|
|
||||||
"priority": 50,
|
|
||||||
"width": 4,
|
|
||||||
"align": "r"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"type": "string",
|
|
||||||
"name": "line_current_symbol",
|
|
||||||
"highlight_group": ["line_current_symbol", "line_current"]
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "line_current",
|
|
||||||
"draw_soft_divider": false,
|
|
||||||
"width": 3,
|
|
||||||
"align": "r"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "virtcol_current",
|
|
||||||
"draw_soft_divider": false,
|
|
||||||
"priority": 20,
|
|
||||||
"before": ":",
|
|
||||||
"width": 3,
|
|
||||||
"align": "l"
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,36 +0,0 @@
|
||||||
{
|
|
||||||
"segments": {
|
|
||||||
"left": [
|
|
||||||
{
|
|
||||||
"name": "file_name",
|
|
||||||
"draw_soft_divider": false
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"type": "string",
|
|
||||||
"highlight_group": ["background"],
|
|
||||||
"draw_soft_divider": false,
|
|
||||||
"draw_hard_divider": false,
|
|
||||||
"width": "auto"
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"right": [
|
|
||||||
{
|
|
||||||
"name": "line_percent",
|
|
||||||
"priority": 30,
|
|
||||||
"width": 4,
|
|
||||||
"align": "r"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"type": "string",
|
|
||||||
"name": "line_current_symbol",
|
|
||||||
"highlight_group": ["line_current_symbol", "line_current"]
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "line_current",
|
|
||||||
"draw_soft_divider": false,
|
|
||||||
"width": 3,
|
|
||||||
"align": "r"
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,37 +0,0 @@
|
||||||
{
|
|
||||||
"segment_data": {
|
|
||||||
"buffer_name": {
|
|
||||||
"contents": "Location List"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"segments": {
|
|
||||||
"left": [
|
|
||||||
{
|
|
||||||
"type": "string",
|
|
||||||
"name": "buffer_name",
|
|
||||||
"highlight_group": ["file_name"],
|
|
||||||
"draw_soft_divider": false
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"type": "string",
|
|
||||||
"highlight_group": ["background"],
|
|
||||||
"draw_soft_divider": false,
|
|
||||||
"draw_hard_divider": false,
|
|
||||||
"width": "auto"
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"right": [
|
|
||||||
{
|
|
||||||
"type": "string",
|
|
||||||
"name": "line_current_symbol",
|
|
||||||
"highlight_group": ["line_current_symbol", "line_current"]
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "line_current",
|
|
||||||
"draw_soft_divider": false,
|
|
||||||
"width": 3,
|
|
||||||
"align": "r"
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,31 +0,0 @@
|
||||||
{
|
|
||||||
"default_module": "powerline.segments.common",
|
|
||||||
"segments": {
|
|
||||||
"right": [
|
|
||||||
{
|
|
||||||
"name": "weather",
|
|
||||||
"priority": 50
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "date"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "date",
|
|
||||||
"args": {
|
|
||||||
"format": "%H:%M",
|
|
||||||
"istime": true
|
|
||||||
},
|
|
||||||
"before": "⌚ "
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "email_imap_alert",
|
|
||||||
"before": "✉ ",
|
|
||||||
"priority": 10,
|
|
||||||
"args": {
|
|
||||||
"username": "",
|
|
||||||
"password": ""
|
|
||||||
}
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,30 +0,0 @@
|
||||||
# vim:fileencoding=utf-8:noet
|
|
||||||
|
|
||||||
from powerline import Powerline
|
|
||||||
from powerline.lib import mergedicts
|
|
||||||
|
|
||||||
|
|
||||||
class IpythonPowerline(Powerline):
|
|
||||||
def __init__(self):
|
|
||||||
super(IpythonPowerline, self).__init__('ipython', use_daemon_threads=True)
|
|
||||||
|
|
||||||
def get_config_paths(self):
|
|
||||||
if self.path:
|
|
||||||
return [self.path]
|
|
||||||
else:
|
|
||||||
return super(IpythonPowerline, self).get_config_paths()
|
|
||||||
|
|
||||||
def get_local_themes(self, local_themes):
|
|
||||||
return dict(((type, {'config': self.load_theme_config(name)}) for type, name in local_themes.items()))
|
|
||||||
|
|
||||||
def load_main_config(self):
|
|
||||||
r = super(IpythonPowerline, self).load_main_config()
|
|
||||||
if self.config_overrides:
|
|
||||||
mergedicts(r, self.config_overrides)
|
|
||||||
return r
|
|
||||||
|
|
||||||
def load_theme_config(self, name):
|
|
||||||
r = super(IpythonPowerline, self).load_theme_config(name)
|
|
||||||
if name in self.theme_overrides:
|
|
||||||
mergedicts(r, self.theme_overrides[name])
|
|
||||||
return r
|
|
|
@ -1,67 +0,0 @@
|
||||||
# vim:fileencoding=utf-8:noet
|
|
||||||
from functools import wraps
|
|
||||||
import json
|
|
||||||
|
|
||||||
|
|
||||||
def wraps_saveargs(wrapped):
|
|
||||||
def dec(wrapper):
|
|
||||||
r = wraps(wrapped)(wrapper)
|
|
||||||
r.powerline_origin = getattr(wrapped, 'powerline_origin', wrapped)
|
|
||||||
return r
|
|
||||||
return dec
|
|
||||||
|
|
||||||
|
|
||||||
def mergedicts(d1, d2):
|
|
||||||
'''Recursively merge two dictionaries. First dictionary is modified in-place.
|
|
||||||
'''
|
|
||||||
for k in d2:
|
|
||||||
if k in d1 and type(d1[k]) is dict and type(d2[k]) is dict:
|
|
||||||
mergedicts(d1[k], d2[k])
|
|
||||||
else:
|
|
||||||
d1[k] = d2[k]
|
|
||||||
|
|
||||||
|
|
||||||
def add_divider_highlight_group(highlight_group):
|
|
||||||
def dec(func):
|
|
||||||
@wraps_saveargs(func)
|
|
||||||
def f(**kwargs):
|
|
||||||
r = func(**kwargs)
|
|
||||||
if r:
|
|
||||||
return [{
|
|
||||||
'contents': r,
|
|
||||||
'divider_highlight_group': highlight_group,
|
|
||||||
}]
|
|
||||||
else:
|
|
||||||
return None
|
|
||||||
return f
|
|
||||||
return dec
|
|
||||||
|
|
||||||
|
|
||||||
def keyvaluesplit(s):
|
|
||||||
if '=' not in s:
|
|
||||||
raise TypeError('Option must look like option=json_value')
|
|
||||||
if s[0] == '_':
|
|
||||||
raise ValueError('Option names must not start with `_\'')
|
|
||||||
idx = s.index('=')
|
|
||||||
o = s[:idx]
|
|
||||||
val = json.loads(s[idx + 1:])
|
|
||||||
return (o, val)
|
|
||||||
|
|
||||||
|
|
||||||
def parsedotval(s):
|
|
||||||
if type(s) is tuple:
|
|
||||||
o, val = s
|
|
||||||
else:
|
|
||||||
o, val = keyvaluesplit(s)
|
|
||||||
|
|
||||||
keys = o.split('.')
|
|
||||||
if len(keys) > 1:
|
|
||||||
r = (keys[0], {})
|
|
||||||
rcur = r[1]
|
|
||||||
for key in keys[1:-1]:
|
|
||||||
rcur[key] = {}
|
|
||||||
rcur = rcur[key]
|
|
||||||
rcur[keys[-1]] = val
|
|
||||||
return r
|
|
||||||
else:
|
|
||||||
return (o, val)
|
|
|
@ -1,156 +0,0 @@
|
||||||
# vim:fileencoding=utf-8:noet
|
|
||||||
|
|
||||||
from powerline.lib.threaded import MultiRunnedThread
|
|
||||||
from powerline.lib.file_watcher import create_file_watcher
|
|
||||||
|
|
||||||
from threading import Event, Lock
|
|
||||||
from collections import defaultdict
|
|
||||||
|
|
||||||
import json
|
|
||||||
|
|
||||||
|
|
||||||
def open_file(path):
|
|
||||||
return open(path, 'r')
|
|
||||||
|
|
||||||
|
|
||||||
def load_json_config(config_file_path, load=json.load, open_file=open_file):
|
|
||||||
with open_file(config_file_path) as config_file_fp:
|
|
||||||
return load(config_file_fp)
|
|
||||||
|
|
||||||
|
|
||||||
class ConfigLoader(MultiRunnedThread):
|
|
||||||
def __init__(self, shutdown_event=None, watcher=None, load=load_json_config):
|
|
||||||
super(ConfigLoader, self).__init__()
|
|
||||||
self.shutdown_event = shutdown_event or Event()
|
|
||||||
self.watcher = watcher or create_file_watcher()
|
|
||||||
self._load = load
|
|
||||||
|
|
||||||
self.pl = None
|
|
||||||
self.interval = None
|
|
||||||
|
|
||||||
self.lock = Lock()
|
|
||||||
|
|
||||||
self.watched = defaultdict(set)
|
|
||||||
self.missing = defaultdict(set)
|
|
||||||
self.loaded = {}
|
|
||||||
|
|
||||||
def set_pl(self, pl):
|
|
||||||
self.pl = pl
|
|
||||||
|
|
||||||
def set_interval(self, interval):
|
|
||||||
self.interval = interval
|
|
||||||
|
|
||||||
def register(self, function, path):
|
|
||||||
'''Register function that will be run when file changes.
|
|
||||||
|
|
||||||
:param function function:
|
|
||||||
Function that will be called when file at the given path changes.
|
|
||||||
:param str path:
|
|
||||||
Path that will be watched for.
|
|
||||||
'''
|
|
||||||
with self.lock:
|
|
||||||
self.watched[path].add(function)
|
|
||||||
self.watcher.watch(path)
|
|
||||||
|
|
||||||
def register_missing(self, condition_function, function, key):
|
|
||||||
'''Register any function that will be called with given key each
|
|
||||||
interval seconds (interval is defined at __init__). Its result is then
|
|
||||||
passed to ``function``, but only if the result is true.
|
|
||||||
|
|
||||||
:param function condition_function:
|
|
||||||
Function which will be called each ``interval`` seconds. All
|
|
||||||
exceptions from it will be ignored.
|
|
||||||
:param function function:
|
|
||||||
Function which will be called if condition_function returns
|
|
||||||
something that is true. Accepts result of condition_function as an
|
|
||||||
argument.
|
|
||||||
:param str key:
|
|
||||||
Any value, it will be passed to condition_function on each call.
|
|
||||||
|
|
||||||
Note: registered functions will be automatically removed if
|
|
||||||
condition_function results in something true.
|
|
||||||
'''
|
|
||||||
with self.lock:
|
|
||||||
self.missing[key].add((condition_function, function))
|
|
||||||
|
|
||||||
def unregister_functions(self, removed_functions):
|
|
||||||
'''Unregister files handled by these functions.
|
|
||||||
|
|
||||||
:param set removed_functions:
|
|
||||||
Set of functions previously passed to ``.register()`` method.
|
|
||||||
'''
|
|
||||||
with self.lock:
|
|
||||||
for path, functions in list(self.watched.items()):
|
|
||||||
functions -= removed_functions
|
|
||||||
if not functions:
|
|
||||||
self.watched.pop(path)
|
|
||||||
self.loaded.pop(path, None)
|
|
||||||
|
|
||||||
def unregister_missing(self, removed_functions):
|
|
||||||
'''Unregister files handled by these functions.
|
|
||||||
|
|
||||||
:param set removed_functions:
|
|
||||||
Set of pairs (2-tuples) representing ``(condition_function,
|
|
||||||
function)`` function pairs previously passed as an arguments to
|
|
||||||
``.register_missing()`` method.
|
|
||||||
'''
|
|
||||||
with self.lock:
|
|
||||||
for key, functions in list(self.missing.items()):
|
|
||||||
functions -= removed_functions
|
|
||||||
if not functions:
|
|
||||||
self.missing.pop(key)
|
|
||||||
|
|
||||||
def load(self, path):
|
|
||||||
try:
|
|
||||||
# No locks: GIL does what we need
|
|
||||||
return self.loaded[path]
|
|
||||||
except KeyError:
|
|
||||||
r = self._load(path)
|
|
||||||
self.loaded[path] = r
|
|
||||||
return r
|
|
||||||
|
|
||||||
def update(self):
|
|
||||||
toload = []
|
|
||||||
with self.lock:
|
|
||||||
for path, functions in self.watched.items():
|
|
||||||
for function in functions:
|
|
||||||
try:
|
|
||||||
modified = self.watcher(path)
|
|
||||||
except OSError as e:
|
|
||||||
modified = True
|
|
||||||
self.exception('Error while running watcher for path {0}: {1}', path, str(e))
|
|
||||||
else:
|
|
||||||
if modified:
|
|
||||||
toload.append(path)
|
|
||||||
if modified:
|
|
||||||
function(path)
|
|
||||||
with self.lock:
|
|
||||||
for key, functions in list(self.missing.items()):
|
|
||||||
for condition_function, function in list(functions):
|
|
||||||
try:
|
|
||||||
path = condition_function(key)
|
|
||||||
except Exception as e:
|
|
||||||
self.exception('Error while running condition function for key {0}: {1}', key, str(e))
|
|
||||||
else:
|
|
||||||
if path:
|
|
||||||
toload.append(path)
|
|
||||||
function(path)
|
|
||||||
functions.remove((condition_function, function))
|
|
||||||
if not functions:
|
|
||||||
self.missing.pop(key)
|
|
||||||
for path in toload:
|
|
||||||
try:
|
|
||||||
self.loaded[path] = self._load(path)
|
|
||||||
except Exception as e:
|
|
||||||
self.exception('Error while loading {0}: {1}', path, str(e))
|
|
||||||
|
|
||||||
def run(self):
|
|
||||||
while self.interval is not None and not self.shutdown_event.is_set():
|
|
||||||
self.update()
|
|
||||||
self.shutdown_event.wait(self.interval)
|
|
||||||
|
|
||||||
def exception(self, msg, *args, **kwargs):
|
|
||||||
if self.pl:
|
|
||||||
self.pl.exception(msg, prefix='config_loader', *args, **kwargs)
|
|
||||||
else:
|
|
||||||
raise
|
|
|
@ -1,181 +0,0 @@
|
||||||
# vim:fileencoding=utf-8:noet
|
|
||||||
from __future__ import unicode_literals, absolute_import
|
|
||||||
|
|
||||||
__copyright__ = '2013, Kovid Goyal <kovid at kovidgoyal.net>'
|
|
||||||
__docformat__ = 'restructuredtext en'
|
|
||||||
|
|
||||||
import os
|
|
||||||
import sys
|
|
||||||
from time import sleep
|
|
||||||
from threading import RLock
|
|
||||||
|
|
||||||
from powerline.lib.monotonic import monotonic
|
|
||||||
from powerline.lib.inotify import INotify, INotifyError
|
|
||||||
|
|
||||||
|
|
||||||
class INotifyWatch(INotify):
|
|
||||||
is_stat_based = False
|
|
||||||
|
|
||||||
def __init__(self, expire_time=10):
|
|
||||||
super(INotifyWatch, self).__init__()
|
|
||||||
self.watches = {}
|
|
||||||
self.modified = {}
|
|
||||||
self.last_query = {}
|
|
||||||
self.lock = RLock()
|
|
||||||
self.expire_time = expire_time * 60
|
|
||||||
|
|
||||||
def expire_watches(self):
|
|
||||||
now = monotonic()
|
|
||||||
for path, last_query in tuple(self.last_query.items()):
|
|
||||||
if last_query - now > self.expire_time:
|
|
||||||
self.unwatch(path)
|
|
||||||
|
|
||||||
def process_event(self, wd, mask, cookie, name):
|
|
||||||
if wd == -1 and (mask & self.Q_OVERFLOW):
|
|
||||||
# We missed some INOTIFY events, so we dont
|
|
||||||
# know the state of any tracked files.
|
|
||||||
for path in tuple(self.modified):
|
|
||||||
if os.path.exists(path):
|
|
||||||
self.modified[path] = True
|
|
||||||
else:
|
|
||||||
self.watches.pop(path, None)
|
|
||||||
self.modified.pop(path, None)
|
|
||||||
self.last_query.pop(path, None)
|
|
||||||
return
|
|
||||||
|
|
||||||
for path, num in tuple(self.watches.items()):
|
|
||||||
if num == wd:
|
|
||||||
if mask & self.IGNORED:
|
|
||||||
self.watches.pop(path, None)
|
|
||||||
self.modified.pop(path, None)
|
|
||||||
self.last_query.pop(path, None)
|
|
||||||
else:
|
|
||||||
self.modified[path] = True
|
|
||||||
|
|
||||||
def unwatch(self, path):
|
|
||||||
''' Remove the watch for path. Raises an OSError if removing the watch
|
|
||||||
fails for some reason. '''
|
|
||||||
path = self.os.path.abspath(path)
|
|
||||||
with self.lock:
|
|
||||||
self.modified.pop(path, None)
|
|
||||||
self.last_query.pop(path, None)
|
|
||||||
wd = self.watches.pop(path, None)
|
|
||||||
if wd is not None:
|
|
||||||
if self._rm_watch(self._inotify_fd, wd) != 0:
|
|
||||||
self.handle_error()
|
|
||||||
|
|
||||||
def watch(self, path):
|
|
||||||
''' Register a watch for the file named path. Raises an OSError if path
|
|
||||||
does not exist. '''
|
|
||||||
import ctypes
|
|
||||||
path = self.os.path.abspath(path)
|
|
||||||
with self.lock:
|
|
||||||
if path not in self.watches:
|
|
||||||
bpath = path if isinstance(path, bytes) else path.encode(self.fenc)
|
|
||||||
wd = self._add_watch(self._inotify_fd, ctypes.c_char_p(bpath),
|
|
||||||
self.MODIFY | self.ATTRIB | self.MOVE_SELF | self.DELETE_SELF)
|
|
||||||
if wd == -1:
|
|
||||||
self.handle_error()
|
|
||||||
self.watches[path] = wd
|
|
||||||
self.modified[path] = False
|
|
||||||
|
|
||||||
def __call__(self, path):
|
|
||||||
''' Return True if path has been modified since the last call. Can
|
|
||||||
raise OSError if the path does not exist. '''
|
|
||||||
path = self.os.path.abspath(path)
|
|
||||||
with self.lock:
|
|
||||||
self.last_query[path] = monotonic()
|
|
||||||
self.expire_watches()
|
|
||||||
if path not in self.watches:
|
|
||||||
# Try to re-add the watch, it will fail if the file does not
|
|
||||||
# exist/you dont have permission
|
|
||||||
self.watch(path)
|
|
||||||
return True
|
|
||||||
self.read(get_name=False)
|
|
||||||
if path not in self.modified:
|
|
||||||
# An ignored event was received which means the path has been
|
|
||||||
# automatically unwatched
|
|
||||||
return True
|
|
||||||
ans = self.modified[path]
|
|
||||||
if ans:
|
|
||||||
self.modified[path] = False
|
|
||||||
return ans
|
|
||||||
|
|
||||||
def close(self):
|
|
||||||
with self.lock:
|
|
||||||
for path in tuple(self.watches):
|
|
||||||
try:
|
|
||||||
self.unwatch(path)
|
|
||||||
except OSError:
|
|
||||||
pass
|
|
||||||
super(INotifyWatch, self).close()
|
|
||||||
|
|
||||||
|
|
||||||
class StatWatch(object):
|
|
||||||
is_stat_based = True
|
|
||||||
|
|
||||||
def __init__(self):
|
|
||||||
self.watches = {}
|
|
||||||
self.lock = RLock()
|
|
||||||
|
|
||||||
def watch(self, path):
|
|
||||||
path = os.path.abspath(path)
|
|
||||||
with self.lock:
|
|
||||||
self.watches[path] = os.path.getmtime(path)
|
|
||||||
|
|
||||||
def unwatch(self, path):
|
|
||||||
path = os.path.abspath(path)
|
|
||||||
with self.lock:
|
|
||||||
self.watches.pop(path, None)
|
|
||||||
|
|
||||||
def __call__(self, path):
|
|
||||||
path = os.path.abspath(path)
|
|
||||||
with self.lock:
|
|
||||||
if path not in self.watches:
|
|
||||||
self.watches[path] = os.path.getmtime(path)
|
|
||||||
return True
|
|
||||||
mtime = os.path.getmtime(path)
|
|
||||||
if mtime != self.watches[path]:
|
|
||||||
self.watches[path] = mtime
|
|
||||||
return True
|
|
||||||
return False
|
|
||||||
|
|
||||||
def close(self):
|
|
||||||
with self.lock:
|
|
||||||
self.watches.clear()
|
|
||||||
|
|
||||||
|
|
||||||
def create_file_watcher(use_stat=False, expire_time=10):
|
|
||||||
'''
|
|
||||||
Create an object that can watch for changes to specified files. To use:
|
|
||||||
|
|
||||||
watcher = create_file_watcher()
|
|
||||||
watcher(path1) # Will return True if path1 has changed since the last time this was called. Always returns True the first time.
|
|
||||||
watcher.unwatch(path1)
|
|
||||||
|
|
||||||
Uses inotify if available, otherwise tracks mtimes. expire_time is the
|
|
||||||
number of minutes after the last query for a given path for the inotify
|
|
||||||
watch for that path to be automatically removed. This conserves kernel
|
|
||||||
resources.
|
|
||||||
'''
|
|
||||||
if use_stat:
|
|
||||||
return StatWatch()
|
|
||||||
try:
|
|
||||||
return INotifyWatch(expire_time=expire_time)
|
|
||||||
except INotifyError:
|
|
||||||
pass
|
|
||||||
return StatWatch()
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
|
||||||
watcher = create_file_watcher()
|
|
||||||
print ('Using watcher: %s' % watcher.__class__.__name__)
|
|
||||||
print ('Watching %s, press Ctrl-C to quit' % sys.argv[-1])
|
|
||||||
watcher.watch(sys.argv[-1])
|
|
||||||
try:
|
|
||||||
while True:
|
|
||||||
if watcher(sys.argv[-1]):
|
|
||||||
print ('%s has changed' % sys.argv[-1])
|
|
||||||
sleep(1)
|
|
||||||
except KeyboardInterrupt:
|
|
||||||
pass
|
|
||||||
watcher.close()
|
|
|
@ -1,22 +0,0 @@
|
||||||
# vim:fileencoding=utf-8:noet
|
|
||||||
|
|
||||||
from math import log
|
|
||||||
unit_list = tuple(zip(['', 'k', 'M', 'G', 'T', 'P'], [0, 0, 1, 2, 2, 2]))
|
|
||||||
|
|
||||||
|
|
||||||
def humanize_bytes(num, suffix='B', si_prefix=False):
|
|
||||||
'''Return a human friendly byte representation.
|
|
||||||
|
|
||||||
Modified version from http://stackoverflow.com/questions/1094841
|
|
||||||
'''
|
|
||||||
if num == 0:
|
|
||||||
return '0 ' + suffix
|
|
||||||
div = 1000 if si_prefix else 1024
|
|
||||||
exponent = min(int(log(num, div)) if num else 0, len(unit_list) - 1)
|
|
||||||
quotient = float(num) / div ** exponent
|
|
||||||
unit, decimals = unit_list[exponent]
|
|
||||||
if unit and not si_prefix:
|
|
||||||
unit = unit.upper() + 'i'
|
|
||||||
return '{{quotient:.{decimals}f}} {{unit}}{{suffix}}'\
|
|
||||||
.format(decimals=decimals)\
|
|
||||||
.format(quotient=quotient, unit=unit, suffix=suffix)
|
|
|
@ -1,178 +0,0 @@
|
||||||
# vim:fileencoding=utf-8:noet
|
|
||||||
from __future__ import unicode_literals, absolute_import
|
|
||||||
|
|
||||||
__copyright__ = '2013, Kovid Goyal <kovid at kovidgoyal.net>'
|
|
||||||
__docformat__ = 'restructuredtext en'
|
|
||||||
|
|
||||||
import sys
|
|
||||||
import os
|
|
||||||
import errno
|
|
||||||
|
|
||||||
|
|
||||||
class INotifyError(Exception):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
_inotify = None
|
|
||||||
|
|
||||||
|
|
||||||
def load_inotify():
|
|
||||||
''' Initialize the inotify library '''
|
|
||||||
global _inotify
|
|
||||||
if _inotify is None:
|
|
||||||
if hasattr(sys, 'getwindowsversion'):
|
|
||||||
# On windows abort before loading the C library. Windows has
|
|
||||||
# multiple, incompatible C runtimes, and we have no way of knowing
|
|
||||||
# if the one chosen by ctypes is compatible with the currently
|
|
||||||
# loaded one.
|
|
||||||
raise INotifyError('INotify not available on windows')
|
|
||||||
import ctypes
|
|
||||||
if not hasattr(ctypes, 'c_ssize_t'):
|
|
||||||
raise INotifyError('You need python >= 2.7 to use inotify')
|
|
||||||
from ctypes.util import find_library
|
|
||||||
name = find_library('c')
|
|
||||||
if not name:
|
|
||||||
raise INotifyError('Cannot find C library')
|
|
||||||
libc = ctypes.CDLL(name, use_errno=True)
|
|
||||||
for function in ("inotify_add_watch", "inotify_init1", "inotify_rm_watch"):
|
|
||||||
if not hasattr(libc, function):
|
|
||||||
raise INotifyError('libc is too old')
|
|
||||||
# inotify_init1()
|
|
||||||
prototype = ctypes.CFUNCTYPE(ctypes.c_int, ctypes.c_int, use_errno=True)
|
|
||||||
init1 = prototype(('inotify_init1', libc), ((1, "flags", 0),))
|
|
||||||
|
|
||||||
# inotify_add_watch()
|
|
||||||
prototype = ctypes.CFUNCTYPE(ctypes.c_int, ctypes.c_int, ctypes.c_char_p, ctypes.c_uint32, use_errno=True)
|
|
||||||
add_watch = prototype(('inotify_add_watch', libc), (
|
|
||||||
(1, "fd"), (1, "pathname"), (1, "mask")), use_errno=True)
|
|
||||||
|
|
||||||
# inotify_rm_watch()
|
|
||||||
prototype = ctypes.CFUNCTYPE(ctypes.c_int, ctypes.c_int, ctypes.c_int, use_errno=True)
|
|
||||||
rm_watch = prototype(('inotify_rm_watch', libc), (
|
|
||||||
(1, "fd"), (1, "wd")), use_errno=True)
|
|
||||||
|
|
||||||
# read()
|
|
||||||
prototype = ctypes.CFUNCTYPE(ctypes.c_ssize_t, ctypes.c_int, ctypes.c_void_p, ctypes.c_size_t, use_errno=True)
|
|
||||||
read = prototype(('read', libc), (
|
|
||||||
(1, "fd"), (1, "buf"), (1, "count")), use_errno=True)
|
|
||||||
_inotify = (init1, add_watch, rm_watch, read)
|
|
||||||
return _inotify
|
|
||||||
|
|
||||||
|
|
||||||
class INotify(object):
|
|
||||||
# See <sys/inotify.h> for the flags defined below
|
|
||||||
|
|
||||||
# Supported events suitable for MASK parameter of INOTIFY_ADD_WATCH.
|
|
||||||
ACCESS = 0x00000001 # File was accessed.
|
|
||||||
MODIFY = 0x00000002 # File was modified.
|
|
||||||
ATTRIB = 0x00000004 # Metadata changed.
|
|
||||||
CLOSE_WRITE = 0x00000008 # Writtable file was closed.
|
|
||||||
CLOSE_NOWRITE = 0x00000010 # Unwrittable file closed.
|
|
||||||
OPEN = 0x00000020 # File was opened.
|
|
||||||
MOVED_FROM = 0x00000040 # File was moved from X.
|
|
||||||
MOVED_TO = 0x00000080 # File was moved to Y.
|
|
||||||
CREATE = 0x00000100 # Subfile was created.
|
|
||||||
DELETE = 0x00000200 # Subfile was deleted.
|
|
||||||
DELETE_SELF = 0x00000400 # Self was deleted.
|
|
||||||
MOVE_SELF = 0x00000800 # Self was moved.
|
|
||||||
|
|
||||||
# Events sent by the kernel.
|
|
||||||
UNMOUNT = 0x00002000 # Backing fs was unmounted.
|
|
||||||
Q_OVERFLOW = 0x00004000 # Event queued overflowed.
|
|
||||||
IGNORED = 0x00008000 # File was ignored.
|
|
||||||
|
|
||||||
# Helper events.
|
|
||||||
CLOSE = (CLOSE_WRITE | CLOSE_NOWRITE) # Close.
|
|
||||||
MOVE = (MOVED_FROM | MOVED_TO) # Moves.
|
|
||||||
|
|
||||||
# Special flags.
|
|
||||||
ONLYDIR = 0x01000000 # Only watch the path if it is a directory.
|
|
||||||
DONT_FOLLOW = 0x02000000 # Do not follow a sym link.
|
|
||||||
EXCL_UNLINK = 0x04000000 # Exclude events on unlinked objects.
|
|
||||||
MASK_ADD = 0x20000000 # Add to the mask of an already existing watch.
|
|
||||||
ISDIR = 0x40000000 # Event occurred against dir.
|
|
||||||
ONESHOT = 0x80000000 # Only send event once.
|
|
||||||
|
|
||||||
# All events which a program can wait on.
|
|
||||||
ALL_EVENTS = (ACCESS | MODIFY | ATTRIB | CLOSE_WRITE | CLOSE_NOWRITE |
|
|
||||||
OPEN | MOVED_FROM | MOVED_TO | CREATE | DELETE |
|
|
||||||
DELETE_SELF | MOVE_SELF)
|
|
||||||
|
|
||||||
# See <bits/inotify.h>
|
|
||||||
CLOEXEC = 0x80000
|
|
||||||
NONBLOCK = 0x800
|
|
||||||
|
|
||||||
def __init__(self, cloexec=True, nonblock=True):
|
|
||||||
import ctypes
|
|
||||||
import struct
|
|
||||||
self._init1, self._add_watch, self._rm_watch, self._read = load_inotify()
|
|
||||||
flags = 0
|
|
||||||
if cloexec:
|
|
||||||
flags |= self.CLOEXEC
|
|
||||||
if nonblock:
|
|
||||||
flags |= self.NONBLOCK
|
|
||||||
self._inotify_fd = self._init1(flags)
|
|
||||||
if self._inotify_fd == -1:
|
|
||||||
raise INotifyError(os.strerror(ctypes.get_errno()))
|
|
||||||
|
|
||||||
self._buf = ctypes.create_string_buffer(5000)
|
|
||||||
self.fenc = sys.getfilesystemencoding() or 'utf-8'
|
|
||||||
self.hdr = struct.Struct(b'iIII')
|
|
||||||
if self.fenc == 'ascii':
|
|
||||||
self.fenc = 'utf-8'
|
|
||||||
# We keep a reference to os to prevent it from being deleted
|
|
||||||
# during interpreter shutdown, which would lead to errors in the
|
|
||||||
# __del__ method
|
|
||||||
self.os = os
|
|
||||||
|
|
||||||
def handle_error(self):
|
|
||||||
import ctypes
|
|
||||||
eno = ctypes.get_errno()
|
|
||||||
raise OSError(eno, self.os.strerror(eno))
|
|
||||||
|
|
||||||
def __del__(self):
|
|
||||||
# This method can be called during interpreter shutdown, which means we
|
|
||||||
# must do the absolute minimum here. Note that there could be running
|
|
||||||
# daemon threads that are trying to call other methods on this object.
|
|
||||||
try:
|
|
||||||
self.os.close(self._inotify_fd)
|
|
||||||
except (AttributeError, TypeError):
|
|
||||||
pass
|
|
||||||
|
|
||||||
def close(self):
|
|
||||||
if hasattr(self, '_inotify_fd'):
|
|
||||||
self.os.close(self._inotify_fd)
|
|
||||||
del self.os
|
|
||||||
del self._add_watch
|
|
||||||
del self._rm_watch
|
|
||||||
del self._inotify_fd
|
|
||||||
|
|
||||||
def read(self, get_name=True):
|
|
||||||
import ctypes
|
|
||||||
buf = []
|
|
||||||
while True:
|
|
||||||
num = self._read(self._inotify_fd, self._buf, len(self._buf))
|
|
||||||
if num == 0:
|
|
||||||
break
|
|
||||||
if num < 0:
|
|
||||||
en = ctypes.get_errno()
|
|
||||||
if en == errno.EAGAIN:
|
|
||||||
break # No more data
|
|
||||||
if en == errno.EINTR:
|
|
||||||
continue # Interrupted, try again
|
|
||||||
raise OSError(en, self.os.strerror(en))
|
|
||||||
buf.append(self._buf.raw[:num])
|
|
||||||
raw = b''.join(buf)
|
|
||||||
pos = 0
|
|
||||||
lraw = len(raw)
|
|
||||||
while lraw - pos >= self.hdr.size:
|
|
||||||
wd, mask, cookie, name_len = self.hdr.unpack_from(raw, pos)
|
|
||||||
pos += self.hdr.size
|
|
||||||
name = None
|
|
||||||
if get_name:
|
|
||||||
name = raw[pos:pos + name_len].rstrip(b'\0').decode(self.fenc)
|
|
||||||
pos += name_len
|
|
||||||
self.process_event(wd, mask, cookie, name)
|
|
||||||
|
|
||||||
def process_event(self, *args):
|
|
||||||
raise NotImplementedError()
|
|
|
@ -1,40 +0,0 @@
|
||||||
# vim:fileencoding=utf-8:noet
|
|
||||||
|
|
||||||
from functools import wraps
|
|
||||||
from powerline.lib.monotonic import monotonic
|
|
||||||
|
|
||||||
|
|
||||||
def default_cache_key(**kwargs):
|
|
||||||
return frozenset(kwargs.items())
|
|
||||||
|
|
||||||
|
|
||||||
class memoize(object):
|
|
||||||
'''Memoization decorator with timeout.'''
|
|
||||||
def __init__(self, timeout, cache_key=default_cache_key, cache_reg_func=None):
|
|
||||||
self.timeout = timeout
|
|
||||||
self.cache_key = cache_key
|
|
||||||
self.cache = {}
|
|
||||||
self.cache_reg_func = cache_reg_func
|
|
||||||
|
|
||||||
def __call__(self, func):
|
|
||||||
@wraps(func)
|
|
||||||
def decorated_function(**kwargs):
|
|
||||||
if self.cache_reg_func:
|
|
||||||
self.cache_reg_func(self.cache)
|
|
||||||
self.cache_reg_func = None
|
|
||||||
|
|
||||||
key = self.cache_key(**kwargs)
|
|
||||||
try:
|
|
||||||
cached = self.cache.get(key, None)
|
|
||||||
except TypeError:
|
|
||||||
return func(**kwargs)
|
|
||||||
# Handle case when time() appears to be less then cached['time'] due
|
|
||||||
# to clock updates. Not applicable for monotonic clock, but this
|
|
||||||
# case is currently rare.
|
|
||||||
if cached is None or not (cached['time'] < monotonic() < cached['time'] + self.timeout):
|
|
||||||
cached = self.cache[key] = {
|
|
||||||
'result': func(**kwargs),
|
|
||||||
'time': monotonic(),
|
|
||||||
}
|
|
||||||
return cached['result']
|
|
||||||
return decorated_function
|
|
|
@ -1,103 +0,0 @@
|
||||||
# vim:fileencoding=utf-8:noet
|
|
||||||
|
|
||||||
from __future__ import division, absolute_import
|
|
||||||
|
|
||||||
try:
|
|
||||||
try:
|
|
||||||
# >=python-3.3, Unix
|
|
||||||
from time import clock_gettime
|
|
||||||
try:
|
|
||||||
# >={kernel}-sources-2.6.28
|
|
||||||
from time import CLOCK_MONOTONIC_RAW as CLOCK_ID
|
|
||||||
except ImportError:
|
|
||||||
from time import CLOCK_MONOTONIC as CLOCK_ID # NOQA
|
|
||||||
|
|
||||||
monotonic = lambda: clock_gettime(CLOCK_ID)
|
|
||||||
|
|
||||||
except ImportError:
|
|
||||||
# >=python-3.3
|
|
||||||
from time import monotonic # NOQA
|
|
||||||
|
|
||||||
except ImportError:
|
|
||||||
import ctypes
|
|
||||||
import sys
|
|
||||||
|
|
||||||
try:
|
|
||||||
if sys.platform == 'win32':
|
|
||||||
# Windows only
|
|
||||||
GetTickCount64 = ctypes.windll.kernel32.GetTickCount64
|
|
||||||
GetTickCount64.restype = ctypes.c_ulonglong
|
|
||||||
|
|
||||||
def monotonic(): # NOQA
|
|
||||||
return GetTickCount64() / 1000
|
|
||||||
|
|
||||||
elif sys.platform == 'darwin':
|
|
||||||
# Mac OS X
|
|
||||||
from ctypes.util import find_library
|
|
||||||
|
|
||||||
libc_name = find_library('c')
|
|
||||||
if not libc_name:
|
|
||||||
raise OSError
|
|
||||||
|
|
||||||
libc = ctypes.CDLL(libc_name, use_errno=True)
|
|
||||||
|
|
||||||
mach_absolute_time = libc.mach_absolute_time
|
|
||||||
mach_absolute_time.argtypes = ()
|
|
||||||
mach_absolute_time.restype = ctypes.c_uint64
|
|
||||||
|
|
||||||
class mach_timebase_info_data_t(ctypes.Structure):
|
|
||||||
_fields_ = (
|
|
||||||
('numer', ctypes.c_uint32),
|
|
||||||
('denom', ctypes.c_uint32),
|
|
||||||
)
|
|
||||||
mach_timebase_info_data_p = ctypes.POINTER(mach_timebase_info_data_t)
|
|
||||||
|
|
||||||
_mach_timebase_info = libc.mach_timebase_info
|
|
||||||
_mach_timebase_info.argtypes = (mach_timebase_info_data_p,)
|
|
||||||
_mach_timebase_info.restype = ctypes.c_int
|
|
||||||
|
|
||||||
def mach_timebase_info():
|
|
||||||
timebase = mach_timebase_info_data_t()
|
|
||||||
_mach_timebase_info(ctypes.byref(timebase))
|
|
||||||
return (timebase.numer, timebase.denom)
|
|
||||||
|
|
||||||
timebase = mach_timebase_info()
|
|
||||||
factor = timebase[0] / timebase[1] * 1e-9
|
|
||||||
|
|
||||||
def monotonic(): # NOQA
|
|
||||||
return mach_absolute_time() * factor
|
|
||||||
else:
|
|
||||||
# linux only (no librt on OS X)
|
|
||||||
import os
|
|
||||||
|
|
||||||
# See <bits/time.h>
|
|
||||||
CLOCK_MONOTONIC = 1
|
|
||||||
CLOCK_MONOTONIC_RAW = 4
|
|
||||||
|
|
||||||
class timespec(ctypes.Structure):
|
|
||||||
_fields_ = (
|
|
||||||
('tv_sec', ctypes.c_long),
|
|
||||||
('tv_nsec', ctypes.c_long)
|
|
||||||
)
|
|
||||||
tspec = timespec()
|
|
||||||
|
|
||||||
librt = ctypes.CDLL('librt.so.1', use_errno=True)
|
|
||||||
clock_gettime = librt.clock_gettime
|
|
||||||
clock_gettime.argtypes = [ctypes.c_int, ctypes.POINTER(timespec)]
|
|
||||||
|
|
||||||
if clock_gettime(CLOCK_MONOTONIC_RAW, ctypes.pointer(tspec)) == 0:
|
|
||||||
# >={kernel}-sources-2.6.28
|
|
||||||
clock_id = CLOCK_MONOTONIC_RAW
|
|
||||||
elif clock_gettime(CLOCK_MONOTONIC, ctypes.pointer(tspec)) == 0:
|
|
||||||
clock_id = CLOCK_MONOTONIC
|
|
||||||
else:
|
|
||||||
raise OSError
|
|
||||||
|
|
||||||
def monotonic(): # NOQA
|
|
||||||
if clock_gettime(CLOCK_MONOTONIC, ctypes.pointer(tspec)) != 0:
|
|
||||||
errno_ = ctypes.get_errno()
|
|
||||||
raise OSError(errno_, os.strerror(errno_))
|
|
||||||
return tspec.tv_sec + tspec.tv_nsec / 1e9
|
|
||||||
|
|
||||||
except:
|
|
||||||
from time import time as monotonic # NOQA
|
|
|
@ -1,204 +0,0 @@
|
||||||
# vim:fileencoding=utf-8:noet
|
|
||||||
|
|
||||||
from __future__ import absolute_import
|
|
||||||
|
|
||||||
from powerline.lib.monotonic import monotonic
|
|
||||||
|
|
||||||
from threading import Thread, Lock, Event
|
|
||||||
|
|
||||||
|
|
||||||
class MultiRunnedThread(object):
|
|
||||||
daemon = True
|
|
||||||
|
|
||||||
def __init__(self):
|
|
||||||
self.thread = None
|
|
||||||
|
|
||||||
def is_alive(self):
|
|
||||||
return self.thread and self.thread.is_alive()
|
|
||||||
|
|
||||||
def start(self):
|
|
||||||
self.shutdown_event.clear()
|
|
||||||
self.thread = Thread(target=self.run)
|
|
||||||
self.thread.daemon = self.daemon
|
|
||||||
self.thread.start()
|
|
||||||
|
|
||||||
def join(self, *args, **kwargs):
|
|
||||||
if self.thread:
|
|
||||||
return self.thread.join(*args, **kwargs)
|
|
||||||
return None
|
|
||||||
|
|
||||||
|
|
||||||
class ThreadedSegment(MultiRunnedThread):
|
|
||||||
min_sleep_time = 0.1
|
|
||||||
update_first = True
|
|
||||||
interval = 1
|
|
||||||
daemon = False
|
|
||||||
|
|
||||||
def __init__(self):
|
|
||||||
super(ThreadedSegment, self).__init__()
|
|
||||||
self.run_once = True
|
|
||||||
self.skip = False
|
|
||||||
self.crashed_value = None
|
|
||||||
self.update_value = None
|
|
||||||
self.updated = False
|
|
||||||
|
|
||||||
def __call__(self, pl, update_first=True, **kwargs):
|
|
||||||
if self.run_once:
|
|
||||||
self.pl = pl
|
|
||||||
self.set_state(**kwargs)
|
|
||||||
update_value = self.get_update_value(True)
|
|
||||||
elif not self.is_alive():
|
|
||||||
# Without this we will not have to wait long until receiving bug “I
|
|
||||||
# opened vim, but branch information is only shown after I move
|
|
||||||
# cursor”.
|
|
||||||
#
|
|
||||||
# If running once .update() is called in __call__.
|
|
||||||
update_value = self.get_update_value(update_first and self.update_first)
|
|
||||||
self.start()
|
|
||||||
elif not self.updated:
|
|
||||||
update_value = self.get_update_value(True)
|
|
||||||
self.updated = True
|
|
||||||
else:
|
|
||||||
update_value = self.update_value
|
|
||||||
|
|
||||||
if self.skip:
|
|
||||||
return self.crashed_value
|
|
||||||
|
|
||||||
return self.render(update_value, update_first=update_first, pl=pl, **kwargs)
|
|
||||||
|
|
||||||
def get_update_value(self, update=False):
|
|
||||||
if update:
|
|
||||||
self.update_value = self.update(self.update_value)
|
|
||||||
return self.update_value
|
|
||||||
|
|
||||||
def run(self):
|
|
||||||
while not self.shutdown_event.is_set():
|
|
||||||
start_time = monotonic()
|
|
||||||
try:
|
|
||||||
self.update_value = self.update(self.update_value)
|
|
||||||
except Exception as e:
|
|
||||||
self.exception('Exception while updating: {0}', str(e))
|
|
||||||
self.skip = True
|
|
||||||
except KeyboardInterrupt:
|
|
||||||
self.warn('Caught keyboard interrupt while updating')
|
|
||||||
self.skip = True
|
|
||||||
else:
|
|
||||||
self.skip = False
|
|
||||||
self.shutdown_event.wait(max(self.interval - (monotonic() - start_time), self.min_sleep_time))
|
|
||||||
|
|
||||||
def shutdown(self):
|
|
||||||
self.shutdown_event.set()
|
|
||||||
if self.daemon and self.is_alive():
|
|
||||||
# Give the worker thread a chance to shutdown, but don't block for
|
|
||||||
# too long
|
|
||||||
self.join(0.01)
|
|
||||||
|
|
||||||
def set_interval(self, interval=None):
|
|
||||||
# Allowing “interval” keyword in configuration.
|
|
||||||
# Note: Here **kwargs is needed to support foreign data, in subclasses
|
|
||||||
# it can be seen in a number of places in order to support
|
|
||||||
# .set_interval().
|
|
||||||
interval = interval or getattr(self, 'interval')
|
|
||||||
self.interval = interval
|
|
||||||
|
|
||||||
def set_state(self, interval=None, update_first=True, shutdown_event=None, **kwargs):
|
|
||||||
self.set_interval(interval)
|
|
||||||
self.shutdown_event = shutdown_event or Event()
|
|
||||||
self.updated = self.updated or (not (update_first and self.update_first))
|
|
||||||
|
|
||||||
def startup(self, pl, **kwargs):
|
|
||||||
self.run_once = False
|
|
||||||
self.pl = pl
|
|
||||||
self.daemon = pl.use_daemon_threads
|
|
||||||
|
|
||||||
self.set_state(**kwargs)
|
|
||||||
|
|
||||||
if not self.is_alive():
|
|
||||||
self.start()
|
|
||||||
|
|
||||||
def critical(self, *args, **kwargs):
|
|
||||||
self.pl.critical(prefix=self.__class__.__name__, *args, **kwargs)
|
|
||||||
|
|
||||||
def exception(self, *args, **kwargs):
|
|
||||||
self.pl.exception(prefix=self.__class__.__name__, *args, **kwargs)
|
|
||||||
|
|
||||||
def info(self, *args, **kwargs):
|
|
||||||
self.pl.info(prefix=self.__class__.__name__, *args, **kwargs)
|
|
||||||
|
|
||||||
def error(self, *args, **kwargs):
|
|
||||||
self.pl.error(prefix=self.__class__.__name__, *args, **kwargs)
|
|
||||||
|
|
||||||
def warn(self, *args, **kwargs):
|
|
||||||
self.pl.warn(prefix=self.__class__.__name__, *args, **kwargs)
|
|
||||||
|
|
||||||
def debug(self, *args, **kwargs):
|
|
||||||
self.pl.debug(prefix=self.__class__.__name__, *args, **kwargs)
|
|
||||||
|
|
||||||
|
|
||||||
class KwThreadedSegment(ThreadedSegment):
|
|
||||||
drop_interval = 10 * 60
|
|
||||||
update_first = True
|
|
||||||
|
|
||||||
def __init__(self):
|
|
||||||
super(KwThreadedSegment, self).__init__()
|
|
||||||
self.updated = True
|
|
||||||
self.update_value = ({}, set())
|
|
||||||
self.write_lock = Lock()
|
|
||||||
self.new_queries = {}
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def key(**kwargs):
|
|
||||||
return frozenset(kwargs.items())
|
|
||||||
|
|
||||||
def render(self, update_value, update_first, **kwargs):
|
|
||||||
queries, crashed = update_value
|
|
||||||
key = self.key(**kwargs)
|
|
||||||
if key in crashed:
|
|
||||||
return self.crashed_value
|
|
||||||
|
|
||||||
try:
|
|
||||||
update_state = queries[key][1]
|
|
||||||
except KeyError:
|
|
||||||
# Allow only to forbid to compute missing values: in either user
|
|
||||||
# configuration or in subclasses.
|
|
||||||
update_state = self.compute_state(key) if ((update_first and self.update_first) or self.run_once) else None
|
|
||||||
|
|
||||||
with self.write_lock:
|
|
||||||
self.new_queries[key] = (monotonic(), update_state)
|
|
||||||
return self.render_one(update_state, **kwargs)
|
|
||||||
|
|
||||||
def update(self, old_update_value):
|
|
||||||
updates = {}
|
|
||||||
crashed = set()
|
|
||||||
update_value = (updates, crashed)
|
|
||||||
queries = old_update_value[0]
|
|
||||||
with self.write_lock:
|
|
||||||
if self.new_queries:
|
|
||||||
queries.update(self.new_queries)
|
|
||||||
self.new_queries.clear()
|
|
||||||
|
|
||||||
for key, (last_query_time, state) in queries.items():
|
|
||||||
if last_query_time < monotonic() < last_query_time + self.drop_interval:
|
|
||||||
try:
|
|
||||||
updates[key] = (last_query_time, self.compute_state(key))
|
|
||||||
except Exception as e:
|
|
||||||
self.exception('Exception while computing state for {0!r}: {1}', key, str(e))
|
|
||||||
crashed.add(key)
|
|
||||||
except KeyboardInterrupt:
|
|
||||||
self.warn('Interrupt while computing state for {0!r}', key)
|
|
||||||
crashed.add(key)
|
|
||||||
|
|
||||||
return update_value
|
|
||||||
|
|
||||||
def set_state(self, interval=None, shutdown_event=None, **kwargs):
|
|
||||||
self.set_interval(interval)
|
|
||||||
self.shutdown_event = shutdown_event or Event()
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def render_one(update_state, **kwargs):
|
|
||||||
return update_state
|
|
||||||
|
|
||||||
|
|
||||||
def with_docstring(instance, doc):
|
|
||||||
instance.__doc__ = doc
|
|
||||||
return instance
|
|
|
@ -1,199 +0,0 @@
|
||||||
# vim:fileencoding=utf-8:noet
|
|
||||||
from __future__ import (unicode_literals, absolute_import, print_function)
|
|
||||||
|
|
||||||
__copyright__ = '2013, Kovid Goyal <kovid at kovidgoyal.net>'
|
|
||||||
__docformat__ = 'restructuredtext en'
|
|
||||||
|
|
||||||
import sys
|
|
||||||
import os
|
|
||||||
import errno
|
|
||||||
from time import sleep
|
|
||||||
from powerline.lib.monotonic import monotonic
|
|
||||||
|
|
||||||
from powerline.lib.inotify import INotify, INotifyError
|
|
||||||
|
|
||||||
|
|
||||||
class NoSuchDir(ValueError):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class DirTooLarge(ValueError):
|
|
||||||
def __init__(self, bdir):
|
|
||||||
ValueError.__init__(self, 'The directory {0} is too large to monitor. Try increasing the value in /proc/sys/fs/inotify/max_user_watches'.format(bdir))
|
|
||||||
|
|
||||||
|
|
||||||
class INotifyTreeWatcher(INotify):
|
|
||||||
is_dummy = False
|
|
||||||
|
|
||||||
def __init__(self, basedir):
|
|
||||||
super(INotifyTreeWatcher, self).__init__()
|
|
||||||
self.basedir = os.path.abspath(basedir)
|
|
||||||
self.watch_tree()
|
|
||||||
self.modified = True
|
|
||||||
|
|
||||||
def watch_tree(self):
|
|
||||||
self.watched_dirs = {}
|
|
||||||
self.watched_rmap = {}
|
|
||||||
try:
|
|
||||||
self.add_watches(self.basedir)
|
|
||||||
except OSError as e:
|
|
||||||
if e.errno == errno.ENOSPC:
|
|
||||||
raise DirTooLarge(self.basedir)
|
|
||||||
|
|
||||||
def add_watches(self, base, top_level=True):
|
|
||||||
''' Add watches for this directory and all its descendant directories,
|
|
||||||
recursively. '''
|
|
||||||
base = os.path.abspath(base)
|
|
||||||
try:
|
|
||||||
is_dir = self.add_watch(base)
|
|
||||||
except OSError as e:
|
|
||||||
if e.errno == errno.ENOENT:
|
|
||||||
# The entry could have been deleted between listdir() and
|
|
||||||
# add_watch().
|
|
||||||
if top_level:
|
|
||||||
raise NoSuchDir('The dir {0} does not exist'.format(base))
|
|
||||||
return
|
|
||||||
if e.errno == errno.EACCES:
|
|
||||||
# We silently ignore entries for which we dont have permission,
|
|
||||||
# unless they are the top level dir
|
|
||||||
if top_level:
|
|
||||||
raise NoSuchDir('You do not have permission to monitor {0}'.format(base))
|
|
||||||
return
|
|
||||||
raise
|
|
||||||
else:
|
|
||||||
if is_dir:
|
|
||||||
try:
|
|
||||||
files = os.listdir(base)
|
|
||||||
except OSError as e:
|
|
||||||
if e.errno in (errno.ENOTDIR, errno.ENOENT):
|
|
||||||
# The dir was deleted/replaced between the add_watch()
|
|
||||||
# and listdir()
|
|
||||||
if top_level:
|
|
||||||
raise NoSuchDir('The dir {0} does not exist'.format(base))
|
|
||||||
return
|
|
||||||
raise
|
|
||||||
for x in files:
|
|
||||||
self.add_watches(os.path.join(base, x), top_level=False)
|
|
||||||
elif top_level:
|
|
||||||
# The top level dir is a file, not good.
|
|
||||||
raise NoSuchDir('The dir {0} does not exist'.format(base))
|
|
||||||
|
|
||||||
def add_watch(self, path):
|
|
||||||
import ctypes
|
|
||||||
bpath = path if isinstance(path, bytes) else path.encode(self.fenc)
|
|
||||||
wd = self._add_watch(self._inotify_fd, ctypes.c_char_p(bpath),
|
|
||||||
# Ignore symlinks and watch only directories
|
|
||||||
self.DONT_FOLLOW | self.ONLYDIR |
|
|
||||||
|
|
||||||
self.MODIFY | self.CREATE | self.DELETE |
|
|
||||||
self.MOVE_SELF | self.MOVED_FROM | self.MOVED_TO |
|
|
||||||
self.ATTRIB | self.MOVE_SELF | self.DELETE_SELF)
|
|
||||||
if wd == -1:
|
|
||||||
eno = ctypes.get_errno()
|
|
||||||
if eno == errno.ENOTDIR:
|
|
||||||
return False
|
|
||||||
raise OSError(eno, 'Failed to add watch for: {0}: {1}'.format(path, self.os.strerror(eno)))
|
|
||||||
self.watched_dirs[path] = wd
|
|
||||||
self.watched_rmap[wd] = path
|
|
||||||
return True
|
|
||||||
|
|
||||||
def process_event(self, wd, mask, cookie, name):
|
|
||||||
if wd == -1 and (mask & self.Q_OVERFLOW):
|
|
||||||
# We missed some INOTIFY events, so we dont
|
|
||||||
# know the state of any tracked dirs.
|
|
||||||
self.watch_tree()
|
|
||||||
self.modified = True
|
|
||||||
return
|
|
||||||
path = self.watched_rmap.get(wd, None)
|
|
||||||
if path is not None:
|
|
||||||
self.modified = True
|
|
||||||
if mask & self.CREATE:
|
|
||||||
# A new sub-directory might have been created, monitor it.
|
|
||||||
try:
|
|
||||||
self.add_watch(os.path.join(path, name))
|
|
||||||
except OSError as e:
|
|
||||||
if e.errno == errno.ENOENT:
|
|
||||||
# Deleted before add_watch()
|
|
||||||
pass
|
|
||||||
elif e.errno == errno.ENOSPC:
|
|
||||||
raise DirTooLarge(self.basedir)
|
|
||||||
else:
|
|
||||||
raise
|
|
||||||
|
|
||||||
def __call__(self):
|
|
||||||
self.read()
|
|
||||||
ret = self.modified
|
|
||||||
self.modified = False
|
|
||||||
return ret
|
|
||||||
|
|
||||||
|
|
||||||
class DummyTreeWatcher(object):
|
|
||||||
is_dummy = True
|
|
||||||
|
|
||||||
def __init__(self, basedir):
|
|
||||||
self.basedir = os.path.abspath(basedir)
|
|
||||||
|
|
||||||
def __call__(self):
|
|
||||||
return False
|
|
||||||
|
|
||||||
|
|
||||||
class TreeWatcher(object):
|
|
||||||
def __init__(self, expire_time=10):
|
|
||||||
self.watches = {}
|
|
||||||
self.last_query_times = {}
|
|
||||||
self.expire_time = expire_time * 60
|
|
||||||
|
|
||||||
def watch(self, path, logger=None):
|
|
||||||
path = os.path.abspath(path)
|
|
||||||
try:
|
|
||||||
w = INotifyTreeWatcher(path)
|
|
||||||
except (INotifyError, DirTooLarge) as e:
|
|
||||||
if logger is not None:
|
|
||||||
logger.warn('Failed to watch path: {0} with error: {1}'.format(path, e))
|
|
||||||
w = DummyTreeWatcher(path)
|
|
||||||
self.watches[path] = w
|
|
||||||
return w
|
|
||||||
|
|
||||||
def is_actually_watched(self, path):
|
|
||||||
w = self.watches.get(path, None)
|
|
||||||
return not getattr(w, 'is_dummy', True)
|
|
||||||
|
|
||||||
def expire_old_queries(self):
|
|
||||||
pop = []
|
|
||||||
now = monotonic()
|
|
||||||
for path, lt in self.last_query_times.items():
|
|
||||||
if now - lt > self.expire_time:
|
|
||||||
pop.append(path)
|
|
||||||
for path in pop:
|
|
||||||
del self.last_query_times[path]
|
|
||||||
|
|
||||||
def __call__(self, path, logger=None):
|
|
||||||
path = os.path.abspath(path)
|
|
||||||
self.expire_old_queries()
|
|
||||||
self.last_query_times[path] = monotonic()
|
|
||||||
w = self.watches.get(path, None)
|
|
||||||
if w is None:
|
|
||||||
try:
|
|
||||||
self.watch(path)
|
|
||||||
except NoSuchDir:
|
|
||||||
pass
|
|
||||||
return True
|
|
||||||
try:
|
|
||||||
return w()
|
|
||||||
except DirTooLarge as e:
|
|
||||||
if logger is not None:
|
|
||||||
logger.warn(str(e))
|
|
||||||
self.watches[path] = DummyTreeWatcher(path)
|
|
||||||
return False
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
|
||||||
w = INotifyTreeWatcher(sys.argv[-1])
|
|
||||||
w()
|
|
||||||
print ('Monitoring', sys.argv[-1], 'press Ctrl-C to stop')
|
|
||||||
try:
|
|
||||||
while True:
|
|
||||||
if w():
|
|
||||||
print (sys.argv[-1], 'changed')
|
|
||||||
sleep(1)
|
|
||||||
except KeyboardInterrupt:
|
|
||||||
raise SystemExit(0)
|
|
|
@ -1,16 +0,0 @@
|
||||||
# vim:fileencoding=utf-8:noet
|
|
||||||
|
|
||||||
try:
|
|
||||||
from urllib.error import HTTPError
|
|
||||||
from urllib.request import urlopen
|
|
||||||
from urllib.parse import urlencode as urllib_urlencode # NOQA
|
|
||||||
except ImportError:
|
|
||||||
from urllib2 import urlopen, HTTPError # NOQA
|
|
||||||
from urllib import urlencode as urllib_urlencode # NOQA
|
|
||||||
|
|
||||||
|
|
||||||
def urllib_read(url):
|
|
||||||
try:
|
|
||||||
return urlopen(url, timeout=10).read().decode('utf-8')
|
|
||||||
except HTTPError:
|
|
||||||
return
|
|
|
@ -1,33 +0,0 @@
|
||||||
# vim:fileencoding=utf-8:noet
|
|
||||||
from __future__ import absolute_import
|
|
||||||
import os
|
|
||||||
|
|
||||||
|
|
||||||
vcs_props = (
|
|
||||||
('git', '.git', os.path.exists),
|
|
||||||
('mercurial', '.hg', os.path.isdir),
|
|
||||||
('bzr', '.bzr', os.path.isdir),
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def generate_directories(path):
|
|
||||||
yield path
|
|
||||||
while True:
|
|
||||||
old_path = path
|
|
||||||
path = os.path.dirname(path)
|
|
||||||
if path == old_path or not path:
|
|
||||||
break
|
|
||||||
yield path
|
|
||||||
|
|
||||||
|
|
||||||
def guess(path):
|
|
||||||
for directory in generate_directories(path):
|
|
||||||
for vcs, vcs_dir, check in vcs_props:
|
|
||||||
if check(os.path.join(directory, vcs_dir)):
|
|
||||||
try:
|
|
||||||
if vcs not in globals():
|
|
||||||
globals()[vcs] = getattr(__import__('powerline.lib.vcs', fromlist=[vcs]), vcs)
|
|
||||||
return globals()[vcs].Repository(directory)
|
|
||||||
except:
|
|
||||||
pass
|
|
||||||
return None
|
|
|
@ -1,64 +0,0 @@
|
||||||
# vim:fileencoding=utf-8:noet
|
|
||||||
from __future__ import absolute_import, unicode_literals, division, print_function
|
|
||||||
|
|
||||||
import sys
|
|
||||||
from io import StringIO
|
|
||||||
|
|
||||||
from bzrlib import (branch, workingtree, status, library_state, trace, ui)
|
|
||||||
|
|
||||||
|
|
||||||
class CoerceIO(StringIO):
|
|
||||||
def write(self, arg):
|
|
||||||
if isinstance(arg, bytes):
|
|
||||||
arg = arg.decode('utf-8', 'replace')
|
|
||||||
return super(CoerceIO, self).write(arg)
|
|
||||||
|
|
||||||
|
|
||||||
class Repository(object):
|
|
||||||
def __init__(self, directory):
|
|
||||||
if isinstance(directory, bytes):
|
|
||||||
directory = directory.decode(sys.getfilesystemencoding() or sys.getdefaultencoding() or 'utf-8')
|
|
||||||
self.directory = directory
|
|
||||||
self.state = library_state.BzrLibraryState(ui=ui.SilentUIFactory, trace=trace.DefaultConfig())
|
|
||||||
|
|
||||||
def status(self, path=None):
|
|
||||||
'''Return status of repository or file.
|
|
||||||
|
|
||||||
Without file argument: returns status of the repository:
|
|
||||||
|
|
||||||
:"D?": dirty (tracked modified files: added, removed, deleted, modified),
|
|
||||||
:"?U": untracked-dirty (added, but not tracked files)
|
|
||||||
:None: clean (status is empty)
|
|
||||||
|
|
||||||
With file argument: returns status of this file: The status codes are
|
|
||||||
those returned by bzr status -S
|
|
||||||
'''
|
|
||||||
try:
|
|
||||||
return self._status(path)
|
|
||||||
except:
|
|
||||||
pass
|
|
||||||
|
|
||||||
def _status(self, path):
|
|
||||||
buf = CoerceIO()
|
|
||||||
w = workingtree.WorkingTree.open(self.directory)
|
|
||||||
status.show_tree_status(w, specific_files=[path] if path else None, to_file=buf, short=True)
|
|
||||||
raw = buf.getvalue()
|
|
||||||
if not raw.strip():
|
|
||||||
return
|
|
||||||
if path:
|
|
||||||
return raw[:2]
|
|
||||||
dirtied = untracked = ' '
|
|
||||||
for line in raw.splitlines():
|
|
||||||
if len(line) > 1 and line[1] in 'ACDMRIN':
|
|
||||||
dirtied = 'D'
|
|
||||||
elif line and line[0] == '?':
|
|
||||||
untracked = 'U'
|
|
||||||
ans = dirtied + untracked
|
|
||||||
return ans if ans.strip() else None
|
|
||||||
|
|
||||||
def branch(self):
|
|
||||||
try:
|
|
||||||
b = branch.Branch.open(self.directory)
|
|
||||||
return b._get_nick(local=True) or None
|
|
||||||
except:
|
|
||||||
pass
|
|
|
@ -1,143 +0,0 @@
|
||||||
# vim:fileencoding=utf-8:noet
|
|
||||||
try:
|
|
||||||
import pygit2 as git
|
|
||||||
|
|
||||||
class Repository(object):
|
|
||||||
__slots__ = ('directory')
|
|
||||||
|
|
||||||
def __init__(self, directory):
|
|
||||||
self.directory = directory
|
|
||||||
|
|
||||||
def _repo(self):
|
|
||||||
return git.Repository(self.directory)
|
|
||||||
|
|
||||||
def status(self, path=None):
|
|
||||||
'''Return status of repository or file.
|
|
||||||
|
|
||||||
Without file argument: returns status of the repository:
|
|
||||||
|
|
||||||
:First column: working directory status (D: dirty / space)
|
|
||||||
:Second column: index status (I: index dirty / space)
|
|
||||||
:Third column: presense of untracked files (U: untracked files / space)
|
|
||||||
:None: repository clean
|
|
||||||
|
|
||||||
With file argument: returns status of this file. Output is
|
|
||||||
equivalent to the first two columns of "git status --porcelain"
|
|
||||||
(except for merge statuses as they are not supported by libgit2).
|
|
||||||
'''
|
|
||||||
if path:
|
|
||||||
try:
|
|
||||||
status = self._repo().status_file(path)
|
|
||||||
except (KeyError, ValueError):
|
|
||||||
return None
|
|
||||||
|
|
||||||
if status == git.GIT_STATUS_CURRENT:
|
|
||||||
return None
|
|
||||||
else:
|
|
||||||
if status & git.GIT_STATUS_WT_NEW:
|
|
||||||
return '??'
|
|
||||||
if status & git.GIT_STATUS_IGNORED:
|
|
||||||
return '!!'
|
|
||||||
|
|
||||||
if status & git.GIT_STATUS_INDEX_NEW:
|
|
||||||
index_status = 'A'
|
|
||||||
elif status & git.GIT_STATUS_INDEX_DELETED:
|
|
||||||
index_status = 'D'
|
|
||||||
elif status & git.GIT_STATUS_INDEX_MODIFIED:
|
|
||||||
index_status = 'M'
|
|
||||||
else:
|
|
||||||
index_status = ' '
|
|
||||||
|
|
||||||
if status & git.GIT_STATUS_WT_DELETED:
|
|
||||||
wt_status = 'D'
|
|
||||||
elif status & git.GIT_STATUS_WT_MODIFIED:
|
|
||||||
wt_status = 'M'
|
|
||||||
else:
|
|
||||||
wt_status = ' '
|
|
||||||
|
|
||||||
return index_status + wt_status
|
|
||||||
else:
|
|
||||||
wt_column = ' '
|
|
||||||
index_column = ' '
|
|
||||||
untracked_column = ' '
|
|
||||||
for status in self._repo().status().values():
|
|
||||||
if status & git.GIT_STATUS_WT_NEW:
|
|
||||||
untracked_column = 'U'
|
|
||||||
continue
|
|
||||||
|
|
||||||
if status & (git.GIT_STATUS_WT_DELETED
|
|
||||||
| git.GIT_STATUS_WT_MODIFIED):
|
|
||||||
wt_column = 'D'
|
|
||||||
|
|
||||||
if status & (git.GIT_STATUS_INDEX_NEW
|
|
||||||
| git.GIT_STATUS_INDEX_MODIFIED
|
|
||||||
| git.GIT_STATUS_INDEX_DELETED):
|
|
||||||
index_column = 'I'
|
|
||||||
r = wt_column + index_column + untracked_column
|
|
||||||
return r if r != ' ' else None
|
|
||||||
|
|
||||||
def branch(self):
|
|
||||||
try:
|
|
||||||
ref = self._repo().lookup_reference('HEAD')
|
|
||||||
except KeyError:
|
|
||||||
return None
|
|
||||||
|
|
||||||
try:
|
|
||||||
target = ref.target
|
|
||||||
except ValueError:
|
|
||||||
return '[DETACHED HEAD]'
|
|
||||||
|
|
||||||
if target.startswith('refs/heads/'):
|
|
||||||
return target[11:]
|
|
||||||
else:
|
|
||||||
return '[DETACHED HEAD]'
|
|
||||||
except ImportError:
|
|
||||||
from subprocess import Popen, PIPE
|
|
||||||
|
|
||||||
def readlines(cmd, cwd):
|
|
||||||
p = Popen(cmd, shell=False, stdout=PIPE, stderr=PIPE, cwd=cwd)
|
|
||||||
p.stderr.close()
|
|
||||||
with p.stdout:
|
|
||||||
for line in p.stdout:
|
|
||||||
yield line[:-1].decode('utf-8')
|
|
||||||
|
|
||||||
class Repository(object):
|
|
||||||
__slots__ = ('directory',)
|
|
||||||
|
|
||||||
def __init__(self, directory):
|
|
||||||
self.directory = directory
|
|
||||||
|
|
||||||
def _gitcmd(self, *args):
|
|
||||||
return readlines(('git',) + args, self.directory)
|
|
||||||
|
|
||||||
def status(self, path=None):
|
|
||||||
if path:
|
|
||||||
try:
|
|
||||||
return next(self._gitcmd('status', '--porcelain', '--ignored', '--', path))[:2]
|
|
||||||
except StopIteration:
|
|
||||||
return None
|
|
||||||
else:
|
|
||||||
wt_column = ' '
|
|
||||||
index_column = ' '
|
|
||||||
untracked_column = ' '
|
|
||||||
for line in self._gitcmd('status', '--porcelain'):
|
|
||||||
if line[0] == '?':
|
|
||||||
untracked_column = 'U'
|
|
||||||
continue
|
|
||||||
elif line[0] == '!':
|
|
||||||
continue
|
|
||||||
|
|
||||||
if line[0] != ' ':
|
|
||||||
index_column = 'I'
|
|
||||||
|
|
||||||
if line[1] != ' ':
|
|
||||||
wt_column = 'D'
|
|
||||||
|
|
||||||
r = wt_column + index_column + untracked_column
|
|
||||||
return r if r != ' ' else None
|
|
||||||
|
|
||||||
def branch(self):
|
|
||||||
for line in self._gitcmd('branch', '-l'):
|
|
||||||
if line[0] == '*':
|
|
||||||
return line[2:]
|
|
||||||
return None
|
|
|
@ -1,51 +0,0 @@
|
||||||
# vim:fileencoding=utf-8:noet
|
|
||||||
from __future__ import absolute_import
|
|
||||||
from mercurial import hg, ui, match
|
|
||||||
|
|
||||||
|
|
||||||
class Repository(object):
|
|
||||||
__slots__ = ('directory', 'ui')
|
|
||||||
|
|
||||||
statuses = 'MARDUI'
|
|
||||||
repo_statuses = (1, 1, 1, 1, 2)
|
|
||||||
repo_statuses_str = (None, 'D ', ' U', 'DU')
|
|
||||||
|
|
||||||
def __init__(self, directory):
|
|
||||||
self.directory = directory
|
|
||||||
self.ui = ui.ui()
|
|
||||||
|
|
||||||
def _repo(self):
|
|
||||||
# Cannot create this object once and use always: when repository updates
|
|
||||||
# functions emit invalid results
|
|
||||||
return hg.repository(self.ui, self.directory)
|
|
||||||
|
|
||||||
def status(self, path=None):
|
|
||||||
'''Return status of repository or file.
|
|
||||||
|
|
||||||
Without file argument: returns status of the repository:
|
|
||||||
|
|
||||||
:"D?": dirty (tracked modified files: added, removed, deleted, modified),
|
|
||||||
:"?U": untracked-dirty (added, but not tracked files)
|
|
||||||
:None: clean (status is empty)
|
|
||||||
|
|
||||||
With file argument: returns status of this file: "M"odified, "A"dded,
|
|
||||||
"R"emoved, "D"eleted (removed from filesystem, but still tracked),
|
|
||||||
"U"nknown, "I"gnored, (None)Clean.
|
|
||||||
'''
|
|
||||||
repo = self._repo()
|
|
||||||
if path:
|
|
||||||
m = match.match(None, None, [path], exact=True)
|
|
||||||
statuses = repo.status(match=m, unknown=True, ignored=True)
|
|
||||||
for status, paths in zip(self.statuses, statuses):
|
|
||||||
if paths:
|
|
||||||
return status
|
|
||||||
return None
|
|
||||||
else:
|
|
||||||
resulting_status = 0
|
|
||||||
for status, paths in zip(self.repo_statuses, repo.status(unknown=True)):
|
|
||||||
if paths:
|
|
||||||
resulting_status |= status
|
|
||||||
return self.repo_statuses_str[resulting_status]
|
|
||||||
|
|
||||||
def branch(self):
|
|
||||||
return self._repo().dirstate.branch()
|
|
File diff suppressed because it is too large
Load diff
|
@ -1,59 +0,0 @@
|
||||||
# vim:fileencoding=utf-8:noet
|
|
||||||
from __future__ import absolute_import
|
|
||||||
from inspect import ArgSpec, getargspec
|
|
||||||
from powerline.lib.threaded import ThreadedSegment, KwThreadedSegment
|
|
||||||
from itertools import count
|
|
||||||
|
|
||||||
def getconfigargspec(obj):
|
|
||||||
if isinstance(obj, ThreadedSegment):
|
|
||||||
args = ['interval']
|
|
||||||
defaults = [getattr(obj, 'interval', 1)]
|
|
||||||
if obj.update_first:
|
|
||||||
args.append('update_first')
|
|
||||||
defaults.append(True)
|
|
||||||
methods = ['render', 'set_state']
|
|
||||||
if isinstance(obj, KwThreadedSegment):
|
|
||||||
methods += ['key', 'render_one']
|
|
||||||
|
|
||||||
for method in methods:
|
|
||||||
if hasattr(obj, method):
|
|
||||||
# Note: on <python-2.6 it may return simple tuple, not
|
|
||||||
# ArgSpec instance.
|
|
||||||
argspec = getargspec(getattr(obj, method))
|
|
||||||
for i, arg in zip(count(1), reversed(argspec.args)):
|
|
||||||
if (arg == 'self' or
|
|
||||||
(arg == 'segment_info' and
|
|
||||||
getattr(obj, 'powerline_requires_segment_info', None)) or
|
|
||||||
(arg == 'pl') or
|
|
||||||
(method.startswith('render') and (1 if argspec.args[0] == 'self' else 0) + i == len(argspec.args)) or
|
|
||||||
arg in args):
|
|
||||||
continue
|
|
||||||
if argspec.defaults and len(argspec.defaults) >= i:
|
|
||||||
default = argspec.defaults[-i]
|
|
||||||
defaults.append(default)
|
|
||||||
args.append(arg)
|
|
||||||
else:
|
|
||||||
args.insert(0, arg)
|
|
||||||
argspec = ArgSpec(args=args, varargs=None, keywords=None, defaults=tuple(defaults))
|
|
||||||
else:
|
|
||||||
if hasattr(obj, 'powerline_origin'):
|
|
||||||
obj = obj.powerline_origin
|
|
||||||
else:
|
|
||||||
obj = obj
|
|
||||||
|
|
||||||
argspec = getargspec(obj)
|
|
||||||
args = []
|
|
||||||
defaults = []
|
|
||||||
for i, arg in zip(count(1), reversed(argspec.args)):
|
|
||||||
if ((arg == 'segment_info' and getattr(obj, 'powerline_requires_segment_info', None)) or
|
|
||||||
arg == 'pl'):
|
|
||||||
continue
|
|
||||||
if argspec.defaults and len(argspec.defaults) >= i:
|
|
||||||
default = argspec.defaults[-i]
|
|
||||||
defaults.append(default)
|
|
||||||
args.append(arg)
|
|
||||||
else:
|
|
||||||
args.insert(0, arg)
|
|
||||||
argspec = ArgSpec(args=args, varargs=argspec.varargs, keywords=argspec.keywords, defaults=tuple(defaults))
|
|
||||||
|
|
||||||
return argspec
|
|
|
@ -1,17 +0,0 @@
|
||||||
__version__ = '3.10'
|
|
||||||
|
|
||||||
|
|
||||||
from .loader import Loader
|
|
||||||
|
|
||||||
|
|
||||||
def load(stream, Loader=Loader):
|
|
||||||
"""
|
|
||||||
Parse the first YAML document in a stream
|
|
||||||
and produce the corresponding Python object.
|
|
||||||
"""
|
|
||||||
loader = Loader(stream)
|
|
||||||
try:
|
|
||||||
r = loader.get_single_data()
|
|
||||||
return r, loader.haserrors
|
|
||||||
finally:
|
|
||||||
loader.dispose()
|
|
|
@ -1,117 +0,0 @@
|
||||||
__all__ = ['Composer', 'ComposerError']
|
|
||||||
|
|
||||||
from .error import MarkedError
|
|
||||||
from .events import * # NOQA
|
|
||||||
from .nodes import * # NOQA
|
|
||||||
|
|
||||||
|
|
||||||
class ComposerError(MarkedError):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class Composer:
|
|
||||||
def __init__(self):
|
|
||||||
pass
|
|
||||||
|
|
||||||
def check_node(self):
|
|
||||||
# Drop the STREAM-START event.
|
|
||||||
if self.check_event(StreamStartEvent):
|
|
||||||
self.get_event()
|
|
||||||
|
|
||||||
# If there are more documents available?
|
|
||||||
return not self.check_event(StreamEndEvent)
|
|
||||||
|
|
||||||
def get_node(self):
|
|
||||||
# Get the root node of the next document.
|
|
||||||
if not self.check_event(StreamEndEvent):
|
|
||||||
return self.compose_document()
|
|
||||||
|
|
||||||
def get_single_node(self):
|
|
||||||
# Drop the STREAM-START event.
|
|
||||||
self.get_event()
|
|
||||||
|
|
||||||
# Compose a document if the stream is not empty.
|
|
||||||
document = None
|
|
||||||
if not self.check_event(StreamEndEvent):
|
|
||||||
document = self.compose_document()
|
|
||||||
|
|
||||||
# Ensure that the stream contains no more documents.
|
|
||||||
if not self.check_event(StreamEndEvent):
|
|
||||||
event = self.get_event()
|
|
||||||
raise ComposerError("expected a single document in the stream",
|
|
||||||
document.start_mark, "but found another document",
|
|
||||||
event.start_mark)
|
|
||||||
|
|
||||||
# Drop the STREAM-END event.
|
|
||||||
self.get_event()
|
|
||||||
|
|
||||||
return document
|
|
||||||
|
|
||||||
def compose_document(self):
|
|
||||||
# Drop the DOCUMENT-START event.
|
|
||||||
self.get_event()
|
|
||||||
|
|
||||||
# Compose the root node.
|
|
||||||
node = self.compose_node(None, None)
|
|
||||||
|
|
||||||
# Drop the DOCUMENT-END event.
|
|
||||||
self.get_event()
|
|
||||||
|
|
||||||
return node
|
|
||||||
|
|
||||||
def compose_node(self, parent, index):
|
|
||||||
self.descend_resolver(parent, index)
|
|
||||||
if self.check_event(ScalarEvent):
|
|
||||||
node = self.compose_scalar_node()
|
|
||||||
elif self.check_event(SequenceStartEvent):
|
|
||||||
node = self.compose_sequence_node()
|
|
||||||
elif self.check_event(MappingStartEvent):
|
|
||||||
node = self.compose_mapping_node()
|
|
||||||
self.ascend_resolver()
|
|
||||||
return node
|
|
||||||
|
|
||||||
def compose_scalar_node(self):
|
|
||||||
event = self.get_event()
|
|
||||||
tag = event.tag
|
|
||||||
if tag is None or tag == '!':
|
|
||||||
tag = self.resolve(ScalarNode, event.value, event.implicit, event.start_mark)
|
|
||||||
node = ScalarNode(tag, event.value,
|
|
||||||
event.start_mark, event.end_mark, style=event.style)
|
|
||||||
return node
|
|
||||||
|
|
||||||
def compose_sequence_node(self):
|
|
||||||
start_event = self.get_event()
|
|
||||||
tag = start_event.tag
|
|
||||||
if tag is None or tag == '!':
|
|
||||||
tag = self.resolve(SequenceNode, None, start_event.implicit)
|
|
||||||
node = SequenceNode(tag, [],
|
|
||||||
start_event.start_mark, None,
|
|
||||||
flow_style=start_event.flow_style)
|
|
||||||
index = 0
|
|
||||||
while not self.check_event(SequenceEndEvent):
|
|
||||||
node.value.append(self.compose_node(node, index))
|
|
||||||
index += 1
|
|
||||||
end_event = self.get_event()
|
|
||||||
node.end_mark = end_event.end_mark
|
|
||||||
return node
|
|
||||||
|
|
||||||
def compose_mapping_node(self):
|
|
||||||
start_event = self.get_event()
|
|
||||||
tag = start_event.tag
|
|
||||||
if tag is None or tag == '!':
|
|
||||||
tag = self.resolve(MappingNode, None, start_event.implicit)
|
|
||||||
node = MappingNode(tag, [],
|
|
||||||
start_event.start_mark, None,
|
|
||||||
flow_style=start_event.flow_style)
|
|
||||||
while not self.check_event(MappingEndEvent):
|
|
||||||
#key_event = self.peek_event()
|
|
||||||
item_key = self.compose_node(node, None)
|
|
||||||
#if item_key in node.value:
|
|
||||||
# raise ComposerError("while composing a mapping", start_event.start_mark,
|
|
||||||
# "found duplicate key", key_event.start_mark)
|
|
||||||
item_value = self.compose_node(node, item_key)
|
|
||||||
#node.value[item_key] = item_value
|
|
||||||
node.value.append((item_key, item_value))
|
|
||||||
end_event = self.get_event()
|
|
||||||
node.end_mark = end_event.end_mark
|
|
||||||
return node
|
|
|
@ -1,274 +0,0 @@
|
||||||
__all__ = ['BaseConstructor', 'Constructor', 'ConstructorError']
|
|
||||||
|
|
||||||
from .error import MarkedError
|
|
||||||
from .nodes import * # NOQA
|
|
||||||
from .markedvalue import gen_marked_value
|
|
||||||
|
|
||||||
import collections
|
|
||||||
import types
|
|
||||||
|
|
||||||
from functools import wraps
|
|
||||||
|
|
||||||
|
|
||||||
try:
|
|
||||||
from __builtin__ import unicode
|
|
||||||
except ImportError:
|
|
||||||
unicode = str # NOQA
|
|
||||||
|
|
||||||
|
|
||||||
def marked(func):
|
|
||||||
@wraps(func)
|
|
||||||
def f(self, node, *args, **kwargs):
|
|
||||||
return gen_marked_value(func(self, node, *args, **kwargs), node.start_mark)
|
|
||||||
return f
|
|
||||||
|
|
||||||
|
|
||||||
class ConstructorError(MarkedError):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class BaseConstructor:
|
|
||||||
yaml_constructors = {}
|
|
||||||
|
|
||||||
def __init__(self):
|
|
||||||
self.constructed_objects = {}
|
|
||||||
self.state_generators = []
|
|
||||||
self.deep_construct = False
|
|
||||||
|
|
||||||
def check_data(self):
|
|
||||||
# If there are more documents available?
|
|
||||||
return self.check_node()
|
|
||||||
|
|
||||||
def get_data(self):
|
|
||||||
# Construct and return the next document.
|
|
||||||
if self.check_node():
|
|
||||||
return self.construct_document(self.get_node())
|
|
||||||
|
|
||||||
def get_single_data(self):
|
|
||||||
# Ensure that the stream contains a single document and construct it.
|
|
||||||
node = self.get_single_node()
|
|
||||||
if node is not None:
|
|
||||||
return self.construct_document(node)
|
|
||||||
return None
|
|
||||||
|
|
||||||
def construct_document(self, node):
|
|
||||||
data = self.construct_object(node)
|
|
||||||
while self.state_generators:
|
|
||||||
state_generators = self.state_generators
|
|
||||||
self.state_generators = []
|
|
||||||
for generator in state_generators:
|
|
||||||
for dummy in generator:
|
|
||||||
pass
|
|
||||||
self.constructed_objects = {}
|
|
||||||
self.deep_construct = False
|
|
||||||
return data
|
|
||||||
|
|
||||||
def construct_object(self, node, deep=False):
|
|
||||||
if node in self.constructed_objects:
|
|
||||||
return self.constructed_objects[node]
|
|
||||||
if deep:
|
|
||||||
old_deep = self.deep_construct
|
|
||||||
self.deep_construct = True
|
|
||||||
constructor = None
|
|
||||||
tag_suffix = None
|
|
||||||
if node.tag in self.yaml_constructors:
|
|
||||||
constructor = self.yaml_constructors[node.tag]
|
|
||||||
else:
|
|
||||||
raise ConstructorError(None, None, 'no constructor for tag %s' % node.tag)
|
|
||||||
if tag_suffix is None:
|
|
||||||
data = constructor(self, node)
|
|
||||||
else:
|
|
||||||
data = constructor(self, tag_suffix, node)
|
|
||||||
if isinstance(data, types.GeneratorType):
|
|
||||||
generator = data
|
|
||||||
data = next(generator)
|
|
||||||
if self.deep_construct:
|
|
||||||
for dummy in generator:
|
|
||||||
pass
|
|
||||||
else:
|
|
||||||
self.state_generators.append(generator)
|
|
||||||
self.constructed_objects[node] = data
|
|
||||||
if deep:
|
|
||||||
self.deep_construct = old_deep
|
|
||||||
return data
|
|
||||||
|
|
||||||
@marked
|
|
||||||
def construct_scalar(self, node):
|
|
||||||
if not isinstance(node, ScalarNode):
|
|
||||||
raise ConstructorError(None, None,
|
|
||||||
"expected a scalar node, but found %s" % node.id,
|
|
||||||
node.start_mark)
|
|
||||||
return node.value
|
|
||||||
|
|
||||||
def construct_sequence(self, node, deep=False):
|
|
||||||
if not isinstance(node, SequenceNode):
|
|
||||||
raise ConstructorError(None, None,
|
|
||||||
"expected a sequence node, but found %s" % node.id,
|
|
||||||
node.start_mark)
|
|
||||||
return [self.construct_object(child, deep=deep)
|
|
||||||
for child in node.value]
|
|
||||||
|
|
||||||
@marked
|
|
||||||
def construct_mapping(self, node, deep=False):
|
|
||||||
if not isinstance(node, MappingNode):
|
|
||||||
raise ConstructorError(None, None,
|
|
||||||
"expected a mapping node, but found %s" % node.id,
|
|
||||||
node.start_mark)
|
|
||||||
mapping = {}
|
|
||||||
for key_node, value_node in node.value:
|
|
||||||
key = self.construct_object(key_node, deep=deep)
|
|
||||||
if not isinstance(key, collections.Hashable):
|
|
||||||
self.echoerr('While constructing a mapping', node.start_mark,
|
|
||||||
'found unhashable key', key_node.start_mark)
|
|
||||||
continue
|
|
||||||
elif type(key.value) != unicode:
|
|
||||||
self.echoerr('Error while constructing a mapping', node.start_mark,
|
|
||||||
'found key that is not a string', key_node.start_mark)
|
|
||||||
continue
|
|
||||||
elif key in mapping:
|
|
||||||
self.echoerr('Error while constructing a mapping', node.start_mark,
|
|
||||||
'found duplicate key', key_node.start_mark)
|
|
||||||
continue
|
|
||||||
value = self.construct_object(value_node, deep=deep)
|
|
||||||
mapping[key] = value
|
|
||||||
return mapping
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def add_constructor(cls, tag, constructor):
|
|
||||||
if not 'yaml_constructors' in cls.__dict__:
|
|
||||||
cls.yaml_constructors = cls.yaml_constructors.copy()
|
|
||||||
cls.yaml_constructors[tag] = constructor
|
|
||||||
|
|
||||||
|
|
||||||
class Constructor(BaseConstructor):
|
|
||||||
def construct_scalar(self, node):
|
|
||||||
if isinstance(node, MappingNode):
|
|
||||||
for key_node, value_node in node.value:
|
|
||||||
if key_node.tag == 'tag:yaml.org,2002:value':
|
|
||||||
return self.construct_scalar(value_node)
|
|
||||||
return BaseConstructor.construct_scalar(self, node)
|
|
||||||
|
|
||||||
def flatten_mapping(self, node):
|
|
||||||
merge = []
|
|
||||||
index = 0
|
|
||||||
while index < len(node.value):
|
|
||||||
key_node, value_node = node.value[index]
|
|
||||||
if key_node.tag == 'tag:yaml.org,2002:merge':
|
|
||||||
del node.value[index]
|
|
||||||
if isinstance(value_node, MappingNode):
|
|
||||||
self.flatten_mapping(value_node)
|
|
||||||
merge.extend(value_node.value)
|
|
||||||
elif isinstance(value_node, SequenceNode):
|
|
||||||
submerge = []
|
|
||||||
for subnode in value_node.value:
|
|
||||||
if not isinstance(subnode, MappingNode):
|
|
||||||
raise ConstructorError("while constructing a mapping",
|
|
||||||
node.start_mark,
|
|
||||||
"expected a mapping for merging, but found %s"
|
|
||||||
% subnode.id, subnode.start_mark)
|
|
||||||
self.flatten_mapping(subnode)
|
|
||||||
submerge.append(subnode.value)
|
|
||||||
submerge.reverse()
|
|
||||||
for value in submerge:
|
|
||||||
merge.extend(value)
|
|
||||||
else:
|
|
||||||
raise ConstructorError("while constructing a mapping", node.start_mark,
|
|
||||||
"expected a mapping or list of mappings for merging, but found %s"
|
|
||||||
% value_node.id, value_node.start_mark)
|
|
||||||
elif key_node.tag == 'tag:yaml.org,2002:value':
|
|
||||||
key_node.tag = 'tag:yaml.org,2002:str'
|
|
||||||
index += 1
|
|
||||||
else:
|
|
||||||
index += 1
|
|
||||||
if merge:
|
|
||||||
node.value = merge + node.value
|
|
||||||
|
|
||||||
def construct_mapping(self, node, deep=False):
|
|
||||||
if isinstance(node, MappingNode):
|
|
||||||
self.flatten_mapping(node)
|
|
||||||
return BaseConstructor.construct_mapping(self, node, deep=deep)
|
|
||||||
|
|
||||||
@marked
|
|
||||||
def construct_yaml_null(self, node):
|
|
||||||
self.construct_scalar(node)
|
|
||||||
return None
|
|
||||||
|
|
||||||
@marked
|
|
||||||
def construct_yaml_bool(self, node):
|
|
||||||
value = self.construct_scalar(node).value
|
|
||||||
return bool(value)
|
|
||||||
|
|
||||||
@marked
|
|
||||||
def construct_yaml_int(self, node):
|
|
||||||
value = self.construct_scalar(node).value
|
|
||||||
sign = +1
|
|
||||||
if value[0] == '-':
|
|
||||||
sign = -1
|
|
||||||
if value[0] in '+-':
|
|
||||||
value = value[1:]
|
|
||||||
if value == '0':
|
|
||||||
return 0
|
|
||||||
else:
|
|
||||||
return sign * int(value)
|
|
||||||
|
|
||||||
@marked
|
|
||||||
def construct_yaml_float(self, node):
|
|
||||||
value = self.construct_scalar(node).value
|
|
||||||
sign = +1
|
|
||||||
if value[0] == '-':
|
|
||||||
sign = -1
|
|
||||||
if value[0] in '+-':
|
|
||||||
value = value[1:]
|
|
||||||
else:
|
|
||||||
return sign * float(value)
|
|
||||||
|
|
||||||
def construct_yaml_str(self, node):
|
|
||||||
return self.construct_scalar(node)
|
|
||||||
|
|
||||||
def construct_yaml_seq(self, node):
|
|
||||||
data = gen_marked_value([], node.start_mark)
|
|
||||||
yield data
|
|
||||||
data.extend(self.construct_sequence(node))
|
|
||||||
|
|
||||||
def construct_yaml_map(self, node):
|
|
||||||
data = gen_marked_value({}, node.start_mark)
|
|
||||||
yield data
|
|
||||||
value = self.construct_mapping(node)
|
|
||||||
data.update(value)
|
|
||||||
|
|
||||||
def construct_undefined(self, node):
|
|
||||||
raise ConstructorError(None, None,
|
|
||||||
"could not determine a constructor for the tag %r" % node.tag,
|
|
||||||
node.start_mark)
|
|
||||||
|
|
||||||
|
|
||||||
Constructor.add_constructor(
|
|
||||||
'tag:yaml.org,2002:null',
|
|
||||||
Constructor.construct_yaml_null)
|
|
||||||
|
|
||||||
Constructor.add_constructor(
|
|
||||||
'tag:yaml.org,2002:bool',
|
|
||||||
Constructor.construct_yaml_bool)
|
|
||||||
|
|
||||||
Constructor.add_constructor(
|
|
||||||
'tag:yaml.org,2002:int',
|
|
||||||
Constructor.construct_yaml_int)
|
|
||||||
|
|
||||||
Constructor.add_constructor(
|
|
||||||
'tag:yaml.org,2002:float',
|
|
||||||
Constructor.construct_yaml_float)
|
|
||||||
|
|
||||||
Constructor.add_constructor(
|
|
||||||
'tag:yaml.org,2002:str',
|
|
||||||
Constructor.construct_yaml_str)
|
|
||||||
|
|
||||||
Constructor.add_constructor(
|
|
||||||
'tag:yaml.org,2002:seq',
|
|
||||||
Constructor.construct_yaml_seq)
|
|
||||||
|
|
||||||
Constructor.add_constructor(
|
|
||||||
'tag:yaml.org,2002:map',
|
|
||||||
Constructor.construct_yaml_map)
|
|
||||||
|
|
||||||
Constructor.add_constructor(None,
|
|
||||||
Constructor.construct_undefined)
|
|
|
@ -1,99 +0,0 @@
|
||||||
__all__ = ['Mark', 'MarkedError', 'echoerr', 'NON_PRINTABLE']
|
|
||||||
|
|
||||||
|
|
||||||
import sys
|
|
||||||
import re
|
|
||||||
|
|
||||||
try:
|
|
||||||
from __builtin__ import unichr
|
|
||||||
except ImportError:
|
|
||||||
unichr = chr # NOQA
|
|
||||||
|
|
||||||
|
|
||||||
NON_PRINTABLE = re.compile('[^\t\n\x20-\x7E' + unichr(0x85) + (unichr(0xA0) + '-' + unichr(0xD7FF)) + (unichr(0xE000) + '-' + unichr(0xFFFD)) + ']')
|
|
||||||
|
|
||||||
|
|
||||||
def repl(s):
|
|
||||||
return '<x%04x>' % ord(s.group())
|
|
||||||
|
|
||||||
|
|
||||||
def strtrans(s):
|
|
||||||
return NON_PRINTABLE.sub(repl, s.replace('\t', '>---'))
|
|
||||||
|
|
||||||
|
|
||||||
class Mark:
|
|
||||||
def __init__(self, name, line, column, buffer, pointer):
|
|
||||||
self.name = name
|
|
||||||
self.line = line
|
|
||||||
self.column = column
|
|
||||||
self.buffer = buffer
|
|
||||||
self.pointer = pointer
|
|
||||||
|
|
||||||
def copy(self):
|
|
||||||
return Mark(self.name, self.line, self.column, self.buffer, self.pointer)
|
|
||||||
|
|
||||||
def get_snippet(self, indent=4, max_length=75):
|
|
||||||
if self.buffer is None:
|
|
||||||
return None
|
|
||||||
head = ''
|
|
||||||
start = self.pointer
|
|
||||||
while start > 0 and self.buffer[start - 1] not in '\0\n':
|
|
||||||
start -= 1
|
|
||||||
if self.pointer - start > max_length / 2 - 1:
|
|
||||||
head = ' ... '
|
|
||||||
start += 5
|
|
||||||
break
|
|
||||||
tail = ''
|
|
||||||
end = self.pointer
|
|
||||||
while end < len(self.buffer) and self.buffer[end] not in '\0\n':
|
|
||||||
end += 1
|
|
||||||
if end - self.pointer > max_length / 2 - 1:
|
|
||||||
tail = ' ... '
|
|
||||||
end -= 5
|
|
||||||
break
|
|
||||||
snippet = [self.buffer[start:self.pointer], self.buffer[self.pointer], self.buffer[self.pointer + 1:end]]
|
|
||||||
snippet = [strtrans(s) for s in snippet]
|
|
||||||
return ' ' * indent + head + ''.join(snippet) + tail + '\n' \
|
|
||||||
+ ' ' * (indent + len(head) + len(snippet[0])) + '^'
|
|
||||||
|
|
||||||
def __str__(self):
|
|
||||||
snippet = self.get_snippet()
|
|
||||||
where = " in \"%s\", line %d, column %d" \
|
|
||||||
% (self.name, self.line + 1, self.column + 1)
|
|
||||||
if snippet is not None:
|
|
||||||
where += ":\n" + snippet
|
|
||||||
if type(where) is str:
|
|
||||||
return where
|
|
||||||
else:
|
|
||||||
return where.encode('utf-8')
|
|
||||||
|
|
||||||
|
|
||||||
def echoerr(*args, **kwargs):
|
|
||||||
sys.stderr.write('\n')
|
|
||||||
sys.stderr.write(format_error(*args, **kwargs) + '\n')
|
|
||||||
|
|
||||||
|
|
||||||
def format_error(context=None, context_mark=None, problem=None, problem_mark=None, note=None):
|
|
||||||
lines = []
|
|
||||||
if context is not None:
|
|
||||||
lines.append(context)
|
|
||||||
if context_mark is not None \
|
|
||||||
and (problem is None or problem_mark is None
|
|
||||||
or context_mark.name != problem_mark.name
|
|
||||||
or context_mark.line != problem_mark.line
|
|
||||||
or context_mark.column != problem_mark.column):
|
|
||||||
lines.append(str(context_mark))
|
|
||||||
if problem is not None:
|
|
||||||
lines.append(problem)
|
|
||||||
if problem_mark is not None:
|
|
||||||
lines.append(str(problem_mark))
|
|
||||||
if note is not None:
|
|
||||||
lines.append(note)
|
|
||||||
return '\n'.join(lines)
|
|
||||||
|
|
||||||
|
|
||||||
class MarkedError(Exception):
|
|
||||||
def __init__(self, context=None, context_mark=None,
|
|
||||||
problem=None, problem_mark=None, note=None):
|
|
||||||
Exception.__init__(self, format_error(context, context_mark, problem,
|
|
||||||
problem_mark, note))
|
|
|
@ -1,97 +0,0 @@
|
||||||
# Abstract classes.
|
|
||||||
|
|
||||||
|
|
||||||
class Event(object):
|
|
||||||
def __init__(self, start_mark=None, end_mark=None):
|
|
||||||
self.start_mark = start_mark
|
|
||||||
self.end_mark = end_mark
|
|
||||||
|
|
||||||
def __repr__(self):
|
|
||||||
attributes = [key for key in ['implicit', 'value']
|
|
||||||
if hasattr(self, key)]
|
|
||||||
arguments = ', '.join(['%s=%r' % (key, getattr(self, key))
|
|
||||||
for key in attributes])
|
|
||||||
return '%s(%s)' % (self.__class__.__name__, arguments)
|
|
||||||
|
|
||||||
|
|
||||||
class NodeEvent(Event):
|
|
||||||
def __init__(self, start_mark=None, end_mark=None):
|
|
||||||
self.start_mark = start_mark
|
|
||||||
self.end_mark = end_mark
|
|
||||||
|
|
||||||
|
|
||||||
class CollectionStartEvent(NodeEvent):
|
|
||||||
def __init__(self, implicit, start_mark=None, end_mark=None,
|
|
||||||
flow_style=None):
|
|
||||||
self.tag = None
|
|
||||||
self.implicit = implicit
|
|
||||||
self.start_mark = start_mark
|
|
||||||
self.end_mark = end_mark
|
|
||||||
self.flow_style = flow_style
|
|
||||||
|
|
||||||
|
|
||||||
class CollectionEndEvent(Event):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
# Implementations.
|
|
||||||
|
|
||||||
|
|
||||||
class StreamStartEvent(Event):
|
|
||||||
def __init__(self, start_mark=None, end_mark=None, encoding=None):
|
|
||||||
self.start_mark = start_mark
|
|
||||||
self.end_mark = end_mark
|
|
||||||
self.encoding = encoding
|
|
||||||
|
|
||||||
|
|
||||||
class StreamEndEvent(Event):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class DocumentStartEvent(Event):
|
|
||||||
def __init__(self, start_mark=None, end_mark=None,
|
|
||||||
explicit=None, version=None, tags=None):
|
|
||||||
self.start_mark = start_mark
|
|
||||||
self.end_mark = end_mark
|
|
||||||
self.explicit = explicit
|
|
||||||
self.version = version
|
|
||||||
self.tags = tags
|
|
||||||
|
|
||||||
|
|
||||||
class DocumentEndEvent(Event):
|
|
||||||
def __init__(self, start_mark=None, end_mark=None,
|
|
||||||
explicit=None):
|
|
||||||
self.start_mark = start_mark
|
|
||||||
self.end_mark = end_mark
|
|
||||||
self.explicit = explicit
|
|
||||||
|
|
||||||
|
|
||||||
class AliasEvent(NodeEvent):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class ScalarEvent(NodeEvent):
|
|
||||||
def __init__(self, implicit, value,
|
|
||||||
start_mark=None, end_mark=None, style=None):
|
|
||||||
self.tag = None
|
|
||||||
self.implicit = implicit
|
|
||||||
self.value = value
|
|
||||||
self.start_mark = start_mark
|
|
||||||
self.end_mark = end_mark
|
|
||||||
self.style = style
|
|
||||||
|
|
||||||
|
|
||||||
class SequenceStartEvent(CollectionStartEvent):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class SequenceEndEvent(CollectionEndEvent):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class MappingStartEvent(CollectionStartEvent):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class MappingEndEvent(CollectionEndEvent):
|
|
||||||
pass
|
|
|
@ -1,24 +0,0 @@
|
||||||
__all__ = ['Loader']
|
|
||||||
|
|
||||||
from .reader import Reader
|
|
||||||
from .scanner import Scanner
|
|
||||||
from .parser import Parser
|
|
||||||
from .composer import Composer
|
|
||||||
from .constructor import Constructor
|
|
||||||
from .resolver import Resolver
|
|
||||||
from .error import echoerr
|
|
||||||
|
|
||||||
|
|
||||||
class Loader(Reader, Scanner, Parser, Composer, Constructor, Resolver):
|
|
||||||
def __init__(self, stream):
|
|
||||||
Reader.__init__(self, stream)
|
|
||||||
Scanner.__init__(self)
|
|
||||||
Parser.__init__(self)
|
|
||||||
Composer.__init__(self)
|
|
||||||
Constructor.__init__(self)
|
|
||||||
Resolver.__init__(self)
|
|
||||||
self.haserrors = False
|
|
||||||
|
|
||||||
def echoerr(self, *args, **kwargs):
|
|
||||||
echoerr(*args, **kwargs)
|
|
||||||
self.haserrors = True
|
|
|
@ -1,83 +0,0 @@
|
||||||
__all__ = ['gen_marked_value', 'MarkedValue']
|
|
||||||
|
|
||||||
|
|
||||||
try:
|
|
||||||
from __builtin__ import unicode
|
|
||||||
except ImportError:
|
|
||||||
unicode = str
|
|
||||||
|
|
||||||
|
|
||||||
def gen_new(cls):
|
|
||||||
def __new__(arg_cls, value, mark):
|
|
||||||
r = super(arg_cls, arg_cls).__new__(arg_cls, value)
|
|
||||||
r.mark = mark
|
|
||||||
r.value = value
|
|
||||||
return r
|
|
||||||
return __new__
|
|
||||||
|
|
||||||
|
|
||||||
class MarkedUnicode(unicode):
|
|
||||||
__new__ = gen_new(unicode)
|
|
||||||
|
|
||||||
def _proc_partition(self, part_result):
|
|
||||||
pointdiff = 1
|
|
||||||
r = []
|
|
||||||
for s in part_result:
|
|
||||||
mark = self.mark.copy()
|
|
||||||
# XXX Does not work properly with escaped strings, but this requires
|
|
||||||
# saving much more information in mark.
|
|
||||||
mark.column += pointdiff
|
|
||||||
mark.pointer += pointdiff
|
|
||||||
r.append(MarkedUnicode(s, mark))
|
|
||||||
pointdiff += len(s)
|
|
||||||
return tuple(r)
|
|
||||||
|
|
||||||
def rpartition(self, sep):
|
|
||||||
return self._proc_partition(super(MarkedUnicode, self).rpartition(sep))
|
|
||||||
|
|
||||||
def partition(self, sep):
|
|
||||||
return self._proc_partition(super(MarkedUnicode, self).partition(sep))
|
|
||||||
|
|
||||||
|
|
||||||
class MarkedInt(int):
|
|
||||||
__new__ = gen_new(int)
|
|
||||||
|
|
||||||
|
|
||||||
class MarkedFloat(float):
|
|
||||||
__new__ = gen_new(float)
|
|
||||||
|
|
||||||
|
|
||||||
class MarkedValue:
|
|
||||||
def __init__(self, value, mark):
|
|
||||||
self.mark = mark
|
|
||||||
self.value = value
|
|
||||||
|
|
||||||
|
|
||||||
specialclasses = {
|
|
||||||
unicode: MarkedUnicode,
|
|
||||||
int: MarkedInt,
|
|
||||||
float: MarkedFloat,
|
|
||||||
}
|
|
||||||
|
|
||||||
classcache = {}
|
|
||||||
|
|
||||||
|
|
||||||
def gen_marked_value(value, mark, use_special_classes=True):
|
|
||||||
if use_special_classes and value.__class__ in specialclasses:
|
|
||||||
Marked = specialclasses[value.__class__]
|
|
||||||
elif value.__class__ in classcache:
|
|
||||||
Marked = classcache[value.__class__]
|
|
||||||
else:
|
|
||||||
class Marked(MarkedValue):
|
|
||||||
for func in value.__class__.__dict__:
|
|
||||||
if func not in set(('__init__', '__new__', '__getattribute__')):
|
|
||||||
if func in set(('__eq__',)):
|
|
||||||
# HACK to make marked dictionaries always work
|
|
||||||
exec (('def {0}(self, *args):\n'
|
|
||||||
' return self.value.{0}(*[arg.value if isinstance(arg, MarkedValue) else arg for arg in args])').format(func))
|
|
||||||
else:
|
|
||||||
exec (('def {0}(self, *args, **kwargs):\n'
|
|
||||||
' return self.value.{0}(*args, **kwargs)\n').format(func))
|
|
||||||
classcache[value.__class__] = Marked
|
|
||||||
|
|
||||||
return Marked(value, mark)
|
|
|
@ -1,53 +0,0 @@
|
||||||
class Node(object):
|
|
||||||
def __init__(self, tag, value, start_mark, end_mark):
|
|
||||||
self.tag = tag
|
|
||||||
self.value = value
|
|
||||||
self.start_mark = start_mark
|
|
||||||
self.end_mark = end_mark
|
|
||||||
|
|
||||||
def __repr__(self):
|
|
||||||
value = self.value
|
|
||||||
#if isinstance(value, list):
|
|
||||||
# if len(value) == 0:
|
|
||||||
# value = '<empty>'
|
|
||||||
# elif len(value) == 1:
|
|
||||||
# value = '<1 item>'
|
|
||||||
# else:
|
|
||||||
# value = '<%d items>' % len(value)
|
|
||||||
#else:
|
|
||||||
# if len(value) > 75:
|
|
||||||
# value = repr(value[:70]+u' ... ')
|
|
||||||
# else:
|
|
||||||
# value = repr(value)
|
|
||||||
value = repr(value)
|
|
||||||
return '%s(tag=%r, value=%s)' % (self.__class__.__name__, self.tag, value)
|
|
||||||
|
|
||||||
|
|
||||||
class ScalarNode(Node):
|
|
||||||
id = 'scalar'
|
|
||||||
|
|
||||||
def __init__(self, tag, value,
|
|
||||||
start_mark=None, end_mark=None, style=None):
|
|
||||||
self.tag = tag
|
|
||||||
self.value = value
|
|
||||||
self.start_mark = start_mark
|
|
||||||
self.end_mark = end_mark
|
|
||||||
self.style = style
|
|
||||||
|
|
||||||
|
|
||||||
class CollectionNode(Node):
|
|
||||||
def __init__(self, tag, value,
|
|
||||||
start_mark=None, end_mark=None, flow_style=None):
|
|
||||||
self.tag = tag
|
|
||||||
self.value = value
|
|
||||||
self.start_mark = start_mark
|
|
||||||
self.end_mark = end_mark
|
|
||||||
self.flow_style = flow_style
|
|
||||||
|
|
||||||
|
|
||||||
class SequenceNode(CollectionNode):
|
|
||||||
id = 'sequence'
|
|
||||||
|
|
||||||
|
|
||||||
class MappingNode(CollectionNode):
|
|
||||||
id = 'mapping'
|
|
|
@ -1,240 +0,0 @@
|
||||||
__all__ = ['Parser', 'ParserError']
|
|
||||||
|
|
||||||
from .error import MarkedError
|
|
||||||
from .tokens import * # NOQA
|
|
||||||
from .events import * # NOQA
|
|
||||||
|
|
||||||
|
|
||||||
class ParserError(MarkedError):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class Parser:
|
|
||||||
def __init__(self):
|
|
||||||
self.current_event = None
|
|
||||||
self.yaml_version = None
|
|
||||||
self.states = []
|
|
||||||
self.marks = []
|
|
||||||
self.state = self.parse_stream_start
|
|
||||||
|
|
||||||
def dispose(self):
|
|
||||||
# Reset the state attributes (to clear self-references)
|
|
||||||
self.states = []
|
|
||||||
self.state = None
|
|
||||||
|
|
||||||
def check_event(self, *choices):
|
|
||||||
# Check the type of the next event.
|
|
||||||
if self.current_event is None:
|
|
||||||
if self.state:
|
|
||||||
self.current_event = self.state()
|
|
||||||
if self.current_event is not None:
|
|
||||||
if not choices:
|
|
||||||
return True
|
|
||||||
for choice in choices:
|
|
||||||
if isinstance(self.current_event, choice):
|
|
||||||
return True
|
|
||||||
return False
|
|
||||||
|
|
||||||
def peek_event(self):
|
|
||||||
# Get the next event.
|
|
||||||
if self.current_event is None:
|
|
||||||
if self.state:
|
|
||||||
self.current_event = self.state()
|
|
||||||
return self.current_event
|
|
||||||
|
|
||||||
def get_event(self):
|
|
||||||
# Get the next event and proceed further.
|
|
||||||
if self.current_event is None:
|
|
||||||
if self.state:
|
|
||||||
self.current_event = self.state()
|
|
||||||
value = self.current_event
|
|
||||||
self.current_event = None
|
|
||||||
return value
|
|
||||||
|
|
||||||
# stream ::= STREAM-START implicit_document? explicit_document* STREAM-END
|
|
||||||
# implicit_document ::= block_node DOCUMENT-END*
|
|
||||||
# explicit_document ::= DIRECTIVE* DOCUMENT-START block_node? DOCUMENT-END*
|
|
||||||
|
|
||||||
def parse_stream_start(self):
|
|
||||||
# Parse the stream start.
|
|
||||||
token = self.get_token()
|
|
||||||
event = StreamStartEvent(token.start_mark, token.end_mark,
|
|
||||||
encoding=token.encoding)
|
|
||||||
|
|
||||||
# Prepare the next state.
|
|
||||||
self.state = self.parse_implicit_document_start
|
|
||||||
|
|
||||||
return event
|
|
||||||
|
|
||||||
def parse_implicit_document_start(self):
|
|
||||||
# Parse an implicit document.
|
|
||||||
if not self.check_token(StreamEndToken):
|
|
||||||
token = self.peek_token()
|
|
||||||
start_mark = end_mark = token.start_mark
|
|
||||||
event = DocumentStartEvent(start_mark, end_mark, explicit=False)
|
|
||||||
|
|
||||||
# Prepare the next state.
|
|
||||||
self.states.append(self.parse_document_end)
|
|
||||||
self.state = self.parse_node
|
|
||||||
|
|
||||||
return event
|
|
||||||
|
|
||||||
else:
|
|
||||||
return self.parse_document_start()
|
|
||||||
|
|
||||||
def parse_document_start(self):
|
|
||||||
# Parse an explicit document.
|
|
||||||
if not self.check_token(StreamEndToken):
|
|
||||||
token = self.peek_token()
|
|
||||||
self.echoerr(None, None,
|
|
||||||
"expected '<stream end>', but found %r" % token.id,
|
|
||||||
token.start_mark)
|
|
||||||
return StreamEndEvent(token.start_mark, token.end_mark)
|
|
||||||
else:
|
|
||||||
# Parse the end of the stream.
|
|
||||||
token = self.get_token()
|
|
||||||
event = StreamEndEvent(token.start_mark, token.end_mark)
|
|
||||||
assert not self.states
|
|
||||||
assert not self.marks
|
|
||||||
self.state = None
|
|
||||||
return event
|
|
||||||
|
|
||||||
def parse_document_end(self):
|
|
||||||
# Parse the document end.
|
|
||||||
token = self.peek_token()
|
|
||||||
start_mark = end_mark = token.start_mark
|
|
||||||
explicit = False
|
|
||||||
event = DocumentEndEvent(start_mark, end_mark, explicit=explicit)
|
|
||||||
|
|
||||||
# Prepare the next state.
|
|
||||||
self.state = self.parse_document_start
|
|
||||||
|
|
||||||
return event
|
|
||||||
|
|
||||||
def parse_document_content(self):
|
|
||||||
return self.parse_node()
|
|
||||||
|
|
||||||
def parse_node(self, indentless_sequence=False):
|
|
||||||
start_mark = end_mark = None
|
|
||||||
if start_mark is None:
|
|
||||||
start_mark = end_mark = self.peek_token().start_mark
|
|
||||||
event = None
|
|
||||||
implicit = True
|
|
||||||
if self.check_token(ScalarToken):
|
|
||||||
token = self.get_token()
|
|
||||||
end_mark = token.end_mark
|
|
||||||
if token.plain:
|
|
||||||
implicit = (True, False)
|
|
||||||
else:
|
|
||||||
implicit = (False, True)
|
|
||||||
event = ScalarEvent(implicit, token.value,
|
|
||||||
start_mark, end_mark, style=token.style)
|
|
||||||
self.state = self.states.pop()
|
|
||||||
elif self.check_token(FlowSequenceStartToken):
|
|
||||||
end_mark = self.peek_token().end_mark
|
|
||||||
event = SequenceStartEvent(implicit,
|
|
||||||
start_mark, end_mark, flow_style=True)
|
|
||||||
self.state = self.parse_flow_sequence_first_entry
|
|
||||||
elif self.check_token(FlowMappingStartToken):
|
|
||||||
end_mark = self.peek_token().end_mark
|
|
||||||
event = MappingStartEvent(implicit,
|
|
||||||
start_mark, end_mark, flow_style=True)
|
|
||||||
self.state = self.parse_flow_mapping_first_key
|
|
||||||
else:
|
|
||||||
token = self.peek_token()
|
|
||||||
raise ParserError("while parsing a flow node", start_mark,
|
|
||||||
"expected the node content, but found %r" % token.id,
|
|
||||||
token.start_mark)
|
|
||||||
return event
|
|
||||||
|
|
||||||
def parse_flow_sequence_first_entry(self):
|
|
||||||
token = self.get_token()
|
|
||||||
self.marks.append(token.start_mark)
|
|
||||||
return self.parse_flow_sequence_entry(first=True)
|
|
||||||
|
|
||||||
def parse_flow_sequence_entry(self, first=False):
|
|
||||||
if not self.check_token(FlowSequenceEndToken):
|
|
||||||
if not first:
|
|
||||||
if self.check_token(FlowEntryToken):
|
|
||||||
self.get_token()
|
|
||||||
if self.check_token(FlowSequenceEndToken):
|
|
||||||
token = self.peek_token()
|
|
||||||
self.echoerr("While parsing a flow sequence", self.marks[-1],
|
|
||||||
"expected sequence value, but got %r" % token.id, token.start_mark)
|
|
||||||
else:
|
|
||||||
token = self.peek_token()
|
|
||||||
raise ParserError("while parsing a flow sequence", self.marks[-1],
|
|
||||||
"expected ',' or ']', but got %r" % token.id, token.start_mark)
|
|
||||||
|
|
||||||
if not self.check_token(FlowSequenceEndToken):
|
|
||||||
self.states.append(self.parse_flow_sequence_entry)
|
|
||||||
return self.parse_node()
|
|
||||||
token = self.get_token()
|
|
||||||
event = SequenceEndEvent(token.start_mark, token.end_mark)
|
|
||||||
self.state = self.states.pop()
|
|
||||||
self.marks.pop()
|
|
||||||
return event
|
|
||||||
|
|
||||||
def parse_flow_sequence_entry_mapping_end(self):
|
|
||||||
self.state = self.parse_flow_sequence_entry
|
|
||||||
token = self.peek_token()
|
|
||||||
return MappingEndEvent(token.start_mark, token.start_mark)
|
|
||||||
|
|
||||||
def parse_flow_mapping_first_key(self):
|
|
||||||
token = self.get_token()
|
|
||||||
self.marks.append(token.start_mark)
|
|
||||||
return self.parse_flow_mapping_key(first=True)
|
|
||||||
|
|
||||||
def parse_flow_mapping_key(self, first=False):
|
|
||||||
if not self.check_token(FlowMappingEndToken):
|
|
||||||
if not first:
|
|
||||||
if self.check_token(FlowEntryToken):
|
|
||||||
self.get_token()
|
|
||||||
if self.check_token(FlowMappingEndToken):
|
|
||||||
token = self.peek_token()
|
|
||||||
self.echoerr("While parsing a flow mapping", self.marks[-1],
|
|
||||||
"expected mapping key, but got %r" % token.id, token.start_mark)
|
|
||||||
else:
|
|
||||||
token = self.peek_token()
|
|
||||||
raise ParserError("while parsing a flow mapping", self.marks[-1],
|
|
||||||
"expected ',' or '}', but got %r" % token.id, token.start_mark)
|
|
||||||
if self.check_token(KeyToken):
|
|
||||||
token = self.get_token()
|
|
||||||
if not self.check_token(ValueToken,
|
|
||||||
FlowEntryToken, FlowMappingEndToken):
|
|
||||||
self.states.append(self.parse_flow_mapping_value)
|
|
||||||
return self.parse_node()
|
|
||||||
else:
|
|
||||||
token = self.peek_token()
|
|
||||||
raise ParserError("while parsing a flow mapping", self.marks[-1],
|
|
||||||
"expected value, but got %r" % token.id, token.start_mark)
|
|
||||||
elif not self.check_token(FlowMappingEndToken):
|
|
||||||
token = self.peek_token()
|
|
||||||
expect_key = self.check_token(ValueToken, FlowEntryToken)
|
|
||||||
if not expect_key:
|
|
||||||
self.get_token()
|
|
||||||
expect_key = self.check_token(ValueToken)
|
|
||||||
|
|
||||||
if expect_key:
|
|
||||||
raise ParserError("while parsing a flow mapping", self.marks[-1],
|
|
||||||
"expected string key, but got %r" % token.id, token.start_mark)
|
|
||||||
else:
|
|
||||||
token = self.peek_token()
|
|
||||||
raise ParserError("while parsing a flow mapping", self.marks[-1],
|
|
||||||
"expected ':', but got %r" % token.id, token.start_mark)
|
|
||||||
token = self.get_token()
|
|
||||||
event = MappingEndEvent(token.start_mark, token.end_mark)
|
|
||||||
self.state = self.states.pop()
|
|
||||||
self.marks.pop()
|
|
||||||
return event
|
|
||||||
|
|
||||||
def parse_flow_mapping_value(self):
|
|
||||||
if self.check_token(ValueToken):
|
|
||||||
token = self.get_token()
|
|
||||||
if not self.check_token(FlowEntryToken, FlowMappingEndToken):
|
|
||||||
self.states.append(self.parse_flow_mapping_key)
|
|
||||||
return self.parse_node()
|
|
||||||
|
|
||||||
token = self.peek_token()
|
|
||||||
raise ParserError("while parsing a flow mapping", self.marks[-1],
|
|
||||||
"expected mapping value, but got %r" % token.id, token.start_mark)
|
|
|
@ -1,135 +0,0 @@
|
||||||
# This module contains abstractions for the input stream. You don't have to
|
|
||||||
# looks further, there are no pretty code.
|
|
||||||
|
|
||||||
__all__ = ['Reader', 'ReaderError']
|
|
||||||
|
|
||||||
from .error import MarkedError, Mark, NON_PRINTABLE
|
|
||||||
|
|
||||||
import codecs
|
|
||||||
|
|
||||||
try:
|
|
||||||
from __builtin__ import unicode
|
|
||||||
except ImportError:
|
|
||||||
unicode = str # NOQA
|
|
||||||
|
|
||||||
|
|
||||||
class ReaderError(MarkedError):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class Reader(object):
|
|
||||||
# Reader:
|
|
||||||
# - determines the data encoding and converts it to a unicode string,
|
|
||||||
# - checks if characters are in allowed range,
|
|
||||||
# - adds '\0' to the end.
|
|
||||||
|
|
||||||
# Reader accepts
|
|
||||||
# - a file-like object with its `read` method returning `str`,
|
|
||||||
|
|
||||||
# Yeah, it's ugly and slow.
|
|
||||||
def __init__(self, stream):
|
|
||||||
self.name = None
|
|
||||||
self.stream = None
|
|
||||||
self.stream_pointer = 0
|
|
||||||
self.eof = True
|
|
||||||
self.buffer = ''
|
|
||||||
self.pointer = 0
|
|
||||||
self.full_buffer = unicode('')
|
|
||||||
self.full_pointer = 0
|
|
||||||
self.raw_buffer = None
|
|
||||||
self.raw_decode = codecs.utf_8_decode
|
|
||||||
self.encoding = 'utf-8'
|
|
||||||
self.index = 0
|
|
||||||
self.line = 0
|
|
||||||
self.column = 0
|
|
||||||
|
|
||||||
self.stream = stream
|
|
||||||
self.name = getattr(stream, 'name', "<file>")
|
|
||||||
self.eof = False
|
|
||||||
self.raw_buffer = None
|
|
||||||
|
|
||||||
while not self.eof and (self.raw_buffer is None or len(self.raw_buffer) < 2):
|
|
||||||
self.update_raw()
|
|
||||||
self.update(1)
|
|
||||||
|
|
||||||
def peek(self, index=0):
|
|
||||||
try:
|
|
||||||
return self.buffer[self.pointer + index]
|
|
||||||
except IndexError:
|
|
||||||
self.update(index + 1)
|
|
||||||
return self.buffer[self.pointer + index]
|
|
||||||
|
|
||||||
def prefix(self, length=1):
|
|
||||||
if self.pointer + length >= len(self.buffer):
|
|
||||||
self.update(length)
|
|
||||||
return self.buffer[self.pointer:self.pointer + length]
|
|
||||||
|
|
||||||
def update_pointer(self, length):
|
|
||||||
while length:
|
|
||||||
ch = self.buffer[self.pointer]
|
|
||||||
self.pointer += 1
|
|
||||||
self.full_pointer += 1
|
|
||||||
self.index += 1
|
|
||||||
if ch == '\n':
|
|
||||||
self.line += 1
|
|
||||||
self.column = 0
|
|
||||||
else:
|
|
||||||
self.column += 1
|
|
||||||
length -= 1
|
|
||||||
|
|
||||||
def forward(self, length=1):
|
|
||||||
if self.pointer + length + 1 >= len(self.buffer):
|
|
||||||
self.update(length + 1)
|
|
||||||
self.update_pointer(length)
|
|
||||||
|
|
||||||
def get_mark(self):
|
|
||||||
return Mark(self.name, self.line, self.column, self.full_buffer, self.full_pointer)
|
|
||||||
|
|
||||||
def check_printable(self, data):
|
|
||||||
match = NON_PRINTABLE.search(data)
|
|
||||||
if match:
|
|
||||||
self.update_pointer(match.start())
|
|
||||||
raise ReaderError('while reading from stream', None,
|
|
||||||
'found special characters which are not allowed',
|
|
||||||
Mark(self.name, self.line, self.column, self.full_buffer, self.full_pointer))
|
|
||||||
|
|
||||||
def update(self, length):
|
|
||||||
if self.raw_buffer is None:
|
|
||||||
return
|
|
||||||
self.buffer = self.buffer[self.pointer:]
|
|
||||||
self.pointer = 0
|
|
||||||
while len(self.buffer) < length:
|
|
||||||
if not self.eof:
|
|
||||||
self.update_raw()
|
|
||||||
try:
|
|
||||||
data, converted = self.raw_decode(self.raw_buffer,
|
|
||||||
'strict', self.eof)
|
|
||||||
except UnicodeDecodeError as exc:
|
|
||||||
character = self.raw_buffer[exc.start]
|
|
||||||
position = self.stream_pointer - len(self.raw_buffer) + exc.start
|
|
||||||
data, converted = self.raw_decode(self.raw_buffer[:exc.start], 'strict', self.eof)
|
|
||||||
self.buffer += data
|
|
||||||
self.full_buffer += data + '<' + str(ord(character)) + '>'
|
|
||||||
self.raw_buffer = self.raw_buffer[converted:]
|
|
||||||
self.update_pointer(exc.start - 1)
|
|
||||||
raise ReaderError('while reading from stream', None,
|
|
||||||
'found character #x%04x that cannot be decoded by UTF-8 codec' % ord(character),
|
|
||||||
Mark(self.name, self.line, self.column, self.full_buffer, position))
|
|
||||||
self.buffer += data
|
|
||||||
self.full_buffer += data
|
|
||||||
self.raw_buffer = self.raw_buffer[converted:]
|
|
||||||
self.check_printable(data)
|
|
||||||
if self.eof:
|
|
||||||
self.buffer += '\0'
|
|
||||||
self.raw_buffer = None
|
|
||||||
break
|
|
||||||
|
|
||||||
def update_raw(self, size=4096):
|
|
||||||
data = self.stream.read(size)
|
|
||||||
if self.raw_buffer is None:
|
|
||||||
self.raw_buffer = data
|
|
||||||
else:
|
|
||||||
self.raw_buffer += data
|
|
||||||
self.stream_pointer += len(data)
|
|
||||||
if not data:
|
|
||||||
self.eof = True
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue