mirror of
https://github.com/wting/autojump
synced 2024-10-27 20:34:07 +00:00
Merge 0883bf60aa
into ff75f542ae
This commit is contained in:
commit
2bee71df0b
1
.gitignore
vendored
1
.gitignore
vendored
@ -8,3 +8,4 @@ __pycache__
|
|||||||
.pytest_cache
|
.pytest_cache
|
||||||
.tox
|
.tox
|
||||||
tags
|
tags
|
||||||
|
.idea/
|
||||||
|
22
.travis.yml
22
.travis.yml
@ -1,17 +1,15 @@
|
|||||||
language: python
|
language: python
|
||||||
|
|
||||||
python: 2.7
|
python:
|
||||||
|
- 2.7
|
||||||
env:
|
- 3.3
|
||||||
- TOX_ENV=py26
|
- 3.4
|
||||||
- TOX_ENV=py27
|
- 3.5
|
||||||
- TOX_ENV=py32
|
- 3.6
|
||||||
- TOX_ENV=py33
|
|
||||||
- TOX_ENV=py34
|
|
||||||
- TOX_ENV=flake8
|
|
||||||
|
|
||||||
install:
|
install:
|
||||||
- pip install tox
|
- pip install mock coverage ipdb ipython pytest pre-commit autopep8 flake8
|
||||||
|
|
||||||
script:
|
script:
|
||||||
- tox -e $TOX_ENV
|
- coverage run --source=bin/ --omit=bin/autojump_argparse.py -m py.test -vv -rxs --tb native -s --strict
|
||||||
|
- coverage report -m
|
||||||
|
188
bin/autojump
188
bin/autojump
@ -26,6 +26,8 @@ from itertools import chain
|
|||||||
from math import sqrt
|
from math import sqrt
|
||||||
from operator import attrgetter
|
from operator import attrgetter
|
||||||
from operator import itemgetter
|
from operator import itemgetter
|
||||||
|
from sys import stderr
|
||||||
|
import re
|
||||||
|
|
||||||
if sys.version_info[0] == 3:
|
if sys.version_info[0] == 3:
|
||||||
ifilter = filter
|
ifilter = filter
|
||||||
@ -65,73 +67,39 @@ from autojump_utils import sanitize
|
|||||||
from autojump_utils import take
|
from autojump_utils import take
|
||||||
from autojump_utils import unico
|
from autojump_utils import unico
|
||||||
|
|
||||||
VERSION = '22.5.3'
|
VERSION = "22.5.3"
|
||||||
FUZZY_MATCH_THRESHOLD = 0.6
|
FUZZY_MATCH_THRESHOLD = 0.6
|
||||||
TAB_ENTRIES_COUNT = 9
|
TAB_ENTRIES_COUNT = 9
|
||||||
TAB_SEPARATOR = '__'
|
TAB_SEPARATOR = "__"
|
||||||
|
|
||||||
|
|
||||||
def set_defaults():
|
def set_defaults():
|
||||||
config = {}
|
config = {}
|
||||||
|
|
||||||
if is_osx():
|
if is_osx():
|
||||||
data_home = os.path.join(os.path.expanduser('~'), 'Library')
|
data_home = os.path.join(os.path.expanduser("~"), "Library")
|
||||||
elif is_windows():
|
elif is_windows():
|
||||||
data_home = os.getenv('APPDATA')
|
data_home = os.getenv("APPDATA")
|
||||||
else:
|
else:
|
||||||
data_home = os.getenv(
|
data_home = os.getenv("XDG_DATA_HOME", os.path.join(os.path.expanduser("~"), ".local", "share"))
|
||||||
'XDG_DATA_HOME',
|
config["data_path"] = os.path.join(data_home, "autojump", "autojump.txt")
|
||||||
os.path.join(
|
config["backup_path"] = os.path.join(data_home, "autojump", "autojump.txt.bak")
|
||||||
os.path.expanduser('~'),
|
|
||||||
'.local',
|
|
||||||
'share',
|
|
||||||
),
|
|
||||||
)
|
|
||||||
config['data_path'] = os.path.join(data_home, 'autojump', 'autojump.txt')
|
|
||||||
config['backup_path'] = os.path.join(data_home, 'autojump', 'autojump.txt.bak')
|
|
||||||
|
|
||||||
return config
|
return config
|
||||||
|
|
||||||
|
|
||||||
def parse_arguments():
|
def parse_arguments():
|
||||||
parser = ArgumentParser(
|
parser = ArgumentParser(
|
||||||
description='Automatically jump to directory passed as an argument.',
|
description="Automatically jump to directory passed as an argument.", epilog="Please see autojump(1) man pages for full documentation."
|
||||||
epilog='Please see autojump(1) man pages for full documentation.',
|
|
||||||
)
|
|
||||||
parser.add_argument(
|
|
||||||
'directory', metavar='DIRECTORY', nargs='*', default='',
|
|
||||||
help='directory to jump to',
|
|
||||||
)
|
|
||||||
parser.add_argument(
|
|
||||||
'-a', '--add', metavar='DIRECTORY',
|
|
||||||
help='add path',
|
|
||||||
)
|
|
||||||
parser.add_argument(
|
|
||||||
'-i', '--increase', metavar='WEIGHT', nargs='?', type=int,
|
|
||||||
const=10, default=False,
|
|
||||||
help='increase current directory weight',
|
|
||||||
)
|
|
||||||
parser.add_argument(
|
|
||||||
'-d', '--decrease', metavar='WEIGHT', nargs='?', type=int,
|
|
||||||
const=15, default=False,
|
|
||||||
help='decrease current directory weight',
|
|
||||||
)
|
|
||||||
parser.add_argument(
|
|
||||||
'--complete', action='store_true', default=False,
|
|
||||||
help='used for tab completion',
|
|
||||||
)
|
|
||||||
parser.add_argument(
|
|
||||||
'--purge', action='store_true', default=False,
|
|
||||||
help='remove non-existent paths from database',
|
|
||||||
)
|
|
||||||
parser.add_argument(
|
|
||||||
'-s', '--stat', action='store_true', default=False,
|
|
||||||
help='show database entries and their key weights',
|
|
||||||
)
|
|
||||||
parser.add_argument(
|
|
||||||
'-v', '--version', action='version', version='%(prog)s v' +
|
|
||||||
VERSION, help='show version information',
|
|
||||||
)
|
)
|
||||||
|
parser.add_argument("directory", metavar="DIRECTORY", nargs="*", default="", help="directory to jump to")
|
||||||
|
parser.add_argument("-a", "--add", metavar="DIRECTORY", help="add path")
|
||||||
|
parser.add_argument("-i", "--increase", metavar="WEIGHT", nargs="?", type=int, const=10, default=False, help="increase current directory weight")
|
||||||
|
parser.add_argument("-d", "--decrease", metavar="WEIGHT", nargs="?", type=int, const=15, default=False, help="decrease current directory weight")
|
||||||
|
parser.add_argument("--complete", action="store_true", default=False, help="used for tab completion")
|
||||||
|
parser.add_argument("--purge", action="store_true", default=False, help="remove non-existent paths from database")
|
||||||
|
parser.add_argument("-s", "--stat", action="store_true", default=False, help="show database entries and their key weights")
|
||||||
|
parser.add_argument("-v", "--version", action="version", version="%(prog)s v" + VERSION, help="show version information")
|
||||||
|
|
||||||
return parser.parse_args()
|
return parser.parse_args()
|
||||||
|
|
||||||
@ -145,12 +113,13 @@ def add_path(data, path, weight=10):
|
|||||||
path.
|
path.
|
||||||
"""
|
"""
|
||||||
path = unico(path).rstrip(os.sep)
|
path = unico(path).rstrip(os.sep)
|
||||||
if path == os.path.expanduser('~'):
|
if path == os.path.expanduser("~"):
|
||||||
return data, Entry(path, 0)
|
return data, Entry(path, 0)
|
||||||
|
slash_only_path = re.sub(re.escape(os.sep), "/", path)
|
||||||
|
orig_weight = data.get(slash_only_path, 0)
|
||||||
|
data[slash_only_path] = sqrt((orig_weight ** 2) + (weight ** 2))
|
||||||
|
|
||||||
data[path] = sqrt((data.get(path, 0) ** 2) + (weight ** 2))
|
return data, Entry(path, data[slash_only_path])
|
||||||
|
|
||||||
return data, Entry(path, data[path])
|
|
||||||
|
|
||||||
|
|
||||||
def decrease_path(data, path, weight=15):
|
def decrease_path(data, path, weight=15):
|
||||||
@ -171,7 +140,7 @@ def detect_smartcase(needles):
|
|||||||
def find_matches(entries, needles, check_entries=True):
|
def find_matches(entries, needles, check_entries=True):
|
||||||
"""Return an iterator to matching entries."""
|
"""Return an iterator to matching entries."""
|
||||||
# TODO(wting|2014-02-24): replace assertion with unit test
|
# TODO(wting|2014-02-24): replace assertion with unit test
|
||||||
assert isinstance(needles, list), 'Needles must be a list.'
|
assert isinstance(needles, list), "Needles must be a list."
|
||||||
ignore_case = detect_smartcase(needles)
|
ignore_case = detect_smartcase(needles)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
@ -188,19 +157,11 @@ def find_matches(entries, needles, check_entries=True):
|
|||||||
else:
|
else:
|
||||||
path_exists = lambda _: True
|
path_exists = lambda _: True
|
||||||
|
|
||||||
data = sorted(
|
data = sorted(entries, key=attrgetter("weight", "path"), reverse=True)
|
||||||
entries,
|
|
||||||
key=attrgetter('weight', 'path'),
|
|
||||||
reverse=True,
|
|
||||||
)
|
|
||||||
|
|
||||||
return ifilter(
|
return ifilter(
|
||||||
lambda entry: not is_cwd(entry) and path_exists(entry),
|
lambda entry: not is_cwd(entry) and path_exists(entry),
|
||||||
chain(
|
chain(match_consecutive(needles, data, ignore_case), match_fuzzy(needles, data, ignore_case), match_anywhere(needles, data, ignore_case)),
|
||||||
match_consecutive(needles, data, ignore_case),
|
|
||||||
match_fuzzy(needles, data, ignore_case),
|
|
||||||
match_anywhere(needles, data, ignore_case),
|
|
||||||
),
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@ -211,35 +172,12 @@ def handle_tab_completion(needle, entries):
|
|||||||
print_local(tab_path)
|
print_local(tab_path)
|
||||||
elif tab_index:
|
elif tab_index:
|
||||||
get_ith_path = lambda i, iterable: last(take(i, iterable)).path
|
get_ith_path = lambda i, iterable: last(take(i, iterable)).path
|
||||||
print_local(get_ith_path(
|
print_local(get_ith_path(tab_index, find_matches(entries, [tab_needle], check_entries=False)))
|
||||||
tab_index,
|
|
||||||
find_matches(entries, [tab_needle], check_entries=False),
|
|
||||||
))
|
|
||||||
elif tab_needle:
|
elif tab_needle:
|
||||||
# found partial tab completion entry
|
# found partial tab completion entry
|
||||||
print_tab_menu(
|
print_tab_menu(tab_needle, take(TAB_ENTRIES_COUNT, find_matches(entries, [tab_needle], check_entries=False)), TAB_SEPARATOR)
|
||||||
tab_needle,
|
|
||||||
take(
|
|
||||||
TAB_ENTRIES_COUNT, find_matches(
|
|
||||||
entries,
|
|
||||||
[tab_needle],
|
|
||||||
check_entries=False,
|
|
||||||
),
|
|
||||||
),
|
|
||||||
TAB_SEPARATOR,
|
|
||||||
)
|
|
||||||
else:
|
else:
|
||||||
print_tab_menu(
|
print_tab_menu(needle, take(TAB_ENTRIES_COUNT, find_matches(entries, [needle], check_entries=False)), TAB_SEPARATOR)
|
||||||
needle,
|
|
||||||
take(
|
|
||||||
TAB_ENTRIES_COUNT, find_matches(
|
|
||||||
entries,
|
|
||||||
[needle],
|
|
||||||
check_entries=False,
|
|
||||||
),
|
|
||||||
),
|
|
||||||
TAB_SEPARATOR,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def purge_missing_paths(entries):
|
def purge_missing_paths(entries):
|
||||||
@ -252,26 +190,24 @@ def print_stats(data, data_path):
|
|||||||
for path, weight in sorted(data.items(), key=itemgetter(1)):
|
for path, weight in sorted(data.items(), key=itemgetter(1)):
|
||||||
print_entry(Entry(path, weight))
|
print_entry(Entry(path, weight))
|
||||||
|
|
||||||
print('________________________________________\n')
|
print("________________________________________\n")
|
||||||
print('%d:\t total weight' % sum(data.values()))
|
print("%d:\t total weight" % sum(data.values()))
|
||||||
print('%d:\t number of entries' % len(data))
|
print("%d:\t number of entries" % len(data))
|
||||||
|
|
||||||
try:
|
try:
|
||||||
print_local(
|
print_local("%.2f:\t current directory weight" % data.get(os.getcwdu(), 0))
|
||||||
'%.2f:\t current directory weight' % data.get(os.getcwdu(), 0),
|
|
||||||
)
|
|
||||||
except OSError:
|
except OSError:
|
||||||
# current directory no longer exists
|
# current directory no longer exists
|
||||||
pass
|
pass
|
||||||
|
|
||||||
print('\ndata:\t %s' % data_path)
|
print("\ndata:\t %s" % data_path)
|
||||||
|
|
||||||
|
|
||||||
def main(args): # noqa
|
def main(args): # noqa
|
||||||
if not is_autojump_sourced() and not is_windows():
|
if not is_autojump_sourced() and not is_windows():
|
||||||
print("Please source the correct autojump file in your shell's")
|
print("Please source the correct autojump file in your shell's")
|
||||||
print('startup file. For more information, please reinstall autojump')
|
print("startup file. For more information, please reinstall autojump")
|
||||||
print('and read the post installation instructions.')
|
print("and read the post installation instructions.")
|
||||||
return 1
|
return 1
|
||||||
|
|
||||||
config = set_defaults()
|
config = set_defaults()
|
||||||
@ -280,10 +216,7 @@ def main(args): # noqa
|
|||||||
if args.add:
|
if args.add:
|
||||||
save(config, first(add_path(load(config), args.add)))
|
save(config, first(add_path(load(config), args.add)))
|
||||||
elif args.complete:
|
elif args.complete:
|
||||||
handle_tab_completion(
|
handle_tab_completion(needle=first(chain(sanitize(args.directory), [""])), entries=entriefy(load(config)))
|
||||||
needle=first(chain(sanitize(args.directory), [''])),
|
|
||||||
entries=entriefy(load(config)),
|
|
||||||
)
|
|
||||||
elif args.decrease:
|
elif args.decrease:
|
||||||
data, entry = decrease_path(load(config), get_pwd(), args.decrease)
|
data, entry = decrease_path(load(config), get_pwd(), args.decrease)
|
||||||
save(config, data)
|
save(config, data)
|
||||||
@ -296,47 +229,48 @@ def main(args): # noqa
|
|||||||
old_data = load(config)
|
old_data = load(config)
|
||||||
new_data = dictify(purge_missing_paths(entriefy(old_data)))
|
new_data = dictify(purge_missing_paths(entriefy(old_data)))
|
||||||
save(config, new_data)
|
save(config, new_data)
|
||||||
print('Purged %d entries.' % (len(old_data) - len(new_data)))
|
print("Purged %d entries." % (len(old_data) - len(new_data)))
|
||||||
elif args.stat:
|
elif args.stat:
|
||||||
print_stats(load(config), config['data_path'])
|
print_stats(load(config), config["data_path"])
|
||||||
elif not args.directory:
|
elif not args.directory:
|
||||||
# Return best match.
|
# Return best match.
|
||||||
entries = entriefy(load(config))
|
entries = entriefy(load(config))
|
||||||
print_local(first(chain(
|
print_local(
|
||||||
imap(attrgetter('path'), find_matches(entries, [''])),
|
first(
|
||||||
# always return a path to calling shell functions
|
chain(
|
||||||
['.'],
|
imap(attrgetter("path"), find_matches(entries, [""])),
|
||||||
)))
|
# always return a path to calling shell functions
|
||||||
|
["."],
|
||||||
|
)
|
||||||
|
)
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
entries = entriefy(load(config))
|
entries = entriefy(load(config))
|
||||||
needles = sanitize(args.directory)
|
needles = sanitize(args.directory)
|
||||||
tab_needle, tab_index, tab_path = \
|
tab_needle, tab_index, tab_path = get_tab_entry_info(first(needles), TAB_SEPARATOR)
|
||||||
get_tab_entry_info(first(needles), TAB_SEPARATOR)
|
|
||||||
|
|
||||||
# Handle `j foo__`, assuming first index.
|
# Handle `j foo__`, assuming first index.
|
||||||
if not tab_path and not tab_index \
|
if not tab_path and not tab_index and tab_needle and needles[0] == tab_needle + TAB_SEPARATOR:
|
||||||
and tab_needle and needles[0] == tab_needle + TAB_SEPARATOR:
|
|
||||||
tab_index = 1
|
tab_index = 1
|
||||||
|
|
||||||
if tab_path:
|
if tab_path:
|
||||||
print_local(tab_path)
|
print_local(tab_path)
|
||||||
elif tab_index:
|
elif tab_index:
|
||||||
get_ith_path = lambda i, iterable: last(take(i, iterable)).path
|
get_ith_path = lambda i, iterable: last(take(i, iterable)).path
|
||||||
print_local(
|
print_local(get_ith_path(tab_index, find_matches(entries, [tab_needle])))
|
||||||
get_ith_path(
|
|
||||||
tab_index,
|
|
||||||
find_matches(entries, [tab_needle]),
|
|
||||||
),
|
|
||||||
)
|
|
||||||
else:
|
else:
|
||||||
print_local(first(chain(
|
print_local(
|
||||||
imap(attrgetter('path'), find_matches(entries, needles)),
|
first(
|
||||||
# always return a path to calling shell functions
|
chain(
|
||||||
['.'],
|
imap(attrgetter("path"), find_matches(entries, needles)),
|
||||||
)))
|
# always return a path to calling shell functions
|
||||||
|
["."],
|
||||||
|
)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
return 0
|
return 0
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == "__main__":
|
||||||
sys.exit(main(parse_arguments()))
|
sys.exit(main(parse_arguments()))
|
||||||
|
@ -1,130 +1,140 @@
|
|||||||
export AUTOJUMP_SOURCED=1
|
if [[ -z $AUTOJUMP_SOURCED ]]; then
|
||||||
|
export AUTOJUMP_SOURCED=1
|
||||||
|
|
||||||
# set user installation paths
|
# Script include
|
||||||
if [[ -d ~/.autojump/ ]]; then
|
SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
|
||||||
export PATH=~/.autojump/bin:"${PATH}"
|
|
||||||
fi
|
# set user installation paths
|
||||||
|
export PATH="${SCRIPT_DIR}:${PATH}"
|
||||||
|
|
||||||
|
|
||||||
# set error file location
|
# set error file location
|
||||||
if [[ "$(uname)" == "Darwin" ]]; then
|
if [[ "$(uname)" == "Darwin" ]]; then
|
||||||
export AUTOJUMP_ERROR_PATH=~/Library/autojump/errors.log
|
export AUTOJUMP_ERROR_PATH=~/Library/autojump/errors.log
|
||||||
elif [[ -n "${XDG_DATA_HOME}" ]]; then
|
elif [[ -n "${XDG_DATA_HOME}" ]]; then
|
||||||
export AUTOJUMP_ERROR_PATH="${XDG_DATA_HOME}/autojump/errors.log"
|
export AUTOJUMP_ERROR_PATH="${XDG_DATA_HOME}/autojump/errors.log"
|
||||||
else
|
else
|
||||||
export AUTOJUMP_ERROR_PATH=~/.local/share/autojump/errors.log
|
export AUTOJUMP_ERROR_PATH=~/.local/share/autojump/errors.log
|
||||||
fi
|
fi
|
||||||
|
|
||||||
if [[ ! -d "$(dirname ${AUTOJUMP_ERROR_PATH})" ]]; then
|
if [[ ! -d "$(dirname ${AUTOJUMP_ERROR_PATH})" ]]; then
|
||||||
mkdir -p "$(dirname ${AUTOJUMP_ERROR_PATH})"
|
mkdir -p "$(dirname ${AUTOJUMP_ERROR_PATH})"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
|
||||||
# enable tab completion
|
# enable tab completion
|
||||||
_autojump() {
|
_autojump() {
|
||||||
local cur
|
local cur
|
||||||
cur=${COMP_WORDS[*]:1}
|
cur=${COMP_WORDS[*]:1}
|
||||||
comps=$(autojump --complete $cur)
|
comps=$(autojump --complete $cur)
|
||||||
while read i; do
|
echo "$comps" | while read i; do
|
||||||
COMPREPLY=("${COMPREPLY[@]}" "${i}")
|
COMPREPLY=("${COMPREPLY[@]}" "${i}")
|
||||||
done <<EOF
|
done
|
||||||
$comps
|
}
|
||||||
EOF
|
complete -F _autojump j
|
||||||
}
|
|
||||||
complete -F _autojump j
|
|
||||||
|
|
||||||
|
|
||||||
# change pwd hook
|
# change pwd hook
|
||||||
autojump_add_to_database() {
|
autojump_add_to_database() {
|
||||||
if [[ -f "${AUTOJUMP_ERROR_PATH}" ]]; then
|
if [[ -f "${AUTOJUMP_ERROR_PATH}" ]]; then
|
||||||
(autojump --add "$(pwd)" >/dev/null 2>>${AUTOJUMP_ERROR_PATH} &) &>/dev/null
|
(autojump --add "$(pwd)" >/dev/null 2>>${AUTOJUMP_ERROR_PATH} &) &>/dev/null
|
||||||
else
|
else
|
||||||
(autojump --add "$(pwd)" >/dev/null &) &>/dev/null
|
(autojump --add "$(pwd)" >/dev/null &) &>/dev/null
|
||||||
fi
|
fi
|
||||||
}
|
}
|
||||||
|
|
||||||
case $PROMPT_COMMAND in
|
case $PROMPT_COMMAND in
|
||||||
*autojump*)
|
*autojump*)
|
||||||
;;
|
;;
|
||||||
*)
|
*)
|
||||||
PROMPT_COMMAND="${PROMPT_COMMAND:+$(echo "${PROMPT_COMMAND}" | awk '{gsub(/; *$/,"")}1') ; }autojump_add_to_database"
|
PROMPT_COMMAND="${PROMPT_COMMAND:+$(echo "${PROMPT_COMMAND}" | awk '{gsub(/; *$/,"")}1') ; }autojump_add_to_database"
|
||||||
;;
|
;;
|
||||||
esac
|
esac
|
||||||
|
|
||||||
|
|
||||||
# default autojump command
|
# default autojump command
|
||||||
j() {
|
j() {
|
||||||
if [[ ${1} == -* ]] && [[ ${1} != "--" ]]; then
|
if [[ ${1} == -* ]] && [[ ${1} != "--" ]]; then
|
||||||
autojump ${@}
|
autojump ${@}
|
||||||
return
|
return
|
||||||
fi
|
fi
|
||||||
|
|
||||||
output="$(autojump ${@})"
|
output="$(autojump ${@})"
|
||||||
if [[ -d "${output}" ]]; then
|
if [[ -d "${output}" ]]; then
|
||||||
if [ -t 1 ]; then # if stdout is a terminal, use colors
|
if [ -t 1 ]; then # if stdout is a terminal, use colors
|
||||||
echo -e "\\033[31m${output}\\033[0m"
|
echo -e "\\033[31m${output}\\033[0m"
|
||||||
else
|
else
|
||||||
echo -e "${output}"
|
echo -e "${output}"
|
||||||
fi
|
fi
|
||||||
cd "${output}"
|
cd "${output}"
|
||||||
else
|
else
|
||||||
echo "autojump: directory '${@}' not found"
|
echo "autojump: directory '${@}' not found"
|
||||||
echo "\n${output}\n"
|
echo "\n${output}\n"
|
||||||
echo "Try \`autojump --help\` for more information."
|
echo "Try \`autojump --help\` for more information."
|
||||||
false
|
false
|
||||||
fi
|
fi
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
# jump to child directory (subdirectory of current path)
|
# jump to child directory (subdirectory of current path)
|
||||||
jc() {
|
jc() {
|
||||||
if [[ ${1} == -* ]] && [[ ${1} != "--" ]]; then
|
if [[ ${1} == -* ]] && [[ ${1} != "--" ]]; then
|
||||||
autojump ${@}
|
autojump ${@}
|
||||||
return
|
return
|
||||||
else
|
else
|
||||||
j $(pwd) ${@}
|
j $(pwd) ${@}
|
||||||
fi
|
fi
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
# open autojump results in file browser
|
# open autojump results in file browser
|
||||||
jo() {
|
jo() {
|
||||||
if [[ ${1} == -* ]] && [[ ${1} != "--" ]]; then
|
if [[ ${1} == -* ]] && [[ ${1} != "--" ]]; then
|
||||||
autojump ${@}
|
autojump ${@}
|
||||||
return
|
return
|
||||||
fi
|
fi
|
||||||
|
|
||||||
output="$(autojump ${@})"
|
output="$(autojump ${@})"
|
||||||
if [[ -d "${output}" ]]; then
|
if [[ -d "${output}" ]]; then
|
||||||
case ${OSTYPE} in
|
case ${OSTYPE} in
|
||||||
linux*)
|
linux*)
|
||||||
xdg-open "${output}"
|
xdg-open "${output}"
|
||||||
;;
|
;;
|
||||||
darwin*)
|
darwin*)
|
||||||
open "${output}"
|
open "${output}"
|
||||||
;;
|
;;
|
||||||
cygwin)
|
cygwin)
|
||||||
cygstart "" $(cygpath -w -a ${output})
|
cygstart "" $(cygpath -w -a ${output})
|
||||||
;;
|
;;
|
||||||
*)
|
*)
|
||||||
echo "Unknown operating system: ${OSTYPE}." 1>&2
|
echo "Unknown operating system: ${OSTYPE}." 1>&2
|
||||||
;;
|
;;
|
||||||
esac
|
esac
|
||||||
else
|
else
|
||||||
echo "autojump: directory '${@}' not found"
|
echo "autojump: directory '${@}' not found"
|
||||||
echo "\n${output}\n"
|
echo "\n${output}\n"
|
||||||
echo "Try \`autojump --help\` for more information."
|
echo "Try \`autojump --help\` for more information."
|
||||||
false
|
false
|
||||||
fi
|
fi
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
# open autojump results (child directory) in file browser
|
# open autojump results (child directory) in file browser
|
||||||
jco() {
|
jco() {
|
||||||
if [[ ${1} == -* ]] && [[ ${1} != "--" ]]; then
|
if [[ ${1} == -* ]] && [[ ${1} != "--" ]]; then
|
||||||
autojump ${@}
|
autojump ${@}
|
||||||
return
|
return
|
||||||
else
|
else
|
||||||
jo $(pwd) ${@}
|
jo $(pwd) ${@}
|
||||||
fi
|
fi
|
||||||
}
|
}
|
||||||
|
|
||||||
|
# Jump around a git repo
|
||||||
|
g() {
|
||||||
|
REPO_ROOT=`git rev-parse --show-toplevel`
|
||||||
|
j "$REPO_ROOT" "$@"
|
||||||
|
}
|
||||||
|
|
||||||
|
else
|
||||||
|
echo "ERROR: autojump was sourced twice"
|
||||||
|
fi
|
||||||
|
@ -1,8 +1,10 @@
|
|||||||
# the login $SHELL isn't always the one used
|
# the login $SHELL isn't always the one used
|
||||||
# NOTE: problems might occur if /bin/sh is symlinked to /bin/bash
|
# NOTE: problems might occur if /bin/sh is symlinked to /bin/bash
|
||||||
if [ -n "${BASH}" ]; then
|
if [ -n "${BASH}" ]; then
|
||||||
|
SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
|
||||||
shell="bash"
|
shell="bash"
|
||||||
elif [ -n "${ZSH_NAME}" ]; then
|
elif [ -n "${ZSH_NAME}" ]; then
|
||||||
|
SCRIPT_DIR="${0:a:h}"
|
||||||
shell="zsh"
|
shell="zsh"
|
||||||
elif [ -n "${__fish_datadir}" ]; then
|
elif [ -n "${__fish_datadir}" ]; then
|
||||||
shell="fish"
|
shell="fish"
|
||||||
@ -23,4 +25,10 @@ elif [ -s ~/.autojump/share/autojump/autojump.${shell} ]; then
|
|||||||
# check global install
|
# check global install
|
||||||
elif [ -s /usr/local/share/autojump/autojump.${shell} ]; then
|
elif [ -s /usr/local/share/autojump/autojump.${shell} ]; then
|
||||||
source /usr/local/share/autojump/autojump.${shell}
|
source /usr/local/share/autojump/autojump.${shell}
|
||||||
|
else
|
||||||
|
if [ -s "$SCRIPT_DIR/autojump.${shell}" ]; then
|
||||||
|
source "$SCRIPT_DIR/autojump.${shell}"
|
||||||
|
else
|
||||||
|
echo "ERROR: autojump not found"
|
||||||
|
fi
|
||||||
fi
|
fi
|
||||||
|
@ -1,11 +1,13 @@
|
|||||||
export AUTOJUMP_SOURCED=1
|
export AUTOJUMP_SOURCED=1
|
||||||
|
SCRIPT_DIR="${0:a:h}"
|
||||||
|
|
||||||
# set user installation paths
|
# set user installation paths
|
||||||
|
path=(${SCRIPT_DIR} "${path[@]}")
|
||||||
if [[ -d ~/.autojump/bin ]]; then
|
if [[ -d ~/.autojump/bin ]]; then
|
||||||
path=(~/.autojump/bin ${path})
|
path=(~/.autojump/bin "${path[@]}")
|
||||||
fi
|
fi
|
||||||
if [[ -d ~/.autojump/functions ]]; then
|
if [[ -d ~/.autojump/functions ]]; then
|
||||||
fpath=(~/.autojump/functions ${fpath})
|
fpath=(~/.autojump/functions "${fpath[@]}")
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
|
||||||
@ -13,11 +15,11 @@ fi
|
|||||||
if command -v brew &>/dev/null; then
|
if command -v brew &>/dev/null; then
|
||||||
local brew_prefix=${BREW_PREFIX:-$(brew --prefix)}
|
local brew_prefix=${BREW_PREFIX:-$(brew --prefix)}
|
||||||
if [[ -d "${brew_prefix}/share/zsh/site-functions" ]]; then
|
if [[ -d "${brew_prefix}/share/zsh/site-functions" ]]; then
|
||||||
fpath=("${brew_prefix}/share/zsh/site-functions" ${fpath})
|
fpath=("${brew_prefix}/share/zsh/site-functions" "${fpath[@]}")
|
||||||
fi
|
fi
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
# set this installation path
|
||||||
# set error file location
|
# set error file location
|
||||||
if [[ "$(uname)" == "Darwin" ]]; then
|
if [[ "$(uname)" == "Darwin" ]]; then
|
||||||
export AUTOJUMP_ERROR_PATH=~/Library/autojump/errors.log
|
export AUTOJUMP_ERROR_PATH=~/Library/autojump/errors.log
|
||||||
@ -123,3 +125,14 @@ jco() {
|
|||||||
jo $(pwd) ${@}
|
jo $(pwd) ${@}
|
||||||
fi
|
fi
|
||||||
}
|
}
|
||||||
|
|
||||||
|
# Jump around a git repo
|
||||||
|
g() {
|
||||||
|
if [[ ${1} == -* ]] && [[ ${1} != "--" ]]; then
|
||||||
|
autojump ${@}
|
||||||
|
return
|
||||||
|
else
|
||||||
|
REPO_ROOT=`git rev-parse --show-toplevel`
|
||||||
|
j "$REPO_ROOT" ${@}
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
@ -3,6 +3,7 @@
|
|||||||
from __future__ import print_function
|
from __future__ import print_function
|
||||||
|
|
||||||
import os
|
import os
|
||||||
|
import re
|
||||||
import shutil
|
import shutil
|
||||||
import sys
|
import sys
|
||||||
from codecs import open
|
from codecs import open
|
||||||
@ -26,7 +27,7 @@ else:
|
|||||||
|
|
||||||
|
|
||||||
BACKUP_THRESHOLD = 24 * 60 * 60
|
BACKUP_THRESHOLD = 24 * 60 * 60
|
||||||
Entry = namedtuple('Entry', ['path', 'weight'])
|
Entry = namedtuple("Entry", ["path", "weight"])
|
||||||
|
|
||||||
|
|
||||||
def dictify(entries):
|
def dictify(entries):
|
||||||
@ -51,21 +52,16 @@ def entriefy(data):
|
|||||||
|
|
||||||
def load(config):
|
def load(config):
|
||||||
"""Returns a dictonary (key=path, value=weight) loaded from data file."""
|
"""Returns a dictonary (key=path, value=weight) loaded from data file."""
|
||||||
xdg_aj_home = os.path.join(
|
xdg_aj_home = os.path.join(os.path.expanduser("~"), ".local", "share", "autojump")
|
||||||
os.path.expanduser('~'),
|
|
||||||
'.local',
|
|
||||||
'share',
|
|
||||||
'autojump',
|
|
||||||
)
|
|
||||||
|
|
||||||
if is_osx() and os.path.exists(xdg_aj_home):
|
if is_osx() and os.path.exists(xdg_aj_home):
|
||||||
migrate_osx_xdg_data(config)
|
migrate_osx_xdg_data(config)
|
||||||
|
|
||||||
if not os.path.exists(config['data_path']):
|
if not os.path.exists(config["data_path"]):
|
||||||
return {}
|
return {}
|
||||||
|
|
||||||
# example: u'10.0\t/home/user\n' -> ['10.0', u'/home/user']
|
# example: u'10.0\t/home/user\n' -> ['10.0', u'/home/user']
|
||||||
parse = lambda line: line.strip().split('\t')
|
parse = lambda line: line.strip().split("\t")
|
||||||
|
|
||||||
correct_length = lambda x: len(x) == 2
|
correct_length = lambda x: len(x) == 2
|
||||||
|
|
||||||
@ -73,24 +69,15 @@ def load(config):
|
|||||||
tupleize = lambda x: (x[1], float(x[0]))
|
tupleize = lambda x: (x[1], float(x[0]))
|
||||||
|
|
||||||
try:
|
try:
|
||||||
with open(
|
with open(config["data_path"], "r", encoding="utf-8", errors="replace") as f:
|
||||||
config['data_path'],
|
return dict(imap(tupleize, ifilter(correct_length, imap(parse, f))))
|
||||||
'r', encoding='utf-8',
|
|
||||||
errors='replace',
|
|
||||||
) as f:
|
|
||||||
return dict(
|
|
||||||
imap(
|
|
||||||
tupleize,
|
|
||||||
ifilter(correct_length, imap(parse, f)),
|
|
||||||
),
|
|
||||||
)
|
|
||||||
except (IOError, EOFError):
|
except (IOError, EOFError):
|
||||||
return load_backup(config)
|
return load_backup(config)
|
||||||
|
|
||||||
|
|
||||||
def load_backup(config):
|
def load_backup(config):
|
||||||
if os.path.exists(config['backup_path']):
|
if os.path.exists(config["backup_path"]):
|
||||||
move_file(config['backup_path'], config['data_path'])
|
move_file(config["backup_path"], config["data_path"])
|
||||||
return load(config)
|
return load(config)
|
||||||
return {}
|
return {}
|
||||||
|
|
||||||
@ -100,17 +87,17 @@ def migrate_osx_xdg_data(config):
|
|||||||
Older versions incorrectly used Linux XDG_DATA_HOME paths on OS X. This
|
Older versions incorrectly used Linux XDG_DATA_HOME paths on OS X. This
|
||||||
migrates autojump files from ~/.local/share/autojump to ~/Library/autojump
|
migrates autojump files from ~/.local/share/autojump to ~/Library/autojump
|
||||||
"""
|
"""
|
||||||
assert is_osx(), 'This function should only be run on OS X.'
|
assert is_osx(), "This function should only be run on OS X."
|
||||||
|
|
||||||
xdg_data_home = os.path.join(os.path.expanduser('~'), '.local', 'share')
|
xdg_data_home = os.path.join(os.path.expanduser("~"), ".local", "share")
|
||||||
xdg_aj_home = os.path.join(xdg_data_home, 'autojump')
|
xdg_aj_home = os.path.join(xdg_data_home, "autojump")
|
||||||
data_path = os.path.join(xdg_aj_home, 'autojump.txt')
|
data_path = os.path.join(xdg_aj_home, "autojump.txt")
|
||||||
backup_path = os.path.join(xdg_aj_home, 'autojump.txt.bak')
|
backup_path = os.path.join(xdg_aj_home, "autojump.txt.bak")
|
||||||
|
|
||||||
if os.path.exists(data_path):
|
if os.path.exists(data_path):
|
||||||
move_file(data_path, config['data_path'])
|
move_file(data_path, config["data_path"])
|
||||||
if os.path.exists(backup_path):
|
if os.path.exists(backup_path):
|
||||||
move_file(backup_path, config['backup_path'])
|
move_file(backup_path, config["backup_path"])
|
||||||
|
|
||||||
# cleanup
|
# cleanup
|
||||||
shutil.rmtree(xdg_aj_home)
|
shutil.rmtree(xdg_aj_home)
|
||||||
@ -120,7 +107,7 @@ def migrate_osx_xdg_data(config):
|
|||||||
|
|
||||||
def save(config, data):
|
def save(config, data):
|
||||||
"""Save data and create backup, creating a new data file if necessary."""
|
"""Save data and create backup, creating a new data file if necessary."""
|
||||||
data_dir = os.path.dirname(config['data_path'])
|
data_dir = os.path.dirname(config["data_path"])
|
||||||
create_dir(data_dir)
|
create_dir(data_dir)
|
||||||
|
|
||||||
# atomically save by writing to temporary file and moving to destination
|
# atomically save by writing to temporary file and moving to destination
|
||||||
@ -129,20 +116,21 @@ def save(config, data):
|
|||||||
# Windows cannot reuse the same open file name
|
# Windows cannot reuse the same open file name
|
||||||
temp.close()
|
temp.close()
|
||||||
|
|
||||||
with open(temp.name, 'w', encoding='utf-8', errors='replace') as f:
|
with open(temp.name, "w", encoding="utf-8", errors="replace") as f:
|
||||||
for path, weight in data.items():
|
for path, weight in data.items():
|
||||||
f.write(unico('%s\t%s\n' % (weight, path)))
|
weight_with_age = weight * 0.999
|
||||||
|
slash_only_path = re.sub(re.escape(os.sep), "/", path)
|
||||||
|
f.write(unico("%s\t%s\n" % (weight_with_age, slash_only_path)))
|
||||||
|
|
||||||
f.flush()
|
f.flush()
|
||||||
os.fsync(f)
|
os.fsync(f)
|
||||||
except IOError as ex:
|
except IOError as ex:
|
||||||
print('Error saving autojump data (disk full?)' % ex, file=sys.stderr)
|
print("Error saving autojump data (disk full?)" % ex, file=sys.stderr)
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
# move temp_file -> autojump.txt
|
# move temp_file -> autojump.txt
|
||||||
move_file(temp.name, config['data_path'])
|
move_file(temp.name, config["data_path"])
|
||||||
|
|
||||||
# create backup file if it doesn't exist or is older than BACKUP_THRESHOLD
|
# create backup file if it doesn't exist or is older than BACKUP_THRESHOLD
|
||||||
if not os.path.exists(config['backup_path']) or \
|
if not os.path.exists(config["backup_path"]) or (time() - os.path.getmtime(config["backup_path"]) > BACKUP_THRESHOLD): # noqa
|
||||||
(time() - os.path.getmtime(config['backup_path']) > BACKUP_THRESHOLD): # noqa
|
shutil.copy(config["data_path"], config["backup_path"])
|
||||||
shutil.copy(config['data_path'], config['backup_path'])
|
|
||||||
|
@ -3,11 +3,11 @@
|
|||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
from difflib import SequenceMatcher
|
from difflib import SequenceMatcher
|
||||||
|
from sys import stderr
|
||||||
|
|
||||||
from autojump_utils import is_python3
|
from autojump_utils import is_python3
|
||||||
from autojump_utils import last
|
from autojump_utils import last
|
||||||
|
|
||||||
|
|
||||||
if is_python3(): # pragma: no cover
|
if is_python3(): # pragma: no cover
|
||||||
ifilter = filter
|
ifilter = filter
|
||||||
imap = map
|
imap = map
|
||||||
@ -36,13 +36,9 @@ def match_anywhere(needles, haystack, ignore_case=False):
|
|||||||
(path='/foo/baz', weight=10),
|
(path='/foo/baz', weight=10),
|
||||||
]
|
]
|
||||||
"""
|
"""
|
||||||
regex_needle = '.*' + '.*'.join(imap(re.escape, needles)) + '.*'
|
regex_needle = ".*" + ".*".join(imap(re.escape, needles)) + ".*"
|
||||||
regex_flags = re.IGNORECASE | re.UNICODE if ignore_case else re.UNICODE
|
regex_flags = re.IGNORECASE | re.UNICODE if ignore_case else re.UNICODE
|
||||||
found = lambda haystack: re.search(
|
found = lambda haystack: re.search(regex_needle, haystack.path, flags=regex_flags)
|
||||||
regex_needle,
|
|
||||||
haystack.path,
|
|
||||||
flags=regex_flags,
|
|
||||||
)
|
|
||||||
return ifilter(found, haystack)
|
return ifilter(found, haystack)
|
||||||
|
|
||||||
|
|
||||||
@ -75,16 +71,21 @@ def match_consecutive(needles, haystack, ignore_case=False):
|
|||||||
(path='/foo/baz', weight=10),
|
(path='/foo/baz', weight=10),
|
||||||
]
|
]
|
||||||
"""
|
"""
|
||||||
regex_no_sep = '[^' + os.sep + ']*'
|
regex_needle = ""
|
||||||
regex_no_sep_end = regex_no_sep + '$'
|
for needle in needles:
|
||||||
regex_one_sep = regex_no_sep + os.sep + regex_no_sep
|
slash_only_needle = re.sub(re.escape(os.sep), "/", needle)
|
||||||
regex_needle = regex_one_sep.join(imap(re.escape, needles)) + regex_no_sep_end
|
if regex_needle == "":
|
||||||
|
regex_needle = slash_only_needle
|
||||||
|
else:
|
||||||
|
regex_needle += "[^/]*/.*" + slash_only_needle
|
||||||
|
regex_needle += "[^/]*$"
|
||||||
regex_flags = re.IGNORECASE | re.UNICODE if ignore_case else re.UNICODE
|
regex_flags = re.IGNORECASE | re.UNICODE if ignore_case else re.UNICODE
|
||||||
found = lambda entry: re.search(
|
stderr.write("Regex: " + regex_needle + "\n")
|
||||||
regex_needle,
|
|
||||||
entry.path,
|
def found(entry):
|
||||||
flags=regex_flags,
|
slash_only_path = re.sub(re.escape(os.sep), "/", entry.path)
|
||||||
)
|
return re.search(regex_needle, slash_only_path, flags=regex_flags)
|
||||||
|
|
||||||
return ifilter(found, haystack)
|
return ifilter(found, haystack)
|
||||||
|
|
||||||
|
|
||||||
@ -115,15 +116,9 @@ def match_fuzzy(needles, haystack, ignore_case=False, threshold=0.6):
|
|||||||
end_dir = lambda path: last(os.path.split(path))
|
end_dir = lambda path: last(os.path.split(path))
|
||||||
if ignore_case:
|
if ignore_case:
|
||||||
needle = last(needles).lower()
|
needle = last(needles).lower()
|
||||||
match_percent = lambda entry: SequenceMatcher(
|
match_percent = lambda entry: SequenceMatcher(a=needle, b=end_dir(entry.path.lower())).ratio()
|
||||||
a=needle,
|
|
||||||
b=end_dir(entry.path.lower()),
|
|
||||||
).ratio()
|
|
||||||
else:
|
else:
|
||||||
needle = last(needles)
|
needle = last(needles)
|
||||||
match_percent = lambda entry: SequenceMatcher(
|
match_percent = lambda entry: SequenceMatcher(a=needle, b=end_dir(entry.path)).ratio()
|
||||||
a=needle,
|
|
||||||
b=end_dir(entry.path),
|
|
||||||
).ratio()
|
|
||||||
meets_threshold = lambda entry: match_percent(entry) >= threshold
|
meets_threshold = lambda entry: match_percent(entry) >= threshold
|
||||||
return ifilter(meets_threshold, haystack)
|
return ifilter(meets_threshold, haystack)
|
||||||
|
29
bin/autojump_profile.ps1
Normal file
29
bin/autojump_profile.ps1
Normal file
@ -0,0 +1,29 @@
|
|||||||
|
Set-Alias -Name ll -Value ls
|
||||||
|
Function .. {cd ..}
|
||||||
|
Function ... {cd ../..}
|
||||||
|
Function .... {cd ../../..}
|
||||||
|
Function ..... {cd ../../../..}
|
||||||
|
Function ...... {cd ../../../../..}
|
||||||
|
Function ....... {cd ../../../../../..}
|
||||||
|
Function ........ {cd ../../../../../../..}
|
||||||
|
Function ......... {cd ../../../../../../../..}
|
||||||
|
Function .......... {cd ../../../../../../../../..}
|
||||||
|
|
||||||
|
Function j {
|
||||||
|
$jumpdir = autojump $args
|
||||||
|
echo "$jumpdir"
|
||||||
|
cd $jumpdir
|
||||||
|
}
|
||||||
|
|
||||||
|
Function jc {
|
||||||
|
j "$pwd" @args
|
||||||
|
}
|
||||||
|
|
||||||
|
Function g {
|
||||||
|
$repo_root = git rev-parse --show-toplevel
|
||||||
|
j $repo_root @args
|
||||||
|
}
|
||||||
|
|
||||||
|
Set-PSBreakpoint -Variable pwd -Mode Write -Action {
|
||||||
|
autojump --add "$pwd"
|
||||||
|
} | out-null
|
@ -13,6 +13,7 @@ from itertools import islice
|
|||||||
|
|
||||||
if sys.version_info[0] == 3:
|
if sys.version_info[0] == 3:
|
||||||
imap = map
|
imap = map
|
||||||
|
unicode = str
|
||||||
os.getcwdu = os.getcwd
|
os.getcwdu = os.getcwd
|
||||||
else:
|
else:
|
||||||
from itertools import imap
|
from itertools import imap
|
||||||
|
@ -19,8 +19,8 @@ class TestMatchAnywhere(object):
|
|||||||
entry4 = Entry('/中/zhong/国/guo', 10)
|
entry4 = Entry('/中/zhong/国/guo', 10)
|
||||||
entry5 = Entry('/is\'t/this/a/b*tchin/edge/case?', 10)
|
entry5 = Entry('/is\'t/this/a/b*tchin/edge/case?', 10)
|
||||||
win_entry1 = Entry('C:\\foo\\bar\\baz', 10)
|
win_entry1 = Entry('C:\\foo\\bar\\baz', 10)
|
||||||
win_entry2 = Entry('D:\Program Files (x86)\GIMP', 10)
|
win_entry2 = Entry('D:\\Program Files (x86)\\GIMP', 10)
|
||||||
win_entry3 = Entry('C:\Windows\System32', 10)
|
win_entry3 = Entry('C:\\Windows\\System32', 10)
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def haystack(self):
|
def haystack(self):
|
||||||
@ -75,9 +75,9 @@ class TestMatchConsecutive(object):
|
|||||||
entry4 = Entry('/中/zhong/国/guo', 10)
|
entry4 = Entry('/中/zhong/国/guo', 10)
|
||||||
entry5 = Entry('/日/本', 10)
|
entry5 = Entry('/日/本', 10)
|
||||||
entry6 = Entry('/is\'t/this/a/b*tchin/edge/case?', 10)
|
entry6 = Entry('/is\'t/this/a/b*tchin/edge/case?', 10)
|
||||||
win_entry1 = Entry('C:\Foo\Bar\Baz', 10)
|
win_entry1 = Entry('C:\\Foo\\Bar\\Baz', 10)
|
||||||
win_entry2 = Entry('D:\Program Files (x86)\GIMP', 10)
|
win_entry2 = Entry('D:\\Program Files (x86)\\GIMP', 10)
|
||||||
win_entry3 = Entry('C:\Windows\System32', 10)
|
win_entry3 = Entry('C:\\Windows\\System32', 10)
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def haystack(self):
|
def haystack(self):
|
||||||
|
@ -25,6 +25,7 @@ from autojump_utils import unico
|
|||||||
if is_python3():
|
if is_python3():
|
||||||
os.getcwdu = os.getcwd
|
os.getcwdu = os.getcwd
|
||||||
xrange = range
|
xrange = range
|
||||||
|
unicode = str
|
||||||
|
|
||||||
|
|
||||||
def u(string):
|
def u(string):
|
||||||
|
7
tox.ini
7
tox.ini
@ -4,7 +4,8 @@ envlist =
|
|||||||
py27,
|
py27,
|
||||||
py33,
|
py33,
|
||||||
py34,
|
py34,
|
||||||
py35
|
py35,
|
||||||
|
py36
|
||||||
# ignore missing setup.py
|
# ignore missing setup.py
|
||||||
skipsdist = True
|
skipsdist = True
|
||||||
|
|
||||||
@ -31,3 +32,7 @@ commands =
|
|||||||
|
|
||||||
[pytest]
|
[pytest]
|
||||||
norecursedirs = .git .tox docs
|
norecursedirs = .git .tox docs
|
||||||
|
|
||||||
|
[pycodestyle]
|
||||||
|
ignore = E731,W504
|
||||||
|
max-line-length = 131
|
||||||
|
Loading…
Reference in New Issue
Block a user