2010-12-02 16:46:23 +00:00
|
|
|
#!/usr/bin/env python
|
2012-05-07 06:50:40 +00:00
|
|
|
# -*- coding: utf-8 -*-
|
2012-05-07 06:19:19 +00:00
|
|
|
"""
|
|
|
|
Copyright © 2008-2012 Joel Schaerer
|
|
|
|
Copyright © 2012 William Ting
|
|
|
|
|
|
|
|
* This program is free software; you can redistribute it and/or modify
|
|
|
|
it under the terms of the GNU General Public License as published by
|
|
|
|
the Free Software Foundation; either version 3, or (at your option)
|
|
|
|
any later version.
|
|
|
|
|
|
|
|
* This program is distributed in the hope that it will be useful,
|
|
|
|
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
|
|
GNU General Public License for more details.
|
|
|
|
|
|
|
|
* You should have received a copy of the GNU General Public License
|
|
|
|
along with this program; if not, write to the Free Software
|
|
|
|
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
|
|
|
"""
|
|
|
|
|
2010-10-24 09:30:01 +00:00
|
|
|
from __future__ import division, print_function
|
|
|
|
|
2012-09-26 20:04:45 +00:00
|
|
|
import sys
|
2008-04-21 09:43:07 +00:00
|
|
|
import os
|
2012-09-26 20:04:45 +00:00
|
|
|
try:
|
2012-11-23 16:57:54 +00:00
|
|
|
import argparse
|
2012-09-26 20:04:45 +00:00
|
|
|
except ImportError:
|
2012-11-23 16:57:54 +00:00
|
|
|
sys.path.append(os.path.dirname(os.path.realpath(__file__)))
|
|
|
|
import autojump_argparse as argparse
|
|
|
|
sys.path.pop()
|
2012-09-26 20:04:45 +00:00
|
|
|
from operator import itemgetter
|
2012-05-06 23:41:00 +00:00
|
|
|
import re
|
2011-09-27 13:47:24 +00:00
|
|
|
import shutil
|
2012-05-06 23:41:00 +00:00
|
|
|
from tempfile import NamedTemporaryFile
|
2011-09-06 14:21:59 +00:00
|
|
|
|
2013-02-25 05:49:45 +00:00
|
|
|
VERSION = 'release-v21.5.1'
|
2011-01-03 22:23:26 +00:00
|
|
|
MAX_KEYWEIGHT = 1000
|
2012-05-07 01:29:23 +00:00
|
|
|
MAX_STORED_PATHS = 1000
|
2011-01-03 22:23:26 +00:00
|
|
|
COMPLETION_SEPARATOR = '__'
|
2012-05-06 23:41:00 +00:00
|
|
|
ARGS = None
|
2011-09-06 14:21:59 +00:00
|
|
|
|
2012-05-13 02:31:37 +00:00
|
|
|
CONFIG_DIR = None
|
|
|
|
DB_FILE = None
|
|
|
|
TESTING = False
|
2012-05-07 05:51:09 +00:00
|
|
|
|
2012-05-13 03:00:38 +00:00
|
|
|
# load config from environmental variables
|
|
|
|
if 'AUTOJUMP_DATA_DIR' in os.environ:
|
|
|
|
CONFIG_DIR = os.environ.get('AUTOJUMP_DATA_DIR')
|
|
|
|
else:
|
2013-02-25 05:49:45 +00:00
|
|
|
xdg_data_dir = os.environ.get('XDG_DATA_HOME') or \
|
|
|
|
os.path.join(os.environ['HOME'], '.local', 'share')
|
2012-05-13 03:00:38 +00:00
|
|
|
CONFIG_DIR = os.path.join(xdg_data_dir, 'autojump')
|
2012-05-13 02:31:37 +00:00
|
|
|
|
2012-05-13 03:00:38 +00:00
|
|
|
KEEP_ALL_ENTRIES = False
|
2013-02-25 05:49:45 +00:00
|
|
|
if 'AUTOJUMP_KEEP_ALL_ENTRIES' in os.environ and \
|
|
|
|
os.environ.get('AUTOJUMP_KEEP_ALL_ENTRIES') == '1':
|
2012-05-13 03:00:38 +00:00
|
|
|
KEEP_ALL_ENTRIES = True
|
2012-05-13 02:31:37 +00:00
|
|
|
|
2012-05-13 03:00:38 +00:00
|
|
|
ALWAYS_IGNORE_CASE = False
|
2013-02-25 05:49:45 +00:00
|
|
|
if 'AUTOJUMP_IGNORE_CASE' in os.environ and \
|
|
|
|
os.environ.get('AUTOJUMP_IGNORE_CASE') == '1':
|
2012-05-13 03:00:38 +00:00
|
|
|
ALWAYS_IGNORE_CASE = True
|
2012-05-13 02:31:37 +00:00
|
|
|
|
2012-06-04 18:06:25 +00:00
|
|
|
KEEP_SYMLINKS = False
|
2013-02-25 05:49:45 +00:00
|
|
|
if 'AUTOJUMP_KEEP_SYMLINKS' in os.environ and \
|
|
|
|
os.environ.get('AUTOJUMP_KEEP_SYMLINKS') == '1':
|
2012-06-04 18:06:25 +00:00
|
|
|
KEEP_SYMLINKS = True
|
|
|
|
|
2012-05-13 03:00:38 +00:00
|
|
|
if CONFIG_DIR == os.path.expanduser('~'):
|
|
|
|
DB_FILE = CONFIG_DIR + '/.autojump.txt'
|
|
|
|
else:
|
|
|
|
DB_FILE = CONFIG_DIR + '/autojump.txt'
|
2012-05-06 23:12:39 +00:00
|
|
|
|
|
|
|
class Database:
|
2012-05-13 02:31:37 +00:00
|
|
|
"""
|
|
|
|
Object for interfacing with autojump database.
|
|
|
|
"""
|
2012-05-06 23:12:39 +00:00
|
|
|
|
|
|
|
def __init__(self, filename):
|
|
|
|
self.filename = filename
|
|
|
|
self.data = {}
|
|
|
|
self.load()
|
|
|
|
|
2012-05-13 02:31:37 +00:00
|
|
|
def __len__(self):
|
|
|
|
return len(self.data)
|
|
|
|
|
|
|
|
def add(self, path, increment = 10):
|
|
|
|
"""
|
2013-02-25 05:45:22 +00:00
|
|
|
Increase weight of existing paths or initialize new ones to 10.
|
2012-05-13 02:31:37 +00:00
|
|
|
"""
|
2012-05-07 01:54:27 +00:00
|
|
|
if path not in self.data:
|
2012-05-13 02:31:37 +00:00
|
|
|
self.data[path] = increment
|
2012-05-07 01:54:27 +00:00
|
|
|
else:
|
2012-05-07 04:10:06 +00:00
|
|
|
import math
|
2013-02-25 05:45:22 +00:00
|
|
|
self.data[path] = math.sqrt((self.data[path]**2) + (increment**2))
|
2012-05-28 19:21:31 +00:00
|
|
|
self.save()
|
2012-05-06 23:12:39 +00:00
|
|
|
|
2013-02-25 05:45:22 +00:00
|
|
|
def decrease(self, path, increment = 15):
|
2013-02-25 04:52:09 +00:00
|
|
|
"""
|
2013-02-25 05:45:22 +00:00
|
|
|
Decrease weight of existing path. Unknown ones are ignored.
|
2013-02-25 04:52:09 +00:00
|
|
|
"""
|
|
|
|
if path in self.data:
|
2013-02-25 05:45:22 +00:00
|
|
|
if self.data[path] < increment:
|
|
|
|
self.data[path] = 0
|
|
|
|
else:
|
|
|
|
self.data[path] -= increment
|
2013-02-25 04:52:09 +00:00
|
|
|
self.save()
|
|
|
|
|
2012-05-06 23:12:39 +00:00
|
|
|
def decay(self):
|
2012-05-13 02:31:37 +00:00
|
|
|
"""
|
|
|
|
Decay database entries.
|
|
|
|
"""
|
2012-05-06 23:12:39 +00:00
|
|
|
for k in self.data.keys():
|
|
|
|
self.data[k] *= 0.9
|
|
|
|
|
2012-05-13 02:31:37 +00:00
|
|
|
def get_weight(self, path):
|
|
|
|
"""
|
|
|
|
Return path weight.
|
|
|
|
"""
|
|
|
|
if path in self.data:
|
|
|
|
return self.data[path]
|
|
|
|
else:
|
|
|
|
return 0
|
|
|
|
|
2012-05-06 23:12:39 +00:00
|
|
|
def load(self, error_recovery = False):
|
2012-05-13 02:31:37 +00:00
|
|
|
"""
|
|
|
|
Try to open the database file, recovering from backup if needed.
|
|
|
|
"""
|
2012-05-28 19:21:31 +00:00
|
|
|
if os.path.exists(self.filename):
|
2012-05-28 19:44:09 +00:00
|
|
|
try:
|
2012-11-23 16:57:54 +00:00
|
|
|
if sys.version_info >= (3, 0):
|
2012-11-22 23:13:38 +00:00
|
|
|
with open(self.filename, 'r', encoding = 'utf-8') as f:
|
|
|
|
for line in f.readlines():
|
|
|
|
weight, path = line[:-1].split("\t", 1)
|
|
|
|
path = decode(path, 'utf-8')
|
|
|
|
self.data[path] = float(weight)
|
|
|
|
else:
|
|
|
|
with open(self.filename, 'r') as f:
|
|
|
|
for line in f.readlines():
|
|
|
|
weight, path = line[:-1].split("\t", 1)
|
|
|
|
path = decode(path, 'utf-8')
|
|
|
|
self.data[path] = float(weight)
|
2012-05-28 19:44:09 +00:00
|
|
|
except (IOError, EOFError):
|
|
|
|
self.load_backup(error_recovery)
|
|
|
|
else:
|
|
|
|
self.load_backup(error_recovery)
|
|
|
|
|
|
|
|
def load_backup(self, error_recovery = False):
|
|
|
|
"""
|
|
|
|
Loads database from backup file.
|
|
|
|
"""
|
|
|
|
if os.path.exists(self.filename + '.bak'):
|
2012-05-28 19:21:31 +00:00
|
|
|
if not error_recovery:
|
2012-05-06 23:12:39 +00:00
|
|
|
print('Problem with autojump database,\
|
|
|
|
trying to recover from backup...', file=sys.stderr)
|
|
|
|
shutil.copy(self.filename + '.bak', self.filename)
|
|
|
|
return self.load(True)
|
|
|
|
|
|
|
|
def maintenance(self):
|
2012-05-13 02:31:37 +00:00
|
|
|
"""
|
|
|
|
Trims and decays database entries when exceeding settings.
|
|
|
|
"""
|
2012-05-06 23:12:39 +00:00
|
|
|
if sum(self.data.values()) > MAX_KEYWEIGHT:
|
|
|
|
self.decay()
|
|
|
|
if len(self.data) > MAX_STORED_PATHS:
|
|
|
|
self.trim()
|
|
|
|
self.save()
|
|
|
|
|
2012-05-07 06:50:40 +00:00
|
|
|
def purge(self):
|
2012-05-13 02:31:37 +00:00
|
|
|
"""
|
|
|
|
Deletes all entries that no longer exist on system.
|
|
|
|
"""
|
2012-05-07 06:50:40 +00:00
|
|
|
removed = []
|
|
|
|
for path in self.data.keys():
|
|
|
|
if not os.path.exists(path):
|
|
|
|
removed.append(path)
|
|
|
|
del self.data[path]
|
|
|
|
self.save()
|
|
|
|
return removed
|
|
|
|
|
2012-05-06 23:12:39 +00:00
|
|
|
def save(self):
|
2012-05-13 02:31:37 +00:00
|
|
|
"""
|
2012-05-28 19:21:31 +00:00
|
|
|
Save database atomically and preserve backup, creating new database if
|
|
|
|
needed.
|
2012-05-13 02:31:37 +00:00
|
|
|
"""
|
2012-05-06 23:12:39 +00:00
|
|
|
# check file existence and permissions
|
|
|
|
if ((not os.path.exists(self.filename)) or
|
|
|
|
os.name == 'nt' or
|
|
|
|
os.getuid() == os.stat(self.filename)[4]):
|
|
|
|
temp = NamedTemporaryFile(dir = CONFIG_DIR, delete = False)
|
|
|
|
for path, weight in sorted(self.data.items(),
|
|
|
|
key=itemgetter(1),
|
|
|
|
reverse=True):
|
|
|
|
temp.write((unico("%s\t%s\n")%(weight, path)).encode("utf-8"))
|
|
|
|
|
2013-02-25 05:49:45 +00:00
|
|
|
# catching disk errors and skipping save when file handle can't
|
|
|
|
# be closed.
|
2012-05-06 23:12:39 +00:00
|
|
|
try:
|
|
|
|
# http://thunk.org/tytso/blog/2009/03/15/dont-fear-the-fsync/
|
|
|
|
temp.flush()
|
|
|
|
os.fsync(temp)
|
|
|
|
temp.close()
|
|
|
|
except IOError as ex:
|
|
|
|
print("Error saving autojump database (disk full?)" %
|
|
|
|
ex, file=sys.stderr)
|
|
|
|
return
|
|
|
|
|
|
|
|
shutil.move(temp.name, self.filename)
|
|
|
|
try: # backup file
|
|
|
|
import time
|
|
|
|
if (not os.path.exists(self.filename+".bak") or
|
2013-02-25 05:49:45 +00:00
|
|
|
time.time()-os.path.getmtime(self.filename+".bak") \
|
|
|
|
> 86400):
|
2012-05-06 23:12:39 +00:00
|
|
|
shutil.copy(self.filename, self.filename+".bak")
|
|
|
|
except OSError as ex:
|
|
|
|
print("Error while creating backup autojump file. (%s)" %
|
|
|
|
ex, file=sys.stderr)
|
|
|
|
|
2012-05-13 02:31:37 +00:00
|
|
|
def trim(self, percent=0.1):
|
|
|
|
"""
|
|
|
|
If database has exceeded MAX_STORED_PATHS, removes bottom 10%.
|
|
|
|
"""
|
2012-05-06 23:12:39 +00:00
|
|
|
dirs = list(self.data.items())
|
|
|
|
dirs.sort(key=itemgetter(1))
|
2012-05-13 02:31:37 +00:00
|
|
|
remove_cnt = int(percent * len(dirs))
|
2012-05-06 23:12:39 +00:00
|
|
|
for path, _ in dirs[:remove_cnt]:
|
|
|
|
del self.data[path]
|
|
|
|
|
|
|
|
|
2012-05-06 23:41:00 +00:00
|
|
|
def options():
|
2012-05-13 02:31:37 +00:00
|
|
|
"""
|
|
|
|
Parse command line options.
|
|
|
|
"""
|
2012-05-13 03:00:38 +00:00
|
|
|
global ARGS
|
|
|
|
|
2013-02-25 05:49:45 +00:00
|
|
|
parser = argparse.ArgumentParser(
|
|
|
|
description='Automatically jump to \
|
|
|
|
directory passed as an argument.',
|
2012-05-06 23:41:00 +00:00
|
|
|
epilog="Please see autojump(1) man pages for full documentation.")
|
2013-02-25 05:49:45 +00:00
|
|
|
parser.add_argument(
|
2013-02-25 05:55:29 +00:00
|
|
|
'directory', metavar='DIRECTORY', nargs='*', default='',
|
2012-05-06 23:41:00 +00:00
|
|
|
help='directory to jump to')
|
2013-02-25 05:49:45 +00:00
|
|
|
parser.add_argument(
|
2013-02-25 05:55:29 +00:00
|
|
|
'-a', '--add', '--increase', metavar='DIRECTORY',
|
2013-02-25 05:49:45 +00:00
|
|
|
help='manually add path to database, or increase path weight for \
|
|
|
|
existing paths')
|
|
|
|
parser.add_argument(
|
2013-02-25 05:55:29 +00:00
|
|
|
'-d', '--decrease', metavar='WEIGHT', nargs='?', type=int,
|
|
|
|
const=15, default=False,
|
2013-02-25 05:45:22 +00:00
|
|
|
help='manually decrease path weight in database')
|
2013-02-25 05:49:45 +00:00
|
|
|
parser.add_argument(
|
|
|
|
'-b', '--bash', action="store_true", default=False,
|
2012-05-06 23:41:00 +00:00
|
|
|
help='enclose directory quotes to prevent errors')
|
2013-02-25 05:49:45 +00:00
|
|
|
parser.add_argument(
|
|
|
|
'--complete', action="store_true", default=False,
|
2012-06-23 20:20:27 +00:00
|
|
|
help='used for tab completion')
|
2013-02-25 05:49:45 +00:00
|
|
|
parser.add_argument(
|
|
|
|
'--purge', action="store_true", default=False,
|
2012-05-07 06:50:40 +00:00
|
|
|
help='delete all database entries that no longer exist on system')
|
2013-02-25 05:49:45 +00:00
|
|
|
parser.add_argument(
|
|
|
|
'-s', '--stat', action="store_true", default=False,
|
2012-05-06 23:41:00 +00:00
|
|
|
help='show database entries and their key weights')
|
2013-02-25 05:49:45 +00:00
|
|
|
parser.add_argument(
|
|
|
|
'-v', '--version', action="version", version="%(prog)s " + VERSION,
|
2012-05-06 23:41:00 +00:00
|
|
|
help='show version information and exit')
|
|
|
|
|
|
|
|
ARGS = parser.parse_args()
|
|
|
|
|
2013-02-25 05:49:45 +00:00
|
|
|
# The home dir can be reached quickly by "cd" and may interfere with other
|
|
|
|
# directories
|
2012-05-06 23:41:00 +00:00
|
|
|
if (ARGS.add):
|
|
|
|
if(ARGS.add != os.path.expanduser("~")):
|
|
|
|
db = Database(DB_FILE)
|
|
|
|
db.add(decode(ARGS.add))
|
2012-05-07 00:34:03 +00:00
|
|
|
return True
|
2012-05-06 23:41:00 +00:00
|
|
|
|
2013-02-25 04:52:09 +00:00
|
|
|
if (ARGS.decrease):
|
|
|
|
if(ARGS.decrease != os.path.expanduser("~")):
|
|
|
|
db = Database(DB_FILE)
|
2013-02-25 05:45:22 +00:00
|
|
|
# FIXME: handle symlinks?
|
|
|
|
db.decrease(os.getcwd(), ARGS.decrease)
|
2013-02-25 04:52:09 +00:00
|
|
|
return True
|
|
|
|
|
2012-05-07 06:50:40 +00:00
|
|
|
if (ARGS.purge):
|
|
|
|
db = Database(DB_FILE)
|
|
|
|
removed = db.purge()
|
|
|
|
if len(removed) > 0:
|
|
|
|
for dir in removed:
|
|
|
|
output(unico(dir))
|
|
|
|
print("Number of database entries removed: %d" % len(removed))
|
|
|
|
return True
|
|
|
|
|
2012-05-06 23:41:00 +00:00
|
|
|
if (ARGS.stat):
|
|
|
|
db = Database(DB_FILE)
|
|
|
|
dirs = list(db.data.items())
|
|
|
|
dirs.sort(key=itemgetter(1))
|
|
|
|
for path, count in dirs[-100:]:
|
|
|
|
output(unico("%.1f:\t%s") % (count, path))
|
|
|
|
print("Total key weight: %d. Number of stored dirs: %d" %
|
|
|
|
(sum(db.data.values()), len(dirs)))
|
2012-05-07 00:34:03 +00:00
|
|
|
return True
|
|
|
|
return False
|
2012-05-06 23:41:00 +00:00
|
|
|
|
|
|
|
def decode(text, encoding=None, errors="strict"):
|
2012-05-13 02:31:37 +00:00
|
|
|
"""
|
|
|
|
Decoding step for Python 2 which does not default to unicode.
|
|
|
|
"""
|
2012-05-06 23:12:39 +00:00
|
|
|
if sys.version_info[0] > 2:
|
2012-05-06 23:41:00 +00:00
|
|
|
return text
|
2011-09-12 15:04:37 +00:00
|
|
|
else:
|
|
|
|
if encoding is None:
|
2012-05-06 23:12:39 +00:00
|
|
|
encoding = sys.getfilesystemencoding()
|
2012-05-06 23:41:00 +00:00
|
|
|
return text.decode(encoding, errors)
|
2011-09-12 14:42:40 +00:00
|
|
|
|
2012-05-06 23:41:00 +00:00
|
|
|
def output(unicode_text, encoding=None):
|
2012-05-13 02:31:37 +00:00
|
|
|
"""
|
|
|
|
Wrapper for the print function, using the filesystem encoding by default
|
|
|
|
to minimize encoding mismatch problems in directory names.
|
|
|
|
"""
|
2012-05-06 23:12:39 +00:00
|
|
|
if sys.version_info[0] > 2:
|
2012-05-06 23:41:00 +00:00
|
|
|
print(unicode_text)
|
2011-09-12 14:42:40 +00:00
|
|
|
else:
|
|
|
|
if encoding is None:
|
2012-05-06 23:12:39 +00:00
|
|
|
encoding = sys.getfilesystemencoding()
|
2012-05-06 23:41:00 +00:00
|
|
|
print(unicode_text.encode(encoding))
|
2011-09-12 14:42:40 +00:00
|
|
|
|
2011-09-12 15:04:37 +00:00
|
|
|
def unico(text):
|
2012-05-13 02:31:37 +00:00
|
|
|
"""
|
|
|
|
If Python 2, convert to a unicode object.
|
|
|
|
"""
|
2012-05-06 23:12:39 +00:00
|
|
|
if sys.version_info[0] > 2:
|
2011-09-12 15:04:37 +00:00
|
|
|
return text
|
|
|
|
else:
|
|
|
|
return unicode(text)
|
|
|
|
|
2012-05-06 23:51:18 +00:00
|
|
|
def match_last(pattern):
|
2012-05-13 02:31:37 +00:00
|
|
|
"""
|
|
|
|
If the last pattern contains a full path, jump there.
|
2012-05-06 23:51:18 +00:00
|
|
|
The regexp is because we need to support stuff like
|
2012-05-13 02:31:37 +00:00
|
|
|
"j wo jo__3__/home/joel/workspace/joel" for zsh.
|
|
|
|
"""
|
2012-05-06 23:51:18 +00:00
|
|
|
last_pattern_path = re.sub("(.*)"+COMPLETION_SEPARATOR, "", pattern[-1])
|
|
|
|
if (len(last_pattern_path) > 0 and
|
|
|
|
last_pattern_path[0] == "/" and
|
|
|
|
os.path.exists(last_pattern_path)):
|
2012-05-07 04:40:19 +00:00
|
|
|
if not ARGS.complete:
|
2012-05-06 23:51:18 +00:00
|
|
|
output(last_pattern_path)
|
|
|
|
return True
|
|
|
|
return False
|
|
|
|
|
2012-05-07 01:09:37 +00:00
|
|
|
def match(path, pattern, only_end=False, ignore_case=False):
|
2012-05-13 02:31:37 +00:00
|
|
|
"""
|
|
|
|
Check whether a path matches a particular pattern, and return
|
|
|
|
the remaining part of the string.
|
|
|
|
"""
|
2010-07-21 14:44:43 +00:00
|
|
|
if only_end:
|
2012-05-07 01:09:37 +00:00
|
|
|
match_path = "/".join(path.split('/')[-1-pattern.count('/'):])
|
2010-07-21 14:44:43 +00:00
|
|
|
else:
|
2012-05-07 01:09:37 +00:00
|
|
|
match_path = path
|
|
|
|
|
2010-07-21 14:44:43 +00:00
|
|
|
if ignore_case:
|
2012-05-07 04:10:06 +00:00
|
|
|
match_path = match_path.lower()
|
|
|
|
pattern = pattern.lower()
|
2012-05-07 01:09:37 +00:00
|
|
|
|
2012-05-07 04:10:06 +00:00
|
|
|
find_idx = match_path.find(pattern)
|
2012-05-07 01:09:37 +00:00
|
|
|
# truncate path to avoid matching a pattern multiple times
|
|
|
|
if find_idx != -1:
|
|
|
|
return (True, path)
|
2010-07-21 14:44:43 +00:00
|
|
|
else:
|
2012-05-07 01:09:37 +00:00
|
|
|
return (False, path[find_idx+len(pattern):])
|
2010-07-21 14:44:43 +00:00
|
|
|
|
2012-05-07 04:10:06 +00:00
|
|
|
def find_matches(db, patterns, max_matches=1, ignore_case=False, fuzzy=False):
|
2012-05-13 02:31:37 +00:00
|
|
|
"""
|
2013-02-25 05:49:45 +00:00
|
|
|
Find max_matches paths that match the pattern, and add them to the
|
|
|
|
result_list.
|
2012-05-13 02:31:37 +00:00
|
|
|
"""
|
2012-05-07 00:34:03 +00:00
|
|
|
try:
|
2012-12-17 18:35:33 +00:00
|
|
|
current_dir = decode(os.path.realpath(os.curdir))
|
2012-05-07 00:34:03 +00:00
|
|
|
except OSError:
|
|
|
|
current_dir = None
|
|
|
|
|
|
|
|
dirs = list(db.data.items())
|
|
|
|
dirs.sort(key=itemgetter(1), reverse=True)
|
|
|
|
results = []
|
2012-05-07 04:10:06 +00:00
|
|
|
if fuzzy:
|
|
|
|
from difflib import get_close_matches
|
|
|
|
|
|
|
|
# create dictionary of end paths to compare against
|
|
|
|
end_dirs = {}
|
|
|
|
for d in dirs:
|
|
|
|
if ignore_case:
|
|
|
|
end = d[0].split('/')[-1].lower()
|
|
|
|
else:
|
|
|
|
end = d[0].split('/')[-1]
|
|
|
|
|
|
|
|
# collisions: ignore lower weight paths
|
2012-12-18 16:14:53 +00:00
|
|
|
if end not in end_dirs:
|
2012-05-07 04:10:06 +00:00
|
|
|
end_dirs[end] = d[0]
|
|
|
|
|
|
|
|
# find the first match (heighest weight)
|
2012-12-18 16:14:53 +00:00
|
|
|
while True:
|
|
|
|
found = get_close_matches(patterns[-1], end_dirs, n=1, cutoff=.6)
|
|
|
|
if not found:
|
|
|
|
break
|
|
|
|
# avoid jumping to current directory
|
|
|
|
if (os.path.exists(found[0]) or TESTING) and \
|
|
|
|
current_dir != os.path.realpath(found[0]):
|
|
|
|
break
|
|
|
|
# continue with the last found directory removed
|
|
|
|
del end_dirs[found[0]]
|
|
|
|
|
2012-05-07 04:10:06 +00:00
|
|
|
if found:
|
|
|
|
found = found[0]
|
|
|
|
results.append(end_dirs[found])
|
|
|
|
return results
|
|
|
|
else:
|
|
|
|
return []
|
|
|
|
|
2013-02-14 19:25:41 +00:00
|
|
|
current_dir_match = False
|
2012-05-06 23:41:00 +00:00
|
|
|
for path, _ in dirs:
|
2012-05-07 01:09:37 +00:00
|
|
|
found, tmp = True, path
|
|
|
|
for n, p in enumerate(patterns):
|
|
|
|
# for single/last pattern, only check end of path
|
|
|
|
if n == len(patterns)-1:
|
|
|
|
found, tmp = match(tmp, p, True, ignore_case)
|
|
|
|
else:
|
|
|
|
found, tmp = match(tmp, p, False, ignore_case)
|
|
|
|
if not found: break
|
|
|
|
|
2012-05-13 02:31:37 +00:00
|
|
|
if found and (os.path.exists(path) or TESTING):
|
2012-12-18 10:32:58 +00:00
|
|
|
# avoid jumping to current directory
|
|
|
|
# (call out to realpath this late to not stat all dirs)
|
Do not decode os.path.realpath / path
`path` is decoded already (coming from `db`) and this caused the
following error:
Traceback (most recent call last):
File "/home/user/.autojump/bin/autojump", line 460, in <module>
if not shell_utility(): sys.exit(1)
File "/home/user/.autojump/bin/autojump", line 429, in shell_utility
results = find_matches(db, patterns, max_matches, False)
File "/home/user/.autojump/bin/autojump", line 374, in find_matches
if current_dir == decode(os.path.realpath(path)) :
File "/home/user/.autojump/bin/autojump", line 277, in decode
return text.decode(encoding, errors)
File "/usr/lib/python2.7/encodings/utf_8.py", line 16, in decode
return codecs.utf_8_decode(input, errors, True)
UnicodeEncodeError: 'ascii' codec can't encode character u'\xb4' in
position 52: ordinal not in range(128)
2012-12-18 10:52:57 +00:00
|
|
|
if current_dir == os.path.realpath(path):
|
2013-02-14 19:25:41 +00:00
|
|
|
current_dir_match = True
|
2012-12-18 10:32:58 +00:00
|
|
|
continue
|
|
|
|
|
2012-05-07 00:34:03 +00:00
|
|
|
if path not in results:
|
|
|
|
results.append(path)
|
2012-05-07 01:09:37 +00:00
|
|
|
if len(results) >= max_matches:
|
2011-09-09 11:04:21 +00:00
|
|
|
break
|
2013-02-14 19:25:41 +00:00
|
|
|
|
|
|
|
# if current directory is the only match, add it to results
|
|
|
|
if len(results) == 0 and current_dir_match:
|
|
|
|
results.append(current_dir)
|
|
|
|
|
2012-05-07 00:34:03 +00:00
|
|
|
return results
|
|
|
|
|
2011-01-04 20:00:59 +00:00
|
|
|
def shell_utility():
|
2012-05-13 02:31:37 +00:00
|
|
|
"""
|
|
|
|
Run this when autojump is called as a shell utility.
|
|
|
|
"""
|
2012-05-07 00:34:03 +00:00
|
|
|
if options(): return True
|
2012-05-06 23:12:39 +00:00
|
|
|
db = Database(DB_FILE)
|
2012-04-07 14:14:19 +00:00
|
|
|
|
2012-05-06 23:51:18 +00:00
|
|
|
# if no directories, add empty string
|
2012-05-06 23:41:00 +00:00
|
|
|
if (ARGS.directory == ''):
|
|
|
|
patterns = [unico('')]
|
2012-04-07 14:14:19 +00:00
|
|
|
else:
|
2012-05-06 23:41:00 +00:00
|
|
|
patterns = [decode(a) for a in ARGS.directory]
|
2012-04-07 14:14:19 +00:00
|
|
|
|
2012-05-06 23:51:18 +00:00
|
|
|
# check last pattern for full path
|
2012-05-07 00:34:03 +00:00
|
|
|
# FIXME: disabled until zsh tab completion is fixed on the shell side
|
2012-05-06 23:51:18 +00:00
|
|
|
# if match_last(patterns): return True
|
2012-04-07 14:14:19 +00:00
|
|
|
|
2012-05-07 00:34:03 +00:00
|
|
|
# check for tab completion
|
|
|
|
tab_choice = -1
|
|
|
|
tab_match = re.search(COMPLETION_SEPARATOR+"([0-9]+)", patterns[-1])
|
|
|
|
if tab_match: # user has selected a tab completion entry
|
|
|
|
tab_choice = int(tab_match.group(1))
|
2012-05-07 00:05:47 +00:00
|
|
|
patterns[-1] = re.sub(COMPLETION_SEPARATOR+"[0-9]+.*", "", patterns[-1])
|
2012-05-07 00:34:03 +00:00
|
|
|
else: # user hasn't selected a tab completion, display choices again
|
|
|
|
tab_match = re.match("(.*)"+COMPLETION_SEPARATOR, patterns[-1])
|
|
|
|
if tab_match:
|
|
|
|
patterns[-1] = tab_match.group(1)
|
2012-05-06 23:51:18 +00:00
|
|
|
|
2012-05-07 00:34:03 +00:00
|
|
|
# on tab completion always show all results
|
|
|
|
if ARGS.complete or tab_choice != -1:
|
2012-05-06 23:51:18 +00:00
|
|
|
max_matches = 9
|
|
|
|
else:
|
|
|
|
max_matches = 1
|
|
|
|
|
2012-05-08 19:08:50 +00:00
|
|
|
results = []
|
2012-05-07 04:30:22 +00:00
|
|
|
if not ALWAYS_IGNORE_CASE:
|
2012-12-18 15:03:29 +00:00
|
|
|
results = find_matches(db, patterns, max_matches, ignore_case=False)
|
2012-05-07 04:30:22 +00:00
|
|
|
|
2012-05-07 00:34:03 +00:00
|
|
|
# if no results, try ignoring case
|
2012-05-06 23:51:18 +00:00
|
|
|
if ARGS.complete or not results:
|
2012-12-18 15:03:29 +00:00
|
|
|
results = find_matches(db, patterns, max_matches, ignore_case=True)
|
2012-05-06 23:51:18 +00:00
|
|
|
|
2012-05-07 04:10:06 +00:00
|
|
|
# if no results, try approximate matching
|
|
|
|
if not results:
|
2012-12-18 15:03:29 +00:00
|
|
|
results = find_matches(db, patterns, max_matches, ignore_case=True,
|
|
|
|
fuzzy=True)
|
2012-05-07 04:10:06 +00:00
|
|
|
|
2012-05-07 00:34:03 +00:00
|
|
|
quotes = ""
|
2012-05-06 23:51:18 +00:00
|
|
|
if ARGS.complete and ARGS.bash: quotes = "'"
|
|
|
|
|
2012-05-07 00:34:03 +00:00
|
|
|
if tab_choice != -1:
|
|
|
|
if len(results) > tab_choice-1:
|
|
|
|
output(unico("%s%s%s") % (quotes,results[tab_choice-1],quotes))
|
2012-05-06 23:51:18 +00:00
|
|
|
elif len(results) > 1 and ARGS.complete:
|
|
|
|
output("\n".join(("%s%s%d%s%s" % (patterns[-1],
|
|
|
|
COMPLETION_SEPARATOR, n+1, COMPLETION_SEPARATOR, r)
|
|
|
|
for n, r in enumerate(results[:8]))))
|
2012-05-07 00:34:03 +00:00
|
|
|
elif results:
|
|
|
|
output(unico("%s%s%s")%(quotes,results[0],quotes))
|
2012-05-06 23:51:18 +00:00
|
|
|
else:
|
|
|
|
return False
|
2012-05-06 23:41:00 +00:00
|
|
|
|
2012-05-07 04:40:19 +00:00
|
|
|
if not KEEP_ALL_ENTRIES:
|
2012-05-06 23:51:18 +00:00
|
|
|
db.maintenance()
|
2012-05-06 23:41:00 +00:00
|
|
|
|
2012-05-06 23:51:18 +00:00
|
|
|
return True
|
2012-04-07 14:14:19 +00:00
|
|
|
|
2011-01-04 20:00:59 +00:00
|
|
|
if __name__ == "__main__":
|
2012-12-18 17:33:36 +00:00
|
|
|
if not shell_utility():
|
|
|
|
sys.exit(1)
|