2008-04-21 09:43:07 +00:00
|
|
|
#!/usr/bin/python
|
2009-05-13 09:32:19 +00:00
|
|
|
#Copyright Joel Schaerer 2008, 2009
|
|
|
|
#This file is part of autojump
|
|
|
|
|
|
|
|
#autojump is free software: you can redistribute it and/or modify
|
|
|
|
#it under the terms of the GNU General Public License as published by
|
|
|
|
#the Free Software Foundation, either version 3 of the License, or
|
|
|
|
#(at your option) any later version.
|
|
|
|
#
|
|
|
|
#autojump is distributed in the hope that it will be useful,
|
|
|
|
#but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
#MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
|
|
#GNU General Public License for more details.
|
|
|
|
#
|
|
|
|
#You should have received a copy of the GNU General Public License
|
|
|
|
#along with autojump. If not, see <http://www.gnu.org/licenses/>.
|
|
|
|
|
2008-04-21 09:43:07 +00:00
|
|
|
from __future__ import division
|
|
|
|
import cPickle
|
|
|
|
import getopt
|
2009-10-03 18:36:18 +00:00
|
|
|
from sys import argv,exit,stderr
|
2008-04-21 09:43:07 +00:00
|
|
|
import os
|
2008-06-02 15:43:38 +00:00
|
|
|
import signal
|
2009-02-18 13:12:13 +00:00
|
|
|
max_keyweight=1000
|
2009-05-13 11:28:18 +00:00
|
|
|
dead_dirs=False #global variable (evil ;-) to know if we should save the dict at the end
|
2008-06-02 15:43:38 +00:00
|
|
|
|
|
|
|
def signal_handler(arg1,arg2):
|
|
|
|
print "Received SIGINT, trying to continue"
|
|
|
|
signal.signal(signal.SIGINT,signal_handler) #Don't break on sigint
|
2008-04-21 09:43:07 +00:00
|
|
|
|
2009-02-13 22:22:32 +00:00
|
|
|
def uniqadd(list,key):
|
|
|
|
if key not in list:
|
|
|
|
list.append(key)
|
|
|
|
|
2008-04-21 09:43:07 +00:00
|
|
|
def dicadd(dic,key,increment=1):
|
|
|
|
dic[key]=dic.get(key,0.)+increment
|
|
|
|
|
2010-04-25 23:00:57 +00:00
|
|
|
def match(path,pattern,path_dict,ignore_case=False,only_end=False):
|
2010-01-26 15:47:13 +00:00
|
|
|
try:
|
|
|
|
if os.path.realpath(os.curdir)==path : return False
|
|
|
|
except OSError: #sometimes the current path doesn't exist anymore. In that case, jump if possible.
|
|
|
|
pass
|
2010-04-25 23:00:57 +00:00
|
|
|
if only_end:
|
|
|
|
match_string = "/".join(path.split('/')[-1-pattern.count('/'):])
|
|
|
|
else:
|
|
|
|
match_string = path
|
2010-04-25 21:55:18 +00:00
|
|
|
#import re
|
2010-04-25 22:20:55 +00:00
|
|
|
#if re.search(pattern,match_string),re.IGNORECASE if ignore_case else 0) is None:
|
2010-04-25 23:00:57 +00:00
|
|
|
match=(match_string.lower().find(pattern.lower()) != -1)\
|
|
|
|
if ignore_case\
|
|
|
|
else (match_string.find(pattern) != -1)
|
|
|
|
if not match:
|
2008-04-21 09:43:07 +00:00
|
|
|
return False
|
|
|
|
else:
|
|
|
|
if os.path.exists(path) : return True
|
|
|
|
else: #clean up dead directories
|
|
|
|
del path_dict[path]
|
2009-05-13 11:28:18 +00:00
|
|
|
global dead_dirs
|
2009-04-26 17:48:27 +00:00
|
|
|
dead_dirs=True
|
2008-04-21 09:43:07 +00:00
|
|
|
return False
|
|
|
|
|
2009-02-18 14:13:29 +00:00
|
|
|
def save(path_dict,dic_file):
|
2009-03-17 09:39:00 +00:00
|
|
|
f=open(dic_file+".tmp",'w')
|
|
|
|
cPickle.dump(path_dict,f,-1)
|
|
|
|
f.flush()
|
|
|
|
os.fsync(f)
|
|
|
|
f.close()
|
2009-03-17 09:43:40 +00:00
|
|
|
try:
|
|
|
|
os.rename(dic_file+".tmp",dic_file) #cf. http://thunk.org/tytso/blog/2009/03/15/dont-fear-the-fsync/
|
2009-09-16 14:06:20 +00:00
|
|
|
import time #backup file
|
|
|
|
if not os.path.exists(dic_file+".bak") or time.time()-os.path.getmtime(dic_file+".bak")>86400:
|
|
|
|
import shutil
|
|
|
|
shutil.copy(dic_file,dic_file+".bak")
|
2009-03-17 09:43:40 +00:00
|
|
|
except OSError:
|
|
|
|
pass #Fail quietly, this usually means a concurrent autojump process already did the job
|
2009-02-18 14:13:29 +00:00
|
|
|
|
2009-02-18 13:12:13 +00:00
|
|
|
def forget(path_dict,dic_file):
|
|
|
|
"""Gradually forget about directories. Only call from the actual jump since it can take time"""
|
|
|
|
keyweight=sum(path_dict.values()) #Gradually forget about old directories
|
|
|
|
if keyweight>max_keyweight:
|
|
|
|
for k in path_dict.keys():
|
|
|
|
path_dict[k]*=0.9*max_keyweight/keyweight
|
2009-05-30 08:29:43 +00:00
|
|
|
save(path_dict,dic_file)
|
2009-02-18 13:12:13 +00:00
|
|
|
|
2010-04-25 23:00:57 +00:00
|
|
|
def find_matches(dirs,patterns,path_dict,result_list,ignore_case,max_matches):
|
2009-02-19 09:16:30 +00:00
|
|
|
"""Find max_matches paths that match the pattern, and add them to the result_list"""
|
2009-02-18 13:12:13 +00:00
|
|
|
for path,count in dirs:
|
2009-02-19 09:16:30 +00:00
|
|
|
if len(result_list) >= max_matches : break
|
2010-04-25 23:00:57 +00:00
|
|
|
#For the last pattern, only match the end of the pattern
|
|
|
|
if all(match(path,p,path_dict,ignore_case, only_end = (n==len(patterns)-1)) for n,p in enumerate(patterns)):
|
2009-02-18 13:12:13 +00:00
|
|
|
uniqadd(result_list,path)
|
|
|
|
|
2009-09-16 14:06:20 +00:00
|
|
|
def open_dic(dic_file,error_recovery=False):
|
|
|
|
try:
|
|
|
|
aj_file=open(dic_file)
|
|
|
|
path_dict=cPickle.load(aj_file)
|
|
|
|
aj_file.close()
|
|
|
|
return path_dict
|
2009-10-03 18:36:18 +00:00
|
|
|
except (IOError,EOFError,cPickle.UnpicklingError):
|
2009-09-16 14:06:20 +00:00
|
|
|
if not error_recovery and os.path.exists(dic_file+".bak"):
|
2009-10-03 18:36:18 +00:00
|
|
|
print >> stderr, 'Problem with autojump database, trying to recover from backup...'
|
2009-09-16 14:06:20 +00:00
|
|
|
import shutil
|
|
|
|
shutil.copy(dic_file+".bak",dic_file)
|
|
|
|
return open_dic(dic_file,True)
|
|
|
|
else: return {} #if everything fails, return an empty file
|
|
|
|
|
2009-02-18 13:12:13 +00:00
|
|
|
#Main code
|
2009-02-14 09:28:27 +00:00
|
|
|
try:
|
2009-10-03 18:25:16 +00:00
|
|
|
optlist, args = getopt.getopt(argv[1:], 'a',['stat','import','completion', 'bash'])
|
2009-02-14 09:28:27 +00:00
|
|
|
except getopt.GetoptError, e:
|
|
|
|
print "Unknown command line argument: %s" % e
|
|
|
|
exit(1)
|
|
|
|
|
2008-04-21 09:43:07 +00:00
|
|
|
dic_file=os.path.expanduser("~/.autojump_py")
|
2009-09-16 14:06:20 +00:00
|
|
|
path_dict=open_dic(dic_file)
|
2009-03-06 19:43:49 +00:00
|
|
|
if ('-a','') in optlist:
|
2010-04-25 21:38:15 +00:00
|
|
|
if(args[-1] != os.path.expanduser("~")): # home dir can be reached quickly by "cd" and may interfere with other directories
|
2009-03-31 07:48:33 +00:00
|
|
|
dicadd(path_dict,args[-1])
|
|
|
|
save(path_dict,dic_file)
|
2008-04-21 09:43:07 +00:00
|
|
|
elif ('--stat','') in optlist:
|
|
|
|
a=path_dict.items()
|
|
|
|
a.sort(key=lambda e:e[1])
|
|
|
|
for path,count in a[-100:]:
|
|
|
|
print "%.1f:\t%s" % (count,path)
|
|
|
|
print "Total key weight: %d" % sum(path_dict.values())
|
|
|
|
elif ('--import','') in optlist:
|
2009-02-13 23:41:03 +00:00
|
|
|
for i in open(args[-1]).readlines():
|
2008-04-21 09:43:07 +00:00
|
|
|
dicadd(path_dict,i[:-1])
|
|
|
|
cPickle.dump(path_dict,open(dic_file,'w'),-1)
|
|
|
|
else:
|
2009-02-14 00:18:11 +00:00
|
|
|
import re
|
2009-02-13 22:22:32 +00:00
|
|
|
completion=False
|
2010-04-25 21:38:15 +00:00
|
|
|
userchoice=-1 #i if the pattern is of the form __pattern__i, otherwise -1
|
2009-02-18 13:12:13 +00:00
|
|
|
results=[]
|
2009-02-13 22:22:32 +00:00
|
|
|
if ('--completion','') in optlist:
|
|
|
|
completion=True
|
2009-02-18 13:12:13 +00:00
|
|
|
else:
|
|
|
|
forget(path_dict,dic_file) #gradually forget about old directories
|
2010-04-25 23:00:57 +00:00
|
|
|
if not args: patterns=[""]
|
|
|
|
else: patterns=args
|
|
|
|
|
|
|
|
#if pattern is a full path, jump there
|
|
|
|
if len(patterns) == 1 and\
|
|
|
|
len(patterns[0])>0 and\
|
|
|
|
patterns[0][0]=="/" and\
|
|
|
|
os.path.exists(patterns[0]):
|
2010-04-26 16:52:18 +00:00
|
|
|
if not completion : print patterns[0]
|
2009-02-18 13:12:13 +00:00
|
|
|
else:
|
2010-04-25 23:00:57 +00:00
|
|
|
endmatch=re.search("__([0-9]+)",patterns[-1])
|
2009-02-14 00:57:49 +00:00
|
|
|
if endmatch:
|
|
|
|
userchoice=int(endmatch.group(1))
|
2010-04-25 23:00:57 +00:00
|
|
|
patterns[-1]=re.sub("__[0-9]+.*","",patterns[-1])
|
2009-02-14 00:57:49 +00:00
|
|
|
else:
|
2010-04-25 23:00:57 +00:00
|
|
|
endmatch=re.match("(.*)__",patterns[-1])
|
|
|
|
if endmatch: patterns[-1]=endmatch.group(1)
|
2009-02-18 13:12:13 +00:00
|
|
|
|
|
|
|
dirs=path_dict.items()
|
|
|
|
dirs.sort(key=lambda e:e[1],reverse=True)
|
2010-04-25 23:00:57 +00:00
|
|
|
max_matches = 9 if completion else 1
|
|
|
|
find_matches(dirs,patterns,path_dict,results,False,max_matches)
|
2009-02-18 13:12:13 +00:00
|
|
|
dirs=path_dict.items() #we need to recreate the list since the first iteration potentially deletes paths
|
|
|
|
dirs.sort(key=lambda e:e[1],reverse=True)
|
|
|
|
if completion or not results: #if not found, try ignoring case. On completion always show all results
|
2010-04-25 23:00:57 +00:00
|
|
|
find_matches(dirs,patterns,path_dict,results,ignore_case=True,max_matches=max_matches)
|
2009-05-13 09:53:50 +00:00
|
|
|
if dead_dirs and not completion: #save the dict if there were some non-existent directories in the database
|
2009-04-26 17:48:27 +00:00
|
|
|
save(path_dict,dic_file)
|
2009-02-18 13:12:13 +00:00
|
|
|
|
2009-10-03 18:25:16 +00:00
|
|
|
if completion and ('--bash', '') in optlist: quotes='"'
|
2009-09-16 19:45:09 +00:00
|
|
|
else: quotes=""
|
|
|
|
|
2009-02-14 00:57:49 +00:00
|
|
|
if userchoice!=-1:
|
2009-09-16 19:45:09 +00:00
|
|
|
if len(results) > userchoice-1 : print quotes+results[userchoice-1]+quotes
|
2009-02-18 13:12:13 +00:00
|
|
|
elif len(results) > 1 and completion:
|
2010-04-25 23:00:57 +00:00
|
|
|
print "\n".join(("%s__%d__%s" % (" ".join(patterns),n+1,r) for n,r in enumerate(results[:8])))
|
2009-09-16 19:45:09 +00:00
|
|
|
elif results: print quotes+results[0]+quotes
|