1
0
mirror of https://github.com/wting/autojump synced 2024-10-27 20:34:07 +00:00
wting_autojump/autojump
Joël Schaerer 52f3c84b29 try to make autojump more robust to concurrent uses over network filesystems
When your home is on a network filesytems, multiple shells can try
to write to the autojump store concurrently. The previous system was
a bit dangerous since the name of the temp file was always the same.
This should be better, since the different autojump instances should
all write to different temporaries, and only the atomic rename() should
be concurrent. Time will tell...
2010-09-29 15:22:13 +02:00

196 lines
7.7 KiB
Python
Executable File

#!/usr/bin/python
#Copyright Joel Schaerer 2008, 2009
#This file is part of autojump
#autojump is free software: you can redistribute it and/or modify
#it under the terms of the GNU General Public License as published by
#the Free Software Foundation, either version 3 of the License, or
#(at your option) any later version.
#
#autojump is distributed in the hope that it will be useful,
#but WITHOUT ANY WARRANTY; without even the implied warranty of
#MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
#GNU General Public License for more details.
#
#You should have received a copy of the GNU General Public License
#along with autojump. If not, see <http://www.gnu.org/licenses/>.
from __future__ import division
import cPickle
import getopt
from sys import argv,exit,stderr
from tempfile import NamedTemporaryFile
import os
import signal
max_keyweight=1000
max_stored_paths=600
completion_separator='__'
config_dir=os.environ.get("AUTOJUMP_DATA_DIR",os.path.expanduser("~"))
def signal_handler(arg1,arg2):
print "Received SIGINT, trying to continue"
signal.signal(signal.SIGINT,signal_handler) #Don't break on sigint
def uniqadd(list,key):
if key not in list:
list.append(key)
def dicadd(dic,key,increment=1):
dic[key]=dic.get(key,0.)+increment
def save(path_dict,dic_file):
f=NamedTemporaryFile(dir=config_dir,delete=False)
cPickle.dump(path_dict,f,-1)
f.flush()
os.fsync(f)
f.close()
os.rename(f.name,dic_file) #cf. http://thunk.org/tytso/blog/2009/03/15/dont-fear-the-fsync/
try: #backup file
import time
if not os.path.exists(dic_file+".bak") or time.time()-os.path.getmtime(dic_file+".bak")>86400:
import shutil
shutil.copy(dic_file,dic_file+".bak")
except OSError, e:
print >> stderr, "Error while creating backup autojump file. (%s)" % e
def forget(path_dict,dic_file):
"""Gradually forget about directories. Only call from the actual jump since it can take time"""
keyweight=sum(path_dict.values()) #Gradually forget about old directories
if keyweight>max_keyweight:
for k in path_dict.keys():
path_dict[k]*=0.9*max_keyweight/keyweight
save(path_dict,dic_file)
def clean_dict(sorted_dirs,path_dict):
"""Limits the sized of the path_dict to max_stored_paths. Returns True if keys were deleted"""
if len(sorted_dirs) > max_stored_paths:
#remove 25 more than needed, to avoid doing it every time
for dir,dummy in sorted_dirs[max_stored_paths-25:]:
del path_dict[dir]
return True
else: return False
# catch all unavailable (python < 2.5)
try:
all
except:
def all(iterable):
for element in iterable:
if not element:
return False
return True
def match(path,pattern,ignore_case=False,only_end=False):
try:
if os.path.realpath(os.curdir)==path : return False
except OSError: #sometimes the current path doesn't exist anymore. In that case, jump if possible.
pass
if only_end:
match_string = "/".join(path.split('/')[-1-pattern.count('/'):])
else:
match_string = path
if ignore_case:
match=(match_string.lower().find(pattern.lower()) != -1)
else:
match=(match_string.find(pattern) != -1)
#return true if there is a match and the path exists (useful in the case of external drives, for example)
return match and os.path.exists(path)
def find_matches(dirs,patterns,result_list,ignore_case,max_matches):
"""Find max_matches paths that match the pattern, and add them to the result_list"""
for path,count in dirs:
if len(result_list) >= max_matches : break
#For the last pattern, only match the end of the pattern
if all(match(path,p,ignore_case, only_end = (n==len(patterns)-1)) for n,p in enumerate(patterns)):
uniqadd(result_list,path)
def open_dic(dic_file,error_recovery=False):
try:
aj_file=open(dic_file)
path_dict=cPickle.load(aj_file)
aj_file.close()
return path_dict
except (IOError,EOFError,cPickle.UnpicklingError):
if not error_recovery and os.path.exists(dic_file+".bak"):
print >> stderr, 'Problem with autojump database, trying to recover from backup...'
import shutil
shutil.copy(dic_file+".bak",dic_file)
return open_dic(dic_file,True)
else: return {} #if everything fails, return an empty file
#Main code
try:
optlist, args = getopt.getopt(argv[1:], 'a',['stat','import','completion', 'bash'])
except getopt.GetoptError, e:
print "Unknown command line argument: %s" % e
exit(1)
if config_dir == os.path.expanduser("~"):
dic_file=config_dir+"/.autojump_py"
else:
dic_file=config_dir+"/autojump_py"
path_dict=open_dic(dic_file)
if ('-a','') in optlist:
if(args[-1] != os.path.expanduser("~")): # home dir can be reached quickly by "cd" and may interfere with other directories
dicadd(path_dict,args[-1])
save(path_dict,dic_file)
elif ('--stat','') in optlist:
a=path_dict.items()
a.sort(key=lambda e:e[1])
for path,count in a[-100:]:
print "%.1f:\t%s" % (count,path)
print "Total key weight: %d. Number of stored paths: %d" % (sum(path_dict.values()),len(a))
elif ('--import','') in optlist:
for i in open(args[-1]).readlines():
dicadd(path_dict,i[:-1])
cPickle.dump(path_dict,open(dic_file,'w'),-1)
else:
import re
completion=False
userchoice=-1 #i if the pattern is of the form __pattern__i, otherwise -1
results=[]
if ('--completion','') in optlist:
completion=True
else:
forget(path_dict,dic_file) #gradually forget about old directories
if not args: patterns=[""]
else: patterns=args
#if the last pattern contains a full path, jump there
#the regexp is because we need to support stuff like "j wo jo__3__/home/joel/workspace/joel" for zsh
last_pattern_path = re.sub("(.*)"+completion_separator,"",patterns[-1])
#print >> stderr, last_pattern_path
if len(last_pattern_path)>0 and last_pattern_path[0]=="/" and os.path.exists(last_pattern_path):
if not completion : print last_pattern_path
else:
#check for ongoing completion, and act accordingly
endmatch=re.search(completion_separator+"([0-9]+)",patterns[-1]) #user has selected a completion
if endmatch:
userchoice=int(endmatch.group(1))
patterns[-1]=re.sub(completion_separator+"[0-9]+.*","",patterns[-1])
else: #user hasn't selected a completion, display the same choices again
endmatch=re.match("(.*)"+completion_separator,patterns[-1])
if endmatch: patterns[-1]=endmatch.group(1)
dirs=path_dict.items()
dirs.sort(key=lambda e:e[1],reverse=True)
if completion or userchoice != -1:
max_matches = 9
else:
max_matches = 1
find_matches(dirs,patterns,results,False,max_matches)
if completion or not results: #if not found, try ignoring case. On completion always show all results
find_matches(dirs,patterns,results,ignore_case=True,max_matches=max_matches)
if not completion and clean_dict(dirs,path_dict): #keep the database to a reasonable size
save(path_dict,dic_file)
if completion and ('--bash', '') in optlist: quotes='"'
else: quotes=""
if userchoice!=-1:
if len(results) > userchoice-1 : print quotes+results[userchoice-1]+quotes
elif len(results) > 1 and completion:
print "\n".join(("%s%s%d%s%s" % (patterns[-1],completion_separator,n+1,completion_separator,r)\
for n,r in enumerate(results[:8])))
elif results: print quotes+results[0]+quotes