1
0
mirror of https://github.com/wting/autojump synced 2024-10-27 20:34:07 +00:00

don't remove non-existent paths, keep the db small

Change the way we keep the database under a reasonable size, to better support removable drives
Instead of removing non-existent paths, keep the 300 more recent directories
This commit is contained in:
Joël Schaerer 2010-07-21 16:44:43 +02:00
parent 5b0b8f866e
commit 1be62c7ad2

View File

@ -22,8 +22,8 @@ from sys import argv,exit,stderr
import os import os
import signal import signal
max_keyweight=1000 max_keyweight=1000
max_stored_paths=300
completion_separator='__' completion_separator='__'
dead_dirs=False #global variable (evil ;-) to know if we should save the dict at the end
def signal_handler(arg1,arg2): def signal_handler(arg1,arg2):
print "Received SIGINT, trying to continue" print "Received SIGINT, trying to continue"
@ -36,31 +36,6 @@ def uniqadd(list,key):
def dicadd(dic,key,increment=1): def dicadd(dic,key,increment=1):
dic[key]=dic.get(key,0.)+increment dic[key]=dic.get(key,0.)+increment
def match(path,pattern,path_dict,ignore_case=False,only_end=False):
try:
if os.path.realpath(os.curdir)==path : return False
except OSError: #sometimes the current path doesn't exist anymore. In that case, jump if possible.
pass
if only_end:
match_string = "/".join(path.split('/')[-1-pattern.count('/'):])
else:
match_string = path
#import re
#if re.search(pattern,match_string),re.IGNORECASE if ignore_case else 0) is None:
if ignore_case:
match=(match_string.lower().find(pattern.lower()) != -1)
else:
match=(match_string.find(pattern) != -1)
if not match:
return False
else:
if os.path.exists(path) : return True
else: #clean up dead directories
del path_dict[path]
global dead_dirs
dead_dirs=True
return False
def save(path_dict,dic_file): def save(path_dict,dic_file):
f=open(dic_file+".tmp",'w') f=open(dic_file+".tmp",'w')
cPickle.dump(path_dict,f,-1) cPickle.dump(path_dict,f,-1)
@ -84,6 +59,14 @@ def forget(path_dict,dic_file):
path_dict[k]*=0.9*max_keyweight/keyweight path_dict[k]*=0.9*max_keyweight/keyweight
save(path_dict,dic_file) save(path_dict,dic_file)
def clean_dict(sorted_dirs,path_dict):
"""Limits the sized of the path_dict to max_stored_paths. Returns True if keys were deleted"""
if len(sorted_dirs) > max_stored_paths:
for dir,dummy in sorted_dirs[max_stored_paths:]:
del path_dict[dir]
return True
else: return False
# catch all unavailable (python < 2.5) # catch all unavailable (python < 2.5)
try: try:
all all
@ -94,12 +77,28 @@ except:
return False return False
return True return True
def find_matches(dirs,patterns,path_dict,result_list,ignore_case,max_matches): def match(path,pattern,ignore_case=False,only_end=False):
try:
if os.path.realpath(os.curdir)==path : return False
except OSError: #sometimes the current path doesn't exist anymore. In that case, jump if possible.
pass
if only_end:
match_string = "/".join(path.split('/')[-1-pattern.count('/'):])
else:
match_string = path
if ignore_case:
match=(match_string.lower().find(pattern.lower()) != -1)
else:
match=(match_string.find(pattern) != -1)
#return true if there is a match and the path exists (useful in the case of external drives, for example)
return match and os.path.exists(path)
def find_matches(dirs,patterns,result_list,ignore_case,max_matches):
"""Find max_matches paths that match the pattern, and add them to the result_list""" """Find max_matches paths that match the pattern, and add them to the result_list"""
for path,count in dirs: for path,count in dirs:
if len(result_list) >= max_matches : break if len(result_list) >= max_matches : break
#For the last pattern, only match the end of the pattern #For the last pattern, only match the end of the pattern
if all(match(path,p,path_dict,ignore_case, only_end = (n==len(patterns)-1)) for n,p in enumerate(patterns)): if all(match(path,p,ignore_case, only_end = (n==len(patterns)-1)) for n,p in enumerate(patterns)):
uniqadd(result_list,path) uniqadd(result_list,path)
def open_dic(dic_file,error_recovery=False): def open_dic(dic_file,error_recovery=False):
@ -173,12 +172,10 @@ else:
max_matches = 9 max_matches = 9
else: else:
max_matches = 1 max_matches = 1
find_matches(dirs,patterns,path_dict,results,False,max_matches) find_matches(dirs,patterns,results,False,max_matches)
dirs=path_dict.items() #we need to recreate the list since the first iteration potentially deletes paths
dirs.sort(key=lambda e:e[1],reverse=True)
if completion or not results: #if not found, try ignoring case. On completion always show all results if completion or not results: #if not found, try ignoring case. On completion always show all results
find_matches(dirs,patterns,path_dict,results,ignore_case=True,max_matches=max_matches) find_matches(dirs,patterns,results,ignore_case=True,max_matches=max_matches)
if dead_dirs and not completion: #save the dict if there were some non-existent directories in the database if not completion and clean_dict(dirs,path_dict): #keep the database to a reasonable size
save(path_dict,dic_file) save(path_dict,dic_file)
if completion and ('--bash', '') in optlist: quotes='"' if completion and ('--bash', '') in optlist: quotes='"'