mirror of
				https://github.com/wting/autojump
				synced 2025-06-13 12:54:07 +00:00 
			
		
		
		
	
		
			
				
	
	
		
			307 lines
		
	
	
		
			12 KiB
		
	
	
	
		
			Python
		
	
	
		
			Executable File
		
	
	
	
	
			
		
		
	
	
			307 lines
		
	
	
		
			12 KiB
		
	
	
	
		
			Python
		
	
	
		
			Executable File
		
	
	
	
	
| #!/usr/bin/env python
 | |
| """Copyright Joel Schaerer 2008-2010
 | |
| This file is part of autojump
 | |
| 
 | |
| autojump is free software: you can redistribute it and/or modify
 | |
| it under the terms of the GNU General Public License as published by
 | |
| the Free Software Foundation, either version 3 of the License, or
 | |
| (at your option) any later version.
 | |
| 
 | |
| autojump is distributed in the hope that it will be useful,
 | |
| but WITHOUT ANY WARRANTY; without even the implied warranty of
 | |
| MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 | |
| GNU General Public License for more details.
 | |
| 
 | |
| You should have received a copy of the GNU General Public License
 | |
| along with autojump.  If not, see <http://www.gnu.org/licenses/>.
 | |
| 
 | |
| Autojump is a small tool that maintains a database of your most
 | |
| used directories, and finds the best match to help you jump to
 | |
| frequently used places."""
 | |
| 
 | |
| from __future__ import division, print_function
 | |
| 
 | |
| import getopt
 | |
| from sys import argv, stderr, version_info, exit, getfilesystemencoding
 | |
| from tempfile import NamedTemporaryFile
 | |
| from operator import itemgetter
 | |
| import os
 | |
| 
 | |
| MAX_KEYWEIGHT = 1000
 | |
| MAX_STORED_PATHS = 600
 | |
| COMPLETION_SEPARATOR = '__'
 | |
| 
 | |
| if "AUTOJUMP_DATA_DIR" in os.environ:
 | |
|     CONFIG_DIR = os.environ.get("AUTOJUMP_DATA_DIR")
 | |
| else:
 | |
|     xdg_data_dir = os.environ.get('XDG_DATA_HOME') or os.path.join(os.environ['HOME'], '.local', 'share')
 | |
|     CONFIG_DIR=os.path.join(xdg_data_dir, 'autojump')
 | |
| 
 | |
| def uniqadd(collection, key):
 | |
|     """Adds a key to a list only if it is not already present"""
 | |
|     if key not in collection:
 | |
|         collection.append(key)
 | |
| 
 | |
| def dicadd(dic, key, increment=1):
 | |
|     """Increment a value in a dic, set it to 0
 | |
|     if is is not already present"""
 | |
|     dic[key] = dic.get(key, 0.)+increment
 | |
| 
 | |
| def output(unicode_text,encoding=None):
 | |
|     """Wrapper for the print function, using the filesystem encoding by default
 | |
|     to minimize encoding mismatch problems in directory names"""
 | |
|     if version_info[0] > 2:
 | |
|         print(unicode_text)
 | |
|     else:
 | |
|         if encoding is None:
 | |
|             encoding = getfilesystemencoding()
 | |
|         print(unicode_text.encode(encoding))
 | |
| 
 | |
| def decode(text,encoding=None,errors="strict"):
 | |
|     """Decoding step for python2.x which does not default to unicode"""
 | |
|     if version_info[0] > 2:
 | |
|         return text
 | |
|     else:
 | |
|         if encoding is None:
 | |
|             encoding = getfilesystemencoding()
 | |
|         return text.decode(encoding,errors)
 | |
| 
 | |
| def unico(text):
 | |
|     """if python2, convert to a unicode object"""
 | |
|     if version_info[0] > 2:
 | |
|         return text
 | |
|     else:
 | |
|         return unicode(text)
 | |
| 
 | |
| def save(path_dict, dic_file):
 | |
|     """Save the database in an atomic way, and preserve
 | |
|        a backup file."""
 | |
|     # If the dic_file exists, check that it belongs to us
 | |
|     # Otherwise, fail quietly
 | |
|     if (not os.path.exists(dic_file)) or os.getuid() == os.stat(dic_file)[4]:
 | |
|         temp = NamedTemporaryFile(dir=CONFIG_DIR, delete=False)
 | |
|         for path in path_dict:
 | |
|             # the db is stored in utf-8
 | |
|             temp.write((unico("%s\t%s\n")%(path_dict[path],path)).encode("utf-8"))
 | |
|         temp.flush()
 | |
|         os.fsync(temp)
 | |
|         temp.close()
 | |
|         #cf. http://thunk.org/tytso/blog/2009/03/15/dont-fear-the-fsync/
 | |
|         os.rename(temp.name, dic_file)
 | |
|         try: #backup file
 | |
|             import time 
 | |
|             if (not os.path.exists(dic_file+".bak") or
 | |
|                     time.time()-os.path.getmtime(dic_file+".bak")>86400):
 | |
|                 import shutil
 | |
|                 shutil.copy(dic_file, dic_file+".bak")
 | |
|         except OSError as ex:
 | |
|             print("Error while creating backup autojump file. (%s)" %
 | |
|                     ex, file=stderr)
 | |
| 
 | |
| def open_dic(dic_file, error_recovery=False):
 | |
|     """Try hard to open the database file, recovering
 | |
|        from backup if needed. """
 | |
|     try:
 | |
|         path_dict = {}
 | |
|         with open(dic_file, 'r') as aj_file:
 | |
|             for l in aj_file.readlines():
 | |
|                 weight,path = l[:-1].split("\t",1)
 | |
|                 # the db is stored in utf-8
 | |
|                 path = decode(path,"utf-8")
 | |
|                 path_dict[path] = float(weight)
 | |
|             return path_dict
 | |
|     except (IOError, EOFError):
 | |
|         if not error_recovery and os.path.exists(dic_file+".bak"):
 | |
|             print('Problem with autojump database,\
 | |
|                     trying to recover from backup...', file=stderr)
 | |
|             import shutil
 | |
|             shutil.copy(dic_file+".bak", dic_file)
 | |
|             return open_dic(dic_file, True)
 | |
|         else:
 | |
|             # Temporary migration code
 | |
|             old_dic_file = get_dic_file("autojump_py")
 | |
|             if os.path.exists(old_dic_file):
 | |
|                 try: # fix to get optimised pickle in python < 3
 | |
|                     import cPickle as pickle
 | |
|                 except ImportError:
 | |
|                     import pickle
 | |
|                 try:
 | |
|                     with open(old_dic_file, 'rb') as aj_file:
 | |
|                         if version_info[0] > 2:
 | |
|                             #encoding is only specified for python2.x compatibility
 | |
|                             path_dict = pickle.load(aj_file, encoding="utf-8")
 | |
|                         else:
 | |
|                             path_dict = pickle.load(aj_file)
 | |
|                     unicode_dict = {} #we now use unicode internally
 | |
|                     for k,v in path_dict.items():
 | |
|                         print(k)
 | |
|                         unicode_dict[decode(k,errors="replace")] = v
 | |
|                     return unicode_dict
 | |
|                 except (IOError, EOFError, pickle.UnpicklingError):
 | |
|                     pass
 | |
|             return {} #if everything fails, return an empty file
 | |
| 
 | |
| 
 | |
| def forget(path_dict, dic_file):
 | |
|     """Gradually forget about directories. Only call
 | |
|     from the actual jump since it can take time"""
 | |
|     keyweight = sum(path_dict.values()) 
 | |
|     if keyweight > MAX_KEYWEIGHT: 
 | |
|         for k in path_dict.keys():
 | |
|             path_dict[k] *= 0.9 * MAX_KEYWEIGHT / keyweight
 | |
|         save(path_dict, dic_file)
 | |
| 
 | |
| def clean_dict(sorted_dirs, path_dict):
 | |
|     """Limits the sized of the path_dict to MAX_STORED_PATHS. 
 | |
|     Returns True if keys were deleted"""
 | |
|     if len(sorted_dirs) > MAX_STORED_PATHS:
 | |
|         #remove 25 more than needed, to avoid doing it every time
 | |
|         for path, dummy in sorted_dirs[MAX_STORED_PATHS-25:]:
 | |
|             del path_dict[path]
 | |
|         return True
 | |
|     else: return False
 | |
| 
 | |
| def match(path, pattern, ignore_case=False, only_end=False):
 | |
|     """Check whether a path matches a particular pattern, and return
 | |
|        the remaning part of the string"""
 | |
|     if only_end:
 | |
|         match_string = "/".join(path.split('/')[-1-pattern.count('/'):])
 | |
|     else:
 | |
|         match_string = path
 | |
|     if ignore_case:
 | |
|         find_idx = match_string.lower().find(pattern.lower())
 | |
|     else:
 | |
|         find_idx = match_string.find(pattern)
 | |
|     does_match = (find_idx != -1)
 | |
|     # Eat the path to avoid two patterns matching the same part of the string
 | |
|     if does_match:
 | |
|         eaten_path = path[find_idx+len(pattern):]
 | |
|     else:
 | |
|         eaten_path = path
 | |
|     return (does_match, eaten_path)
 | |
| 
 | |
| def find_matches(dirs, patterns, result_list, ignore_case, max_matches):
 | |
|     """Find max_matches paths that match the pattern, 
 | |
|     and add them to the result_list"""
 | |
|     for path, count in dirs:
 | |
|         # Don't jump to where we alread are
 | |
|         try:
 | |
|             if decode(os.path.realpath(os.curdir)) == path :
 | |
|                 continue
 | |
|         #Sometimes the current path doesn't exist anymore.
 | |
|         #In that case, jump if possible.
 | |
|         except OSError:
 | |
|             pass
 | |
|         #If a path doesn't exist, don't jump there
 | |
|         #We still keep it in db in case it's from a removable drive
 | |
|         if not os.path.exists(path):
 | |
|             continue
 | |
|         does_match, eaten_path = True, path
 | |
|         for n,p in enumerate(patterns):
 | |
|             #For the last pattern, only match the end of the pattern
 | |
|             does_match, eaten_path = match(eaten_path, p, ignore_case, only_end=(n == len(patterns)-1))
 | |
|             if not does_match: break
 | |
|         if does_match:
 | |
|             uniqadd(result_list, path)
 | |
|             if len(result_list) >= max_matches :
 | |
|                 break
 | |
| 
 | |
| def get_dic_file(filename="autojump.txt"):
 | |
|     if CONFIG_DIR == os.path.expanduser("~"):
 | |
|         dic_file = CONFIG_DIR+"/." + filename
 | |
|     else:
 | |
|         dic_file = CONFIG_DIR+"/" + filename
 | |
|     return dic_file
 | |
| 
 | |
| def shell_utility():
 | |
|     """Run this when autojump is called as a shell utility"""
 | |
|     try:
 | |
|         optlist, args = getopt.getopt(argv[1:], 'a',
 | |
|                 ['stat', 'import', 'completion', 'bash']) 
 | |
|     except getopt.GetoptError as ex:
 | |
|         print("Unknown command line argument: %s" % ex, file=stderr)
 | |
|         exit(1)
 | |
| 
 | |
|     dic_file = get_dic_file()
 | |
|     path_dict = open_dic(dic_file)
 | |
|     if ('-a', '') in optlist:
 | |
|         # The home dir can be reached quickly by "cd"
 | |
|         # and may interfere with other directories
 | |
|         if(args[-1] != os.path.expanduser("~")): 
 | |
|             dicadd(path_dict, decode(args[-1]))
 | |
|             save(path_dict, dic_file)
 | |
|     elif ('--stat', '') in optlist:
 | |
|         paths = list(path_dict.items())
 | |
|         paths.sort(key=itemgetter(1))
 | |
|         for path, count in paths[-100:]:
 | |
|             output(unico("%.1f:\t%s") % (count, path))
 | |
|         print("Total key weight: %d. Number of stored paths: %d" %
 | |
|                 (sum(path_dict.values()), len(paths)))
 | |
|     else:
 | |
|         import re
 | |
|         completion = False
 | |
|         #userchoice is i if the pattern is __pattern__i, otherwise -1
 | |
|         userchoice = -1 
 | |
|         results = []
 | |
|         if ('--completion', '') in optlist:
 | |
|             completion = True
 | |
|         else:
 | |
|             forget(path_dict, dic_file) #gradually forget about old directories
 | |
|         if not args: patterns = [unico("")]
 | |
|         else: patterns = [decode(a) for a in args]
 | |
| 
 | |
|         # If the last pattern contains a full path, jump there
 | |
|         # The regexp is because we need to support stuff like
 | |
|         # "j wo jo__3__/home/joel/workspace/joel" for zsh
 | |
|         last_pattern_path = re.sub("(.*)"+COMPLETION_SEPARATOR, "", patterns[-1])
 | |
|         if (len(last_pattern_path)>0 and
 | |
|               last_pattern_path[0] == "/" and
 | |
|               os.path.exists(last_pattern_path)):
 | |
|             if not completion: output(last_pattern_path)
 | |
|         else:
 | |
|             #check for ongoing completion, and act accordingly
 | |
|             endmatch = re.search(COMPLETION_SEPARATOR+"([0-9]+)", patterns[-1])
 | |
|             if endmatch:  #user has selected a completion
 | |
|                 userchoice = int(endmatch.group(1))
 | |
|                 patterns[-1] = re.sub(COMPLETION_SEPARATOR+"[0-9]+.*",
 | |
|                         "", patterns[-1])
 | |
|             else: #user hasn't selected a completion, display the same choices again
 | |
|                 endmatch = re.match("(.*)"+COMPLETION_SEPARATOR, patterns[-1])
 | |
|                 if endmatch: patterns[-1] = endmatch.group(1)
 | |
| 
 | |
|             dirs = list(path_dict.items())
 | |
|             dirs.sort(key=itemgetter(1), reverse=True)
 | |
|             if completion or userchoice != -1:
 | |
|                 max_matches = 9
 | |
|             else:
 | |
|                 max_matches = 1
 | |
|             find_matches(dirs, patterns, results, False, max_matches)
 | |
|             # If not found, try ignoring case.
 | |
|             # On completion always show all results
 | |
|             if completion or not results: 
 | |
|                 find_matches(dirs, patterns, results,
 | |
|                         ignore_case=True, max_matches=max_matches) 
 | |
|             # Keep the database to a reasonable size
 | |
|             if not completion and clean_dict(dirs, path_dict):
 | |
|                 save(path_dict, dic_file)
 | |
| 
 | |
|             if completion and ('--bash', '') in optlist: quotes = "'"
 | |
|             else: quotes = ""
 | |
| 
 | |
|             if userchoice != -1:
 | |
|                 if len(results) > userchoice-1 : 
 | |
|                     output(unico("%s%s%s") % (quotes,results[userchoice-1],quotes))
 | |
|             elif len(results) > 1 and completion:
 | |
|                 output("\n".join(("%s%s%d%s%s" % (patterns[-1],
 | |
|                     COMPLETION_SEPARATOR, n+1, COMPLETION_SEPARATOR, r)
 | |
|                     for n, r in enumerate(results[:8]))))
 | |
|             elif results: output(unico("%s%s%s")%(quotes,results[0],quotes))
 | |
|             else:
 | |
|                 return False
 | |
|             return True
 | |
| 
 | |
| 
 | |
| if __name__ == "__main__":
 | |
|     success=shell_utility()
 | |
|     if not success: exit(1)
 |