mirror of
				https://github.com/wting/autojump
				synced 2025-06-13 12:54:07 +00:00 
			
		
		
		
	Merge remote branch 'joelthelion/master'
Conflicts: autojump
This commit is contained in:
		
						commit
						f403a4b529
					
				
							
								
								
									
										157
									
								
								autojump
									
									
									
									
									
								
							
							
						
						
									
										157
									
								
								autojump
									
									
									
									
									
								
							@ -21,19 +21,16 @@ frequently used places."""
 | 
				
			|||||||
 | 
					
 | 
				
			||||||
from __future__ import division, print_function
 | 
					from __future__ import division, print_function
 | 
				
			||||||
 | 
					
 | 
				
			||||||
try: # fix to get optimised pickle in python < 3
 | 
					 | 
				
			||||||
    import cPickle as pickle
 | 
					 | 
				
			||||||
except ImportError:
 | 
					 | 
				
			||||||
    import pickle
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
import getopt
 | 
					import getopt
 | 
				
			||||||
from sys import argv, stderr, version_info, exit
 | 
					from sys import argv, stderr, version_info, exit, getfilesystemencoding
 | 
				
			||||||
from tempfile import NamedTemporaryFile
 | 
					from tempfile import NamedTemporaryFile
 | 
				
			||||||
from operator import itemgetter
 | 
					from operator import itemgetter
 | 
				
			||||||
import os
 | 
					import os
 | 
				
			||||||
 | 
					
 | 
				
			||||||
MAX_KEYWEIGHT = 1000
 | 
					MAX_KEYWEIGHT = 1000
 | 
				
			||||||
MAX_STORED_PATHS = 600
 | 
					MAX_STORED_PATHS = 600
 | 
				
			||||||
COMPLETION_SEPARATOR = '__'
 | 
					COMPLETION_SEPARATOR = '__'
 | 
				
			||||||
 | 
					
 | 
				
			||||||
if "AUTOJUMP_DATA_DIR" in os.environ:
 | 
					if "AUTOJUMP_DATA_DIR" in os.environ:
 | 
				
			||||||
    CONFIG_DIR = os.environ.get("AUTOJUMP_DATA_DIR")
 | 
					    CONFIG_DIR = os.environ.get("AUTOJUMP_DATA_DIR")
 | 
				
			||||||
else:
 | 
					else:
 | 
				
			||||||
@ -50,6 +47,32 @@ def dicadd(dic, key, increment=1):
 | 
				
			|||||||
    if is is not already present"""
 | 
					    if is is not already present"""
 | 
				
			||||||
    dic[key] = dic.get(key, 0.)+increment
 | 
					    dic[key] = dic.get(key, 0.)+increment
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					def output(unicode_text,encoding=None):
 | 
				
			||||||
 | 
					    """Wrapper for the print function, using the filesystem encoding by default
 | 
				
			||||||
 | 
					    to minimize encoding mismatch problems in directory names"""
 | 
				
			||||||
 | 
					    if version_info[0] > 2:
 | 
				
			||||||
 | 
					        print(unicode_text)
 | 
				
			||||||
 | 
					    else:
 | 
				
			||||||
 | 
					        if encoding is None:
 | 
				
			||||||
 | 
					            encoding = getfilesystemencoding()
 | 
				
			||||||
 | 
					        print(unicode_text.encode(encoding))
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					def decode(text,encoding=None,errors="strict"):
 | 
				
			||||||
 | 
					    """Decoding step for python2.x which does not default to unicode"""
 | 
				
			||||||
 | 
					    if version_info[0] > 2:
 | 
				
			||||||
 | 
					        return text
 | 
				
			||||||
 | 
					    else:
 | 
				
			||||||
 | 
					        if encoding is None:
 | 
				
			||||||
 | 
					            encoding = getfilesystemencoding()
 | 
				
			||||||
 | 
					        return text.decode(encoding,errors)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					def unico(text):
 | 
				
			||||||
 | 
					    """if python2, convert to a unicode object"""
 | 
				
			||||||
 | 
					    if version_info[0] > 2:
 | 
				
			||||||
 | 
					        return text
 | 
				
			||||||
 | 
					    else:
 | 
				
			||||||
 | 
					        return unicode(text)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
def save(path_dict, dic_file):
 | 
					def save(path_dict, dic_file):
 | 
				
			||||||
    """Save the database in an atomic way, and preserve
 | 
					    """Save the database in an atomic way, and preserve
 | 
				
			||||||
       a backup file."""
 | 
					       a backup file."""
 | 
				
			||||||
@ -57,12 +80,13 @@ def save(path_dict, dic_file):
 | 
				
			|||||||
    # Otherwise, fail quietly
 | 
					    # Otherwise, fail quietly
 | 
				
			||||||
    if (not os.path.exists(dic_file)) or os.name == 'nt' or os.getuid() == os.stat(dic_file)[4]:
 | 
					    if (not os.path.exists(dic_file)) or os.name == 'nt' or os.getuid() == os.stat(dic_file)[4]:
 | 
				
			||||||
        temp = NamedTemporaryFile(dir=CONFIG_DIR, delete=False)
 | 
					        temp = NamedTemporaryFile(dir=CONFIG_DIR, delete=False)
 | 
				
			||||||
        pickle.dump(path_dict, temp, 2)
 | 
					        for path,weight in sorted(path_dict.items(),key=itemgetter(1),reverse=True):
 | 
				
			||||||
 | 
					            # the db is stored in utf-8
 | 
				
			||||||
 | 
					            temp.write((unico("%s\t%s\n")%(weight,path)).encode("utf-8"))
 | 
				
			||||||
        temp.flush()
 | 
					        temp.flush()
 | 
				
			||||||
        os.fsync(temp)
 | 
					        os.fsync(temp)
 | 
				
			||||||
        temp.close()
 | 
					        temp.close()
 | 
				
			||||||
        #cf. http://thunk.org/tytso/blog/2009/03/15/dont-fear-the-fsync/
 | 
					        #cf. http://thunk.org/tytso/blog/2009/03/15/dont-fear-the-fsync/
 | 
				
			||||||
        #os.rename(temp.name, dic_file) 
 | 
					 | 
				
			||||||
        import shutil
 | 
					        import shutil
 | 
				
			||||||
        shutil.copy(temp.name, dic_file)
 | 
					        shutil.copy(temp.name, dic_file)
 | 
				
			||||||
        try: #backup file
 | 
					        try: #backup file
 | 
				
			||||||
@ -75,6 +99,50 @@ def save(path_dict, dic_file):
 | 
				
			|||||||
            print("Error while creating backup autojump file. (%s)" %
 | 
					            print("Error while creating backup autojump file. (%s)" %
 | 
				
			||||||
                    ex, file=stderr)
 | 
					                    ex, file=stderr)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					def open_dic(dic_file, error_recovery=False):
 | 
				
			||||||
 | 
					    """Try hard to open the database file, recovering
 | 
				
			||||||
 | 
					       from backup if needed. """
 | 
				
			||||||
 | 
					    try:
 | 
				
			||||||
 | 
					        path_dict = {}
 | 
				
			||||||
 | 
					        with open(dic_file, 'r') as aj_file:
 | 
				
			||||||
 | 
					            for l in aj_file.readlines():
 | 
				
			||||||
 | 
					                weight,path = l[:-1].split("\t",1)
 | 
				
			||||||
 | 
					                # the db is stored in utf-8
 | 
				
			||||||
 | 
					                path = decode(path,"utf-8")
 | 
				
			||||||
 | 
					                path_dict[path] = float(weight)
 | 
				
			||||||
 | 
					            return path_dict
 | 
				
			||||||
 | 
					    except (IOError, EOFError):
 | 
				
			||||||
 | 
					        if not error_recovery and os.path.exists(dic_file+".bak"):
 | 
				
			||||||
 | 
					            print('Problem with autojump database,\
 | 
				
			||||||
 | 
					                    trying to recover from backup...', file=stderr)
 | 
				
			||||||
 | 
					            import shutil
 | 
				
			||||||
 | 
					            shutil.copy(dic_file+".bak", dic_file)
 | 
				
			||||||
 | 
					            return open_dic(dic_file, True)
 | 
				
			||||||
 | 
					        else:
 | 
				
			||||||
 | 
					            # Temporary migration code
 | 
				
			||||||
 | 
					            old_dic_file = get_dic_file("autojump_py")
 | 
				
			||||||
 | 
					            if os.path.exists(old_dic_file):
 | 
				
			||||||
 | 
					                try: # fix to get optimised pickle in python < 3
 | 
				
			||||||
 | 
					                    import cPickle as pickle
 | 
				
			||||||
 | 
					                except ImportError:
 | 
				
			||||||
 | 
					                    import pickle
 | 
				
			||||||
 | 
					                try:
 | 
				
			||||||
 | 
					                    with open(old_dic_file, 'rb') as aj_file:
 | 
				
			||||||
 | 
					                        if version_info[0] > 2:
 | 
				
			||||||
 | 
					                            #encoding is only specified for python2.x compatibility
 | 
				
			||||||
 | 
					                            path_dict = pickle.load(aj_file, encoding="utf-8")
 | 
				
			||||||
 | 
					                        else:
 | 
				
			||||||
 | 
					                            path_dict = pickle.load(aj_file)
 | 
				
			||||||
 | 
					                    unicode_dict = {} #we now use unicode internally
 | 
				
			||||||
 | 
					                    for k,v in path_dict.items():
 | 
				
			||||||
 | 
					                        print(k)
 | 
				
			||||||
 | 
					                        unicode_dict[decode(k,errors="replace")] = v
 | 
				
			||||||
 | 
					                    return unicode_dict
 | 
				
			||||||
 | 
					                except (IOError, EOFError, pickle.UnpicklingError):
 | 
				
			||||||
 | 
					                    pass
 | 
				
			||||||
 | 
					            return {} #if everything fails, return an empty file
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
def forget(path_dict, dic_file):
 | 
					def forget(path_dict, dic_file):
 | 
				
			||||||
    """Gradually forget about directories. Only call
 | 
					    """Gradually forget about directories. Only call
 | 
				
			||||||
    from the actual jump since it can take time"""
 | 
					    from the actual jump since it can take time"""
 | 
				
			||||||
@ -113,58 +181,30 @@ def match(path, pattern, ignore_case=False, only_end=False):
 | 
				
			|||||||
        eaten_path = path
 | 
					        eaten_path = path
 | 
				
			||||||
    return (does_match, eaten_path)
 | 
					    return (does_match, eaten_path)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
def find_matches(dirs, patterns, result_list, ignore_case, max_matches):
 | 
					def find_matches(dirs, patterns, result_list, ignore_case, max_matches, current_dir):
 | 
				
			||||||
    """Find max_matches paths that match the pattern, 
 | 
					    """Find max_matches paths that match the pattern, 
 | 
				
			||||||
    and add them to the result_list"""
 | 
					    and add them to the result_list"""
 | 
				
			||||||
    for path, count in dirs:
 | 
					    for path, count in dirs:
 | 
				
			||||||
        # Don't jump to where we alread are
 | 
					        # Don't jump to where we alread are
 | 
				
			||||||
        try:
 | 
					        if current_dir == path :
 | 
				
			||||||
            if os.path.realpath(os.curdir) == path :
 | 
					 | 
				
			||||||
                continue
 | 
					 | 
				
			||||||
        #Sometimes the current path doesn't exist anymore.
 | 
					 | 
				
			||||||
        #In that case, jump if possible.
 | 
					 | 
				
			||||||
        except OSError:
 | 
					 | 
				
			||||||
            pass
 | 
					 | 
				
			||||||
        #If a path doesn't exist, don't jump there
 | 
					 | 
				
			||||||
        #We still keep it in db in case it's from a removable drive
 | 
					 | 
				
			||||||
        if not os.path.exists(path):
 | 
					 | 
				
			||||||
            continue
 | 
					            continue
 | 
				
			||||||
        does_match, eaten_path = True, path
 | 
					        does_match, eaten_path = True, path
 | 
				
			||||||
        for n,p in enumerate(patterns):
 | 
					        for n,p in enumerate(patterns):
 | 
				
			||||||
            #For the last pattern, only match the end of the pattern
 | 
					            #For the last pattern, only match the end of the pattern
 | 
				
			||||||
            does_match, eaten_path = match(eaten_path, p, ignore_case, only_end=(n == len(patterns)-1))
 | 
					            does_match, eaten_path = match(eaten_path, p, ignore_case, only_end=(n == len(patterns)-1))
 | 
				
			||||||
            if not does_match: break
 | 
					            if not does_match: break
 | 
				
			||||||
        if does_match:
 | 
					        #If a path doesn't exist, don't jump there
 | 
				
			||||||
 | 
					        #We still keep it in db in case it's from a removable drive
 | 
				
			||||||
 | 
					        if does_match and os.path.exists(path):
 | 
				
			||||||
            uniqadd(result_list, path)
 | 
					            uniqadd(result_list, path)
 | 
				
			||||||
            if len(result_list) >= max_matches :
 | 
					            if len(result_list) >= max_matches :
 | 
				
			||||||
                break
 | 
					                break
 | 
				
			||||||
 | 
					
 | 
				
			||||||
def open_dic(dic_file, error_recovery=False):
 | 
					def get_dic_file(filename="autojump.txt"):
 | 
				
			||||||
    """Try hard to open the database file, recovering
 | 
					 | 
				
			||||||
       from backup if needed. """
 | 
					 | 
				
			||||||
    try:
 | 
					 | 
				
			||||||
        aj_file = open(dic_file, 'rb')
 | 
					 | 
				
			||||||
        if version_info[0] > 2:
 | 
					 | 
				
			||||||
            #encoding is only specified for python2.x compatibility
 | 
					 | 
				
			||||||
            path_dict = pickle.load(aj_file, encoding="utf-8")
 | 
					 | 
				
			||||||
        else:
 | 
					 | 
				
			||||||
            path_dict = pickle.load(aj_file)
 | 
					 | 
				
			||||||
        aj_file.close()
 | 
					 | 
				
			||||||
        return path_dict
 | 
					 | 
				
			||||||
    except (IOError, EOFError, pickle.UnpicklingError):
 | 
					 | 
				
			||||||
        if not error_recovery and os.path.exists(dic_file+".bak"):
 | 
					 | 
				
			||||||
            print('Problem with autojump database,\
 | 
					 | 
				
			||||||
                    trying to recover from backup...', file=stderr)
 | 
					 | 
				
			||||||
            import shutil
 | 
					 | 
				
			||||||
            shutil.copy(dic_file+".bak", dic_file)
 | 
					 | 
				
			||||||
            return open_dic(dic_file, True)
 | 
					 | 
				
			||||||
        else: return {} #if everything fails, return an empty file
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
def get_dic_file():
 | 
					 | 
				
			||||||
    if CONFIG_DIR == os.path.expanduser("~"):
 | 
					    if CONFIG_DIR == os.path.expanduser("~"):
 | 
				
			||||||
        dic_file = CONFIG_DIR+"/.autojump_py"
 | 
					        dic_file = CONFIG_DIR+"/." + filename
 | 
				
			||||||
    else:
 | 
					    else:
 | 
				
			||||||
        dic_file = CONFIG_DIR+"/autojump_py"
 | 
					        dic_file = CONFIG_DIR+"/" + filename
 | 
				
			||||||
    return dic_file
 | 
					    return dic_file
 | 
				
			||||||
 | 
					
 | 
				
			||||||
def shell_utility():
 | 
					def shell_utility():
 | 
				
			||||||
@ -182,13 +222,13 @@ def shell_utility():
 | 
				
			|||||||
        # The home dir can be reached quickly by "cd"
 | 
					        # The home dir can be reached quickly by "cd"
 | 
				
			||||||
        # and may interfere with other directories
 | 
					        # and may interfere with other directories
 | 
				
			||||||
        if(args[-1] != os.path.expanduser("~")): 
 | 
					        if(args[-1] != os.path.expanduser("~")): 
 | 
				
			||||||
            dicadd(path_dict, args[-1])
 | 
					            dicadd(path_dict, decode(args[-1]))
 | 
				
			||||||
            save(path_dict, dic_file)
 | 
					            save(path_dict, dic_file)
 | 
				
			||||||
    elif ('--stat', '') in optlist:
 | 
					    elif ('--stat', '') in optlist:
 | 
				
			||||||
        paths = list(path_dict.items())
 | 
					        paths = list(path_dict.items())
 | 
				
			||||||
        paths.sort(key=itemgetter(1))
 | 
					        paths.sort(key=itemgetter(1))
 | 
				
			||||||
        for path, count in paths[-100:]:
 | 
					        for path, count in paths[-100:]:
 | 
				
			||||||
            print("%.1f:\t%s" % (count, path))
 | 
					            output(unico("%.1f:\t%s") % (count, path))
 | 
				
			||||||
        print("Total key weight: %d. Number of stored paths: %d" %
 | 
					        print("Total key weight: %d. Number of stored paths: %d" %
 | 
				
			||||||
                (sum(path_dict.values()), len(paths)))
 | 
					                (sum(path_dict.values()), len(paths)))
 | 
				
			||||||
    else:
 | 
					    else:
 | 
				
			||||||
@ -201,8 +241,8 @@ def shell_utility():
 | 
				
			|||||||
            completion = True
 | 
					            completion = True
 | 
				
			||||||
        else:
 | 
					        else:
 | 
				
			||||||
            forget(path_dict, dic_file) #gradually forget about old directories
 | 
					            forget(path_dict, dic_file) #gradually forget about old directories
 | 
				
			||||||
        if not args: patterns = [""]
 | 
					        if not args: patterns = [unico("")]
 | 
				
			||||||
        else: patterns = args
 | 
					        else: patterns = [decode(a) for a in args]
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        # If the last pattern contains a full path, jump there
 | 
					        # If the last pattern contains a full path, jump there
 | 
				
			||||||
        # The regexp is because we need to support stuff like
 | 
					        # The regexp is because we need to support stuff like
 | 
				
			||||||
@ -211,7 +251,7 @@ def shell_utility():
 | 
				
			|||||||
        if (len(last_pattern_path)>0 and
 | 
					        if (len(last_pattern_path)>0 and
 | 
				
			||||||
              last_pattern_path[0] == "/" and
 | 
					              last_pattern_path[0] == "/" and
 | 
				
			||||||
              os.path.exists(last_pattern_path)):
 | 
					              os.path.exists(last_pattern_path)):
 | 
				
			||||||
            if not completion: print(last_pattern_path)
 | 
					            if not completion: output(last_pattern_path)
 | 
				
			||||||
        else:
 | 
					        else:
 | 
				
			||||||
            #check for ongoing completion, and act accordingly
 | 
					            #check for ongoing completion, and act accordingly
 | 
				
			||||||
            endmatch = re.search(COMPLETION_SEPARATOR+"([0-9]+)", patterns[-1])
 | 
					            endmatch = re.search(COMPLETION_SEPARATOR+"([0-9]+)", patterns[-1])
 | 
				
			||||||
@ -229,12 +269,21 @@ def shell_utility():
 | 
				
			|||||||
                max_matches = 9
 | 
					                max_matches = 9
 | 
				
			||||||
            else:
 | 
					            else:
 | 
				
			||||||
                max_matches = 1
 | 
					                max_matches = 1
 | 
				
			||||||
            find_matches(dirs, patterns, results, False, max_matches)
 | 
					
 | 
				
			||||||
 | 
					            # Don't jump to the current directory
 | 
				
			||||||
 | 
					            try:
 | 
				
			||||||
 | 
					                current_dir = decode(os.path.realpath(os.curdir))
 | 
				
			||||||
 | 
					            #Sometimes the current path doesn't exist anymore.
 | 
				
			||||||
 | 
					            #In that case, jump if possible.
 | 
				
			||||||
 | 
					            except OSError:
 | 
				
			||||||
 | 
					                current_dir = None
 | 
				
			||||||
 | 
					            find_matches(dirs, patterns, results, False, max_matches, current_dir)
 | 
				
			||||||
            # If not found, try ignoring case.
 | 
					            # If not found, try ignoring case.
 | 
				
			||||||
            # On completion always show all results
 | 
					            # On completion always show all results
 | 
				
			||||||
            if completion or not results: 
 | 
					            if completion or not results: 
 | 
				
			||||||
                find_matches(dirs, patterns, results,
 | 
					                find_matches(dirs, patterns, results,
 | 
				
			||||||
                        ignore_case=True, max_matches=max_matches) 
 | 
					                        ignore_case=True,
 | 
				
			||||||
 | 
					                        max_matches=max_matches, current_dir=current_dir) 
 | 
				
			||||||
            # Keep the database to a reasonable size
 | 
					            # Keep the database to a reasonable size
 | 
				
			||||||
            if not completion and clean_dict(dirs, path_dict):
 | 
					            if not completion and clean_dict(dirs, path_dict):
 | 
				
			||||||
                save(path_dict, dic_file)
 | 
					                save(path_dict, dic_file)
 | 
				
			||||||
@ -244,12 +293,12 @@ def shell_utility():
 | 
				
			|||||||
 | 
					
 | 
				
			||||||
            if userchoice != -1:
 | 
					            if userchoice != -1:
 | 
				
			||||||
                if len(results) > userchoice-1 : 
 | 
					                if len(results) > userchoice-1 : 
 | 
				
			||||||
                    print(quotes+results[userchoice-1]+quotes)
 | 
					                    output(unico("%s%s%s") % (quotes,results[userchoice-1],quotes))
 | 
				
			||||||
            elif len(results) > 1 and completion:
 | 
					            elif len(results) > 1 and completion:
 | 
				
			||||||
                print("\n".join(("%s%s%d%s%s" % (patterns[-1],
 | 
					                output("\n".join(("%s%s%d%s%s" % (patterns[-1],
 | 
				
			||||||
                    COMPLETION_SEPARATOR, n+1, COMPLETION_SEPARATOR, r)
 | 
					                    COMPLETION_SEPARATOR, n+1, COMPLETION_SEPARATOR, r)
 | 
				
			||||||
                    for n, r in enumerate(results[:8]))))
 | 
					                    for n, r in enumerate(results[:8]))))
 | 
				
			||||||
            elif results: print(quotes+results[0]+quotes)
 | 
					            elif results: output(unico("%s%s%s")%(quotes,results[0],quotes))
 | 
				
			||||||
            else:
 | 
					            else:
 | 
				
			||||||
                return False
 | 
					                return False
 | 
				
			||||||
            return True
 | 
					            return True
 | 
				
			||||||
 | 
				
			|||||||
		Loading…
	
		Reference in New Issue
	
	Block a user