2013-12-16 17:20:40 +00:00
|
|
|
#!/usr/bin/env python
|
|
|
|
# -*- coding: utf-8 -*-
|
2013-12-16 20:08:28 +00:00
|
|
|
from __future__ import print_function
|
2013-12-16 17:20:40 +00:00
|
|
|
|
2013-12-16 21:19:04 +00:00
|
|
|
from codecs import open
|
2013-12-17 20:48:12 +00:00
|
|
|
from collections import namedtuple
|
2013-12-16 17:20:40 +00:00
|
|
|
import os
|
|
|
|
import shutil
|
|
|
|
import sys
|
2015-10-04 17:21:28 +00:00
|
|
|
import fcntl
|
2013-12-16 20:08:28 +00:00
|
|
|
from time import time
|
2013-12-16 17:20:40 +00:00
|
|
|
|
2013-12-18 22:51:26 +00:00
|
|
|
if sys.version_info[0] == 3:
|
|
|
|
ifilter = filter
|
|
|
|
imap = map
|
|
|
|
else:
|
|
|
|
from itertools import ifilter
|
|
|
|
from itertools import imap
|
|
|
|
|
2013-12-30 20:49:34 +00:00
|
|
|
from autojump_utils import create_dir
|
2014-01-07 17:44:44 +00:00
|
|
|
from autojump_utils import unico
|
2013-12-30 20:49:34 +00:00
|
|
|
from autojump_utils import is_osx
|
|
|
|
from autojump_utils import is_python3
|
|
|
|
from autojump_utils import move_file
|
2013-12-16 17:20:40 +00:00
|
|
|
|
|
|
|
|
2013-12-16 20:08:28 +00:00
|
|
|
BACKUP_THRESHOLD = 24 * 60 * 60
|
2013-12-17 20:48:12 +00:00
|
|
|
Entry = namedtuple('Entry', ['path', 'weight'])
|
|
|
|
|
|
|
|
|
|
|
|
def dictify(entries):
|
|
|
|
"""
|
|
|
|
Converts a list of entries into a dictionary where
|
|
|
|
key = path
|
|
|
|
value = weight
|
|
|
|
"""
|
|
|
|
result = {}
|
|
|
|
for entry in entries:
|
|
|
|
result[entry.path] = entry.weight
|
|
|
|
return result
|
|
|
|
|
|
|
|
|
|
|
|
def entriefy(data):
|
|
|
|
"""Converts a dictionary into an iterator of entries."""
|
|
|
|
convert = lambda tup: Entry(*tup)
|
2013-12-18 22:51:26 +00:00
|
|
|
if is_python3():
|
|
|
|
return map(convert, data.items())
|
2013-12-17 20:48:12 +00:00
|
|
|
return imap(convert, data.iteritems())
|
2013-12-16 20:08:28 +00:00
|
|
|
|
|
|
|
|
2013-12-16 17:20:40 +00:00
|
|
|
def load(config):
|
2013-12-17 20:48:12 +00:00
|
|
|
"""Returns a dictonary (key=path, value=weight) loaded from data file."""
|
2013-12-16 17:20:40 +00:00
|
|
|
xdg_aj_home = os.path.join(
|
2014-08-10 02:59:41 +00:00
|
|
|
os.path.expanduser('~'),
|
|
|
|
'.local',
|
|
|
|
'share',
|
|
|
|
'autojump')
|
2013-12-16 17:20:40 +00:00
|
|
|
|
|
|
|
if is_osx() and os.path.exists(xdg_aj_home):
|
2013-12-16 20:08:28 +00:00
|
|
|
migrate_osx_xdg_data(config)
|
|
|
|
|
2014-03-10 02:41:44 +00:00
|
|
|
if not os.path.exists(config['data_path']):
|
|
|
|
return {}
|
|
|
|
|
|
|
|
# example: u'10.0\t/home/user\n' -> ['10.0', u'/home/user']
|
|
|
|
parse = lambda line: line.strip().split('\t')
|
|
|
|
|
|
|
|
correct_length = lambda x: len(x) == 2
|
|
|
|
|
|
|
|
# example: ['10.0', u'/home/user'] -> (u'/home/user', 10.0)
|
|
|
|
tupleize = lambda x: (x[1], float(x[0]))
|
|
|
|
|
|
|
|
try:
|
|
|
|
with open(
|
|
|
|
config['data_path'],
|
|
|
|
'r', encoding='utf-8',
|
|
|
|
errors='replace') as f:
|
2015-10-04 17:21:28 +00:00
|
|
|
fcntl.flock(f.fileno(), fcntl.LOCK_SH)
|
|
|
|
d = dict(
|
2014-08-10 02:59:41 +00:00
|
|
|
imap(
|
|
|
|
tupleize,
|
|
|
|
ifilter(correct_length, imap(parse, f))))
|
2015-10-04 17:21:28 +00:00
|
|
|
fcntl.flock(f.fileno(), fcntl.LOCK_UN)
|
|
|
|
return d
|
2014-03-10 02:41:44 +00:00
|
|
|
except (IOError, EOFError):
|
|
|
|
return load_backup(config)
|
2013-12-16 17:20:40 +00:00
|
|
|
|
|
|
|
|
2013-12-16 20:08:28 +00:00
|
|
|
def load_backup(config):
|
2013-12-16 21:19:04 +00:00
|
|
|
if os.path.exists(config['backup_path']):
|
|
|
|
move_file(config['backup_path'], config['data_path'])
|
2013-12-16 20:08:28 +00:00
|
|
|
return load(config)
|
|
|
|
return {}
|
2013-12-16 17:20:40 +00:00
|
|
|
|
|
|
|
|
2013-12-16 20:08:28 +00:00
|
|
|
def migrate_osx_xdg_data(config):
|
|
|
|
"""
|
|
|
|
Older versions incorrectly used Linux XDG_DATA_HOME paths on OS X. This
|
|
|
|
migrates autojump files from ~/.local/share/autojump to ~/Library/autojump
|
|
|
|
"""
|
2013-12-16 22:26:41 +00:00
|
|
|
assert is_osx(), "This function should only be run on OS X."
|
2013-12-16 20:08:28 +00:00
|
|
|
|
2013-12-16 17:20:40 +00:00
|
|
|
xdg_data_home = os.path.join(os.path.expanduser('~'), '.local', 'share')
|
|
|
|
xdg_aj_home = os.path.join(xdg_data_home, 'autojump')
|
2014-03-10 02:04:29 +00:00
|
|
|
data_path = os.path.join(xdg_aj_home, 'autojump.txt')
|
|
|
|
backup_path = os.path.join(xdg_aj_home, 'autojump.txt.bak')
|
2013-12-16 17:20:40 +00:00
|
|
|
|
2013-12-16 20:08:28 +00:00
|
|
|
if os.path.exists(data_path):
|
|
|
|
move_file(data_path, config['data_path'])
|
2013-12-16 21:19:04 +00:00
|
|
|
if os.path.exists(backup_path):
|
|
|
|
move_file(backup_path, config['backup_path'])
|
2013-12-16 17:20:40 +00:00
|
|
|
|
|
|
|
# cleanup
|
2013-12-16 20:08:28 +00:00
|
|
|
shutil.rmtree(xdg_aj_home)
|
|
|
|
if len(os.listdir(xdg_data_home)) == 0:
|
|
|
|
shutil.rmtree(xdg_data_home)
|
2013-12-16 17:20:40 +00:00
|
|
|
|
|
|
|
|
2013-12-16 20:08:28 +00:00
|
|
|
def save(config, data):
|
|
|
|
"""Save data and create backup, creating a new data file if necessary."""
|
|
|
|
create_dir(os.path.dirname(config['data_path']))
|
2013-12-16 17:20:40 +00:00
|
|
|
|
2015-10-04 17:21:28 +00:00
|
|
|
with open(config['data_path'], 'w', encoding='utf-8') as f:
|
|
|
|
fcntl.flock(f.fileno(), fcntl.LOCK_EX)
|
|
|
|
for path, weight in data.items():
|
|
|
|
f.write(unico("%s\t%s\n" % (weight, path)))
|
|
|
|
fcntl.flock(f.fileno(), fcntl.LOCK_UN)
|
2013-12-31 00:13:52 +00:00
|
|
|
|
2013-12-16 21:19:04 +00:00
|
|
|
# create backup file if it doesn't exist or is older than BACKUP_THRESHOLD
|
|
|
|
if not os.path.exists(config['backup_path']) or \
|
2014-06-28 19:38:31 +00:00
|
|
|
(time() - os.path.getmtime(config['backup_path']) > BACKUP_THRESHOLD): # noqa
|
2013-12-31 00:13:52 +00:00
|
|
|
shutil.copy(config['data_path'], config['backup_path'])
|