1
0
mirror of https://github.com/wting/autojump synced 2024-10-27 20:34:07 +00:00

implement load, backup, and save data functionality

This commit is contained in:
William Ting 2013-12-16 14:08:28 -06:00
parent 5fd2859f19
commit 39b5e3030d
2 changed files with 103 additions and 100 deletions

View File

@ -1,138 +1,114 @@
#!/usr/bin/env python #!/usr/bin/env python
# -*- coding: utf-8 -*- # -*- coding: utf-8 -*-
from __future__ import print_function
from itertools import imap from itertools import imap
from operator import itemgetter
import os import os
import pickle
import platform
import shutil import shutil
import sys import sys
from time import time
from utils import create_dir
from utils import decode from utils import decode
from utils import is_osx from utils import is_osx
from utils import is_python3 from utils import is_python3
from utils import move_file
from utils import unico as unicode from utils import unico as unicode
BACKUP_THRESHOLD = 24 * 60 * 60
def load(config): def load(config):
xdg_aj_home = os.path.join( xdg_aj_home = os.path.join(
os.path.expanduser('~'), os.path.expanduser('~'),
'.local', '.local',
'share', 'share',
'autojump') 'autojump')
legacy_data_file = os.path.join(xdg_aj_home, 'autojump.txt')
# Older versions incorrectly used Linux XDG_DATA_HOME paths on OS X
if is_osx() and os.path.exists(xdg_aj_home): if is_osx() and os.path.exists(xdg_aj_home):
return migrate_legacy_data(config) migrate_osx_xdg_data(config)
elif os.path.exists(legacy_data_file):
return migrate_legacy_data(config) if os.path.exists(config['data_path']):
elif os.path.exists(config['data_file']): try:
return load_pickle(config) if is_python3():
with open(data_path, 'r', encoding='utf-8') as f:
lines = f.readlines()
else:
with open(data_path, 'r') as f:
lines = f.readlines()
except (IOError, EOFError):
return load_backup(config)
# example: '10.0\t/home/user\n' -> ['10.0', '/home/user']
parse = lambda x: x.strip().split('\t')
# example: ['10.0', '/home/user'] -> (u'/home/user', 10.0)
convert = lambda x: (decode(x[1], 'utf-8'), float(x[0]))
return dict(imap(convert, imap(parse, lines)))
return {} return {}
def load_pickle(config): def load_backup(config):
with open(config['data_file'], 'rb') as f: if os.path.exists(config['data_backup_path']):
data = pickle.load(f) move_file(config['data_backup_path'], config['data_path'])
return data return load(config)
return {}
def migrate_legacy_data(config): def migrate_osx_xdg_data(config):
"""
Older versions incorrectly used Linux XDG_DATA_HOME paths on OS X. This
migrates autojump files from ~/.local/share/autojump to ~/Library/autojump
"""
assert is_osx(), "Expecting OSX."
xdg_data_home = os.path.join(os.path.expanduser('~'), '.local', 'share') xdg_data_home = os.path.join(os.path.expanduser('~'), '.local', 'share')
xdg_aj_home = os.path.join(xdg_data_home, 'autojump') xdg_aj_home = os.path.join(xdg_data_home, 'autojump')
legacy_data = os.path.join(xdg_aj_home, 'autojump.txt') data_path = os.path.join(xdg_aj_home, 'autojump.txt'),
legacy_data_backup = os.path.join(xdg_aj_home, 'autojump.bak') data_backup_path = os.path.join(xdg_aj_home, 'autojump.txt.bak'),
assert(os.path.exists(xdg_aj_home), "$XDG_DATA_HOME doesn't exist.") if os.path.exists(data_path):
move_file(data_path, config['data_path'])
# migrate to new file format if os.path.exists(data_backup_path):
data = load_legacy(legacy_data, legacy_data_backup) move_file(data_backup_path, config['data_backup_path'])
save(config, data)
# cleanup # cleanup
if is_osx(): shutil.rmtree(xdg_aj_home)
shutil.rmtree(xdg_aj_home) if len(os.listdir(xdg_data_home)) == 0:
if len(os.listdir(xdg_data_home)) == 0: shutil.rmtree(xdg_data_home)
shutil.rmtree(xdg_data_home)
else:
if os.path.exists(legacy_data):
os.remove(legacy_data)
if os.path.exists(legacy_data_backup):
os.remove(legacy_data_backup)
return data
def load_legacy(data_file, data_file_backup): def save(config, data):
"""Loads data from legacy data file.""" """Save data and create backup, creating a new data file if necessary."""
create_dir(os.path.dirname(config['data_path']))
# atomically save by writing to temporary file and moving to destination
temp_file = tempfile.NamedTemporaryFile(
dir=os.path.dirname(config['data_path']),
delete=False)
try: try:
if is_python3(): for path, weight in sorted(
with open(data_file, 'r', encoding='utf-8') as f: data.iteritems(),
lines = f.readlines() key=itemgetter(1),
else:
with open(data_file, 'r') as f:
lines = f.readlines()
except (IOError, EOFError):
return load_legacy_backup(data_file_backup)
# example: '10.0\t/home/user\n' -> ['10.0', '/home/user']
parse = lambda x: x.strip().split('\t')
# example: ['10.0', '/home/user'] -> (u'/home/user', 10.0)
convert = lambda x: (decode(x[1], 'utf-8'), float(x[0]))
return dict(imap(convert, imap(parse, lines)))
def load_legacy_backup(data_file, data_file_backup):
"""Loads data from backup data file."""
if os.path.exists(data_file_backup):
shutil.move(data_file_backup, data_file)
return load_legacy(data_file, None)
return {}
def save(self):
"""
Save database atomically and preserve backup, creating new database if
needed.
"""
# check file existence and permissions
if ((not os.path.exists(self.filename)) or
os.name == 'nt' or
os.getuid() == os.stat(self.filename)[4]):
create_dir_atomically(self.config['data'])
temp = tempfile.NamedTemporaryFile(
dir=self.config['data'],
delete=False)
for path, weight in sorted(self.data.items(),
key=operator.itemgetter(1),
reverse=True): reverse=True):
temp.write((unico("%s\t%s\n" % (weight, path)).encode("utf-8"))) temp_file.write((unicode("%s\t%s\n" % (weight, path)).encode("utf-8")))
# catching disk errors and skipping save when file handle can't temp_file.flush()
# be closed. os.fsync(temp_file)
try: temp_file.close()
# http://thunk.org/tytso/blog/2009/03/15/dont-fear-the-fsync/ except IOError as ex:
temp.flush() print("Error saving autojump data (disk full?)" % ex, file=sys.stderr)
os.fsync(temp) sys.exit(1)
temp.close()
except IOError as ex:
print("Error saving autojump database (disk full?)" %
ex, file=sys.stderr)
return
shutil.move(temp.name, self.filename) # if no backup file or backup file is older than 24 hours,
try: # backup file # move autojump.txt -> autojump.txt.bak
import time if not os.path.exists(config['data_backup_path']) or \
if (not os.path.exists(self.filename+".bak") or (time() - os.path.getmtime(config['data_backup_path']) > BACKUP_THRESHOLD):
time.time()-os.path.getmtime(self.filename+".bak") \ move_file(config['data_path'], config['data_backup_path'])
> 86400):
shutil.copy(self.filename, self.filename+".bak")
except OSError as ex:
print("Error while creating backup autojump file. (%s)" %
ex, file=sys.stderr)
# move temp_file -> autojump.txt
move_file(temp_file.name, config['data_path'])

View File

@ -1,12 +1,22 @@
#!/usr/bin/env python #!/usr/bin/env python
# -*- coding: utf-8 -*- # -*- coding: utf-8 -*-
from __future__ import division, print_function from __future__ import print_function
import os
import platform import platform
import sys import sys
def create_dir(path):
"""Creates a directory atomically."""
try:
os.makedirs(path)
except OSError as exception:
if exception.errno != errno.EEXIST:
raise
def is_python2(): def is_python2():
return sys.version_info[0] > 2 return sys.version_info[0] > 2
@ -23,6 +33,10 @@ def is_osx():
return platform.system() == 'Darwin' return platform.system() == 'Darwin'
def is_windows():
return platform.system() == 'Windows'
def decode(string, encoding=None, errors="strict"): def decode(string, encoding=None, errors="strict"):
""" """
Decoding step for Python 2 which does not default to unicode. Decoding step for Python 2 which does not default to unicode.
@ -44,3 +58,16 @@ def unico(string):
return string return string
else: else:
return unicode(string) return unicode(string)
def move_file(src, dst):
"""
Atomically move file.
Windows does not allow for atomic file renaming (which is used by
os.rename / shutil.move) so destination paths must first be deleted.
"""
if is_windows() and os.path.exists(dst):
# raises exception if file is in use on Windows
os.remove(dst)
shutil.move(src, dst)