1
0
mirror of https://github.com/wting/autojump synced 2024-10-27 20:34:07 +00:00

Use flock instead of temporary files to avoid races/purge on ENOSPC/fsync time

- races: two parallel `autojump --add` can lead to database purging
  (overwriting by one of tmp files)
- ENOSPC: will leave autojump.txt empty, because no free space available
- fsync: can take a while

All of this can be fixed with fsync(2).
This commit is contained in:
Azat Khuzhin 2015-10-04 20:21:28 +03:00
parent 113a84f9f0
commit 97233e90c8

View File

@ -7,7 +7,7 @@ from collections import namedtuple
import os import os
import shutil import shutil
import sys import sys
from tempfile import NamedTemporaryFile import fcntl
from time import time from time import time
if sys.version_info[0] == 3: if sys.version_info[0] == 3:
@ -75,10 +75,13 @@ def load(config):
config['data_path'], config['data_path'],
'r', encoding='utf-8', 'r', encoding='utf-8',
errors='replace') as f: errors='replace') as f:
return dict( fcntl.flock(f.fileno(), fcntl.LOCK_SH)
d = dict(
imap( imap(
tupleize, tupleize,
ifilter(correct_length, imap(parse, f)))) ifilter(correct_length, imap(parse, f))))
fcntl.flock(f.fileno(), fcntl.LOCK_UN)
return d
except (IOError, EOFError): except (IOError, EOFError):
return load_backup(config) return load_backup(config)
@ -117,24 +120,11 @@ def save(config, data):
"""Save data and create backup, creating a new data file if necessary.""" """Save data and create backup, creating a new data file if necessary."""
create_dir(os.path.dirname(config['data_path'])) create_dir(os.path.dirname(config['data_path']))
# atomically save by writing to temporary file and moving to destination with open(config['data_path'], 'w', encoding='utf-8') as f:
try: fcntl.flock(f.fileno(), fcntl.LOCK_EX)
temp = NamedTemporaryFile(delete=False) for path, weight in data.items():
# Windows cannot reuse the same open file name f.write(unico("%s\t%s\n" % (weight, path)))
temp.close() fcntl.flock(f.fileno(), fcntl.LOCK_UN)
with open(temp.name, 'w', encoding='utf-8', errors='replace') as f:
for path, weight in data.items():
f.write(unico("%s\t%s\n" % (weight, path)))
f.flush()
os.fsync(f)
except IOError as ex:
print("Error saving autojump data (disk full?)" % ex, file=sys.stderr)
sys.exit(1)
# move temp_file -> autojump.txt
move_file(temp.name, config['data_path'])
# create backup file if it doesn't exist or is older than BACKUP_THRESHOLD # create backup file if it doesn't exist or is older than BACKUP_THRESHOLD
if not os.path.exists(config['backup_path']) or \ if not os.path.exists(config['backup_path']) or \