fix linux file stat after normalize bug; add facilities for utf-8 output

This commit is contained in:
rolux 2013-05-30 20:08:58 +02:00
parent 1b10a09618
commit 8b8dafae34

View file

@ -2,6 +2,7 @@
# -*- coding: utf-8 -*- # -*- coding: utf-8 -*-
# vi:si:et:sw=4:sts=4:ts=4 # vi:si:et:sw=4:sts=4:ts=4
# GPL 2012 # GPL 2012
from __future__ import division, with_statement from __future__ import division, with_statement
import datetime import datetime
import json import json
@ -91,16 +92,15 @@ def organize():
print 'Scanning %s' % volume_path print 'Scanning %s' % volume_path
directories = sorted(os.listdir(volume_path)) directories = sorted(os.listdir(volume_path))
for absolute_path, dirnames, filenames in os.walk(volume_path, followlinks=True): for absolute_path, dirnames, filenames in os.walk(volume_path, followlinks=True):
absolute_path = unicodedata.normalize('NFD', absolute_path) relative_path = unicodedata.normalize('NFD', absolute_path)[len(volume_path):]
relative_path = absolute_path[len(volume_path):]
parts = relative_path.split('/') parts = relative_path.split('/')
length = len(parts) length = len(parts)
for filename in ox.sorted_strings(filenames): for filename in ox.sorted_strings(filenames):
full_path = os.path.join(absolute_path, filename)
filename = unicodedata.normalize('NFD', filename) filename = unicodedata.normalize('NFD', filename)
if relative_path != previous_path and length == 3: if relative_path != previous_path and length == 3:
print relative_path print relative_path
previous_path = relative_path previous_path = relative_path
full_path = os.path.join(absolute_path, filename)
path = os.path.join(relative_path, filename) path = os.path.join(relative_path, filename)
if is_system_file(filename): if is_system_file(filename):
issues['system files'].append(path) issues['system files'].append(path)
@ -215,7 +215,7 @@ def organize():
issues[key].remove(paths) issues[key].remove(paths)
if not paths in issues[exists_key]: if not paths in issues[exists_key]:
issues[exists_key].append(paths) issues[exists_key].append(paths)
elif not filenames[1] in existing_paths: elif not paths[1] in existing_paths:
# target path does not exist, remove original path from existing paths # target path does not exist, remove original path from existing paths
existing_paths.remove(paths[0]) existing_paths.remove(paths[0])
for paths in [paths for paths in issues[key] if paths[0].lower() != paths[1].lower()]: for paths in [paths for paths in issues[key] if paths[0].lower() != paths[1].lower()]:
@ -239,6 +239,7 @@ def organize():
if path_file in new_path: if path_file in new_path:
new_paths.append(new_path[path_file]) new_paths.append(new_path[path_file])
if len(new_paths) == len(files_by_path[path]) and len(set(new_paths)) == 1 and new_paths[0] != path: if len(new_paths) == len(files_by_path[path]) and len(set(new_paths)) == 1 and new_paths[0] != path:
# per path, if all files get moved to the same path, move directories too
for directory in directories_by_path[path]: for directory in directories_by_path[path]:
new_directory = os.path.join(new_paths[0], directory.split('/')[-1]) new_directory = os.path.join(new_paths[0], directory.split('/')[-1])
key = rename_key[new_paths[0]] key = rename_key[new_paths[0]]
@ -285,23 +286,22 @@ def organize():
print 'Writing %s' % FILES['files'] print 'Writing %s' % FILES['files']
data = ['{'] data = ['{']
data.append(4 * ' ' + '"directories": ' + json.dumps(directories) + ',') data.append(4 * ' ' + '"directories": ' + get_json(directories) + ',')
data.append(4 * ' ' + '"files": [') data.append(4 * ' ' + '"files": [')
for f, file in enumerate(files): for f, file in enumerate(files):
data.append(8 * ' ' + json.dumps({ data.append(8 * ' ' + get_json({
'hash': file['hash'], 'hash': file['hash'],
'path': file['path'], 'path': file['path'],
'size': file['size'], 'size': file['size'],
'time': file['time'] 'time': file['time']
}, sort_keys=True) + (',' if f < len(files) - 1 else '')) }, sort_keys=True) + (',' if f < len(files) - 1 else ''))
data.append(4 * ' ' + '],') data.append(4 * ' ' + '],')
data.append(4 * ' ' + '"path": ' + json.dumps(volume_path) + ',') data.append(4 * ' ' + '"path": ' + get_json(volume_path) + ',')
data.append(4 * ' ' + '"totals": {"files": %d, "size": %d' % ( data.append(4 * ' ' + '"totals": {"files": %d, "size": %d' % (
len(files), sum([file['size'] for file in files]) len(files), sum([file['size'] for file in files])
) + '}') ) + '}')
data.append('}') data.append('}')
with open(FILES['files'], 'w') as f: write_file(FILES['files'], u'\n'.join(data))
f.write('\n'.join(data))
print 'Writing %s' % FILES['organize'] print 'Writing %s' % FILES['organize']
data = ['{'] data = ['{']
@ -318,7 +318,7 @@ def organize():
data.append('') data.append('')
data.append(12 * ' ' + '/* %s */' % issue) data.append(12 * ' ' + '/* %s */' % issue)
if issues[issue]: if issues[issue]:
for line in json.dumps(issues[issue], indent=4).split('\n')[1:-1]: for line in get_json(issues[issue], indent=4).split('\n')[1:-1]:
data.append(8 * ' ' + line) data.append(8 * ' ' + line)
if i < len(issue_keys) - 1: if i < len(issue_keys) - 1:
data[-1] += ',' data[-1] += ','
@ -327,8 +327,7 @@ def organize():
data.append('') data.append('')
data.append(4 * ' ' + '}' + (',' if s < len(sections) - 1 else '')) data.append(4 * ' ' + '}' + (',' if s < len(sections) - 1 else ''))
data.append('}') data.append('}')
with open(FILES['organize'], 'w') as f: write_file(FILES['organize'], u'\n'.join(data))
f.write('\n'.join(data))
print 'Next, edit %s and run pandoraclient organize -x' % FILES['organize'] print 'Next, edit %s and run pandoraclient organize -x' % FILES['organize']
@ -548,34 +547,33 @@ def sync():
data.append('') data.append('')
if key == 'same files, different filenames': if key == 'same files, different filenames':
data.append(4 * ' ' + '/* rename in b */') data.append(4 * ' ' + '/* rename in b */')
data.append(',\n'.join([4 * ' ' + json.dumps(['b', 'rename'] + x) for x in paths]) + ',') data.append(',\n'.join([4 * ' ' + get_json(['b', 'rename'] + x) for x in paths]) + ',')
data.append(4 * ' ' + '/* rename in a */') data.append(4 * ' ' + '/* rename in a */')
data.append(',\n'.join([4 * ' ' + '// ' + json.dumps(['a', 'rename'] + list(reversed(x))) for x in paths]) + ',') data.append(',\n'.join([4 * ' ' + '// ' + get_json(['a', 'rename'] + list(reversed(x))) for x in paths]) + ',')
elif key == 'same directory, different files': elif key == 'same directory, different files':
data.append(4 * ' ' + '/* remove in b, copy from a to b */') data.append(4 * ' ' + '/* remove in b, copy from a to b */')
data.append(',\n'.join([4 * ' ' + json.dumps(['b', 'remove', x]) for x in paths[1]]) + ',') data.append(',\n'.join([4 * ' ' + get_json(['b', 'remove', x]) for x in paths[1]]) + ',')
data.append(',\n'.join([4 * ' ' + json.dumps(['a', 'copy', x]) for x in paths[0]]) + ',') data.append(',\n'.join([4 * ' ' + get_json(['a', 'copy', x]) for x in paths[0]]) + ',')
data.append(4 * ' ' + '/* remove in a, copy from b to a */') data.append(4 * ' ' + '/* remove in a, copy from b to a */')
data.append(',\n'.join([4 * ' ' + '// ' + json.dumps(['a', 'remove', x]) for x in paths[0]]) + ',') data.append(',\n'.join([4 * ' ' + '// ' + get_json(['a', 'remove', x]) for x in paths[0]]) + ',')
data.append(',\n'.join([4 * ' ' + '// ' + json.dumps(['b', 'copy', x]) for x in paths[1]]) + ',') data.append(',\n'.join([4 * ' ' + '// ' + get_json(['b', 'copy', x]) for x in paths[1]]) + ',')
elif key == 'same file, different directory': elif key == 'same file, different directory':
data.append(4 * ' ' + '/* rename in b */') data.append(4 * ' ' + '/* rename in b */')
print json.dumps(paths, indent=4) #print get_json(paths, indent=4).encode('utf-8')
data.append(4 * ' ' + json.dumps(['b', 'rename'] + paths) + ',') data.append(4 * ' ' + get_json(['b', 'rename'] + paths) + ',')
data.append(4 * ' ' + '/* rename in a */') data.append(4 * ' ' + '/* rename in a */')
data.append(4 * ' ' + '// ' + json.dumps(['a', 'rename'] + list(reversed(paths))) + ',') data.append(4 * ' ' + '// ' + get_json(['a', 'rename'] + list(reversed(paths))) + ',')
else: else:
copy = ['a', 'b'] if paths[0] else ['b', 'a'] copy = ['a', 'b'] if paths[0] else ['b', 'a']
i = 0 if paths[0] else 1 i = 0 if paths[0] else 1
data.append(4 * ' ' + '/* copy from %s to %s */' % (copy[0], copy[1])) data.append(4 * ' ' + '/* copy from %s to %s */' % (copy[0], copy[1]))
data.append('\n'.join([4 * ' ' + json.dumps([copy[0], 'copy', x]) for x in paths[i]]) + ',') data.append('\n'.join([4 * ' ' + get_json([copy[0], 'copy', x]) for x in paths[i]]) + ',')
data.append(4 * ' ' + '/* remove in %s */' % copy[0]) data.append(4 * ' ' + '/* remove in %s */' % copy[0])
data.append('\n'.join([4 * ' ' + '// ' + json.dumps([copy[0], 'remove', x]) for x in paths[i]]) + ',') data.append('\n'.join([4 * ' ' + '// ' + get_json([copy[0], 'remove', x]) for x in paths[i]]) + ',')
data[-1] = data[-1][:-1] data[-1] = data[-1][:-1]
data.append('') data.append('')
data.append(']') data.append(']')
with open(FILES['sync'], 'w') as f: write_file(FILES['sync'], u'\n'.join(data))
f.write('\n'.join(data))
def execute_sync(): def execute_sync():
@ -601,6 +599,16 @@ def get_config():
return config return config
def get_json(data, indent=None, sort_keys=False):
# return json.dumps(data, ensure_ascii=False, indent=indent, sort_keys=sort_keys)
return json.dumps(data, indent=indent, sort_keys=sort_keys)
def write_file(filename, data):
with open(filename, 'w') as f:
# f.write(data.encode('utf-8'))
f.write(data)
if __name__ == '__main__': if __name__ == '__main__':
actions = ['copy', 'organize', 'sync', 'upload'] actions = ['copy', 'organize', 'sync', 'upload']