migrate more data

This commit is contained in:
j 2011-12-04 18:05:48 +01:00
parent 272c228326
commit f435f2b14b
2 changed files with 103 additions and 7 deletions

View file

@ -29,9 +29,11 @@ from datetime import datetime
from ox.utils import json
import ox
from item.models import Item
from annotation.models import Annotation, Layer
from annotation.models import Annotation
from archive.models import File
from urlalias.models import IDAlias, LayerAlias
from urlalias.models import IDAlias, LayerAlias, ListAlias
from place.models import Place
from itemlist.models import List
from django.db import connection, transaction
@ -41,6 +43,10 @@ with open('users.json') as f: users = json.load(f)
with open('padma_files.json') as f: padma = json.load(f)
with open('padma_locations.json') as f: locations = json.load(f)
with open('padma_lists.json') as f: lists = json.load(f)
with open('padma_data.json') as f: padma_data = json.load(f)
longest_username = max([len(u['username'].strip()) for u in users]) + 1
@ -65,7 +71,7 @@ for u in users:
profile.set_level('member')
profile.save()
for g in u['groups']:
if g and g.strip() and g not 'admin':
if g and g.strip() and g != 'admin':
group, created = Group.objects.get_or_create(name=g)
user.groups.add(group)
@ -153,3 +159,36 @@ for oldId in sorted(padma, key=lambda x: padma[x]['created']):
f.save()
i += 1
print item, item.available
#lists
for l in lists:
l['user'] = User.objects.get(username=l['user'])
p = List(name=l['name'], user=l['user'])
p.type = l['type'] == 'static' and 'static' or 'smart'
p.status = l['public'] and 'featured' or 'private'
p.description = l['description']
p.save()
if l['type'] == 'static':
for v in l['videos']:
i = Item.objects.get(data__contains=v)
p.add(i)
else:
key = l['query']['key']
value= l['query']['value']
if key == '': key = '*'
p.query = {'conditions': [{'key': key, 'value': value, 'operator': '='}], 'operator': '&'}
p.save()
alias, created = ListAlias.objects.get_or_create(old=l['id'])
alias.new = p.get_id()
alias.save()
#Places
for l in locations:
oldId = l.pop('id')
l['user'] = User.objects.get(username=l['user'])
l['created'] = datetime.fromtimestamp(int(l['created']))
l['modified'] = datetime.fromtimestamp(int(l['modified']))
p = Place(**l)
p.save()
#FIXME matches

View file

@ -39,7 +39,7 @@ for v in Video.select():
data[v.hid] = v.jsondump()
with open(os.path.join(prefix, 'padma_data.json'), 'w') as f:
json.dump(data, f)
json.dump(data, f, indent=2)
users = []
for u in User.select().orderBy('id'):
@ -56,19 +56,76 @@ with open(os.path.join(prefix, 'users.json'), 'w') as f:
json.dump(users, f, indent=2)
files = []
files = {}
for v in Video.select().orderBy('id'):
f = {
'sha1sum': v.source_hash,
'ogg': v.filename,
'id': v.hid,
'created': int(v.created.strftime('%s'))
}
info = ox.avinfo(v.filename)
f['oshash'] = info.get('metadata', {}).get('SOURCE_OSHASH', '')
f['ogg_oshash'] = info['oshash']
files.append(f)
files[v.hid] = f
with open(os.path.join(prefix, 'padma_files.json'), 'w') as f:
json.dump(files, f, indent=2)
lists = []
for l in List.select().orderBy('id'):
data = {
'id': l.hid,
'user': l.creator.user_name.strip(),
'title': l.title.strip(),
'created': int(l.created.strftime('%s')),
'modified': int(l.modified.strftime('%s')),
'public': l.public,
'description': l.description.strip(),
'type': l.type,
}
if data['type'] == 'dynamic':
data['query'] = {'value': l.query, 'key': l.field}
else:
data['items'] = [v.hid for v in l.videos]
lists.append(data)
with open(os.path.join(prefix, 'padma_lists.json'), 'w') as f:
json.dump(lists, f, indent=2)
locations = []
for l in Location.select().orderBy('id'):
data = {}
data['id'] = l.hid
data['name'] = l.name
data['south'] = l.lat_sw
data['west'] = l.lng_sw
data['north'] = l.lat_ne
data['east'] = l.lng_ne
data['lat'] = l.lat_center
data['lng'] = l.lng_center
data['area'] = l.area
data['created'] = int(l.created.strftime('%s'))
data['modified'] = int(l.modified.strftime('%s'))
data['alternativeNames'] = [l.name for l in l.alt_names]
try:
if l.creator:
data['user'] = l.creator.user_name
except SQLObjectNotFound:
pass
data['annotations'] = []
for a in l.layers:
data['annotations'].append(a.hid)
locations.append(data)
with open(os.path.join(prefix, 'padma_locations.json'), 'w') as f:
json.dump(locations, f, indent=2)
notes = []
for n in Notes.select(Notes.q.notes!=''):
notes.append({
'user': n.user.user_name,
'item': n.video.hid,
'note': n.notes
})
with open(os.path.join(prefix, 'padma_notes.json'), 'w') as f:
json.dump(notes, f, indent=2)