forked from 0x2620/pandora
rename Item.itemId to Item.public_id
This commit is contained in:
parent
3ab0e4ba1e
commit
3232ce6989
44 changed files with 382 additions and 213 deletions
|
|
@ -19,7 +19,7 @@ class Command(BaseCommand):
|
|||
import annotations
|
||||
"""
|
||||
help = 'import annotations from srt'
|
||||
args = 'username itemId layername filename.srt'
|
||||
args = 'username item layername filename.srt'
|
||||
option_list = BaseCommand.option_list + (
|
||||
)
|
||||
|
||||
|
|
@ -27,13 +27,13 @@ class Command(BaseCommand):
|
|||
if len(args) != 4:
|
||||
print self.usage('import_srt')
|
||||
return
|
||||
username, itemId, layer_id, filename = args
|
||||
username, public_id, layer_id, filename = args
|
||||
user = User.objects.get(username=username)
|
||||
item = Item.objects.get(itemId=itemId)
|
||||
item = Item.objects.get(public_id=public_id)
|
||||
layer = filter(lambda l: l['id'] == layer_id, settings.CONFIG['layers'])[0]
|
||||
|
||||
annotations = ox.srt.load(filename)
|
||||
print 'importing %d annotations into %s/%s' % (len(annotations), itemId, layer_id)
|
||||
print 'importing %d annotations into %s/%s' % (len(annotations), public_id, layer_id)
|
||||
for i in range(len(annotations)-1):
|
||||
if annotations[i]['out'] == annotations[i+1]['in']:
|
||||
annotations[i]['out'] = annotations[i]['out'] - 0.001
|
||||
|
|
|
|||
|
|
@ -119,7 +119,7 @@ class Annotation(models.Model):
|
|||
if not previous.public_id:
|
||||
previous.set_public_id()
|
||||
public_id = ox.fromAZ(previous.public_id.split('/')[-1]) + 1
|
||||
self.public_id = "%s/%s" % (self.item.itemId, ox.toAZ(public_id))
|
||||
self.public_id = "%s/%s" % (self.item.public_id, ox.toAZ(public_id))
|
||||
Annotation.objects.filter(id=self.id).update(public_id=self.public_id)
|
||||
|
||||
@classmethod
|
||||
|
|
|
|||
|
|
@ -68,7 +68,7 @@ def update_matches(id, type):
|
|||
def add_annotations(data):
|
||||
from item.models import Item
|
||||
from user.models import User
|
||||
item = Item.objects.get(itemId=data['item'])
|
||||
item = Item.objects.get(public_id=data['item'])
|
||||
layer_id = data['layer']
|
||||
layer = filter(lambda l: l['id'] == layer_id, settings.CONFIG['layers'])
|
||||
if not layer:
|
||||
|
|
|
|||
|
|
@ -100,7 +100,7 @@ def findAnnotations(request):
|
|||
query = parse_query(data, request.user)
|
||||
qs = order_query(query['qs'], query['sort'])
|
||||
if qs.count() > 0:
|
||||
response['data']['position'] = utils.get_positions(ids, [qs[0].itemId])[0]
|
||||
response['data']['position'] = utils.get_positions(ids, [qs[0].public_id])[0]
|
||||
elif 'positions' in data:
|
||||
ids = [i.public_id for i in qs]
|
||||
response['data']['positions'] = utils.get_positions(ids, data['positions'])
|
||||
|
|
@ -114,7 +114,7 @@ actions.register(findAnnotations)
|
|||
def addAnnotation(request):
|
||||
'''
|
||||
takes {
|
||||
item: itemId,
|
||||
item: public_id,
|
||||
layer: layerId,
|
||||
in: float,
|
||||
out: float,
|
||||
|
|
@ -131,7 +131,7 @@ def addAnnotation(request):
|
|||
return render_to_json_response(json_response(status=400,
|
||||
text='invalid data'))
|
||||
|
||||
item = get_object_or_404_json(Item, itemId=data['item'])
|
||||
item = get_object_or_404_json(Item, public_id=data['item'])
|
||||
|
||||
layer_id = data['layer']
|
||||
layer = filter(lambda l: l['id'] == layer_id, settings.CONFIG['layers'])[0]
|
||||
|
|
@ -155,7 +155,7 @@ actions.register(addAnnotation, cache=False)
|
|||
def addAnnotations(request):
|
||||
'''
|
||||
takes {
|
||||
item: itemId,
|
||||
item: public_id,
|
||||
layer: layerId,
|
||||
annotations: [{
|
||||
in: float,
|
||||
|
|
@ -173,7 +173,7 @@ def addAnnotations(request):
|
|||
return render_to_json_response(json_response(status=400,
|
||||
text='invalid data'))
|
||||
|
||||
item = get_object_or_404_json(Item, itemId=data['item'])
|
||||
item = get_object_or_404_json(Item, public_id=data['item'])
|
||||
|
||||
layer_id = data['layer']
|
||||
layer = filter(lambda l: l['id'] == layer_id, settings.CONFIG['layers'])[0]
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue