- rework file/upload handling, no more extras
- cleanup imdb data, get from external data service - rename filesview
This commit is contained in:
parent
3bff9d1fb9
commit
45798810a9
18 changed files with 295 additions and 631 deletions
|
@ -8,8 +8,8 @@ import models
|
|||
|
||||
|
||||
class FileAdmin(admin.ModelAdmin):
|
||||
search_fields = ['name', 'folder','oshash', 'video_codec']
|
||||
list_display = ['available', 'wanted', 'active', '__unicode__', 'itemId']
|
||||
search_fields = ['path','oshash', 'video_codec']
|
||||
list_display = ['available', 'wanted', 'selected', '__unicode__', 'itemId']
|
||||
list_display_links = ('__unicode__', )
|
||||
|
||||
def itemId(self, obj):
|
||||
|
@ -21,7 +21,7 @@ admin.site.register(models.File, FileAdmin)
|
|||
|
||||
|
||||
class InstanceAdmin(admin.ModelAdmin):
|
||||
search_fields = ['name', 'folder', 'volume__name', 'file__oshash']
|
||||
search_fields = ['path', 'volume__name', 'file__oshash']
|
||||
form = InstanceAdminForm
|
||||
|
||||
admin.site.register(models.Instance, InstanceAdmin)
|
||||
|
|
|
@ -25,7 +25,7 @@ class FileAdminForm(forms.ModelForm):
|
|||
|
||||
|
||||
class InstanceAdminForm(forms.ModelForm):
|
||||
file = ForeignKeyByLetter(models.File, field_name='name')
|
||||
file = ForeignKeyByLetter(models.File, field_name='path')
|
||||
|
||||
class Meta:
|
||||
model = models.Instance
|
||||
|
|
|
@ -6,10 +6,10 @@ import os.path
|
|||
import re
|
||||
import time
|
||||
|
||||
from django.conf import settings
|
||||
from django.contrib.auth.models import User
|
||||
from django.db import models
|
||||
from django.db.models import Q
|
||||
from django.contrib.auth.models import User
|
||||
from django.conf import settings
|
||||
from django.db.models.signals import pre_delete
|
||||
|
||||
from ox.django import fields
|
||||
|
@ -17,7 +17,6 @@ import ox
|
|||
import chardet
|
||||
|
||||
from item import utils
|
||||
from person.models import get_name_sort
|
||||
|
||||
import extract
|
||||
|
||||
|
@ -26,19 +25,17 @@ class File(models.Model):
|
|||
created = models.DateTimeField(auto_now_add=True)
|
||||
modified = models.DateTimeField(auto_now=True)
|
||||
|
||||
active = models.BooleanField(default=False)
|
||||
auto = models.BooleanField(default=True)
|
||||
|
||||
oshash = models.CharField(max_length=16, unique=True)
|
||||
item = models.ForeignKey("item.Item", related_name='files')
|
||||
|
||||
name = models.CharField(max_length=2048, default="") # canoncial path/file
|
||||
folder = models.CharField(max_length=2048, default="") # canoncial path/file
|
||||
sort_name = models.CharField(max_length=2048, default="") # sort name
|
||||
path = models.CharField(max_length=2048, default="") # canoncial path/file
|
||||
sort_path = models.CharField(max_length=2048, default="") # sort name
|
||||
|
||||
type = models.CharField(default="", max_length=255)
|
||||
part = models.IntegerField(null=True)
|
||||
version = models.CharField(default="", max_length=255) # sort path/file name
|
||||
version = models.CharField(default="", max_length=255)
|
||||
language = models.CharField(default="", max_length=8)
|
||||
|
||||
season = models.IntegerField(default=-1)
|
||||
|
@ -65,22 +62,22 @@ class File(models.Model):
|
|||
|
||||
#This is true if derivative is available or subtitles where uploaded
|
||||
available = models.BooleanField(default = False)
|
||||
wanted = models.BooleanField(default = False)
|
||||
selected = models.BooleanField(default = False)
|
||||
uploading = models.BooleanField(default = False)
|
||||
wanted = models.BooleanField(default = False)
|
||||
|
||||
is_audio = models.BooleanField(default=False)
|
||||
is_video = models.BooleanField(default=False)
|
||||
is_subtitle = models.BooleanField(default=False)
|
||||
|
||||
def __unicode__(self):
|
||||
return self.name
|
||||
return self.path
|
||||
|
||||
def set_state(self):
|
||||
self.name = self.get_name()
|
||||
self.folder = self.get_folder()
|
||||
self.sort_name = utils.sort_string(ox.get_sort_title(self.name))
|
||||
self.path = self.create_path()
|
||||
self.sort_path= utils.sort_string(self.path)
|
||||
|
||||
if not os.path.splitext(self.name)[-1] in (
|
||||
if not os.path.splitext(self.path)[-1] in (
|
||||
'.srt', '.rar', '.sub', '.idx', '.txt', '.jpg', '.png', '.nfo') \
|
||||
and self.info:
|
||||
for key in ('duration', 'size'):
|
||||
|
@ -99,8 +96,8 @@ class File(models.Model):
|
|||
self.display_aspect_ratio = "%s:%s" % (self.width, self.height)
|
||||
self.is_video = True
|
||||
self.is_audio = False
|
||||
if self.name.endswith('.jpg') or \
|
||||
self.name.endswith('.png') or \
|
||||
if self.path.endswith('.jpg') or \
|
||||
self.path.endswith('.png') or \
|
||||
self.duration == 0.04:
|
||||
self.is_video = False
|
||||
else:
|
||||
|
@ -126,11 +123,11 @@ class File(models.Model):
|
|||
self.pixels = int(self.width * self.height * float(utils.parse_decimal(self.framerate)) * self.duration)
|
||||
|
||||
else:
|
||||
self.is_video = os.path.splitext(self.name)[-1] in ('.avi', '.mkv', '.dv', '.ogv', '.mpeg', '.mov', '.webm')
|
||||
self.is_audio = os.path.splitext(self.name)[-1] in ('.mp3', '.wav', '.ogg', '.flac', '.oga')
|
||||
self.is_subtitle = os.path.splitext(self.name)[-1] in ('.srt', )
|
||||
self.is_video = os.path.splitext(self.path)[-1] in ('.avi', '.mkv', '.dv', '.ogv', '.mpeg', '.mov', '.webm')
|
||||
self.is_audio = os.path.splitext(self.path)[-1] in ('.mp3', '.wav', '.ogg', '.flac', '.oga')
|
||||
self.is_subtitle = os.path.splitext(self.path)[-1] in ('.srt', )
|
||||
|
||||
if self.name.endswith('.srt'):
|
||||
if self.path.endswith('.srt'):
|
||||
self.is_subtitle = True
|
||||
self.is_audio = False
|
||||
self.is_video = False
|
||||
|
@ -138,7 +135,8 @@ class File(models.Model):
|
|||
self.is_subtitle = False
|
||||
|
||||
self.type = self.get_type()
|
||||
self.language = self.get_language()
|
||||
info = ox.parse_movie_path(self.path)
|
||||
self.language = info['language']
|
||||
self.part = self.get_part()
|
||||
|
||||
if self.type not in ('audio', 'video'):
|
||||
|
@ -156,9 +154,9 @@ class File(models.Model):
|
|||
|
||||
#upload and data handling
|
||||
data = models.FileField(null=True, blank=True,
|
||||
upload_to=lambda f, x: f.path('data.bin'))
|
||||
upload_to=lambda f, x: f.get_path('data.bin'))
|
||||
|
||||
def path(self, name):
|
||||
def get_path(self, name):
|
||||
h = self.oshash
|
||||
return os.path.join('files', h[:2], h[2:4], h[4:6], h[6:], name)
|
||||
|
||||
|
@ -277,13 +275,12 @@ class File(models.Model):
|
|||
'samplerate': self.samplerate,
|
||||
'video_codec': self.video_codec,
|
||||
'audio_codec': self.audio_codec,
|
||||
'name': self.name,
|
||||
'path': self.path,
|
||||
'size': self.size,
|
||||
#'info': self.info,
|
||||
'users': list(set([u.username
|
||||
for u in User.objects.filter(volumes__files__in=self.instances.all())])),
|
||||
'instances': [i.json() for i in self.instances.all()],
|
||||
'folder': self.get_folder(),
|
||||
'type': self.get_type(),
|
||||
'part': self.get_part()
|
||||
}
|
||||
|
@ -295,17 +292,17 @@ class File(models.Model):
|
|||
|
||||
def get_part(self):
|
||||
#FIXME: this breaks for sub/idx/srt
|
||||
if os.path.splitext(self.name)[-1] in ('.sub', '.idx', '.srt'):
|
||||
name = os.path.splitext(self.name)[0]
|
||||
if os.path.splitext(self.path)[-1] in ('.sub', '.idx', '.srt'):
|
||||
name = os.path.splitext(self.path)[0]
|
||||
if self.language:
|
||||
name = name[-(len(self.language)+1)]
|
||||
qs = self.item.files.filter(Q(is_video=True)|Q(is_audio=True),
|
||||
active=True, name__startswith=name)
|
||||
selected=True, path__startswith=name)
|
||||
if qs.count()>0:
|
||||
return qs[0].part
|
||||
if self.active:
|
||||
if self.selected:
|
||||
files = list(self.item.files.filter(type=self.type, language=self.language,
|
||||
active=self.active).order_by('sort_name'))
|
||||
selected=self.selected).order_by('sort_path'))
|
||||
if self in files:
|
||||
return files.index(self) + 1
|
||||
return None
|
||||
|
@ -315,7 +312,7 @@ class File(models.Model):
|
|||
return 'video'
|
||||
if self.is_audio:
|
||||
return 'audio'
|
||||
if self.is_subtitle or os.path.splitext(self.name)[-1] in ('.sub', '.idx'):
|
||||
if self.is_subtitle or os.path.splitext(self.path)[-1] in ('.sub', '.idx'):
|
||||
return 'subtitle'
|
||||
return 'unknown'
|
||||
|
||||
|
@ -325,30 +322,10 @@ class File(models.Model):
|
|||
return self.instances.all()[0]
|
||||
return None
|
||||
|
||||
def get_folder(self):
|
||||
def create_path(self):
|
||||
instance = self.get_instance()
|
||||
if instance:
|
||||
return instance.folder
|
||||
name = os.path.splitext(self.get_name())[0]
|
||||
name = name.replace('. ', '||').split('.')[0].replace('||', '. ')
|
||||
if self.item:
|
||||
if settings.USE_IMDB:
|
||||
director = self.item.get('director', ['Unknown Director'])
|
||||
director = map(get_name_sort, director)
|
||||
director = u'; '.join(director)
|
||||
director = re.sub(r'[:\\/]', '_', director)
|
||||
name = os.path.join(director, name)
|
||||
year = self.item.get('year')
|
||||
if year:
|
||||
name += u' (%s)' % year
|
||||
name = os.path.join(name[0].upper(), name)
|
||||
return name
|
||||
return u''
|
||||
|
||||
def get_name(self):
|
||||
instance = self.get_instance()
|
||||
if instance:
|
||||
return instance.name
|
||||
return instance.path
|
||||
if self.item:
|
||||
name = self.item.get('title', 'Untitled')
|
||||
name = re.sub(r'[:\\/]', '_', name)
|
||||
|
@ -357,12 +334,6 @@ class File(models.Model):
|
|||
ext = '.unknown'
|
||||
return name + ext
|
||||
|
||||
def get_language(self):
|
||||
language = self.name.split('.')
|
||||
if len(language) >= 3 and len(language[-2]) == 2:
|
||||
return language[-2]
|
||||
return ''
|
||||
|
||||
def delete_file(sender, **kwargs):
|
||||
f = kwargs['instance']
|
||||
#FIXME: delete streams here
|
||||
|
@ -394,7 +365,7 @@ class Volume(models.Model):
|
|||
class Instance(models.Model):
|
||||
|
||||
class Meta:
|
||||
unique_together = ("name", "folder", "volume")
|
||||
unique_together = ("path", "volume")
|
||||
|
||||
created = models.DateTimeField(auto_now_add=True)
|
||||
modified = models.DateTimeField(auto_now=True)
|
||||
|
@ -403,15 +374,14 @@ class Instance(models.Model):
|
|||
ctime = models.IntegerField(default=lambda: int(time.time()), editable=False)
|
||||
mtime = models.IntegerField(default=lambda: int(time.time()), editable=False)
|
||||
|
||||
name = models.CharField(max_length=2048)
|
||||
folder = models.CharField(max_length=2048)
|
||||
extra = models.BooleanField(default=False)
|
||||
path = models.CharField(max_length=2048)
|
||||
ignore = models.BooleanField(default=False)
|
||||
|
||||
file = models.ForeignKey(File, related_name='instances')
|
||||
volume = models.ForeignKey(Volume, related_name='files')
|
||||
|
||||
def __unicode__(self):
|
||||
return u"%s's %s <%s>"% (self.volume.user, self.name, self.file.oshash)
|
||||
return u"%s's %s <%s>"% (self.volume.user, self.path, self.file.oshash)
|
||||
|
||||
@property
|
||||
def itemId(self):
|
||||
|
@ -421,14 +391,13 @@ class Instance(models.Model):
|
|||
return {
|
||||
'user': self.volume.user.username,
|
||||
'volume': self.volume.name,
|
||||
'folder': self.folder,
|
||||
'name': self.name
|
||||
'path': self.path
|
||||
}
|
||||
|
||||
def frame_path(frame, name):
|
||||
ext = os.path.splitext(name)[-1]
|
||||
name = "%s%s" % (frame.position, ext)
|
||||
return frame.file.path(name)
|
||||
return frame.file.get_path(name)
|
||||
|
||||
|
||||
class Frame(models.Model):
|
||||
|
@ -486,7 +455,7 @@ class Stream(models.Model):
|
|||
return u"%s/%s" % (self.file, self.name())
|
||||
|
||||
def path(self, name=''):
|
||||
return self.file.path(name)
|
||||
return self.file.get_path(name)
|
||||
|
||||
def extract_derivatives(self):
|
||||
config = settings.CONFIG['video']
|
||||
|
|
|
@ -1,29 +1,19 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# vi:si:et:sw=4:sts=4:ts=4
|
||||
import os
|
||||
|
||||
from celery.decorators import task
|
||||
import ox
|
||||
|
||||
from item.utils import parse_path
|
||||
from item.models import get_item
|
||||
from django.conf import settings
|
||||
|
||||
import models
|
||||
|
||||
_INSTANCE_KEYS = ('mtime', 'name', 'folder')
|
||||
|
||||
_INSTANCE_KEYS = ('mtime', 'path')
|
||||
|
||||
def get_or_create_item(volume, f, user):
|
||||
in_same_folder = models.Instance.objects.filter(folder=f['folder'], volume=volume)
|
||||
if in_same_folder.count() > 0:
|
||||
i = in_same_folder[0].file.item
|
||||
else:
|
||||
if settings.USE_FOLDER:
|
||||
item_info = parse_path(f['folder'])
|
||||
else:
|
||||
item_info = parse_path(f['path'])
|
||||
i = get_item(item_info, user)
|
||||
return i
|
||||
def get_or_create_item(volume, info, user):
|
||||
item_info = ox.parse_movie_info(info['path'])
|
||||
return get_item(item_info, user)
|
||||
|
||||
def get_or_create_file(volume, f, user, item=None):
|
||||
try:
|
||||
|
@ -31,7 +21,7 @@ def get_or_create_file(volume, f, user, item=None):
|
|||
except models.File.DoesNotExist:
|
||||
file = models.File()
|
||||
file.oshash = f['oshash']
|
||||
file.name = f['name']
|
||||
file.path = f['path']
|
||||
if item:
|
||||
file.item = item
|
||||
else:
|
||||
|
@ -50,13 +40,11 @@ def update_or_create_instance(volume, f):
|
|||
setattr(instance, key, f[key])
|
||||
updated=True
|
||||
if updated:
|
||||
if instance.name.lower().startswith('extras/') or \
|
||||
instance.name.lower().startswith('versions/'):
|
||||
instance.extra = True
|
||||
instance.ignore = False
|
||||
instance.save()
|
||||
instance.file.save()
|
||||
else:
|
||||
instance = models.Instance.objects.filter(name=f['name'], folder=f['folder'], volume=volume)
|
||||
instance = models.Instance.objects.filter(path=f['path'], volume=volume)
|
||||
if instance.count()>0:
|
||||
#same path, other oshash, keep path/item mapping, remove instance
|
||||
item = instance[0].file.item
|
||||
|
@ -69,9 +57,6 @@ def update_or_create_instance(volume, f):
|
|||
instance.file = get_or_create_file(volume, f, volume.user, item)
|
||||
for key in _INSTANCE_KEYS:
|
||||
setattr(instance, key, f[key])
|
||||
if instance.name.lower().startswith('extras/') or \
|
||||
instance.name.lower().startswith('versions/'):
|
||||
instance.extra = True
|
||||
instance.save()
|
||||
instance.file.save()
|
||||
instance.file.item.update_wanted()
|
||||
|
@ -83,17 +68,15 @@ def update_files(user, volume, files):
|
|||
volume, created = models.Volume.objects.get_or_create(user=user, name=volume)
|
||||
all_files = []
|
||||
for f in files:
|
||||
folder = f['path'].split('/')
|
||||
name = folder.pop()
|
||||
if folder and folder[-1].lower() in ('extras', 'versions', 'dvds'):
|
||||
name = '/'.join([folder.pop(), name])
|
||||
f['folder'] = '/'.join(folder)
|
||||
f['name'] = name
|
||||
all_files.append(f['oshash'])
|
||||
update_or_create_instance(volume, f)
|
||||
|
||||
#ignore extras etc,
|
||||
#imdb stlye is L/Last, First/Title (Year)/Title.. 4
|
||||
#otherwise T/Title (Year)/Title... 3
|
||||
folder_depth = settings.USE_IMDB and 4 or 3
|
||||
if len(f['path'].split('/')) == folder_depth:
|
||||
all_files.append(f['oshash'])
|
||||
update_or_create_instance(volume, f)
|
||||
|
||||
#remove deleted files
|
||||
#FIXME: can this have any bad consequences? i.e. on the selction of used item files.
|
||||
models.Instance.objects.filter(volume=volume).exclude(file__oshash__in=all_files).delete()
|
||||
|
||||
@task(queue="encoding")
|
||||
|
|
|
@ -9,6 +9,7 @@ from django.shortcuts import get_object_or_404, redirect
|
|||
from django.conf import settings
|
||||
from django.db.models import Count, Sum
|
||||
|
||||
import ox
|
||||
from ox.utils import json
|
||||
from ox.django.decorators import login_required_json
|
||||
from ox.django.shortcuts import render_to_json_response, get_object_or_404_json, json_response
|
||||
|
@ -105,7 +106,7 @@ def update(request):
|
|||
file__wanted=True)]
|
||||
response['data']['file'] = [f.file.oshash for f in files.filter(file__is_subtitle=True,
|
||||
file__available=False,
|
||||
name__endswith='.srt')]
|
||||
path__endswith='.srt')]
|
||||
|
||||
return render_to_json_response(response)
|
||||
actions.register(update, cache=False)
|
||||
|
@ -319,24 +320,8 @@ def lookup_file(request, oshash):
|
|||
def _order_query(qs, sort, prefix=''):
|
||||
order_by = []
|
||||
if len(sort) == 1:
|
||||
sort.append({'operator': '+', 'key': 'sort_name'})
|
||||
sort.append({'operator': '+', 'key': 'path'})
|
||||
sort.append({'operator': '-', 'key': 'created'})
|
||||
'''
|
||||
if sort[0]['key'] == 'title':
|
||||
sort.append({'operator': '-', 'key': 'year'})
|
||||
sort.append({'operator': '+', 'key': 'director'})
|
||||
elif sort[0]['key'] == 'director':
|
||||
sort.append({'operator': '-', 'key': 'year'})
|
||||
sort.append({'operator': '+', 'key': 'title'})
|
||||
elif sort[0]['key'] == 'year':
|
||||
sort.append({'operator': '+', 'key': 'director'})
|
||||
sort.append({'operator': '+', 'key': 'title'})
|
||||
elif not sort[0]['key'] in ('value', 'value_sort'):
|
||||
sort.append({'operator': '+', 'key': 'director'})
|
||||
sort.append({'operator': '-', 'key': 'year'})
|
||||
sort.append({'operator': '+', 'key': 'title'})
|
||||
|
||||
'''
|
||||
|
||||
for e in sort:
|
||||
operator = e['operator']
|
||||
|
@ -346,7 +331,7 @@ def _order_query(qs, sort, prefix=''):
|
|||
'id': 'item__itemId',
|
||||
'users': 'instances__volume__user__username',
|
||||
'resolution': 'width',
|
||||
'name': 'sort_name'
|
||||
'path': 'sort_path'
|
||||
}.get(e['key'], e['key'])
|
||||
#if operator=='-' and '%s_desc'%key in models.ItemSort.descending_fields:
|
||||
# key = '%s_desc' % key
|
||||
|
@ -400,10 +385,10 @@ Groups
|
|||
keys: array of keys to return
|
||||
group: group elements by, country, genre, director...
|
||||
|
||||
possible values for keys: name, items
|
||||
possible values for keys: path, items
|
||||
|
||||
with keys
|
||||
items contains list of {'name': string, 'items': int}:
|
||||
items contains list of {'path': string, 'items': int}:
|
||||
return {'status': {'code': int, 'text': string},
|
||||
'data': {items: array}}
|
||||
|
||||
|
@ -465,7 +450,7 @@ Positions
|
|||
|
||||
elif 'range' in data:
|
||||
qs = qs[query['range'][0]:query['range'][1]]
|
||||
response['data']['items'] = [{'name': i['value'], 'items': i[items]} for i in qs]
|
||||
response['data']['items'] = [{'path': i['value'], 'items': i[items]} for i in qs]
|
||||
else:
|
||||
response['data']['items'] = qs.count()
|
||||
elif 'positions' in query:
|
||||
|
@ -481,6 +466,7 @@ Positions
|
|||
response['data']['items'] = []
|
||||
qs = models.File.objects.filter(item__in=query['qs'])
|
||||
qs = _order_query(qs, query['sort'])
|
||||
qs = qs.select_related()
|
||||
keys = query['keys']
|
||||
qs = qs[query['range'][0]:query['range'][1]]
|
||||
response['data']['items'] = [f.json(keys) for f in qs]
|
||||
|
@ -492,3 +478,19 @@ Positions
|
|||
|
||||
actions.register(findFiles)
|
||||
|
||||
def parsePath(request): #parse path and return info
|
||||
'''
|
||||
param data {
|
||||
path: string
|
||||
}
|
||||
return {
|
||||
status: {'code': int, 'text': string},
|
||||
data: {
|
||||
imdb: string
|
||||
}
|
||||
}
|
||||
'''
|
||||
path = json.loads(request.POST['data'])['path']
|
||||
response = json_response(ox.parse_movie_path(path))
|
||||
return render_to_json_response(response)
|
||||
actions.register(parsePath)
|
||||
|
|
17
pandora/item/data_api.py
Normal file
17
pandora/item/data_api.py
Normal file
|
@ -0,0 +1,17 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# vi:si:et:sw=4:sts=4:ts=4
|
||||
from __future__ import division, with_statement
|
||||
|
||||
import ox
|
||||
from django.conf import settings
|
||||
|
||||
_DATA_SERVICE=None
|
||||
def external_data(action, data):
|
||||
global _DATA_SERVICE
|
||||
try:
|
||||
if not _DATA_SERVICE and settings.DATA_SERVICE:
|
||||
_DATA_SERVICE = ox.API(settings.DATA_SERVICE)
|
||||
return getattr(_DATA_SERVICE, action)(data)
|
||||
except:
|
||||
pass
|
||||
return {'status': {'code': 500, 'text':'not available'}, 'data': {}}
|
|
@ -15,7 +15,6 @@ from urllib import quote
|
|||
from django.db import models
|
||||
from django.db.models import Count, Q, Sum
|
||||
from django.core.files.base import ContentFile
|
||||
from django.utils import simplejson as json
|
||||
from django.conf import settings
|
||||
from django.contrib.auth.models import User, Group
|
||||
from django.db.models.signals import pre_delete
|
||||
|
@ -30,6 +29,7 @@ import managers
|
|||
import utils
|
||||
import tasks
|
||||
from .timelines import join_timelines
|
||||
from data_api import external_data
|
||||
|
||||
from archive import extract
|
||||
from annotation.models import Annotation, Layer
|
||||
|
@ -39,11 +39,45 @@ from person.models import get_name_sort
|
|||
from title.models import get_title_sort
|
||||
|
||||
|
||||
def get_id(info):
|
||||
q = Item.objects.all()
|
||||
for key in ('title', 'director', 'year'):
|
||||
# 'episodeTitle', 'episodeDirector', 'episodeYear', 'season', 'episode'):
|
||||
if key in info and info[key]:
|
||||
k = 'find__key'
|
||||
v = 'find__value'
|
||||
if key in Item.facet_keys + ['title']:
|
||||
k = 'facets__key'
|
||||
v = 'facets__value'
|
||||
if isinstance(info[key], list):
|
||||
for value in info[key]:
|
||||
q = q.filter(**{k: key, v: value})
|
||||
else:
|
||||
q = q.filter(**{k:key, v:info[key]})
|
||||
if q.count() == 1:
|
||||
return q[0].itemId
|
||||
if settings.DATA_SERVICE:
|
||||
r = external_data('getId', info)
|
||||
if r['status']['code'] == 200:
|
||||
imdbId = r['data']['imdbId']
|
||||
return imdbId
|
||||
return None
|
||||
|
||||
def get_item(info, user=None, async=False):
|
||||
'''
|
||||
info dict with:
|
||||
imdbId, title, director, episode_title, season, series
|
||||
imdbId, title, director, year,
|
||||
season, episode, episodeTitle, episodeDirector, episodeYear
|
||||
'''
|
||||
item_data = {
|
||||
'title': info['title'],
|
||||
'director': info['director'],
|
||||
'year': info.get('year', '')
|
||||
}
|
||||
for key in ('episodeTitle', 'episodeDirector', 'episodeYear',
|
||||
'season', 'episode', 'seriesTitle'):
|
||||
if key in info and info[key]:
|
||||
item_data[key] = info[key]
|
||||
if settings.USE_IMDB:
|
||||
if 'imdbId' in info and info['imdbId']:
|
||||
try:
|
||||
|
@ -51,11 +85,7 @@ def get_item(info, user=None, async=False):
|
|||
except Item.DoesNotExist:
|
||||
item = Item(itemId=info['imdbId'])
|
||||
if 'title' in info and 'director' in info:
|
||||
item.external_data = {
|
||||
'title': info['title'],
|
||||
'director': info['director'],
|
||||
'year': info.get('year', '')
|
||||
}
|
||||
item.external_data = item_data
|
||||
item.user = user
|
||||
item.oxdbId = item.itemId
|
||||
item.save()
|
||||
|
@ -64,61 +94,39 @@ def get_item(info, user=None, async=False):
|
|||
else:
|
||||
item.update_external()
|
||||
else:
|
||||
q = Item.objects.all()
|
||||
for key in ('title', 'director', 'year'):
|
||||
if key in info and info[key]:
|
||||
if isinstance(info[key], list):
|
||||
q = q.filter(find__key=key, find__value='\n'.join(info[key]))
|
||||
else:
|
||||
q = q.filter(find__key=key, find__value=info[key])
|
||||
if q.count() >= 1:
|
||||
item = q[0]
|
||||
elif not 'oxdbId' in info:
|
||||
item = Item()
|
||||
item.data = {
|
||||
'title': info['title'],
|
||||
'director': info['director'],
|
||||
'year': info.get('year', '')
|
||||
}
|
||||
for key in ('episode_title', 'series_title', 'season', 'episode'):
|
||||
if key in info and info[key]:
|
||||
item.data[key] = info[key]
|
||||
item.oxdbId = item.oxdb_id()
|
||||
item.save()
|
||||
else:
|
||||
itemId = get_id(info)
|
||||
if itemId:
|
||||
try:
|
||||
item = Item.objects.get(itemId=info['oxdbId'])
|
||||
item = Item.objects.get(itemId=itemId)
|
||||
except Item.DoesNotExist:
|
||||
item = Item()
|
||||
item.data = {
|
||||
'title': info['title'],
|
||||
'director': info['director'],
|
||||
'year': info.get('year', '')
|
||||
}
|
||||
item.itemId = info['oxdbId']
|
||||
info['imdbId'] = itemId
|
||||
item = get_item(info)
|
||||
return item
|
||||
|
||||
for key in ('episode_title', 'series_title', 'season', 'episode'):
|
||||
if key in info and info[key]:
|
||||
item.data[key] = info[key]
|
||||
try:
|
||||
existing_item = Item.objects.get(oxdbId=item.oxdb_id())
|
||||
item = existing_item
|
||||
except Item.DoesNotExist:
|
||||
item.save()
|
||||
try:
|
||||
item = Item.objects.get(itemId=info.get('oxdbId'))
|
||||
except Item.DoesNotExist:
|
||||
item = Item()
|
||||
item.user = user
|
||||
item.data = item_data
|
||||
item.itemId = info.get('oxdbId', item.oxdb_id())
|
||||
try:
|
||||
existing_item = Item.objects.get(oxdbId=item.oxdb_id())
|
||||
item = existing_item
|
||||
except Item.DoesNotExist:
|
||||
item.oxdbId = item.oxdb_id()
|
||||
item.save()
|
||||
else:
|
||||
qs = Item.objects.filter(find__key='title', find__value=info['title'])
|
||||
if qs.count() == 1:
|
||||
item = qs[0]
|
||||
else:
|
||||
item = Item()
|
||||
item.data = {
|
||||
'title': info['title']
|
||||
}
|
||||
item.data = item_data
|
||||
item.user = user
|
||||
item.save()
|
||||
return item
|
||||
|
||||
|
||||
class Item(models.Model):
|
||||
created = models.DateTimeField(auto_now_add=True)
|
||||
modified = models.DateTimeField(auto_now=True)
|
||||
|
@ -202,59 +210,13 @@ class Item(models.Model):
|
|||
self.data[key] = data[key]
|
||||
self.save()
|
||||
|
||||
def reviews(self):
|
||||
reviews = self.get('reviews', [])
|
||||
_reviews = []
|
||||
for r in reviews:
|
||||
for url in settings.REVIEW_WHITELIST:
|
||||
if url in r[0]:
|
||||
_reviews.append({
|
||||
'source': settings.REVIEW_WHITELIST[url],
|
||||
'url': r[0]
|
||||
})
|
||||
return _reviews
|
||||
|
||||
def update_external(self):
|
||||
if len(self.itemId) == 7:
|
||||
data = ox.web.imdb.Imdb(self.itemId)
|
||||
#FIXME: all this should be in ox.web.imdb.Imdb
|
||||
for key in ('directors', 'writers', 'editors', 'producers',
|
||||
'cinematographers', 'languages', 'genres', 'keywords',
|
||||
'episode_directors'):
|
||||
if key in data:
|
||||
data[key[:-1]] = data.pop(key)
|
||||
if 'countries' in data:
|
||||
data['country'] = data.pop('countries')
|
||||
if 'release date' in data:
|
||||
data['releasedate'] = data.pop('release date')
|
||||
if isinstance(data['releasedate'], list):
|
||||
data['releasedate'] = min(data['releasedate'])
|
||||
if 'plot' in data:
|
||||
data['summary'] = data.pop('plot')
|
||||
if 'cast' in data:
|
||||
if isinstance(data['cast'][0], basestring):
|
||||
data['cast'] = [data['cast']]
|
||||
data['actor'] = [c[0] for c in data['cast']]
|
||||
data['cast'] = map(lambda x: {'actor': x[0], 'character': x[1]}, data['cast'])
|
||||
if 'trivia' in data:
|
||||
def fix_links(t):
|
||||
def fix_names(m):
|
||||
return '<a href="/?find=name:%s">%s</a>' % (
|
||||
quote(m.group(2).encode('utf-8')), m.group(2)
|
||||
)
|
||||
t = re.sub('<a href="(/name/.*?/)">(.*?)</a>', fix_names, t)
|
||||
def fix_titles(m):
|
||||
return '<a href="/?find=title:%s">%s</a>' % (
|
||||
quote(m.group(2).encode('utf-8')), m.group(2)
|
||||
)
|
||||
t = re.sub('<a href="(/title/.*?/)">(.*?)</a>', fix_titles, t)
|
||||
return t
|
||||
data['trivia'] = [fix_links(t) for t in data['trivia']]
|
||||
if 'aspectratio' in data:
|
||||
data['aspectRatio'] = data.pop('aspectratio')
|
||||
#filter reviews
|
||||
self.external_data = data
|
||||
self.save()
|
||||
if settings.DATA_SERVICE and not self.itemId.startswith('0x'):
|
||||
response = external_data('getData', {'id': self.itemId})
|
||||
if response['status']['code'] == 200:
|
||||
self.external_data = response['data']
|
||||
self.save()
|
||||
self.make_poster(True)
|
||||
|
||||
def expand_connections(self):
|
||||
c = self.get('connections')
|
||||
|
@ -292,10 +254,17 @@ class Item(models.Model):
|
|||
super(Item, self).save(*args, **kwargs)
|
||||
if not settings.USE_IMDB:
|
||||
self.itemId = ox.to26(self.id)
|
||||
|
||||
|
||||
#this does not work if another item without imdbid has the same metadata
|
||||
oxdbId = self.oxdb_id()
|
||||
if oxdbId:
|
||||
self.oxdbId = oxdbId
|
||||
if self.oxdbId != oxdbId:
|
||||
q = Item.objects.filter(oxdbId=oxdbId).exclude(id=self.id)
|
||||
if q.count() != 0:
|
||||
self.oxdbId = None
|
||||
q[0].merge_with(self, save=False)
|
||||
else:
|
||||
self.oxdbId = oxdbId
|
||||
|
||||
#id changed, what about existing item with new id?
|
||||
if settings.USE_IMDB and len(self.itemId) != 7 and self.oxdbId != self.itemId:
|
||||
|
@ -333,7 +302,7 @@ class Item(models.Model):
|
|||
self.delete_files()
|
||||
super(Item, self).delete(*args, **kwargs)
|
||||
|
||||
def merge_with(self, other):
|
||||
def merge_with(self, other, save=True):
|
||||
'''
|
||||
move all related tables to other and delete self
|
||||
'''
|
||||
|
@ -350,15 +319,15 @@ class Item(models.Model):
|
|||
f.item = other
|
||||
f.save()
|
||||
self.delete()
|
||||
other.save()
|
||||
#FIXME: update poster, stills and streams after this
|
||||
if save:
|
||||
other.save()
|
||||
#FIXME: update poster, stills and streams after this
|
||||
|
||||
def get_posters(self):
|
||||
url = self.prefered_poster_url()
|
||||
external_posters = self.external_data.get('posters', {})
|
||||
services = external_posters.keys()
|
||||
index = []
|
||||
services = [p['service']
|
||||
for p in self.poster_urls.values("service")
|
||||
.annotate(Count("id")).order_by()]
|
||||
for service in settings.POSTER_PRECEDENCE:
|
||||
if service in services:
|
||||
index.append(service)
|
||||
|
@ -369,7 +338,6 @@ class Item(models.Model):
|
|||
index.append(settings.URL)
|
||||
|
||||
posters = []
|
||||
|
||||
poster = self.path('siteposter.jpg')
|
||||
poster = os.path.abspath(os.path.join(settings.MEDIA_ROOT, poster))
|
||||
if os.path.exists(poster):
|
||||
|
@ -382,18 +350,12 @@ class Item(models.Model):
|
|||
'index': index.index(settings.URL)
|
||||
})
|
||||
|
||||
got = {}
|
||||
for p in self.poster_urls.all().order_by('-height'):
|
||||
if p.service not in got:
|
||||
got[p.service] = 1
|
||||
posters.append({
|
||||
'url': p.url,
|
||||
'width': p.width,
|
||||
'height': p.height,
|
||||
'source': p.service,
|
||||
'selected': p.url == url,
|
||||
'index': index.index(p.service)
|
||||
})
|
||||
for service in external_posters:
|
||||
p = external_posters[service][0]
|
||||
p['source'] = service
|
||||
p['selected'] = p['url'] == url
|
||||
p['index'] = index.index(service)
|
||||
posters.append(p)
|
||||
posters.sort(key=lambda a: a['index'])
|
||||
return posters
|
||||
|
||||
|
@ -452,10 +414,6 @@ class Item(models.Model):
|
|||
if value:
|
||||
i[key] = value
|
||||
|
||||
if 'reviews' in i:
|
||||
i['reviews'] = self.reviews()
|
||||
if not i['reviews']:
|
||||
del i['reviews']
|
||||
if 'cast' in i and isinstance(i['cast'][0], basestring):
|
||||
i['cast'] = [i['cast']]
|
||||
if 'cast' in i and isinstance(i['cast'][0], list):
|
||||
|
@ -497,15 +455,16 @@ class Item(models.Model):
|
|||
return info
|
||||
return i
|
||||
|
||||
|
||||
def oxdb_id(self):
|
||||
if not settings.USE_IMDB:
|
||||
return self.itemId
|
||||
if not self.get('title') and not self.get('director'):
|
||||
return None
|
||||
return utils.oxdb_id(self.get('title', ''), self.get('director', []), str(self.get('year', '')),
|
||||
self.get('season', ''), self.get('episode', ''),
|
||||
self.get('episode_title', ''), self.get('episode_director', []), self.get('episode_year', ''))
|
||||
return ox.get_oxid(self.get('title', ''), self.get('director', []),
|
||||
str(self.get('year', '')),
|
||||
self.get('season', ''), self.get('episode', ''),
|
||||
self.get('episodeTitle', ''),
|
||||
self.get('episodeDirector', []), self.get('episodeYear', ''))
|
||||
|
||||
'''
|
||||
Search related functions
|
||||
|
@ -528,10 +487,10 @@ class Item(models.Model):
|
|||
i = key['id']
|
||||
if i == 'title':
|
||||
save(i, u'\n'.join([self.get('title', 'Untitled'),
|
||||
self.get('original_title', '')]))
|
||||
self.get('originalTitle', '')]))
|
||||
elif i == 'filename':
|
||||
save(i,
|
||||
'\n'.join([os.path.join(f.folder, f.name) for f in self.files.all()]))
|
||||
'\n'.join([f.path for f in self.files.all()]))
|
||||
elif key['type'] == 'layer':
|
||||
qs = Annotation.objects.filter(layer__name=i, item=self).order_by('start')
|
||||
save(i, '\n'.join([l.value for l in qs]))
|
||||
|
@ -662,7 +621,7 @@ class Item(models.Model):
|
|||
s.words = sum([len(a.value.split()) for a in self.annotations.all()])
|
||||
|
||||
s.clips = 0 #FIXME: get clips from all layers or something
|
||||
videos = self.files.filter(active=True, is_video=True)
|
||||
videos = self.files.filter(selected=True, is_video=True)
|
||||
if videos.count() > 0:
|
||||
s.duration = sum([v.duration for v in videos])
|
||||
v = videos[0]
|
||||
|
@ -715,7 +674,7 @@ class Item(models.Model):
|
|||
current_values = [current_values]
|
||||
else:
|
||||
current_values = []
|
||||
ot = self.get('original_title')
|
||||
ot = self.get('originalTitle')
|
||||
if ot:
|
||||
current_values.append(ot)
|
||||
#FIXME: is there a better way to build name collection?
|
||||
|
@ -777,75 +736,43 @@ class Item(models.Model):
|
|||
return [f.json() for f in self.files.all()]
|
||||
|
||||
def users_with_files(self):
|
||||
return User.objects.filter(volumes__files__file__item=self).distinct()
|
||||
return User.objects.filter(
|
||||
volumes__files__file__item=self
|
||||
).order_by('-profile__level', 'date_joined').distinct()
|
||||
|
||||
def sets(self):
|
||||
sets = []
|
||||
for user in self.users_with_files():
|
||||
files = self.files.filter(instances__volume__user=user, instances__ignore=False)
|
||||
sets.append(files)
|
||||
return sets
|
||||
|
||||
def update_wanted(self):
|
||||
users = self.users_with_files()
|
||||
if users.filter(is_superuser=True).count()>0:
|
||||
files = self.files.filter(instances__volume__user__is_superuser=True)
|
||||
users = User.objects.filter(volumes__files__file__in=files,
|
||||
is_superuser=True).distinct()
|
||||
elif users.filter(is_staff=True).count()>0:
|
||||
files = self.files.filter(instances__volume__user__is_staff=True)
|
||||
users = User.objects.filter(volumes__files__file__in=files,
|
||||
is_staff=True).distinct()
|
||||
else:
|
||||
files = self.files.all()
|
||||
files = files.filter(is_video=True, instances__extra=False, instances__gt=0).order_by('part')
|
||||
folders = list(set([f.folder for f in files]))
|
||||
if len(folders) > 1:
|
||||
files = files.filter(folder=folders[0])
|
||||
files.update(wanted=True)
|
||||
self.files.exclude(id__in=files).update(wanted=False)
|
||||
wanted = []
|
||||
for s in self.sets():
|
||||
if s.filter(selected=False).count() != 0:
|
||||
wanted += [i.id for i in s]
|
||||
else:
|
||||
break
|
||||
self.files.filter(id__in=wanted).update(wanted=True)
|
||||
self.files.exclude(id__in=wanted).update(wanted=False)
|
||||
|
||||
def update_selected(self):
|
||||
files = archive.models.File.objects.filter(item=self,
|
||||
streams__available=True,
|
||||
streams__source=None)
|
||||
if files.count() == 0:
|
||||
return
|
||||
|
||||
def get_level(users):
|
||||
if users.filter(is_superuser=True).count() > 0: level = 0
|
||||
elif users.filter(is_staff=True).count() > 0: level = 1
|
||||
else: level = 2
|
||||
return level
|
||||
|
||||
current_users = User.objects.filter(volumes__files__file__in=self.files.filter(active=True)).distinct()
|
||||
current_level = get_level(current_users)
|
||||
|
||||
users = User.objects.filter(volumes__files__file__in=files).distinct()
|
||||
possible_level = get_level(users)
|
||||
|
||||
if possible_level < current_level:
|
||||
files = self.files.filter(instances__volume__user__in=users).order_by('part')
|
||||
#FIXME: this should be instance folders
|
||||
folders = list(set([f.folder
|
||||
for f in files.filter(is_video=True, instances__extra=False)]))
|
||||
files = files.filter(folder__startswith=folders[0])
|
||||
files.update(active=True)
|
||||
self.rendered = False
|
||||
self.save()
|
||||
self.update_timeline()
|
||||
else:
|
||||
files = self.files.filter(instances__volume__user__in=current_users).order_by('part')
|
||||
#FIXME: this should be instance folders
|
||||
folders = list(set([f.folder
|
||||
for f in files.filter(is_video=True, instances__extra=False)]))
|
||||
files = files.filter(folder__startswith=folders[0])
|
||||
if files.filter(active=False, is_video=True).count() > 0:
|
||||
files.update(active=True)
|
||||
self.rendered = False
|
||||
self.save()
|
||||
self.update_timeline()
|
||||
|
||||
for s in self.sets():
|
||||
if s.filter(Q(is_video=True)|Q(is_audio=True)).filter(available=False).count() == 0:
|
||||
if s.filter(selected=False).count() > 0:
|
||||
s.update(selected=True, wanted=False)
|
||||
self.rendered = False
|
||||
self.save()
|
||||
self.update_timeline()
|
||||
break
|
||||
|
||||
def make_torrent(self):
|
||||
base = self.path('torrent')
|
||||
base = os.path.abspath(os.path.join(settings.MEDIA_ROOT, base))
|
||||
if os.path.exists(base):
|
||||
shutil.rmtree(base)
|
||||
os.makedirs(base)
|
||||
ox.makedirs(base)
|
||||
|
||||
base = self.path('torrent/%s' % self.get('title'))
|
||||
base = os.path.abspath(os.path.join(settings.MEDIA_ROOT, base))
|
||||
|
@ -893,7 +820,7 @@ class Item(models.Model):
|
|||
|
||||
def streams(self):
|
||||
return archive.models.Stream.objects.filter(source=None, available=True,
|
||||
file__item=self, file__is_video=True, file__active=True).order_by('file__part')
|
||||
file__item=self, file__is_video=True, file__selected=True).order_by('file__part')
|
||||
|
||||
def update_timeline(self, force=False):
|
||||
streams = self.streams()
|
||||
|
@ -911,32 +838,6 @@ class Item(models.Model):
|
|||
self.rendered = streams != []
|
||||
self.save()
|
||||
|
||||
'''
|
||||
Poster related functions
|
||||
'''
|
||||
|
||||
def update_poster_urls(self):
|
||||
_current = {}
|
||||
for s in settings.POSTER_SERVICES:
|
||||
url = '%s?id=%s'%(s, self.itemId)
|
||||
try:
|
||||
data = json.loads(ox.net.readUrlUnicode(url))
|
||||
except:
|
||||
continue
|
||||
for service in data:
|
||||
if service not in _current:
|
||||
_current[service] = []
|
||||
for poster in data[service]:
|
||||
_current[service].append(poster)
|
||||
#FIXME: remove urls that are no longer listed
|
||||
for service in _current:
|
||||
for poster in _current[service]:
|
||||
p, created = PosterUrl.objects.get_or_create(item=self, url=poster['url'], service=service)
|
||||
if created:
|
||||
p.width = poster['width']
|
||||
p.height = poster['height']
|
||||
p.save()
|
||||
|
||||
def delete_poster(self):
|
||||
if self.poster:
|
||||
path = self.poster.path
|
||||
|
@ -948,15 +849,14 @@ class Item(models.Model):
|
|||
os.unlink(f)
|
||||
|
||||
def prefered_poster_url(self):
|
||||
self.update_poster_urls()
|
||||
external_posters = self.external_data.get('posters', {})
|
||||
service = self.poster_source
|
||||
if service and service != settings.URL:
|
||||
for u in self.poster_urls.filter(service=service).order_by('-height'):
|
||||
return u.url
|
||||
if service and service != settings.URL and service in external_posters:
|
||||
return external_posters[service][0]['url']
|
||||
if not service:
|
||||
for service in settings.POSTER_PRECEDENCE:
|
||||
for u in self.poster_urls.filter(service=service).order_by('-height'):
|
||||
return u.url
|
||||
if service in external_posters:
|
||||
return external_posters[service][0]['url']
|
||||
return None
|
||||
|
||||
def make_timeline(self):
|
||||
|
@ -984,7 +884,7 @@ class Item(models.Model):
|
|||
poster = os.path.abspath(os.path.join(settings.MEDIA_ROOT, poster))
|
||||
|
||||
frame = self.get_poster_frame_path()
|
||||
timeline = '%s.64.png' % self.timeline_prefix
|
||||
timeline = '%s64p.png' % self.timeline_prefix
|
||||
|
||||
director = u', '.join(self.get('director', ['Unknown Director']))
|
||||
cmd = [settings.ITEM_POSTER,
|
||||
|
@ -1018,7 +918,7 @@ class Item(models.Model):
|
|||
def poster_frames(self):
|
||||
frames = []
|
||||
offset = 0
|
||||
for f in self.files.filter(active=True, is_video=True):
|
||||
for f in self.files.filter(selected=True, is_video=True):
|
||||
for ff in f.frames.all():
|
||||
frames.append({
|
||||
'position': offset + ff.position,
|
||||
|
@ -1052,7 +952,7 @@ class Item(models.Model):
|
|||
frame = self.get_poster_frame_path()
|
||||
icon = self.path('icon.jpg')
|
||||
self.icon.name = icon
|
||||
timeline = '%s.64.png' % self.timeline_prefix
|
||||
timeline = '%s64p.png' % self.timeline_prefix
|
||||
cmd = [settings.ITEM_ICON,
|
||||
'-i', self.icon.path
|
||||
]
|
||||
|
@ -1074,8 +974,8 @@ class Item(models.Model):
|
|||
Annotation.objects.filter(layer=layer,item=self).delete()
|
||||
offset = 0
|
||||
language = ''
|
||||
languages = [f.language for f in self.files.filter(active=True, is_subtitle=True,
|
||||
available=True)]
|
||||
subtitles = self.files.filter(selected=True, is_subtitle=True, available=True)
|
||||
languages = [f.language for f in subtitles]
|
||||
if languages:
|
||||
if 'en' in languages:
|
||||
language = 'en'
|
||||
|
@ -1083,10 +983,18 @@ class Item(models.Model):
|
|||
language = ''
|
||||
else:
|
||||
language = languages[0]
|
||||
for f in self.files.filter(active=True, is_subtitle=True,
|
||||
available=True, language=language).order_by('part'):
|
||||
user = f.instances.all()[0].volume.user
|
||||
for data in f.srt(offset):
|
||||
|
||||
#loop over all videos
|
||||
for f in self.files.filter(Q(is_audio=True)|Q(is_video=True)) \
|
||||
.filter(selected=True).order_by('part'):
|
||||
prefix = os.path.splitext(f.path)[0]
|
||||
#if there is a subtitle with the same prefix, import
|
||||
q = subtitles.filter(path__startswith=prefix,
|
||||
language=language)
|
||||
if q.count() == 1:
|
||||
s = q[0]
|
||||
user = s.instances.all()[0].volume.user
|
||||
for data in s.srt(offset):
|
||||
annotation = Annotation(
|
||||
item=f.item,
|
||||
layer=layer,
|
||||
|
@ -1096,14 +1004,7 @@ class Item(models.Model):
|
|||
user=user
|
||||
)
|
||||
annotation.save()
|
||||
duration = self.files.filter(Q(is_audio=True)|Q(is_video=True)) \
|
||||
.filter(active=True, available=True, part=f.part)
|
||||
if duration:
|
||||
duration = duration[0].duration
|
||||
else:
|
||||
Annotation.objects.filter(layer=layer,item=self).delete()
|
||||
break
|
||||
offset += duration
|
||||
offset += f.duration
|
||||
self.update_find()
|
||||
|
||||
def delete_item(sender, **kwargs):
|
||||
|
|
|
@ -8,7 +8,7 @@ from glob import glob
|
|||
import Image
|
||||
|
||||
def loadTimeline(timeline_prefix, height=64):
|
||||
files = sorted(glob('%s.%s.*.png' % (timeline_prefix, height)))
|
||||
files = sorted(glob('%s%sp*.png' % (timeline_prefix, height)))
|
||||
f = Image.open(files[0])
|
||||
width = f.size[0]
|
||||
f = Image.open(files[-1])
|
||||
|
@ -22,7 +22,7 @@ def loadTimeline(timeline_prefix, height=64):
|
|||
return timeline
|
||||
|
||||
def makeTiles(timeline_prefix, height=16, width=3600):
|
||||
files = glob('%s.64.*.png' % timeline_prefix)
|
||||
files = glob('%s64p*.png' % timeline_prefix)
|
||||
fps = 25
|
||||
part_step = 60
|
||||
output_width = width
|
||||
|
@ -43,14 +43,14 @@ def makeTiles(timeline_prefix, height=16, width=3600):
|
|||
i = 0
|
||||
while pos < timeline.size[0]:
|
||||
end = min(pos+output_width, timeline.size[0])
|
||||
timeline.crop((pos, 0, end, timeline.size[1])).save('%s.%s.%04d.png' % (timeline_prefix, timeline.size[1], i))
|
||||
timeline.crop((pos, 0, end, timeline.size[1])).save('%s%sp%04d.png' % (timeline_prefix, timeline.size[1], i))
|
||||
pos += output_width
|
||||
i += 1
|
||||
|
||||
def makeTimelineOverview(timeline_prefix, width, inpoint=0, outpoint=0, duration=-1, height=16):
|
||||
input_scale = 25
|
||||
|
||||
timeline_file = '%s.%s.png' % (timeline_prefix, height)
|
||||
timeline_file = '%s%sp.png' % (timeline_prefix, height)
|
||||
if outpoint > 0:
|
||||
timeline_file = '%s.overview.%s.%d-%d.png' % (timeline_prefix, height, inpoint, outpoint)
|
||||
|
||||
|
@ -76,7 +76,7 @@ def join_timelines(timelines, prefix):
|
|||
|
||||
tiles = []
|
||||
for timeline in timelines:
|
||||
tiles += sorted(glob('%s.%s.*.png'%(timeline, height)))
|
||||
tiles += sorted(glob('%s%sp*.png'%(timeline, height)))
|
||||
|
||||
timeline = Image.new("RGB", (2 * width, height))
|
||||
|
||||
|
@ -87,7 +87,7 @@ def join_timelines(timelines, prefix):
|
|||
timeline.paste(tile, (pos, 0, pos+tile.size[0], height))
|
||||
pos += tile.size[0]
|
||||
if pos >= width:
|
||||
timeline_name = '%s.%s.%04d.png' % (prefix, height, i)
|
||||
timeline_name = '%s%sp%04d.png' % (prefix, height, i)
|
||||
timeline.crop((0, 0, width, height)).save(timeline_name)
|
||||
i += 1
|
||||
if pos > width:
|
||||
|
@ -95,7 +95,7 @@ def join_timelines(timelines, prefix):
|
|||
timeline.paste(t, (0, 0, t.size[0], height))
|
||||
pos -= width
|
||||
if pos:
|
||||
timeline_name = '%s.%s.%04d.png' % (prefix, height, i)
|
||||
timeline_name = '%s%sp%04d.png' % (prefix, height, i)
|
||||
timeline.crop((0, 0, pos, height)).save(timeline_name)
|
||||
|
||||
makeTiles(prefix, 16, 3600)
|
||||
|
|
|
@ -1,19 +1,10 @@
|
|||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
# vi:si:et:sw=4:sts=4:ts=4
|
||||
#
|
||||
from decimal import Decimal
|
||||
import os
|
||||
import re
|
||||
import hashlib
|
||||
import unicodedata
|
||||
|
||||
from django.conf import settings
|
||||
import ox
|
||||
import ox.iso
|
||||
from ox.normalize import normalizeName, normalizeTitle
|
||||
import ox.web.imdb
|
||||
|
||||
|
||||
def parse_decimal(string):
|
||||
string = string.replace(':', '/')
|
||||
|
@ -29,170 +20,6 @@ def plural_key(term):
|
|||
}.get(term, term + 's')
|
||||
|
||||
|
||||
def oxid(title, director, year='', seriesTitle='', episodeTitle='', season=0, episode=0):
|
||||
director = ', '.join(director)
|
||||
oxid_value = u"\n".join([title, director, year])
|
||||
oxid = hashlib.sha1(oxid_value.encode('utf-8')).hexdigest()
|
||||
if seriesTitle:
|
||||
oxid_value = u"\n".join([seriesTitle, "%02d" % season])
|
||||
oxid = hashlib.sha1(oxid_value.encode('utf-8')).hexdigest()[:20]
|
||||
oxid_value = u"\n".join(["%02d" % episode, episodeTitle, director, year])
|
||||
oxid += hashlib.sha1(oxid_value.encode('utf-8')).hexdigest()[:20]
|
||||
return u"0x" + oxid
|
||||
|
||||
|
||||
def oxdb_id(title, director=[], year='', season='', episode='', episode_title='', episode_director=[], episode_year=''):
|
||||
# new id function, will replace oxid()
|
||||
def get_hash(string):
|
||||
return hashlib.sha1(string.encode('utf-8')).hexdigest().upper()
|
||||
director = ', '.join(director)
|
||||
episode_director = ', '.join(episode_director)
|
||||
if not episode:
|
||||
oxdb_id = get_hash(director)[:8] + get_hash('\n'.join([title, str(year)]))[:8]
|
||||
else:
|
||||
oxdb_id = get_hash('\n'.join([director, title, str(year), str(season)]))[:8] + \
|
||||
get_hash('\n'.join([str(episode), episode_director, episode_title, str(episode_year)]))[:8]
|
||||
return u'0x' + oxdb_id
|
||||
|
||||
|
||||
def parse_director(director):
|
||||
director = os.path.basename(os.path.dirname(director))
|
||||
if director.endswith('_'):
|
||||
director = "%s." % director[:-1]
|
||||
director = [normalizeName(d) for d in director.split('; ')]
|
||||
|
||||
def cleanup(director):
|
||||
director = director.strip()
|
||||
director = director.replace('Series', '')
|
||||
director = director.replace('Unknown Director', '')
|
||||
director = director.replace('Various Directors', '')
|
||||
return director
|
||||
director = filter(None, [cleanup(d) for d in director])
|
||||
return director
|
||||
|
||||
|
||||
def parse_title(_title, searchTitle = False):
|
||||
'''
|
||||
normalize filename to get item title
|
||||
'''
|
||||
_title = os.path.basename(_title)
|
||||
_title = _title.replace('... ', '_dot_dot_dot_')
|
||||
_title = _title.replace('. ', '_dot__space_')
|
||||
_title = _title.replace(' .', '_space__dot_')
|
||||
title = _title.split('.')[0]
|
||||
title = re.sub('([A-Za-z0-9])_ ', '\\1: ', title)
|
||||
se = re.compile('Season (\d+).Episode (\d+)').findall(_title)
|
||||
if se:
|
||||
se = "S%02dE%02d" % (int(se[0][0]), int(se[0][1]))
|
||||
if 'Part' in _title.split('.')[-2] and 'Episode' not in _title.split('.')[-3]:
|
||||
stitle = _title.split('.')[-3]
|
||||
else:
|
||||
stitle = _title.split('.')[-2]
|
||||
if stitle.startswith('Episode '):
|
||||
stitle = ''
|
||||
if searchTitle:
|
||||
title = '"%s" %s' % (title, stitle)
|
||||
else:
|
||||
title = '%s (%s) %s' % (title, se, stitle)
|
||||
title = title.strip()
|
||||
title = title.replace('_dot_dot_dot_', '... ')
|
||||
title = title.replace('_dot__space_', '. ')
|
||||
title = title.replace('_space__dot_', ' .')
|
||||
year = ox.findRe(title, '(\(\d{4}\))')
|
||||
if year and title.endswith(year):
|
||||
title = title[:-len(year)].strip()
|
||||
title = normalizeTitle(title)
|
||||
if searchTitle and year:
|
||||
title = u"%s %s" % (title, year)
|
||||
return title
|
||||
|
||||
|
||||
def parse_series_title(path):
|
||||
seriesTitle = u''
|
||||
if path.startswith('Series'):
|
||||
seriesTitle = os.path.basename(path)
|
||||
else:
|
||||
t = parse_title(path)
|
||||
if " (S" in t:
|
||||
seriesTitle = t.split(" (S")[0]
|
||||
return seriesTitle
|
||||
|
||||
|
||||
def parse_episode_title(path):
|
||||
episodeTitle = u''
|
||||
ep = re.compile('.Episode \d+?\.(.*?)\.[a-zA-Z]').findall(path)
|
||||
if ep:
|
||||
episodeTitle = ep[0]
|
||||
return episodeTitle
|
||||
|
||||
|
||||
def parse_season_episode(path):
|
||||
season = 0
|
||||
episode = 0
|
||||
path = os.path.basename(path)
|
||||
se = re.compile('Season (\d+).Episode (\d+)').findall(path)
|
||||
if se:
|
||||
season = int(se[0][0])
|
||||
episode = int(se[0][1])
|
||||
else:
|
||||
ep = re.compile('.Episode (\d+?)').findall(path)
|
||||
if ep:
|
||||
episode = int(ep[0][0])
|
||||
if season == 0 and episode == 0:
|
||||
se = re.compile('S(\d\d)E(\d\d)').findall(path)
|
||||
if se:
|
||||
season = int(se[0][0])
|
||||
episode = int(se[0][1])
|
||||
return (season, episode)
|
||||
|
||||
|
||||
def oxdb_part(path):
|
||||
part = 1
|
||||
path = path.lower()
|
||||
p = re.compile('part\s*?(\d+)\.').findall(path)
|
||||
if p:
|
||||
part = p[0]
|
||||
else:
|
||||
p = re.compile('cd\s*?(\d+)\.').findall(path)
|
||||
if p:
|
||||
part = p[0]
|
||||
return part
|
||||
|
||||
|
||||
def parse_path(path):
|
||||
'''
|
||||
expects path in the form
|
||||
L/Last, First/Title (YYYY)
|
||||
M/McCarthy, Thomas/The Visitor (2007)
|
||||
G/Godard, Jean-Luc/Histoire(s) du cinema_ Toutes les histoires (1988)
|
||||
'''
|
||||
r = {}
|
||||
r['title'] = parse_title(path)
|
||||
year = ox.findRe(path, '\((\d{4})\)')
|
||||
if year:
|
||||
r['year'] = year
|
||||
if not settings.USE_IMDB:
|
||||
return r
|
||||
|
||||
search_title = parse_title(path, True)
|
||||
r['director'] = parse_director(path)
|
||||
|
||||
#FIXME: only include it its actually a series
|
||||
r['episode_title'] = parse_episode_title(path)
|
||||
r['season'], r['episode'] = parse_season_episode(path)
|
||||
r['series_title'] = parse_series_title(path)
|
||||
|
||||
#FIXME: use oxdata/id/?title=title&director=director&year=year
|
||||
#r['imdbId'] = ox.web.imdb.guess(search_title, ', '.join(r['director']), timeout=-1)
|
||||
r['imdbId'] = ox.web.imdb.guess(search_title, timeout=-1)
|
||||
r['oxdbId'] = oxdb_id(r['title'], r['director'], r.get('year', ''),
|
||||
r.get('season', ''), r.get('episode', ''),
|
||||
episode_title=r['episode_title'],
|
||||
episode_director=[],
|
||||
episode_year='')
|
||||
return r
|
||||
|
||||
|
||||
def sort_string(string):
|
||||
string = string.replace(u'Þ', 'Th')
|
||||
#pad numbered titles
|
||||
|
|
|
@ -419,22 +419,6 @@ actions.register(remove, cache=False)
|
|||
'''
|
||||
Poster API
|
||||
'''
|
||||
def parse(request): #parse path and return info
|
||||
'''
|
||||
param data {
|
||||
path: string
|
||||
}
|
||||
return {
|
||||
status: {'code': int, 'text': string},
|
||||
data: {
|
||||
imdb: string
|
||||
}
|
||||
}
|
||||
'''
|
||||
path = json.loads(request.POST['data'])['path']
|
||||
response = json_response(utils.parse_path(path))
|
||||
return render_to_json_response(response)
|
||||
actions.register(parse)
|
||||
|
||||
|
||||
def setPosterFrame(request): #parse path and return info
|
||||
|
@ -667,7 +651,7 @@ def timeline(request, id, size, position):
|
|||
item = get_object_or_404(models.Item, itemId=id)
|
||||
if not item.access(request.user):
|
||||
return HttpResponseForbidden()
|
||||
timeline = '%s.%s.%04d.png' %(item.timeline_prefix, size, int(position))
|
||||
timeline = '%s%sp%04d.png' %(item.timeline_prefix, size, int(position))
|
||||
return HttpFileResponse(timeline, content_type='image/png')
|
||||
|
||||
|
||||
|
@ -675,7 +659,7 @@ def timeline_overview(request, id, size):
|
|||
item = get_object_or_404(models.Item, itemId=id)
|
||||
if not item.access(request.user):
|
||||
return HttpResponseForbidden()
|
||||
timeline = '%s.%s.png' %(item.timeline_prefix, size)
|
||||
timeline = '%s%sp.png' %(item.timeline_prefix, size)
|
||||
return HttpFileResponse(timeline, content_type='image/png')
|
||||
|
||||
def torrent(request, id, filename=None):
|
||||
|
@ -708,6 +692,11 @@ def video(request, id, resolution, format, index=None):
|
|||
index = int(index) - 1
|
||||
else:
|
||||
index = 0
|
||||
#streams = Stream.object.filter(file__item__itemId=item.itemId,
|
||||
# file__selected=True, file__part=index,
|
||||
# resolution=resolution, format=format)
|
||||
#if streams.count() != 1:
|
||||
# reise Http404
|
||||
streams = Stream.objects.filter(file__item__itemId=item.itemId,
|
||||
resolution=resolution, format=format).order_by('file__part')
|
||||
if index > streams.count():
|
||||
|
|
|
@ -18,6 +18,8 @@ def get_name_sort(name):
|
|||
name = unicodedata.normalize('NFKD', name).strip()
|
||||
if name:
|
||||
person, created = Person.objects.get_or_create(name=name)
|
||||
if created:
|
||||
person.save()
|
||||
sortname = unicodedata.normalize('NFKD', person.sortname)
|
||||
else:
|
||||
sortname = u''
|
||||
|
|
|
@ -172,20 +172,10 @@ SITE_CONFIG = join(PROJECT_ROOT, '0xdb.jsonc')
|
|||
TRACKER_URL="http://url2torrent.net:6970/announce"
|
||||
|
||||
|
||||
#Movie related settings
|
||||
REVIEW_WHITELIST = {
|
||||
u'.filmcritic.com': u'Filmcritic',
|
||||
u'metacritic.com': u'Metacritic',
|
||||
u'nytimes.com': u'New York Times',
|
||||
u'rottentomatoes.com': u'Rotten Tomatoes',
|
||||
u'salon.com': u'Salon.com',
|
||||
u'sensesofcinema.com': u'Senses of Cinema',
|
||||
u'villagevoice.com': u'Village Voice'
|
||||
}
|
||||
|
||||
#list of poster services, https://wiki.0x2620.org/wiki/pandora/posterservice
|
||||
POSTER_SERVICES = []
|
||||
DATA_SERVICE = ''
|
||||
POSTER_PRECEDENCE = (
|
||||
'piratecinema.org',
|
||||
'local',
|
||||
'criterion.com',
|
||||
'wikipedia.org',
|
||||
|
@ -196,19 +186,8 @@ POSTER_PRECEDENCE = (
|
|||
'other'
|
||||
)
|
||||
|
||||
DEFAULT_LISTS = [
|
||||
{"name": "Favorites"},
|
||||
{"name": "1960s", "query": {
|
||||
"conditions": [{"key": "year", "value": "196", "operator": "^"}],
|
||||
"operator": ""}
|
||||
}
|
||||
]
|
||||
#0xdb.org
|
||||
USE_IMDB = True
|
||||
#this should idealy go away, one folder per item
|
||||
USE_FOLDER = True
|
||||
|
||||
#POSTER_SERVICES=['http://data.0xdb.org/poster/']
|
||||
|
||||
#copy scripts and adjust to customize
|
||||
ITEM_POSTER = join('scripts', 'oxdb_poster')
|
||||
|
|
|
@ -13,9 +13,13 @@ from item import utils
|
|||
import managers
|
||||
|
||||
def get_title_sort(title):
|
||||
if isinstance(title, str):
|
||||
title = unicode(title)
|
||||
title = unicodedata.normalize('NFKD', title).strip()
|
||||
if title:
|
||||
title, created = Title.objects.get_or_create(title=title)
|
||||
if created:
|
||||
title.save()
|
||||
sorttitle = unicodedata.normalize('NFKD', title.sorttitle)
|
||||
else:
|
||||
sorttitle = u''
|
||||
|
|
|
@ -169,7 +169,7 @@ def signup(request):
|
|||
user.is_staff = first_user
|
||||
user.save()
|
||||
#create default user lists:
|
||||
for l in settings.DEFAULT_LISTS:
|
||||
for l in settings.CONFIG['personalLists']:
|
||||
list = models.List(name=l['name'], user=user)
|
||||
for key in ('query', 'public', 'featured'):
|
||||
if key in l:
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
// vim: et:ts=4:sw=4:sts=4:ft=javascript
|
||||
|
||||
Ox.FilesView = function(options, self) {
|
||||
pandora.ui.filesView = function(options, self) {
|
||||
|
||||
var self = self || {},
|
||||
that = Ox.Element({}, self)
|
||||
|
@ -55,19 +55,11 @@ Ox.FilesView = function(options, self) {
|
|||
},
|
||||
{
|
||||
align: 'left',
|
||||
id: 'folder',
|
||||
id: 'path',
|
||||
operator: '+',
|
||||
title: 'Folder',
|
||||
title: 'Path',
|
||||
visible: true,
|
||||
width: 180
|
||||
},
|
||||
{
|
||||
align: 'left',
|
||||
id: 'name',
|
||||
operator: '+',
|
||||
title: 'Name',
|
||||
visible: true,
|
||||
width: 360
|
||||
width: 560
|
||||
},
|
||||
{
|
||||
align: 'left',
|
||||
|
@ -147,7 +139,7 @@ Ox.FilesView = function(options, self) {
|
|||
}), callback);
|
||||
},
|
||||
scrollbarVisible: true,
|
||||
sort: [{key: 'name', operator: '+'}]
|
||||
sort: [{key: 'path', operator: '+'}]
|
||||
})
|
||||
.bindEvent({
|
||||
open: openFiles,
|
||||
|
@ -174,19 +166,11 @@ Ox.FilesView = function(options, self) {
|
|||
},
|
||||
{
|
||||
align: 'left',
|
||||
id: 'folder',
|
||||
id: 'path',
|
||||
operator: '+',
|
||||
title: 'Folder',
|
||||
title: 'Path',
|
||||
visible: true,
|
||||
width: 180
|
||||
},
|
||||
{
|
||||
align: 'left',
|
||||
id: 'name',
|
||||
operator: '+',
|
||||
title: 'Name',
|
||||
visible: true,
|
||||
width: 360
|
||||
width: 560
|
||||
},
|
||||
],
|
||||
columnsMovable: true,
|
||||
|
@ -352,11 +336,19 @@ Ox.FilesView = function(options, self) {
|
|||
});
|
||||
|
||||
function openFiles(data) {
|
||||
//Ox.print('........', JSON.stringify(self.$filesList.value(data.ids[0], 'instances')))
|
||||
data.ids.length == 1 && pandora.api.parsePath({
|
||||
path: self.$filesList.value(data.ids[0], 'path')
|
||||
}, function(result) {
|
||||
['title', 'director', 'year'].forEach(function(key) {
|
||||
if (result.data[key]) {
|
||||
self['$' + key + 'Input'].options({value: result.data[key]});
|
||||
}
|
||||
});
|
||||
updateForm();
|
||||
});
|
||||
}
|
||||
|
||||
function selectFiles(data) {
|
||||
//Ox.print('........', JSON.stringify(self.$filesList.value(data.ids[0], 'instances')))
|
||||
self.selected = data.ids;
|
||||
self.$instancesList.options({
|
||||
items: data.ids.length == 1
|
|
@ -146,8 +146,8 @@ pandora.ui.infoView = function(data) {
|
|||
})
|
||||
.html(
|
||||
data.title + (
|
||||
data.original_title && data.original_title != data.title
|
||||
? ' ' + formatLight('(' + data.original_title + ')') : ''
|
||||
data.originalTitle && data.originalTitle != data.title
|
||||
? ' ' + formatLight('(' + data.originalTitle + ')') : ''
|
||||
)
|
||||
)
|
||||
.appendTo($text);
|
||||
|
@ -182,11 +182,10 @@ pandora.ui.infoView = function(data) {
|
|||
$div.html(html.join('; '));
|
||||
}
|
||||
|
||||
// fixme: should be camelCase!
|
||||
data.alternative_titles && $('<div>')
|
||||
data.alternativeTitles && $('<div>')
|
||||
.css(css)
|
||||
.html(
|
||||
formatKey('Alternative Titles') + data.alternative_titles.map(function(value) {
|
||||
formatKey('Alternative Titles') + data.alternativeTitles.map(function(value) {
|
||||
return value[0] + (value[1] ? ' '
|
||||
+ formatLight('(' + value[1] + ')') : '');
|
||||
}).join(', ')
|
||||
|
@ -225,9 +224,9 @@ pandora.ui.infoView = function(data) {
|
|||
.css(css)
|
||||
.appendTo($text);
|
||||
html = [];
|
||||
['genre', 'keyword'].forEach(function(key) {
|
||||
['genre', 'keywords'].forEach(function(key) {
|
||||
data[key] && html.push(
|
||||
formatKey(key == 'keyword' ? 'keywords' : key)
|
||||
formatKey(key)
|
||||
+ formatValue(data[key], key)
|
||||
);
|
||||
});
|
||||
|
@ -272,19 +271,19 @@ pandora.ui.infoView = function(data) {
|
|||
.appendTo($text);
|
||||
});
|
||||
|
||||
data.filming_locations && $('<div>')
|
||||
data.filmingLocations && $('<div>')
|
||||
.css(css)
|
||||
.html(
|
||||
formatKey('Filming Locations') + data.filming_locations.map(function(location) {
|
||||
formatKey('Filming Locations') + data.filmingLocations.map(function(location) {
|
||||
return '<a href="/map/@' + location + '">' + location + '</a>'
|
||||
}).join(', ')
|
||||
)
|
||||
.appendTo($text);
|
||||
|
||||
data.releasedate && $('<div>')
|
||||
data.releaseDate && $('<div>')
|
||||
.css(css)
|
||||
.html(
|
||||
formatKey('Release Date') + Ox.formatDate(data.releasedate, '%A, %B %e, %Y')
|
||||
formatKey('Release Date') + Ox.formatDate(data.releaseDate, '%A, %B %e, %Y')
|
||||
)
|
||||
.appendTo($text);
|
||||
|
||||
|
|
|
@ -293,7 +293,7 @@ pandora.ui.item = function() {
|
|||
|
||||
} else if (pandora.user.ui.itemView == 'files') {
|
||||
pandora.$ui.contentPanel.replaceElement(1,
|
||||
pandora.$ui.item = Ox.FilesView({
|
||||
pandora.$ui.item = pandora.ui.filesView({
|
||||
id: result.data.id
|
||||
})
|
||||
);
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
"js/pandora/URL.js",
|
||||
"js/pandora/autovalidate.js",
|
||||
"js/pandora/utils.js",
|
||||
"js/pandora/ui/Ox.FilesView.js",
|
||||
"js/pandora/ui/filesView.js",
|
||||
"js/pandora/ui/account.js",
|
||||
"js/pandora/ui/appPanel.js",
|
||||
"js/pandora/ui/backButton.js",
|
||||
|
|
Loading…
Reference in a new issue