2010-07-12 14:56:14 +00:00
|
|
|
# -*- coding: utf-8 -*-
|
|
|
|
# vi:si:et:sw=4:sts=4:ts=4
|
2010-09-18 14:44:35 +00:00
|
|
|
from __future__ import division
|
2011-01-01 11:44:42 +00:00
|
|
|
|
2010-07-12 14:56:14 +00:00
|
|
|
import os.path
|
|
|
|
import re
|
2010-08-10 22:01:41 +00:00
|
|
|
import time
|
2010-07-12 14:56:14 +00:00
|
|
|
|
|
|
|
from django.db import models
|
|
|
|
from django.contrib.auth.models import User
|
|
|
|
from django.conf import settings
|
|
|
|
|
2010-11-23 09:53:12 +00:00
|
|
|
from ox.django import fields
|
2010-07-12 14:56:14 +00:00
|
|
|
import ox
|
2011-01-01 11:44:42 +00:00
|
|
|
from ox.normalize import canonicalTitle
|
2010-09-18 14:44:35 +00:00
|
|
|
import chardet
|
2010-07-12 14:56:14 +00:00
|
|
|
|
2010-11-08 16:34:25 +00:00
|
|
|
from item import utils
|
|
|
|
from item.models import Item
|
2011-02-04 13:24:03 +00:00
|
|
|
from person.models import get_name_sort
|
2010-07-12 14:56:14 +00:00
|
|
|
|
2010-08-24 17:16:33 +00:00
|
|
|
|
2010-07-12 14:56:14 +00:00
|
|
|
class File(models.Model):
|
|
|
|
created = models.DateTimeField(auto_now_add=True)
|
|
|
|
modified = models.DateTimeField(auto_now=True)
|
|
|
|
|
2010-08-07 14:31:20 +00:00
|
|
|
verified = models.BooleanField(default=False)
|
2010-07-12 14:56:14 +00:00
|
|
|
|
2010-08-10 22:01:41 +00:00
|
|
|
oshash = models.CharField(max_length=16, unique=True)
|
2010-09-23 16:01:48 +00:00
|
|
|
item = models.ForeignKey(Item, related_name='files')
|
2010-07-12 14:56:14 +00:00
|
|
|
|
|
|
|
name = models.CharField(max_length=2048, default="") # canoncial path/file
|
2011-02-01 13:19:34 +00:00
|
|
|
folder = models.CharField(max_length=2048, default="") # canoncial path/file
|
2011-01-01 11:44:42 +00:00
|
|
|
sort_name = models.CharField(max_length=2048, default="") # sort name
|
2010-07-12 14:56:14 +00:00
|
|
|
|
2011-02-01 13:19:34 +00:00
|
|
|
type = models.CharField(default="", max_length=255)
|
|
|
|
part = models.IntegerField(null=True)
|
2010-07-12 14:56:14 +00:00
|
|
|
version = models.CharField(default="", max_length=255) # sort path/file name
|
|
|
|
language = models.CharField(default="", max_length=8)
|
|
|
|
|
2010-08-10 22:01:41 +00:00
|
|
|
season = models.IntegerField(default=-1)
|
|
|
|
episode = models.IntegerField(default=-1)
|
2010-07-12 14:56:14 +00:00
|
|
|
|
2010-08-10 22:01:41 +00:00
|
|
|
size = models.BigIntegerField(default=0)
|
2011-02-01 13:19:34 +00:00
|
|
|
duration = models.BigIntegerField(null=True)
|
2010-07-12 14:56:14 +00:00
|
|
|
|
|
|
|
info = fields.DictField(default={})
|
|
|
|
|
|
|
|
video_codec = models.CharField(max_length=255)
|
|
|
|
pixel_format = models.CharField(max_length=255)
|
|
|
|
display_aspect_ratio = models.CharField(max_length=255)
|
|
|
|
width = models.IntegerField(default = 0)
|
|
|
|
height = models.IntegerField(default = 0)
|
|
|
|
framerate = models.CharField(max_length=255)
|
|
|
|
|
|
|
|
audio_codec = models.CharField(max_length=255)
|
2010-08-10 22:01:41 +00:00
|
|
|
channels = models.IntegerField(default=0)
|
|
|
|
samplerate = models.IntegerField(default=0)
|
2010-07-12 14:56:14 +00:00
|
|
|
|
|
|
|
bits_per_pixel = models.FloatField(default=-1)
|
|
|
|
pixels = models.BigIntegerField(default=0)
|
|
|
|
|
2010-08-07 14:31:20 +00:00
|
|
|
#This is true if derivative is available or subtitles where uploaded
|
|
|
|
available = models.BooleanField(default = False)
|
|
|
|
|
2010-08-10 22:01:41 +00:00
|
|
|
is_audio = models.BooleanField(default=False)
|
|
|
|
is_video = models.BooleanField(default=False)
|
|
|
|
is_extra = models.BooleanField(default=False)
|
|
|
|
is_main = models.BooleanField(default=False)
|
|
|
|
is_subtitle = models.BooleanField(default=False)
|
|
|
|
is_version = models.BooleanField(default=False)
|
2010-08-07 14:31:20 +00:00
|
|
|
|
2010-07-12 14:56:14 +00:00
|
|
|
def __unicode__(self):
|
|
|
|
return self.name
|
|
|
|
|
|
|
|
def save(self, *args, **kwargs):
|
2011-02-02 07:51:59 +00:00
|
|
|
self.name= self.get_name()
|
|
|
|
self.folder = self.get_folder()
|
2010-07-12 14:56:14 +00:00
|
|
|
if self.name and not self.sort_name:
|
2011-02-01 13:19:34 +00:00
|
|
|
self.sort_name = utils.sort_string(canonicalTitle(self.name))
|
2010-07-12 14:56:14 +00:00
|
|
|
if self.info:
|
|
|
|
for key in ('duration', 'size'):
|
|
|
|
setattr(self, key, self.info.get(key, 0))
|
|
|
|
|
2011-01-28 08:48:38 +00:00
|
|
|
if 'video' in self.info and self.info['video'] and \
|
|
|
|
'width' in self.info['video'][0]:
|
2011-01-22 10:14:30 +00:00
|
|
|
video = self.info['video'][0]
|
|
|
|
self.video_codec = video['codec']
|
|
|
|
self.width = video['width']
|
|
|
|
self.height = video['height']
|
|
|
|
self.framerate = video['framerate']
|
|
|
|
if 'display_aspect_ratio' in video:
|
|
|
|
self.display_aspect_ratio = video['display_aspect_ratio']
|
2010-07-12 14:56:14 +00:00
|
|
|
else:
|
|
|
|
self.display_aspect_ratio = "%s:%s" % (self.width, self.height)
|
|
|
|
self.is_video = True
|
|
|
|
self.is_audio = False
|
2011-01-22 10:14:30 +00:00
|
|
|
if self.name.endswith('.jpg') or \
|
|
|
|
self.name.endswith('.png') or \
|
|
|
|
self.duration == 0.04:
|
2010-12-27 05:07:15 +00:00
|
|
|
self.is_video = False
|
2010-07-12 14:56:14 +00:00
|
|
|
else:
|
|
|
|
self.is_video = False
|
2011-02-23 11:51:32 +00:00
|
|
|
self.display_aspect_ratio = "4:3"
|
|
|
|
self.width = '320'
|
|
|
|
self.height = '240'
|
2010-07-12 14:56:14 +00:00
|
|
|
if 'audio' in self.info and self.info['audio']:
|
2011-01-22 10:14:30 +00:00
|
|
|
audio = self.info['audio'][0]
|
|
|
|
self.audio_codec = audio['codec']
|
|
|
|
self.samplerate = audio.get('samplerate', 0)
|
|
|
|
self.channels = audio.get('channels', 0)
|
2010-07-12 14:56:14 +00:00
|
|
|
|
|
|
|
if not self.is_video:
|
|
|
|
self.is_audio = True
|
|
|
|
else:
|
|
|
|
self.is_audio = False
|
|
|
|
|
|
|
|
if self.framerate:
|
2010-12-22 15:17:38 +00:00
|
|
|
self.pixels = int(self.width * self.height * float(utils.parse_decimal(self.framerate)) * self.duration)
|
2010-07-12 14:56:14 +00:00
|
|
|
|
2011-02-01 13:19:34 +00:00
|
|
|
else:
|
|
|
|
self.is_video = os.path.splitext(self.name)[-1] in ('.avi', '.mkv', '.dv', '.ogv', '.mpeg', '.mov')
|
|
|
|
self.is_audio = os.path.splitext(self.name)[-1] in ('.mp3', '.wav', '.ogg', '.flac')
|
|
|
|
self.is_subtitle= os.path.splitext(self.name)[-1] in ('.srt', '.sub', '.idx')
|
|
|
|
|
2010-07-12 14:56:14 +00:00
|
|
|
if not self.is_audio and not self.is_video and self.name.endswith('.srt'):
|
|
|
|
self.is_subtitle = True
|
|
|
|
|
2011-02-24 16:28:44 +00:00
|
|
|
if self.name and self.name.lower().startswith('extras/'):
|
2010-07-12 14:56:14 +00:00
|
|
|
self.is_extra = True
|
|
|
|
self.is_main = False
|
2011-02-24 16:28:44 +00:00
|
|
|
elif self.name and self.name.lower().startswith('versions/'):
|
|
|
|
self.is_version = True
|
|
|
|
self.is_main = False
|
2010-07-12 14:56:14 +00:00
|
|
|
else:
|
|
|
|
self.is_extra = False
|
|
|
|
self.is_main = True
|
|
|
|
|
2011-02-01 13:19:34 +00:00
|
|
|
self.part = self.get_part()
|
|
|
|
self.type = self.get_type()
|
2011-02-02 07:51:59 +00:00
|
|
|
|
2011-02-01 13:19:34 +00:00
|
|
|
if self.type not in ('audio', 'video'):
|
|
|
|
self.duration = None
|
2010-07-12 14:56:14 +00:00
|
|
|
super(File, self).save(*args, **kwargs)
|
|
|
|
|
2010-09-03 13:28:44 +00:00
|
|
|
#upload and data handling
|
2011-01-22 10:14:30 +00:00
|
|
|
video = models.FileField(null=True, blank=True,
|
2011-02-06 12:40:28 +00:00
|
|
|
upload_to=lambda f, x: f.path(settings.VIDEO_PROFILE))
|
2011-01-22 10:14:30 +00:00
|
|
|
data = models.FileField(null=True, blank=True,
|
|
|
|
upload_to=lambda f, x: f.path('data.bin'))
|
2010-12-04 01:09:10 +00:00
|
|
|
|
|
|
|
def path(self, name):
|
|
|
|
h = self.oshash
|
|
|
|
return os.path.join('files', h[:2], h[2:4], h[4:6], h[6:], name)
|
2010-09-03 13:28:44 +00:00
|
|
|
|
2010-08-24 17:16:33 +00:00
|
|
|
def contents(self):
|
2010-09-17 21:06:01 +00:00
|
|
|
if self.data != None:
|
|
|
|
self.data.seek(0)
|
2010-09-03 13:28:44 +00:00
|
|
|
return self.data.read()
|
2010-08-24 17:16:33 +00:00
|
|
|
return None
|
|
|
|
|
2010-09-18 14:44:35 +00:00
|
|
|
def srt(self):
|
2011-01-01 11:44:42 +00:00
|
|
|
|
2010-09-18 14:44:35 +00:00
|
|
|
def _detectEncoding(fp):
|
2011-01-01 11:44:42 +00:00
|
|
|
bomDict={ # bytepattern : name
|
|
|
|
(0x00, 0x00, 0xFE, 0xFF): "utf_32_be",
|
|
|
|
(0xFF, 0xFE, 0x00, 0x00): "utf_32_le",
|
|
|
|
(0xFE, 0xFF, None, None): "utf_16_be",
|
|
|
|
(0xFF, 0xFE, None, None): "utf_16_le",
|
|
|
|
(0xEF, 0xBB, 0xBF, None): "utf_8",
|
2010-09-18 14:44:35 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
# go to beginning of file and get the first 4 bytes
|
|
|
|
oldFP = fp.tell()
|
|
|
|
fp.seek(0)
|
|
|
|
(byte1, byte2, byte3, byte4) = tuple(map(ord, fp.read(4)))
|
|
|
|
|
|
|
|
# try bom detection using 4 bytes, 3 bytes, or 2 bytes
|
|
|
|
bomDetection = bomDict.get((byte1, byte2, byte3, byte4))
|
2011-01-01 11:44:42 +00:00
|
|
|
if not bomDetection:
|
2010-09-18 14:44:35 +00:00
|
|
|
bomDetection = bomDict.get((byte1, byte2, byte3, None))
|
2011-01-01 11:44:42 +00:00
|
|
|
if not bomDetection:
|
2010-09-18 14:44:35 +00:00
|
|
|
bomDetection = bomDict.get((byte1, byte2, None, None))
|
|
|
|
|
|
|
|
## if BOM detected, we're done :-)
|
|
|
|
fp.seek(oldFP)
|
2011-01-01 11:44:42 +00:00
|
|
|
if bomDetection:
|
|
|
|
return bomDetection
|
2010-09-18 14:44:35 +00:00
|
|
|
|
|
|
|
encoding = 'latin-1'
|
|
|
|
#more character detecting magick using http://chardet.feedparser.org/
|
|
|
|
fp.seek(0)
|
|
|
|
rawdata = fp.read()
|
|
|
|
encoding = chardet.detect(rawdata)['encoding']
|
|
|
|
fp.seek(oldFP)
|
|
|
|
return encoding
|
|
|
|
|
|
|
|
def parseTime(t):
|
|
|
|
return ox.time2ms(t.replace(',', '.')) / 1000
|
|
|
|
|
|
|
|
srt = []
|
|
|
|
|
|
|
|
f = open(self.data.path)
|
|
|
|
encoding = _detectEncoding(f)
|
|
|
|
data = f.read()
|
|
|
|
f.close()
|
2010-09-27 08:55:37 +00:00
|
|
|
data = data.replace('\r\n', '\n')
|
2010-09-18 14:44:35 +00:00
|
|
|
try:
|
|
|
|
data = unicode(data, encoding)
|
|
|
|
except:
|
|
|
|
try:
|
|
|
|
data = unicode(data, 'latin-1')
|
|
|
|
except:
|
|
|
|
print "failed to detect encoding, giving up"
|
|
|
|
return srt
|
|
|
|
|
|
|
|
srts = re.compile('(\d\d:\d\d:\d\d[,.]\d\d\d)\s*-->\s*(\d\d:\d\d:\d\d[,.]\d\d\d)\s*(.+?)\n\n', re.DOTALL)
|
2011-02-09 14:28:52 +00:00
|
|
|
i = 0
|
2010-09-18 14:44:35 +00:00
|
|
|
for s in srts.findall(data):
|
2011-02-09 14:28:52 +00:00
|
|
|
_s = {'id': str(i),
|
|
|
|
'in': parseTime(s[0]), 'out': parseTime(s[1]), 'value': s[2].strip()}
|
2010-09-18 14:44:35 +00:00
|
|
|
srt.append(_s)
|
2011-02-09 14:28:52 +00:00
|
|
|
i += 1
|
2010-09-18 14:44:35 +00:00
|
|
|
return srt
|
|
|
|
|
2010-09-03 13:28:44 +00:00
|
|
|
def editable(self, user):
|
|
|
|
#FIXME: check that user has instance of this file
|
|
|
|
return True
|
|
|
|
|
|
|
|
def save_chunk(self, chunk, chunk_id=-1):
|
2010-09-08 17:14:01 +00:00
|
|
|
if not self.available:
|
2010-09-03 13:28:44 +00:00
|
|
|
if not self.video:
|
2011-02-06 12:40:28 +00:00
|
|
|
self.video.save(settings.VIDEO_PROFILE, chunk)
|
2010-09-03 13:28:44 +00:00
|
|
|
else:
|
|
|
|
f = open(self.video.path, 'a')
|
|
|
|
#FIXME: should check that chunk_id/offset is right
|
|
|
|
f.write(chunk.read())
|
|
|
|
f.close()
|
|
|
|
return True
|
|
|
|
return False
|
|
|
|
|
2011-01-19 12:06:03 +00:00
|
|
|
def json(self, keys=None, user=None):
|
2011-02-01 13:19:34 +00:00
|
|
|
resolution = (self.width, self.height)
|
|
|
|
if resolution == (0, 0):
|
|
|
|
resolution = None
|
|
|
|
duration = self.duration
|
|
|
|
if self.get_type() != 'video':
|
|
|
|
duration = None
|
2011-01-19 12:06:03 +00:00
|
|
|
data = {
|
|
|
|
'available': self.available,
|
2011-02-01 13:19:34 +00:00
|
|
|
'duration': duration,
|
2011-01-19 12:06:03 +00:00
|
|
|
'framerate': self.framerate,
|
|
|
|
'height': self.height,
|
|
|
|
'width': self.width,
|
2011-02-01 13:19:34 +00:00
|
|
|
'resolution': resolution,
|
2011-01-19 12:06:03 +00:00
|
|
|
'oshash': self.oshash,
|
|
|
|
'samplerate': self.samplerate,
|
|
|
|
'video_codec': self.video_codec,
|
|
|
|
'audio_codec': self.audio_codec,
|
|
|
|
'name': self.name,
|
|
|
|
'size': self.size,
|
2011-01-21 09:31:49 +00:00
|
|
|
'info': self.info,
|
2011-02-01 13:19:34 +00:00
|
|
|
'users': list(set([i.volume.user.username for i in self.instances.all()])),
|
|
|
|
'instances': [i.json() for i in self.instances.all()],
|
|
|
|
'folder': self.get_folder(),
|
|
|
|
'type': self.get_type(),
|
|
|
|
'is_main': self.is_main,
|
|
|
|
'part': self.get_part()
|
2011-01-19 12:06:03 +00:00
|
|
|
}
|
|
|
|
if keys:
|
|
|
|
for k in data.keys():
|
|
|
|
if k not in keys:
|
|
|
|
del data[k]
|
|
|
|
return data
|
2011-01-01 11:44:42 +00:00
|
|
|
|
2011-02-01 13:19:34 +00:00
|
|
|
def get_part(self):
|
2011-02-04 13:24:03 +00:00
|
|
|
if not self.is_extra:
|
|
|
|
files = list(self.item.files.filter(type=self.type,
|
|
|
|
is_main=self.is_main).order_by('sort_name'))
|
|
|
|
if self in files:
|
|
|
|
return files.index(self) + 1
|
|
|
|
return None
|
2011-02-01 13:19:34 +00:00
|
|
|
|
|
|
|
def get_type(self):
|
|
|
|
if self.is_video:
|
|
|
|
return 'video'
|
|
|
|
if self.is_audio:
|
|
|
|
return 'audio'
|
|
|
|
if self.is_subtitle or os.path.splitext(self.name)[-1] in ('.sub', '.idx'):
|
|
|
|
return 'subtitle'
|
|
|
|
return 'unknown'
|
|
|
|
|
|
|
|
def get_folder(self):
|
2011-02-04 13:24:03 +00:00
|
|
|
name = os.path.splitext(self.get_name())[0]
|
|
|
|
if self.item:
|
|
|
|
if settings.USE_IMDB:
|
|
|
|
director = self.item.get('director', ['Unknown Director'])
|
|
|
|
director = map(get_name_sort, director)
|
|
|
|
director = '; '.join(director)
|
|
|
|
director = re.sub(r'[:\\/]', '_', director)
|
|
|
|
name = os.path.join(director, name)
|
|
|
|
year = self.item.get('year', None)
|
|
|
|
if year:
|
|
|
|
name += u' (%s)' % year
|
|
|
|
name = os.path.join(name[0].upper(), name)
|
|
|
|
return name
|
2011-02-01 13:19:34 +00:00
|
|
|
return ''
|
2011-01-21 09:31:49 +00:00
|
|
|
|
2011-02-02 07:51:59 +00:00
|
|
|
def get_name(self):
|
2011-02-04 13:24:03 +00:00
|
|
|
if self.item:
|
|
|
|
name = self.item.get('title', 'Untitled')
|
|
|
|
name = re.sub(r'[:\\/]', '_', name)
|
|
|
|
if not name:
|
|
|
|
name = 'Untitled'
|
2011-02-02 07:51:59 +00:00
|
|
|
if self.instances.count() > 0:
|
2011-02-04 13:24:03 +00:00
|
|
|
ext = os.path.splitext(self.instances.all()[0].name)[-1]
|
|
|
|
else:
|
|
|
|
ext = '.unknown'
|
|
|
|
return name + ext
|
2011-02-02 07:51:59 +00:00
|
|
|
|
2010-08-10 22:01:41 +00:00
|
|
|
class Volume(models.Model):
|
2011-01-01 11:44:42 +00:00
|
|
|
|
2010-08-10 22:01:41 +00:00
|
|
|
class Meta:
|
|
|
|
unique_together = ("user", "name")
|
|
|
|
|
|
|
|
created = models.DateTimeField(auto_now_add=True)
|
|
|
|
modified = models.DateTimeField(auto_now=True)
|
|
|
|
|
|
|
|
user = models.ForeignKey(User, related_name='volumes')
|
|
|
|
name = models.CharField(max_length=1024)
|
|
|
|
|
|
|
|
def __unicode__(self):
|
|
|
|
return u"%s's %s"% (self.user, self.name)
|
|
|
|
|
2011-01-01 11:44:42 +00:00
|
|
|
|
2010-11-08 17:43:59 +00:00
|
|
|
class Instance(models.Model):
|
2011-01-01 11:44:42 +00:00
|
|
|
|
2010-08-10 22:01:41 +00:00
|
|
|
class Meta:
|
|
|
|
unique_together = ("name", "folder", "volume")
|
|
|
|
|
2010-07-12 14:56:14 +00:00
|
|
|
created = models.DateTimeField(auto_now_add=True)
|
|
|
|
modified = models.DateTimeField(auto_now=True)
|
2010-08-07 14:31:20 +00:00
|
|
|
|
2010-08-10 22:01:41 +00:00
|
|
|
atime = models.IntegerField(default=lambda: int(time.time()), editable=False)
|
|
|
|
ctime = models.IntegerField(default=lambda: int(time.time()), editable=False)
|
|
|
|
mtime = models.IntegerField(default=lambda: int(time.time()), editable=False)
|
2010-07-12 14:56:14 +00:00
|
|
|
|
2010-08-10 22:01:41 +00:00
|
|
|
name = models.CharField(max_length=2048)
|
2011-03-04 16:10:19 +00:00
|
|
|
folder = models.CharField(max_length=2048)
|
2010-07-12 14:56:14 +00:00
|
|
|
|
|
|
|
file = models.ForeignKey(File, related_name='instances')
|
2010-08-10 22:01:41 +00:00
|
|
|
volume = models.ForeignKey(Volume, related_name='files')
|
2010-07-12 14:56:14 +00:00
|
|
|
|
|
|
|
def __unicode__(self):
|
2010-08-10 22:01:41 +00:00
|
|
|
return u"%s's %s <%s>"% (self.volume.user, self.name, self.file.oshash)
|
2010-07-12 14:56:14 +00:00
|
|
|
|
|
|
|
@property
|
2010-09-23 16:01:48 +00:00
|
|
|
def itemId(self):
|
|
|
|
return File.objects.get(oshash=self.oshash).itemId
|
2010-07-12 14:56:14 +00:00
|
|
|
|
2011-02-01 13:19:34 +00:00
|
|
|
def json(self):
|
|
|
|
return {
|
|
|
|
'user': self.volume.user.username,
|
|
|
|
'volume': self.volume.name,
|
|
|
|
'folder': self.folder,
|
|
|
|
'name': self.name
|
|
|
|
}
|
2011-01-01 11:44:42 +00:00
|
|
|
|
2010-09-08 17:14:01 +00:00
|
|
|
def frame_path(frame, name):
|
2010-08-24 17:16:33 +00:00
|
|
|
ext = os.path.splitext(name)[-1]
|
2010-09-08 17:14:01 +00:00
|
|
|
name = "%s%s" % (frame.position, ext)
|
2010-12-04 01:09:10 +00:00
|
|
|
return frame.file.path(name)
|
2010-08-07 14:31:20 +00:00
|
|
|
|
2011-01-01 11:44:42 +00:00
|
|
|
|
2010-07-12 14:56:14 +00:00
|
|
|
class Frame(models.Model):
|
2011-01-01 11:44:42 +00:00
|
|
|
|
2010-08-10 22:01:41 +00:00
|
|
|
class Meta:
|
|
|
|
unique_together = ("file", "position")
|
2010-07-12 14:56:14 +00:00
|
|
|
created = models.DateTimeField(auto_now_add=True)
|
|
|
|
modified = models.DateTimeField(auto_now=True)
|
|
|
|
file = models.ForeignKey(File, related_name="frames")
|
|
|
|
position = models.FloatField()
|
2010-08-07 14:31:20 +00:00
|
|
|
frame = models.ImageField(default=None, null=True, upload_to=frame_path)
|
2010-07-12 14:56:14 +00:00
|
|
|
|
2010-11-06 16:14:40 +00:00
|
|
|
'''
|
|
|
|
def save(self, *args, **kwargs):
|
|
|
|
name = "%d.jpg" % self.position
|
|
|
|
if file.name != name:
|
|
|
|
#FIXME: frame path should be renamed on save to match current position
|
|
|
|
super(Frame, self).save(*args, **kwargs)
|
|
|
|
'''
|
2010-07-12 14:56:14 +00:00
|
|
|
|
|
|
|
def __unicode__(self):
|
2011-01-22 10:14:30 +00:00
|
|
|
return u'%s/%s' % (self.file, self.position)
|