From d780045306b0bcbe17c06acb3ab1a731ce5bf6b3 Mon Sep 17 00:00:00 2001 From: j Date: Fri, 1 Jul 2016 17:41:53 +0200 Subject: [PATCH] flake8 --- pandora/archive/admin.py | 4 +-- pandora/archive/chunk.py | 2 +- pandora/archive/extract.py | 4 +-- pandora/archive/models.py | 70 ++++++++++++++++++++------------------ pandora/archive/tasks.py | 22 ++++++------ pandora/archive/views.py | 40 +++++++++++----------- 6 files changed, 72 insertions(+), 70 deletions(-) diff --git a/pandora/archive/admin.py b/pandora/archive/admin.py index 89b16b211..88b7679b7 100644 --- a/pandora/archive/admin.py +++ b/pandora/archive/admin.py @@ -7,12 +7,12 @@ import models class FileAdmin(admin.ModelAdmin): - search_fields = ['path','oshash', 'video_codec'] + search_fields = ['path', 'oshash', 'video_codec'] list_display = ['available', 'wanted', 'selected', '__unicode__', 'public_id'] list_display_links = ('__unicode__', ) def public_id(self, obj): - return '%s'%(obj.item.public_id) + return '%s' % (obj.item.public_id) admin.site.register(models.File, FileAdmin) diff --git a/pandora/archive/chunk.py b/pandora/archive/chunk.py index 1b11b6653..045d007f8 100644 --- a/pandora/archive/chunk.py +++ b/pandora/archive/chunk.py @@ -23,7 +23,7 @@ def save_chunk(obj, file, chunk, offset, name, done_cb=None): else: path = file.path size = file.size - if offset == None: + if offset is None: offset = size elif offset > size: return False, size diff --git a/pandora/archive/extract.py b/pandora/archive/extract.py index 38d3af20c..aae2c5906 100644 --- a/pandora/archive/extract.py +++ b/pandora/archive/extract.py @@ -494,9 +494,9 @@ def average_color(prefix, start=0, end=0, mode='antialias'): end_offset = timeline.size[0] - (frames - end) box = (0, 0, end_offset, height) timeline = timeline.crop(box) - + p = np.asarray(timeline.convert('RGB'), dtype=np.float32) - p = np.sum(p, axis=0) / height #average color per frame + p = np.sum(p, axis=0) / height # average color per frame pixels.append(p) if end and frames >= end: break diff --git a/pandora/archive/models.py b/pandora/archive/models.py index dc0d7b139..2bfd14a5f 100644 --- a/pandora/archive/models.py +++ b/pandora/archive/models.py @@ -24,7 +24,8 @@ from person.models import get_name_sort from chunk import save_chunk import extract -def data_path(f, x): return f.get_path('data.bin') +def data_path(f, x): + return f.get_path('data.bin') class File(models.Model): AV_INFO = ( @@ -46,12 +47,12 @@ class File(models.Model): oshash = models.CharField(max_length=16, unique=True) item = models.ForeignKey("item.Item", related_name='files', null=True) - path = models.CharField(max_length=2048, default="") # canoncial path/file - sort_path = models.CharField(max_length=2048, default="") # sort name + path = models.CharField(max_length=2048, default="") # canoncial path/file + sort_path = models.CharField(max_length=2048, default="") # sort name type = models.CharField(default="", max_length=255) - #editable + # editable extension = models.CharField(default="", max_length=255, null=True) language = models.CharField(default="", max_length=255, null=True) part = models.CharField(default="", max_length=255, null=True) @@ -77,20 +78,20 @@ class File(models.Model): bits_per_pixel = models.FloatField(default=-1) pixels = models.BigIntegerField(default=0) - #This is true if derivative is available or subtitles where uploaded - available = models.BooleanField(default = False) - selected = models.BooleanField(default = False) - uploading = models.BooleanField(default = False) - queued = models.BooleanField(default = False) - encoding = models.BooleanField(default = False) - wanted = models.BooleanField(default = False) - failed = models.BooleanField(default = False) + # This is true if derivative is available or subtitles where uploaded + available = models.BooleanField(default=False) + selected = models.BooleanField(default=False) + uploading = models.BooleanField(default=False) + queued = models.BooleanField(default=False) + encoding = models.BooleanField(default=False) + wanted = models.BooleanField(default=False) + failed = models.BooleanField(default=False) is_audio = models.BooleanField(default=False) is_video = models.BooleanField(default=False) is_subtitle = models.BooleanField(default=False) - #upload and data handling + # upload and data handling data = models.FileField(null=True, blank=True, upload_to=data_path) @@ -161,11 +162,11 @@ class File(models.Model): data['isEpisode'] = True data['directorSort'] = [get_name_sort(n) for n in self.item.get('director', [])] data['isEpisode'] = 'isEpisode' in data \ - or data.get('season') != None \ - or data.get('episode') != None \ - or data.get('episodes') not in ([], None) \ - or (data.get('seriesTitle') != None and data.get('episodeTitle') != None) - if data['isEpisode'] and data['seriesYear'] == None: + or data.get('season') is not None \ + or data.get('episode') is not None \ + or data.get('episodes') not in ([], None) \ + or (data.get('seriesTitle') is not None and data.get('episodeTitle') is not None) + if data['isEpisode'] and data['seriesYear'] is None: data['seriesYear'] = data['year'] data['type'] = 'unknown' if 'extension' in data and data['extension']: @@ -178,7 +179,7 @@ class File(models.Model): return data def normalize_path(self): - #FIXME: always use format_path + # FIXME: always use format_path if settings.CONFIG['site']['folderdepth'] == 4: return self.normalize_item_path() else: @@ -193,6 +194,7 @@ class File(models.Model): files = [] volume = self.instances.all()[0].volume + def add_file(f): instance = f.instances.all()[0] files.append(f.get_path_info()) @@ -214,11 +216,11 @@ class File(models.Model): def update_info(self, info, user): if not self.info: - #populate name sort with director if unknown + # populate name sort with director if unknown if info.get('director') and info.get('directorSort'): for name, sortname in zip(info['director'], info['directorSort']): get_name_sort(name, sortname) - #add all files in one folder to same item + # add all files in one folder to same item if self.instances.all().count(): if info.get('isEpisode'): prefix = os.path.splitext(self.instances.all()[0].path)[0] @@ -268,7 +270,7 @@ class File(models.Model): self.available = self.data and True or False else: self.available = not self.uploading and \ - self.streams.filter(source=None, available=True).count() > 0 + self.streams.filter(source=None, available=True).count() super(File, self).save(*args, **kwargs) if update_path: self.path = self.normalize_path() @@ -279,7 +281,7 @@ class File(models.Model): return os.path.join('media', h[:2], h[2:4], h[4:6], h[6:], name) def contents(self): - if self.data != None: + if self.data is not None: self.data.seek(0) return self.data.read() return None @@ -293,7 +295,7 @@ class File(models.Model): if key not in subtitles: subtitles.append(key) srt.append(s) - #subtitles should not overlap + # subtitles should not overlap for i in range(1, len(srt)): if srt[i-1]['out'] > srt[i]['in']: srt[i-1]['out'] = srt[i]['in'] @@ -335,8 +337,7 @@ class File(models.Model): def save_chunk_stream(self, chunk, offset, resolution, format, done): if not self.available: - stream, created = Stream.objects.get_or_create( - file=self, resolution=resolution, format=format) + stream, created = Stream.objects.get_or_create(file=self, resolution=resolution, format=format) name = stream.path(stream.name()) def done_cb(): @@ -436,7 +437,7 @@ class File(models.Model): extract.frame_direct(filename, fr.frame.path, pos) if os.path.exists(fr.frame.path): fr.save() - os.chmod(fr.frame.path, 0644) + os.chmod(fr.frame.path, 0o644) self.item.select_frame() def extract_stream(self): @@ -558,7 +559,7 @@ class Volume(models.Model): name = models.CharField(max_length=1024) def __unicode__(self): - return u"%s's %s"% (self.user, self.name) + return u"%s's %s" % (self.user, self.name) def json(self): return { @@ -589,7 +590,7 @@ class Instance(models.Model): volume = models.ForeignKey(Volume, related_name='files') def __unicode__(self): - return u"%s's %s <%s>"% (self.volume.user, self.path, self.file.oshash) + return u"%s's %s <%s>" % (self.volume.user, self.path, self.file.oshash) @property def public_id(self): @@ -618,8 +619,8 @@ class Frame(models.Model): file = models.ForeignKey(File, related_name="frames") position = models.FloatField() frame = models.ImageField(default=None, null=True, upload_to=frame_path) - width = models.IntegerField(default = 0) - height = models.IntegerField(default = 0) + width = models.IntegerField(default=0) + height = models.IntegerField(default=0) def save(self, *args, **kwargs): if self.frame: @@ -636,8 +637,8 @@ def delete_frame(sender, **kwargs): f.frame.delete(save=False) pre_delete.connect(delete_frame, sender=Frame) - -def stream_path(f, x): return f.path(x) +def stream_path(f, x): + return f.path(x) class Stream(models.Model): @@ -696,7 +697,8 @@ class Stream(models.Model): if resolution <= self.resolution: for f in config['formats']: derivative, created = Stream.objects.get_or_create(file=self.file, - resolution=resolution, format=f) + resolution=resolution, + format=f) if created: derivative.source = self derivative.save() diff --git a/pandora/archive/tasks.py b/pandora/archive/tasks.py index 75e1fef95..41172cf79 100644 --- a/pandora/archive/tasks.py +++ b/pandora/archive/tasks.py @@ -24,31 +24,31 @@ def get_or_create_file(volume, f, user, item=None): if item: file.item = item else: - file.item = None #gets pupulated later via update_info + file.item = None # gets pupulated later via update_info file.save() return file def update_or_create_instance(volume, f): - #instance with oshash exists + # instance with oshash exists instance = models.Instance.objects.filter(file__oshash=f['oshash'], volume=volume) - if instance.count()>0: + if instance.count(): instance = instance[0] updated = False for key in _INSTANCE_KEYS: if f[key] != getattr(instance, key): setattr(instance, key, f[key]) - updated=True + updated = True if updated: instance.ignore = False instance.save() instance.file.save() else: instance = models.Instance.objects.filter(path=f['path'], volume=volume) - if instance.count()>0: - #same path, other oshash, keep path/item mapping, remove instance + if instance.count(): + # same path, other oshash, keep path/item mapping, remove instance item = instance[0].file.item instance.delete() - else: #new instance + else: # new instance item = None instance = models.Instance() @@ -144,14 +144,14 @@ def extract_stream(fileId): def extract_derivatives(fileId, rebuild=False): file = models.File.objects.get(id=fileId) streams = file.streams.filter(source=None) - if streams.count() > 0: + if streams.count(): streams[0].extract_derivatives(rebuild) return True @task(queue="encoding") def update_stream(id): s = models.Stream.objects.get(pk=id) - if not glob("%s*"%s.timeline_prefix): + if not glob("%s*" % s.timeline_prefix): s.make_timeline() if not s.color: s.cuts = tuple(extract.cuts(s.timeline_prefix)) @@ -161,11 +161,11 @@ def update_stream(id): s.file.selected = True s.file.save() s.file.item.update_timeline() - #make sure all derivatives exist + # make sure all derivatives exist s.extract_derivatives() s.file.item.save() - #update clips + # update clips for c in s.file.item.clips.all(): c.update_calculated_values() c.save() diff --git a/pandora/archive/views.py b/pandora/archive/views.py index 834f2fdb8..fcd5552f7 100644 --- a/pandora/archive/views.py +++ b/pandora/archive/views.py @@ -94,7 +94,7 @@ def update(request, data): files = all_files.filter(file__available=False) if volume: files = files.filter(volume=volume) - response['data']['info'] = [f.file.oshash for f in all_files.filter(Q(file__info='{}')|Q(file__size=0))] + response['data']['info'] = [f.file.oshash for f in all_files.filter(Q(file__info='{}') | Q(file__size=0))] response['data']['data'] = [f.file.oshash for f in files.filter(file__is_video=True, file__available=False, file__wanted=True)] @@ -135,13 +135,13 @@ def upload(request, data=None): f.frames.all().delete() for frame in request.FILES.getlist('frame'): name = frame.name - #float required? + # float required? position = float(os.path.splitext(name)[0]) fr, created = models.Frame.objects.get_or_create(file=f, position=position) if fr.frame: fr.frame.delete() fr.frame.save(name, frame) - os.chmod(fr.frame.path, 0644) + os.chmod(fr.frame.path, 0o644) fr.save() f.item.select_frame() f.item.save() @@ -152,7 +152,7 @@ def upload(request, data=None): f.data.delete() f.data.save('data.raw', request.FILES['file']) f.save() - os.chmod(f.data.path, 0644) + os.chmod(f.data.path, 0o644) item.tasks.load_subtitles.delay(f.item.public_id) response = json_response(text='file saved') else: @@ -235,20 +235,20 @@ def firefogg_upload(request): resolution, format = profile.split('p.') resolution = int(resolution) if resolution not in config['resolutions'] \ - or format not in config['formats']: + or format not in config['formats']: response = json_response(status=500, text='invalid profile') return render_to_json_response(response) - #handle video upload + # handle video upload if request.method == 'POST': - #post next chunk + # post next chunk if 'chunk' in request.FILES and oshash: f = get_object_or_404(models.File, oshash=oshash) if f.editable(request.user): def save_chunk(chunk, offset, done): return f.save_chunk_stream(chunk, offset, resolution, format, done) response = process_chunk(request, save_chunk) - response['resultUrl'] = request.build_absolute_uri('/%s'%f.item.public_id) + response['resultUrl'] = request.build_absolute_uri('/%s' % f.item.public_id) if response.get('done'): f.uploading = False if response['result'] == 1: @@ -258,16 +258,16 @@ def firefogg_upload(request): f.queued = False f.wanted = True f.save() - #FIXME: this fails badly if rabbitmq goes down + # FIXME: this fails badly if rabbitmq goes down try: t = f.process_stream() response['resultUrl'] = t.task_id except: pass return render_to_json_response(response) - #init upload + # init upload elif oshash: - #404 if oshash is not know, files must be registered via update api first + # 404 if oshash is not know, files must be registered via update api first f = get_object_or_404(models.File, oshash=oshash) if f.editable(request.user): f.streams.all().delete() @@ -309,18 +309,18 @@ def direct_upload(request): file.queued = False file.wanted = True file.save() - #try/execpt so it does not fail if rabitmq is down + # try/execpt so it does not fail if rabitmq is down try: t = file.extract_stream() response['resultUrl'] = t.task_id except: pass return render_to_json_response(response) - #init upload + # init upload else: file, created = models.File.objects.get_or_create(oshash=oshash) if file.editable(request.user): - #remove previous uploads + # remove previous uploads if not created: file.streams.all().delete() file.delete_frames() @@ -353,7 +353,7 @@ def getTaskStatus(request, data): } notes: To be deprecated, will be wrapped in regular API call. ''' - #FIXME: should check if user has permissions to get status + # FIXME: should check if user has permissions to get status if 'id' in data: task_id = data['id'] elif 'taskId' in data: @@ -477,7 +477,7 @@ def editMedia(request, data): models.Instance.objects.filter(file__oshash__in=dont_ignore).update(ignore=False) if ignore or dont_ignore: files = models.File.objects.filter(oshash__in=ignore+dont_ignore) - #FIXME: is this to slow to run sync? + # FIXME: is this to slow to run sync? for i in Item.objects.filter(files__in=files).distinct(): i.update_selected() i.update_wanted() @@ -624,7 +624,7 @@ def findMedia(request, data): qs = qs.values('value').annotate(items=Count('id')).order_by(*order_by) if 'positions' in query: - #FIXME: this does not scale for larger results + # FIXME: this does not scale for larger results response['data']['positions'] = {} ids = [j['value'] for j in qs] response['data']['positions'] = utils.get_positions(ids, query['positions']) @@ -635,7 +635,7 @@ def findMedia(request, data): else: response['data']['items'] = qs.count() elif 'positions' in query: - #FIXME: this does not scale for larger results + # FIXME: this does not scale for larger results qs = models.File.objects.filter(item__in=query['qs']) qs = _order_query(qs, query['sort']) @@ -651,7 +651,7 @@ def findMedia(request, data): keys = query['keys'] qs = qs[query['range'][0]:query['range'][1]] response['data']['items'] = [f.json(keys) for f in qs] - else: # otherwise stats + else: # otherwise stats items = query['qs'] files = models.File.objects.filter(item__in=query['qs']) response['data']['items'] = files.count() @@ -659,7 +659,7 @@ def findMedia(request, data): actions.register(findMedia) -def parsePath(request, data): #parse path and return info +def parsePath(request, data): # parse path and return info ''' Parses a path takes {