add decodebin2 caps, stereo audio timeline
This commit is contained in:
parent
38261db238
commit
716612999c
2 changed files with 44 additions and 19 deletions
|
@ -13,10 +13,11 @@ import gst
|
|||
|
||||
|
||||
class Audio(gst.Pipeline):
|
||||
_data = []
|
||||
_left = []
|
||||
_right = []
|
||||
position = 0
|
||||
|
||||
def __init__(self, uri, samplerate=22050, channels=1):
|
||||
def __init__(self, uri, samplerate=22050, channels=2):
|
||||
gst.Pipeline.__init__(self)
|
||||
self.duration = -1
|
||||
self.framerate = 25
|
||||
|
@ -28,6 +29,8 @@ class Audio(gst.Pipeline):
|
|||
self.src = gst.element_factory_make("filesrc")
|
||||
self.src.props.location = self.uri
|
||||
self.sbin = gst.element_factory_make("decodebin2")
|
||||
self.sbin.props.caps = gst.Caps("audio/x-raw-int;audio/x-raw-float")
|
||||
self.sbin.props.expose_all_streams = False
|
||||
|
||||
self.queue = gst.element_factory_make("queue")
|
||||
self.rate = gst.element_factory_make("audioresample")
|
||||
|
@ -60,33 +63,53 @@ class Audio(gst.Pipeline):
|
|||
|
||||
def _data_callback(self, sink, buff, pad):
|
||||
timestamp = buff.timestamp
|
||||
samples = buff.size // 2
|
||||
samples = int(buff.size // 2 / self.channels)
|
||||
fmt = "<" + str(samples) + "h"
|
||||
data = unpack(fmt, buff.data)
|
||||
data = self._data + list(data)
|
||||
|
||||
samples_per_pixel = self.samplerate / self.framerate
|
||||
while len(data) > samples_per_pixel:
|
||||
pixel = data[:samples_per_pixel]
|
||||
left = unpack(fmt, buff.data[:2*samples])
|
||||
left = self._left + list(left)
|
||||
|
||||
right = unpack(fmt, buff.data[2*samples:])
|
||||
right = self._right + list(right)
|
||||
|
||||
samples_per_pixel = int(self.samplerate / self.framerate)
|
||||
while len(left) > samples_per_pixel:
|
||||
pixel = left[:samples_per_pixel]
|
||||
pixel = np.asarray(pixel)
|
||||
data = data[samples_per_pixel:]
|
||||
left = left[samples_per_pixel:]
|
||||
|
||||
l = np.sum(np.abs(pixel)) / samples_per_pixel
|
||||
l = int(l / 256)
|
||||
lheight = int((l * self.tile_height) / 256) * 2
|
||||
if l: l += 20
|
||||
l = (l, l, l, 255)
|
||||
|
||||
pixel = right[:samples_per_pixel]
|
||||
pixel = np.asarray(pixel)
|
||||
right = right[samples_per_pixel:]
|
||||
|
||||
r = np.sum(np.abs(pixel)) / samples_per_pixel
|
||||
r = int(r / 256)
|
||||
rheight = int((r * self.tile_height) / 256) * 2
|
||||
if r: r += 20
|
||||
r = (r, r, r, 255)
|
||||
|
||||
p = np.sum(np.abs(pixel)) / samples_per_pixel
|
||||
p = p / 256
|
||||
height = int((p * self.tile_height) / 256) * 2
|
||||
if p: p += 20
|
||||
p = (p, p, p, 255)
|
||||
tile = int(math.floor(float(self.position) / self.input_tile_width))
|
||||
tilePos = self.position - (tile * self.input_tile_width)
|
||||
tilePos = int(self.position - (tile * self.input_tile_width))
|
||||
|
||||
crop = (self.tile_height-height) / 2
|
||||
lcrop = int((self.tile_height-lheight) / 2)
|
||||
rcrop = int((self.tile_height-rheight) / 2)
|
||||
|
||||
for i in range(crop, self.tile_height-crop):
|
||||
self.tiles[tile].putpixel((tilePos, i), p)
|
||||
for i in range(lcrop, int(self.tile_height/2)):
|
||||
self.tiles[tile].putpixel((tilePos, i), l)
|
||||
|
||||
for i in range(int(self.tile_height/2), self.tile_height-rcrop):
|
||||
self.tiles[tile].putpixel((tilePos, i), r)
|
||||
|
||||
self.position += 1
|
||||
|
||||
self._data = data
|
||||
self._left = left
|
||||
self._right = right
|
||||
|
||||
if self.mainloop and timestamp >= self.duration:
|
||||
self.done()
|
||||
|
|
|
@ -24,6 +24,8 @@ class Video(gst.Pipeline):
|
|||
self.src = gst.element_factory_make("filesrc")
|
||||
self.src.props.location = uri
|
||||
self.sbin = gst.element_factory_make("decodebin2")
|
||||
self.sbin.props.caps = gst.Caps("video/x-raw-yuv;video/x-raw-rgb")
|
||||
self.sbin.props.expose_all_streams = False
|
||||
|
||||
self.csp = gst.element_factory_make("ffmpegcolorspace")
|
||||
self.scale = gst.element_factory_make("videoscale")
|
||||
|
|
Loading…
Reference in a new issue