/* GStreamer * * Copyright (C) 2014 Samsung Electronics. All rights reserved. * Author: Thiago Santos * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Library General Public * License as published by the Free Software Foundation; either * version 2 of the License, or (at your option) any later version. * * This library is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Library General Public License for more details. * * You should have received a copy of the GNU Library General Public * License along with this library; if not, write to the * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor, * Boston, MA 02110-1301, USA. */ #ifdef HAVE_CONFIG_H #include "config.h" #endif #include #include #include #include #include static GstStaticPadTemplate sinktemplate = GST_STATIC_PAD_TEMPLATE ("sink", GST_PAD_SINK, GST_PAD_ALWAYS, GST_STATIC_CAPS ("video/x-raw") ); #define RESTRICTED_CAPS_WIDTH 800 #define RESTRICTED_CAPS_HEIGHT 600 #define RESTRICTED_CAPS_FPS_N 30 #define RESTRICTED_CAPS_FPS_D 1 static GstStaticPadTemplate sinktemplate_restricted = GST_STATIC_PAD_TEMPLATE ("sink", GST_PAD_SINK, GST_PAD_ALWAYS, GST_STATIC_CAPS ("video/x-raw, width=(int)800, height=(int)600," " framerate=(fraction)30/1") ); static GstStaticPadTemplate sinktemplate_with_range = GST_STATIC_PAD_TEMPLATE ("sink", GST_PAD_SINK, GST_PAD_ALWAYS, GST_STATIC_CAPS ("video/x-raw, width=(int)[1,800], height=(int)[1,600]," " framerate=(fraction)[1/1, 30/1]") ); static GstStaticPadTemplate srctemplate = GST_STATIC_PAD_TEMPLATE ("src", GST_PAD_SRC, GST_PAD_ALWAYS, GST_STATIC_CAPS ("video/x-test-custom") ); static GstPad *mysrcpad, *mysinkpad; static GstElement *dec; static GList *events = NULL; #define TEST_VIDEO_WIDTH 640 #define TEST_VIDEO_HEIGHT 480 #define TEST_VIDEO_FPS_N 30 #define TEST_VIDEO_FPS_D 1 #define GST_VIDEO_DECODER_TESTER_TYPE gst_video_decoder_tester_get_type() static GType gst_video_decoder_tester_get_type (void); typedef struct _GstVideoDecoderTester GstVideoDecoderTester; typedef struct _GstVideoDecoderTesterClass GstVideoDecoderTesterClass; struct _GstVideoDecoderTester { GstVideoDecoder parent; guint64 last_buf_num; guint64 last_kf_num; gboolean set_output_state; }; struct _GstVideoDecoderTesterClass { GstVideoDecoderClass parent_class; }; G_DEFINE_TYPE (GstVideoDecoderTester, gst_video_decoder_tester, GST_TYPE_VIDEO_DECODER); static gboolean gst_video_decoder_tester_start (GstVideoDecoder * dec) { GstVideoDecoderTester *dectester = (GstVideoDecoderTester *) dec; dectester->last_buf_num = -1; dectester->last_kf_num = -1; dectester->set_output_state = TRUE; return TRUE; } static gboolean gst_video_decoder_tester_stop (GstVideoDecoder * dec) { return TRUE; } static gboolean gst_video_decoder_tester_flush (GstVideoDecoder * dec) { GstVideoDecoderTester *dectester = (GstVideoDecoderTester *) dec; dectester->last_buf_num = -1; dectester->last_kf_num = -1; return TRUE; } static gboolean gst_video_decoder_tester_set_format (GstVideoDecoder * dec, GstVideoCodecState * state) { GstVideoDecoderTester *dectester = (GstVideoDecoderTester *) dec; if (dectester->set_output_state) { GstVideoCodecState *res = gst_video_decoder_set_output_state (dec, GST_VIDEO_FORMAT_GRAY8, TEST_VIDEO_WIDTH, TEST_VIDEO_HEIGHT, NULL); gst_video_codec_state_unref (res); } return TRUE; } static GstFlowReturn gst_video_decoder_tester_handle_frame (GstVideoDecoder * dec, GstVideoCodecFrame * frame) { GstVideoDecoderTester *dectester = (GstVideoDecoderTester *) dec; guint64 input_num; guint8 *data; gint size; GstMapInfo map; gst_buffer_map (frame->input_buffer, &map, GST_MAP_READ); input_num = *((guint64 *) map.data); if ((input_num == dectester->last_buf_num + 1 && dectester->last_buf_num != -1) || !GST_BUFFER_FLAG_IS_SET (frame->input_buffer, GST_BUFFER_FLAG_DELTA_UNIT)) { /* the output is gray8 */ size = TEST_VIDEO_WIDTH * TEST_VIDEO_HEIGHT; data = g_malloc0 (size); memcpy (data, map.data, sizeof (guint64)); frame->output_buffer = gst_buffer_new_wrapped (data, size); frame->pts = GST_BUFFER_PTS (frame->input_buffer); frame->duration = GST_BUFFER_DURATION (frame->input_buffer); dectester->last_buf_num = input_num; if (!GST_BUFFER_FLAG_IS_SET (frame->input_buffer, GST_BUFFER_FLAG_DELTA_UNIT)) dectester->last_kf_num = input_num; } gst_buffer_unmap (frame->input_buffer, &map); if (frame->output_buffer) return gst_video_decoder_finish_frame (dec, frame); gst_video_codec_frame_unref (frame); return GST_FLOW_OK; } static void gst_video_decoder_tester_class_init (GstVideoDecoderTesterClass * klass) { GstElementClass *element_class = GST_ELEMENT_CLASS (klass); GstVideoDecoderClass *audiosink_class = GST_VIDEO_DECODER_CLASS (klass); static GstStaticPadTemplate sink_templ = GST_STATIC_PAD_TEMPLATE ("sink", GST_PAD_SINK, GST_PAD_ALWAYS, GST_STATIC_CAPS ("video/x-test-custom")); static GstStaticPadTemplate src_templ = GST_STATIC_PAD_TEMPLATE ("src", GST_PAD_SRC, GST_PAD_ALWAYS, GST_STATIC_CAPS ("video/x-raw")); gst_element_class_add_static_pad_template (element_class, &sink_templ); gst_element_class_add_static_pad_template (element_class, &src_templ); gst_element_class_set_metadata (element_class, "VideoDecoderTester", "Decoder/Video", "yep", "me"); audiosink_class->start = gst_video_decoder_tester_start; audiosink_class->stop = gst_video_decoder_tester_stop; audiosink_class->flush = gst_video_decoder_tester_flush; audiosink_class->handle_frame = gst_video_decoder_tester_handle_frame; audiosink_class->set_format = gst_video_decoder_tester_set_format; } static void gst_video_decoder_tester_init (GstVideoDecoderTester * tester) { } static gboolean _mysinkpad_event (GstPad * pad, GstObject * parent, GstEvent * event) { events = g_list_append (events, event); return TRUE; } static void setup_videodecodertester (GstStaticPadTemplate * sinktmpl, GstStaticPadTemplate * srctmpl) { if (sinktmpl == NULL) sinktmpl = &sinktemplate; if (srctmpl == NULL) srctmpl = &srctemplate; dec = g_object_new (GST_VIDEO_DECODER_TESTER_TYPE, NULL); mysrcpad = gst_check_setup_src_pad (dec, srctmpl); mysinkpad = gst_check_setup_sink_pad (dec, sinktmpl); gst_pad_set_event_function (mysinkpad, _mysinkpad_event); } static void cleanup_videodecodertest (void) { gst_pad_set_active (mysrcpad, FALSE); gst_pad_set_active (mysinkpad, FALSE); gst_check_teardown_src_pad (dec); gst_check_teardown_sink_pad (dec); gst_check_teardown_element (dec); g_list_free_full (events, (GDestroyNotify) gst_event_unref); events = NULL; } static GstBuffer * create_test_buffer (guint64 num) { GstBuffer *buffer; guint64 *data = g_malloc (sizeof (guint64)); *data = num; buffer = gst_buffer_new_wrapped (data, sizeof (guint64)); GST_BUFFER_PTS (buffer) = gst_util_uint64_scale_round (num, GST_SECOND * TEST_VIDEO_FPS_D, TEST_VIDEO_FPS_N); GST_BUFFER_DURATION (buffer) = gst_util_uint64_scale_round (GST_SECOND, TEST_VIDEO_FPS_D, TEST_VIDEO_FPS_N); return buffer; } static void send_startup_events (void) { GstCaps *caps; fail_unless (gst_pad_push_event (mysrcpad, gst_event_new_stream_start ("randomvalue"))); /* push caps */ caps = gst_caps_new_simple ("video/x-test-custom", "width", G_TYPE_INT, TEST_VIDEO_WIDTH, "height", G_TYPE_INT, TEST_VIDEO_HEIGHT, "framerate", GST_TYPE_FRACTION, TEST_VIDEO_FPS_N, TEST_VIDEO_FPS_D, NULL); fail_unless (gst_pad_push_event (mysrcpad, gst_event_new_caps (caps))); gst_caps_unref (caps); } #define NUM_BUFFERS 1000 GST_START_TEST (videodecoder_playback) { GstSegment segment; GstBuffer *buffer; guint64 i; GList *iter; setup_videodecodertester (NULL, NULL); gst_pad_set_active (mysrcpad, TRUE); gst_element_set_state (dec, GST_STATE_PLAYING); gst_pad_set_active (mysinkpad, TRUE); send_startup_events (); /* push a new segment */ gst_segment_init (&segment, GST_FORMAT_TIME); fail_unless (gst_pad_push_event (mysrcpad, gst_event_new_segment (&segment))); /* push buffers, the data is actually a number so we can track them */ for (i = 0; i < NUM_BUFFERS; i++) { buffer = create_test_buffer (i); fail_unless (gst_pad_push (mysrcpad, buffer) == GST_FLOW_OK); } fail_unless (gst_pad_push_event (mysrcpad, gst_event_new_eos ())); /* check that all buffers were received by our source pad */ fail_unless (g_list_length (buffers) == NUM_BUFFERS); i = 0; for (iter = buffers; iter; iter = g_list_next (iter)) { GstMapInfo map; guint64 num; buffer = iter->data; gst_buffer_map (buffer, &map, GST_MAP_READ); num = *(guint64 *) map.data; fail_unless (i == num); fail_unless (GST_BUFFER_PTS (buffer) == gst_util_uint64_scale_round (i, GST_SECOND * TEST_VIDEO_FPS_D, TEST_VIDEO_FPS_N)); fail_unless (GST_BUFFER_DURATION (buffer) == gst_util_uint64_scale_round (GST_SECOND, TEST_VIDEO_FPS_D, TEST_VIDEO_FPS_N)); gst_buffer_unmap (buffer, &map); i++; } g_list_free_full (buffers, (GDestroyNotify) gst_buffer_unref); buffers = NULL; cleanup_videodecodertest (); } GST_END_TEST; GST_START_TEST (videodecoder_playback_with_events) { GstSegment segment; GstBuffer *buffer; guint i; GList *iter; GList *events_iter; setup_videodecodertester (NULL, NULL); gst_pad_set_active (mysrcpad, TRUE); gst_element_set_state (dec, GST_STATE_PLAYING); gst_pad_set_active (mysinkpad, TRUE); send_startup_events (); /* push a new segment */ gst_segment_init (&segment, GST_FORMAT_TIME); fail_unless (gst_pad_push_event (mysrcpad, gst_event_new_segment (&segment))); /* push buffers, the data is actually a number so we can track them */ for (i = 0; i < NUM_BUFFERS; i++) { if (i % 10 == 0) { GstTagList *tags; tags = gst_tag_list_new (GST_TAG_TRACK_NUMBER, i, NULL); fail_unless (gst_pad_push_event (mysrcpad, gst_event_new_tag (tags))); } else { buffer = create_test_buffer (i); fail_unless (gst_pad_push (mysrcpad, buffer) == GST_FLOW_OK); } } fail_unless (gst_pad_push_event (mysrcpad, gst_event_new_eos ())); events_iter = events; /* make sure the usual events have been received */ { GstEvent *sstart = events_iter->data; fail_unless (GST_EVENT_TYPE (sstart) == GST_EVENT_STREAM_START); events_iter = g_list_next (events_iter); } { GstEvent *caps_event = events_iter->data; fail_unless (GST_EVENT_TYPE (caps_event) == GST_EVENT_CAPS); events_iter = g_list_next (events_iter); } { GstEvent *segment_event = events_iter->data; fail_unless (GST_EVENT_TYPE (segment_event) == GST_EVENT_SEGMENT); events_iter = g_list_next (events_iter); } /* check that all buffers were received by our source pad */ iter = buffers; for (i = 0; i < NUM_BUFFERS; i++) { if (i % 10 == 0) { guint tag_v; GstEvent *tag_event = events_iter->data; GstTagList *taglist = NULL; gst_event_parse_tag (tag_event, &taglist); fail_unless (gst_tag_list_get_uint (taglist, GST_TAG_TRACK_NUMBER, &tag_v)); fail_unless (tag_v == i); events_iter = g_list_next (events_iter); } else { GstMapInfo map; guint64 num; buffer = iter->data; gst_buffer_map (buffer, &map, GST_MAP_READ); num = *(guint64 *) map.data; fail_unless (i == num); fail_unless (GST_BUFFER_PTS (buffer) == gst_util_uint64_scale_round (i, GST_SECOND * TEST_VIDEO_FPS_D, TEST_VIDEO_FPS_N)); fail_unless (GST_BUFFER_DURATION (buffer) == gst_util_uint64_scale_round (GST_SECOND, TEST_VIDEO_FPS_D, TEST_VIDEO_FPS_N)); gst_buffer_unmap (buffer, &map); iter = g_list_next (iter); } } fail_unless (iter == NULL); /* check that EOS was received */ { GstEvent *eos = events_iter->data; fail_unless (GST_EVENT_TYPE (eos) == GST_EVENT_EOS); events_iter = g_list_next (events_iter); } fail_unless (events_iter == NULL); g_list_free_full (buffers, (GDestroyNotify) gst_buffer_unref); buffers = NULL; cleanup_videodecodertest (); } GST_END_TEST; GST_START_TEST (videodecoder_flush_events) { GstSegment segment; GstBuffer *buffer; guint i; GList *events_iter; setup_videodecodertester (NULL, NULL); gst_pad_set_active (mysrcpad, TRUE); gst_element_set_state (dec, GST_STATE_PLAYING); gst_pad_set_active (mysinkpad, TRUE); send_startup_events (); /* push a new segment */ gst_segment_init (&segment, GST_FORMAT_TIME); fail_unless (gst_pad_push_event (mysrcpad, gst_event_new_segment (&segment))); /* push buffers, the data is actually a number so we can track them */ for (i = 0; i < NUM_BUFFERS; i++) { if (i % 10 == 0) { GstTagList *tags; tags = gst_tag_list_new (GST_TAG_TRACK_NUMBER, i, NULL); fail_unless (gst_pad_push_event (mysrcpad, gst_event_new_tag (tags))); } else { buffer = create_test_buffer (i); fail_unless (gst_pad_push (mysrcpad, buffer) == GST_FLOW_OK); } } fail_unless (gst_pad_push_event (mysrcpad, gst_event_new_eos ())); events_iter = events; /* make sure the usual events have been received */ { GstEvent *sstart = events_iter->data; fail_unless (GST_EVENT_TYPE (sstart) == GST_EVENT_STREAM_START); events_iter = g_list_next (events_iter); } { GstEvent *caps_event = events_iter->data; fail_unless (GST_EVENT_TYPE (caps_event) == GST_EVENT_CAPS); events_iter = g_list_next (events_iter); } { GstEvent *segment_event = events_iter->data; fail_unless (GST_EVENT_TYPE (segment_event) == GST_EVENT_SEGMENT); events_iter = g_list_next (events_iter); } /* check that EOS was received */ fail_unless (GST_PAD_IS_EOS (mysrcpad)); fail_unless (gst_pad_push_event (mysrcpad, gst_event_new_flush_start ())); fail_unless (GST_PAD_IS_EOS (mysrcpad)); /* Check that we have tags */ { GstEvent *tags = gst_pad_get_sticky_event (mysrcpad, GST_EVENT_TAG, 0); fail_unless (tags != NULL); gst_event_unref (tags); } /* Check that we still have a segment set */ { GstEvent *segment = gst_pad_get_sticky_event (mysrcpad, GST_EVENT_SEGMENT, 0); fail_unless (segment != NULL); gst_event_unref (segment); } fail_unless (gst_pad_push_event (mysrcpad, gst_event_new_flush_stop (TRUE))); fail_if (GST_PAD_IS_EOS (mysrcpad)); /* Check that the segment was flushed on FLUSH_STOP */ { GstEvent *segment = gst_pad_get_sticky_event (mysrcpad, GST_EVENT_SEGMENT, 0); fail_unless (segment == NULL); } /* Check the tags were not lost on FLUSH_STOP */ { GstEvent *tags = gst_pad_get_sticky_event (mysrcpad, GST_EVENT_TAG, 0); fail_unless (tags != NULL); gst_event_unref (tags); } g_list_free_full (buffers, (GDestroyNotify) gst_buffer_unref); buffers = NULL; cleanup_videodecodertest (); } GST_END_TEST; /* Check https://bugzilla.gnome.org/show_bug.cgi?id=721835 */ GST_START_TEST (videodecoder_playback_first_frames_not_decoded) { GstSegment segment; GstBuffer *buffer; guint64 i = 0; setup_videodecodertester (NULL, NULL); gst_pad_set_active (mysrcpad, TRUE); gst_element_set_state (dec, GST_STATE_PLAYING); gst_pad_set_active (mysinkpad, TRUE); send_startup_events (); /* push a new segment */ gst_segment_init (&segment, GST_FORMAT_TIME); fail_unless (gst_pad_push_event (mysrcpad, gst_event_new_segment (&segment))); /* push a buffer, to have the segment attached to it. * unfortunatelly this buffer can't be decoded as it isn't a keyframe */ buffer = create_test_buffer (i++); GST_BUFFER_FLAG_SET (buffer, GST_BUFFER_FLAG_DELTA_UNIT); fail_unless (gst_pad_push (mysrcpad, buffer) == GST_FLOW_OK); /* now be evil and ask this frame to be released * this frame has the segment event attached to it, and the * segment shouldn't disappear with it */ { GList *l, *ol; ol = l = gst_video_decoder_get_frames (GST_VIDEO_DECODER (dec)); fail_unless (g_list_length (l) == 1); while (l) { GstVideoCodecFrame *tmp = l->data; gst_video_decoder_release_frame (GST_VIDEO_DECODER (dec), tmp); l = g_list_next (l); } g_list_free (ol); } buffer = create_test_buffer (i++); fail_unless (gst_pad_push (mysrcpad, buffer) == GST_FLOW_OK); fail_unless (gst_pad_push_event (mysrcpad, gst_event_new_eos ())); fail_unless (g_list_length (buffers) == 1); g_list_free_full (buffers, (GDestroyNotify) gst_buffer_unref); buffers = NULL; cleanup_videodecodertest (); } GST_END_TEST; GST_START_TEST (videodecoder_buffer_after_segment) { GstSegment segment; GstBuffer *buffer; guint64 i; GstClockTime pos; GList *iter; setup_videodecodertester (NULL, NULL); gst_pad_set_active (mysrcpad, TRUE); gst_element_set_state (dec, GST_STATE_PLAYING); gst_pad_set_active (mysinkpad, TRUE); send_startup_events (); /* push a new segment */ gst_segment_init (&segment, GST_FORMAT_TIME); segment.stop = GST_SECOND; fail_unless (gst_pad_push_event (mysrcpad, gst_event_new_segment (&segment))); /* push buffers until we fill our segment */ i = 0; pos = 0; while (pos < GST_SECOND) { buffer = create_test_buffer (i++); pos = GST_BUFFER_TIMESTAMP (buffer) + GST_BUFFER_DURATION (buffer); fail_unless (gst_pad_push (mysrcpad, buffer) == GST_FLOW_OK); } /* pushing the next buffer should result in EOS */ buffer = create_test_buffer (i); fail_unless (gst_pad_push (mysrcpad, buffer) == GST_FLOW_EOS); fail_unless (gst_pad_push_event (mysrcpad, gst_event_new_eos ())); /* check that all buffers were received by our source pad */ fail_unless (g_list_length (buffers) == i); i = 0; for (iter = buffers; iter; iter = g_list_next (iter)) { GstMapInfo map; guint64 num; buffer = iter->data; gst_buffer_map (buffer, &map, GST_MAP_READ); num = *(guint64 *) map.data; fail_unless (i == num); fail_unless (GST_BUFFER_PTS (buffer) == gst_util_uint64_scale_round (i, GST_SECOND * TEST_VIDEO_FPS_D, TEST_VIDEO_FPS_N)); fail_unless (GST_BUFFER_DURATION (buffer) == gst_util_uint64_scale_round (GST_SECOND, TEST_VIDEO_FPS_D, TEST_VIDEO_FPS_N)); gst_buffer_unmap (buffer, &map); i++; } g_list_free_full (buffers, (GDestroyNotify) gst_buffer_unref); buffers = NULL; cleanup_videodecodertest (); } GST_END_TEST; /* make sure that the segment event is pushed before the gap */ GST_START_TEST (videodecoder_first_data_is_gap) { GstSegment segment; GList *events_iter; setup_videodecodertester (NULL, NULL); gst_pad_set_active (mysrcpad, TRUE); gst_element_set_state (dec, GST_STATE_PLAYING); gst_pad_set_active (mysinkpad, TRUE); send_startup_events (); /* push a new segment */ gst_segment_init (&segment, GST_FORMAT_TIME); fail_unless (gst_pad_push_event (mysrcpad, gst_event_new_segment (&segment))); /* push a gap */ fail_unless (gst_pad_push_event (mysrcpad, gst_event_new_gap (0, GST_SECOND))); events_iter = events; /* make sure the usual events have been received */ { GstEvent *sstart = events_iter->data; fail_unless (GST_EVENT_TYPE (sstart) == GST_EVENT_STREAM_START); events_iter = g_list_next (events_iter); } { GstEvent *caps_event = events_iter->data; fail_unless (GST_EVENT_TYPE (caps_event) == GST_EVENT_CAPS); events_iter = g_list_next (events_iter); } { GstEvent *segment_event = events_iter->data; fail_unless (GST_EVENT_TYPE (segment_event) == GST_EVENT_SEGMENT); events_iter = g_list_next (events_iter); } /* Make sure the gap was pushed */ { GstEvent *gap = events_iter->data; fail_unless (GST_EVENT_TYPE (gap) == GST_EVENT_GAP); events_iter = g_list_next (events_iter); } fail_unless (events_iter == NULL); cleanup_videodecodertest (); } GST_END_TEST; GST_START_TEST (videodecoder_backwards_playback) { GstSegment segment; GstBuffer *buffer; guint64 i; GList *iter; setup_videodecodertester (NULL, NULL); gst_pad_set_active (mysrcpad, TRUE); gst_element_set_state (dec, GST_STATE_PLAYING); gst_pad_set_active (mysinkpad, TRUE); send_startup_events (); /* push a new segment with -1 rate */ gst_segment_init (&segment, GST_FORMAT_TIME); segment.rate = -1.0; segment.stop = (NUM_BUFFERS + 1) * gst_util_uint64_scale_round (GST_SECOND, TEST_VIDEO_FPS_D, TEST_VIDEO_FPS_N); fail_unless (gst_pad_push_event (mysrcpad, gst_event_new_segment (&segment))); /* push buffers, the data is actually a number so we can track them */ i = NUM_BUFFERS; while (i > 0) { gint target = i; gint j; /* push groups of 10 buffers * every number that is divisible by 10 is set as a discont, * if it is divisible by 20 it is also a keyframe * * The logic here is that hte current i is the target, and then * it pushes buffers from 'target - 10' up to target. */ for (j = MAX (target - 10, 0); j < target; j++) { GstBuffer *buffer = create_test_buffer (j); if (j % 10 == 0) GST_BUFFER_FLAG_SET (buffer, GST_BUFFER_FLAG_DISCONT); if (j % 20 != 0) GST_BUFFER_FLAG_SET (buffer, GST_BUFFER_FLAG_DELTA_UNIT); fail_unless (gst_pad_push (mysrcpad, buffer) == GST_FLOW_OK); i--; } } fail_unless (gst_pad_push_event (mysrcpad, gst_event_new_eos ())); /* check that all buffers were received by our source pad */ fail_unless (g_list_length (buffers) == NUM_BUFFERS); i = NUM_BUFFERS - 1; for (iter = buffers; iter; iter = g_list_next (iter)) { GstMapInfo map; guint64 num; buffer = iter->data; gst_buffer_map (buffer, &map, GST_MAP_READ); num = *(guint64 *) map.data; fail_unless (i == num); fail_unless (GST_BUFFER_PTS (buffer) == gst_util_uint64_scale_round (i, GST_SECOND * TEST_VIDEO_FPS_D, TEST_VIDEO_FPS_N)); fail_unless (GST_BUFFER_DURATION (buffer) == gst_util_uint64_scale_round (GST_SECOND, TEST_VIDEO_FPS_D, TEST_VIDEO_FPS_N)); gst_buffer_unmap (buffer, &map); i--; } g_list_free_full (buffers, (GDestroyNotify) gst_buffer_unref); buffers = NULL; cleanup_videodecodertest (); } GST_END_TEST; GST_START_TEST (videodecoder_backwards_buffer_after_segment) { GstSegment segment; GstBuffer *buffer; guint64 i; GstClockTime pos; setup_videodecodertester (NULL, NULL); gst_pad_set_active (mysrcpad, TRUE); gst_element_set_state (dec, GST_STATE_PLAYING); gst_pad_set_active (mysinkpad, TRUE); send_startup_events (); /* push a new segment with -1 rate */ gst_segment_init (&segment, GST_FORMAT_TIME); segment.rate = -1.0; segment.start = GST_SECOND; segment.stop = (NUM_BUFFERS + 1) * gst_util_uint64_scale_round (GST_SECOND, TEST_VIDEO_FPS_D, TEST_VIDEO_FPS_N); fail_unless (gst_pad_push_event (mysrcpad, gst_event_new_segment (&segment))); /* push buffers, the data is actually a number so we can track them */ i = NUM_BUFFERS; pos = segment.stop; while (pos >= GST_SECOND) { gint target = i; gint j; g_assert (i > 0); /* push groups of 10 buffers * every number that is divisible by 10 is set as a discont, * if it is divisible by 20 it is also a keyframe * * The logic here is that hte current i is the target, and then * it pushes buffers from 'target - 10' up to target. */ for (j = MAX (target - 10, 0); j < target; j++) { buffer = create_test_buffer (j); pos = MIN (GST_BUFFER_TIMESTAMP (buffer), pos); if (j % 10 == 0) GST_BUFFER_FLAG_SET (buffer, GST_BUFFER_FLAG_DISCONT); if (j % 20 != 0) GST_BUFFER_FLAG_SET (buffer, GST_BUFFER_FLAG_DELTA_UNIT); fail_unless (gst_pad_push (mysrcpad, buffer) == GST_FLOW_OK); i--; } } /* push a discont buffer so it flushes the decoding */ buffer = create_test_buffer (i - 10); GST_BUFFER_FLAG_SET (buffer, GST_BUFFER_FLAG_DISCONT); GST_BUFFER_FLAG_SET (buffer, GST_BUFFER_FLAG_DELTA_UNIT); fail_unless (gst_pad_push (mysrcpad, buffer) == GST_FLOW_EOS); /* check that the last received buffer doesn't contain a * timestamp before the segment */ buffer = g_list_last (buffers)->data; fail_unless (GST_BUFFER_TIMESTAMP (buffer) <= segment.start && GST_BUFFER_TIMESTAMP (buffer) + GST_BUFFER_DURATION (buffer) > segment.start); /* flush our decoded data queue */ g_list_free_full (buffers, (GDestroyNotify) gst_buffer_unref); buffers = NULL; fail_unless (gst_pad_push_event (mysrcpad, gst_event_new_eos ())); fail_unless (buffers == NULL); cleanup_videodecodertest (); } GST_END_TEST; GST_START_TEST (videodecoder_query_caps_with_fixed_caps_peer) { GstCaps *caps; GstCaps *filter; GstStructure *structure; gint width, height, fps_n, fps_d; setup_videodecodertester (&sinktemplate_restricted, NULL); gst_pad_set_active (mysrcpad, TRUE); gst_element_set_state (dec, GST_STATE_PLAYING); gst_pad_set_active (mysinkpad, TRUE); caps = gst_pad_peer_query_caps (mysrcpad, NULL); fail_unless (caps != NULL); structure = gst_caps_get_structure (caps, 0); fail_unless (gst_structure_get_int (structure, "width", &width)); fail_unless (gst_structure_get_int (structure, "height", &height)); fail_unless (gst_structure_get_fraction (structure, "framerate", &fps_n, &fps_d)); /* match our restricted caps values */ fail_unless (width == RESTRICTED_CAPS_WIDTH); fail_unless (height == RESTRICTED_CAPS_HEIGHT); fail_unless (fps_n == RESTRICTED_CAPS_FPS_N); fail_unless (fps_d == RESTRICTED_CAPS_FPS_D); gst_caps_unref (caps); filter = gst_caps_new_simple ("video/x-custom-test", "width", G_TYPE_INT, 1000, "height", G_TYPE_INT, 1000, "framerate", GST_TYPE_FRACTION, 1000, 1, NULL); caps = gst_pad_peer_query_caps (mysrcpad, filter); fail_unless (caps != NULL); fail_unless (gst_caps_is_empty (caps)); gst_caps_unref (caps); gst_caps_unref (filter); cleanup_videodecodertest (); } GST_END_TEST; static void _get_int_range (GstStructure * s, const gchar * field, gint * min_v, gint * max_v) { const GValue *value; value = gst_structure_get_value (s, field); fail_unless (value != NULL); fail_unless (GST_VALUE_HOLDS_INT_RANGE (value)); *min_v = gst_value_get_int_range_min (value); *max_v = gst_value_get_int_range_max (value); } static void _get_fraction_range (GstStructure * s, const gchar * field, gint * fps_n_min, gint * fps_d_min, gint * fps_n_max, gint * fps_d_max) { const GValue *value; const GValue *min_v, *max_v; value = gst_structure_get_value (s, field); fail_unless (value != NULL); fail_unless (GST_VALUE_HOLDS_FRACTION_RANGE (value)); min_v = gst_value_get_fraction_range_min (value); fail_unless (GST_VALUE_HOLDS_FRACTION (min_v)); *fps_n_min = gst_value_get_fraction_numerator (min_v); *fps_d_min = gst_value_get_fraction_denominator (min_v); max_v = gst_value_get_fraction_range_max (value); fail_unless (GST_VALUE_HOLDS_FRACTION (max_v)); *fps_n_max = gst_value_get_fraction_numerator (max_v); *fps_d_max = gst_value_get_fraction_denominator (max_v); } GST_START_TEST (videodecoder_query_caps_with_range_caps_peer) { GstCaps *caps; GstCaps *filter; GstStructure *structure; gint width, height, fps_n, fps_d; gint width_min, height_min, fps_n_min, fps_d_min; gint width_max, height_max, fps_n_max, fps_d_max; setup_videodecodertester (&sinktemplate_with_range, NULL); gst_pad_set_active (mysrcpad, TRUE); gst_element_set_state (dec, GST_STATE_PLAYING); gst_pad_set_active (mysinkpad, TRUE); caps = gst_pad_peer_query_caps (mysrcpad, NULL); fail_unless (caps != NULL); structure = gst_caps_get_structure (caps, 0); _get_int_range (structure, "width", &width_min, &width_max); _get_int_range (structure, "height", &height_min, &height_max); _get_fraction_range (structure, "framerate", &fps_n_min, &fps_d_min, &fps_n_max, &fps_d_max); fail_unless (width_min == 1); fail_unless (width_max == RESTRICTED_CAPS_WIDTH); fail_unless (height_min == 1); fail_unless (height_max == RESTRICTED_CAPS_HEIGHT); fail_unless (fps_n_min == 1); fail_unless (fps_d_min == 1); fail_unless (fps_n_max == RESTRICTED_CAPS_FPS_N); fail_unless (fps_d_max == RESTRICTED_CAPS_FPS_D); gst_caps_unref (caps); /* query with a fixed filter */ filter = gst_caps_new_simple ("video/x-test-custom", "width", G_TYPE_INT, RESTRICTED_CAPS_WIDTH, "height", G_TYPE_INT, RESTRICTED_CAPS_HEIGHT, "framerate", GST_TYPE_FRACTION, RESTRICTED_CAPS_FPS_N, RESTRICTED_CAPS_FPS_D, NULL); caps = gst_pad_peer_query_caps (mysrcpad, filter); fail_unless (caps != NULL); structure = gst_caps_get_structure (caps, 0); fail_unless (gst_structure_get_int (structure, "width", &width)); fail_unless (gst_structure_get_int (structure, "height", &height)); fail_unless (gst_structure_get_fraction (structure, "framerate", &fps_n, &fps_d)); fail_unless (width == RESTRICTED_CAPS_WIDTH); fail_unless (height == RESTRICTED_CAPS_HEIGHT); fail_unless (fps_n == RESTRICTED_CAPS_FPS_N); fail_unless (fps_d == RESTRICTED_CAPS_FPS_D); gst_caps_unref (caps); gst_caps_unref (filter); /* query with a fixed filter that will lead to empty result */ filter = gst_caps_new_simple ("video/x-test-custom", "width", G_TYPE_INT, 1000, "height", G_TYPE_INT, 1000, "framerate", GST_TYPE_FRACTION, 1000, 1, NULL); caps = gst_pad_peer_query_caps (mysrcpad, filter); fail_unless (caps != NULL); fail_unless (gst_caps_is_empty (caps)); gst_caps_unref (caps); gst_caps_unref (filter); cleanup_videodecodertest (); } GST_END_TEST; #define GETCAPS_CAPS_STR "video/x-test-custom, somefield=(string)getcaps" static GstCaps * _custom_video_decoder_getcaps (GstVideoDecoder * dec, GstCaps * filter) { return gst_caps_from_string (GETCAPS_CAPS_STR); } GST_START_TEST (videodecoder_query_caps_with_custom_getcaps) { GstCaps *caps; GstVideoDecoderClass *klass; GstCaps *expected_caps; setup_videodecodertester (&sinktemplate_restricted, NULL); klass = GST_VIDEO_DECODER_CLASS (GST_VIDEO_DECODER_GET_CLASS (dec)); klass->getcaps = _custom_video_decoder_getcaps; gst_pad_set_active (mysrcpad, TRUE); gst_element_set_state (dec, GST_STATE_PLAYING); gst_pad_set_active (mysinkpad, TRUE); caps = gst_pad_peer_query_caps (mysrcpad, NULL); fail_unless (caps != NULL); expected_caps = gst_caps_from_string (GETCAPS_CAPS_STR); fail_unless (gst_caps_is_equal (expected_caps, caps)); gst_caps_unref (expected_caps); gst_caps_unref (caps); cleanup_videodecodertest (); } GST_END_TEST; static const gchar *test_default_caps[][2] = { { "video/x-test-custom", "video/x-raw, format=I420, width=1280, height=720, framerate=0/1, multiview-mode=mono"}, { "video/x-test-custom, width=1000", "video/x-raw, format=I420, width=1000, height=720, framerate=0/1, multiview-mode=mono"}, { "video/x-test-custom, height=500", "video/x-raw, format=I420, width=1280, height=500, framerate=0/1, multiview-mode=mono"}, { "video/x-test-custom, framerate=10/1", "video/x-raw, format=I420, width=1280, height=720, framerate=10/1, multiview-mode=mono"}, { "video/x-test-custom, pixel-aspect-ratio=2/1", "video/x-raw, format=I420, width=1280, height=720, framerate=0/1," "pixel-aspect-ratio=2/1, multiview-mode=mono"} }; GST_START_TEST (videodecoder_default_caps_on_gap_before_buffer) { GstVideoDecoderTester *dec = g_object_new (GST_VIDEO_DECODER_TESTER_TYPE, NULL); GstHarness *h = gst_harness_new_with_element (GST_ELEMENT (dec), "sink", "src"); GstEvent *event; GstCaps *caps1, *caps2; GstVideoInfo info1, info2; /* Don't set output state since we want trigger the default output caps */ dec->set_output_state = FALSE; gst_harness_set_src_caps_str (h, test_default_caps[__i__][0]); fail_unless (gst_harness_push_event (h, gst_event_new_gap (0, GST_SECOND))); fail_unless_equals_int (gst_harness_events_received (h), 4); event = gst_harness_pull_event (h); fail_unless (GST_EVENT_TYPE (event) == GST_EVENT_STREAM_START); gst_event_unref (event); event = gst_harness_pull_event (h); fail_unless (GST_EVENT_TYPE (event) == GST_EVENT_CAPS); gst_event_unref (event); event = gst_harness_pull_event (h); fail_unless (GST_EVENT_TYPE (event) == GST_EVENT_SEGMENT); gst_event_unref (event); event = gst_harness_pull_event (h); fail_unless (GST_EVENT_TYPE (event) == GST_EVENT_GAP); gst_event_unref (event); caps1 = gst_pad_get_current_caps (h->sinkpad); caps2 = gst_caps_from_string (test_default_caps[__i__][1]); gst_video_info_from_caps (&info1, caps1); gst_video_info_from_caps (&info2, caps2); gst_caps_unref (caps1); gst_caps_unref (caps2); fail_unless (gst_video_info_is_equal (&info1, &info2)); gst_harness_teardown (h); gst_object_unref (dec); } GST_END_TEST; GST_START_TEST (videodecoder_playback_event_order) { GstSegment segment; GstBuffer *buffer; guint i = 0; GList *events_iter; setup_videodecodertester (NULL, NULL); gst_pad_set_active (mysrcpad, TRUE); gst_element_set_state (dec, GST_STATE_PLAYING); gst_pad_set_active (mysinkpad, TRUE); send_startup_events (); /* push a new segment */ gst_segment_init (&segment, GST_FORMAT_TIME); fail_unless (gst_pad_push_event (mysrcpad, gst_event_new_segment (&segment))); /* push 5 buffer with one event each. All buffers except the last * one are dropped in some way, so the events are collected in various * places. The order must be preserved. * With the first buffer the segment event is added to the pending event * list to ensure that incorrect ordering can be detected for later * events. */ for (i = 0; i < 9; i++) { if (i % 2 == 0) { buffer = create_test_buffer (i); if (i < 8) GST_BUFFER_FLAG_SET (buffer, GST_BUFFER_FLAG_DELTA_UNIT); fail_unless (gst_pad_push (mysrcpad, buffer) == GST_FLOW_OK); if (i < 6) { GList *l, *ol; ol = l = gst_video_decoder_get_frames (GST_VIDEO_DECODER (dec)); fail_unless (g_list_length (l) == 1); while (l) { GstVideoCodecFrame *tmp = l->data; if (i < 4) gst_video_decoder_release_frame (GST_VIDEO_DECODER (dec), tmp); else gst_video_decoder_drop_frame (GST_VIDEO_DECODER (dec), tmp); l = g_list_next (l); } g_list_free (ol); } } else { GstTagList *tags; tags = gst_tag_list_new (GST_TAG_TRACK_NUMBER, i, NULL); fail_unless (gst_pad_push_event (mysrcpad, gst_event_new_tag (tags))); } } fail_unless (gst_pad_push_event (mysrcpad, gst_event_new_eos ())); events_iter = events; /* make sure the usual events have been received */ { GstEvent *sstart = events_iter->data; fail_unless (GST_EVENT_TYPE (sstart) == GST_EVENT_STREAM_START); events_iter = g_list_next (events_iter); } { GstEvent *caps_event = events_iter->data; fail_unless (GST_EVENT_TYPE (caps_event) == GST_EVENT_CAPS); events_iter = g_list_next (events_iter); } { GstEvent *segment_event = events_iter->data; fail_unless (GST_EVENT_TYPE (segment_event) == GST_EVENT_SEGMENT); events_iter = g_list_next (events_iter); } /* Check the order of the tag events */ for (i = 1; i < 9; i += 2) { guint tag_v; GstEvent *tag_event = events_iter->data; GstTagList *taglist = NULL; fail_unless (GST_EVENT_TYPE (tag_event) == GST_EVENT_TAG); gst_event_parse_tag (tag_event, &taglist); fail_unless (gst_tag_list_get_uint (taglist, GST_TAG_TRACK_NUMBER, &tag_v)); fail_unless (tag_v == i); events_iter = g_list_next (events_iter); } /* check that EOS was received */ { GstEvent *eos = events_iter->data; fail_unless (GST_EVENT_TYPE (eos) == GST_EVENT_EOS); events_iter = g_list_next (events_iter); } fail_unless (events_iter == NULL); g_list_free_full (buffers, (GDestroyNotify) gst_buffer_unref); buffers = NULL; cleanup_videodecodertest (); } GST_END_TEST; static Suite * gst_videodecoder_suite (void) { Suite *s = suite_create ("GstVideoDecoder"); TCase *tc = tcase_create ("general"); suite_add_tcase (s, tc); tcase_add_test (tc, videodecoder_query_caps_with_fixed_caps_peer); tcase_add_test (tc, videodecoder_query_caps_with_range_caps_peer); tcase_add_test (tc, videodecoder_query_caps_with_custom_getcaps); tcase_add_test (tc, videodecoder_playback); tcase_add_test (tc, videodecoder_playback_with_events); tcase_add_test (tc, videodecoder_playback_first_frames_not_decoded); tcase_add_test (tc, videodecoder_buffer_after_segment); tcase_add_test (tc, videodecoder_first_data_is_gap); tcase_add_test (tc, videodecoder_backwards_playback); tcase_add_test (tc, videodecoder_backwards_buffer_after_segment); tcase_add_test (tc, videodecoder_flush_events); tcase_add_loop_test (tc, videodecoder_default_caps_on_gap_before_buffer, 0, G_N_ELEMENTS (test_default_caps)); tcase_add_test (tc, videodecoder_playback_event_order); return s; } GST_CHECK_MAIN (gst_videodecoder);