diff --git a/Miracast/MiracastPlayer/Amlogic/SoC_GstPlayer.cpp b/Miracast/MiracastPlayer/Amlogic/SoC_GstPlayer.cpp deleted file mode 100644 index 41129b0277..0000000000 --- a/Miracast/MiracastPlayer/Amlogic/SoC_GstPlayer.cpp +++ /dev/null @@ -1,954 +0,0 @@ -/* - * If not stated otherwise in this file or this component's Licenses.txt file the - * following copyright and licenses apply: - * - * Copyright 2023 RDK Management - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -#include -#include -#include -#include -#include -#include -#include -#include -#include "MiracastLogger.h" -#include "MiracastRTSPMsg.h" -#include "SoC_GstPlayer.h" - -SoC_GstPlayer *SoC_GstPlayer::m_GstPlayer{nullptr}; - -SoC_GstPlayer *SoC_GstPlayer::getInstance() -{ - if (m_GstPlayer == nullptr) - { - m_GstPlayer = new SoC_GstPlayer(); - } - return m_GstPlayer; -} - -void SoC_GstPlayer::destroyInstance() -{ - MIRACASTLOG_TRACE("Entering..."); - if (m_GstPlayer != nullptr) - { - m_GstPlayer->stop(); - if (m_GstPlayer->stop()) - { - MIRACASTLOG_INFO("Stop miracast player"); - } - else - { - MIRACASTLOG_ERROR("Failed to stop miracast player"); - } - delete m_GstPlayer; - m_GstPlayer = nullptr; - } - MIRACASTLOG_TRACE("Exiting..."); -} - -SoC_GstPlayer::SoC_GstPlayer() -{ - MIRACASTLOG_TRACE("Entering..."); - { - char command[128] = {0}; - std::string default_error_proc_policy = "2151665463"; - std::ifstream decoder_error_proc_policy_file("/opt/miracast_aml_dec_error_proc_policy"); - - if (decoder_error_proc_policy_file.is_open()) - { - std::string new_error_proc_policy = ""; - std::getline(decoder_error_proc_policy_file, new_error_proc_policy); - decoder_error_proc_policy_file.close(); - - MIRACASTLOG_VERBOSE("decoder_error_proc_policy_file reading from file [/opt/miracast_aml_dec_error_proc_policy], new_error_proc_policy as [%s] ", - new_error_proc_policy.c_str()); - MIRACASTLOG_VERBOSE("Overwriting error_proc_policy default[%s] with new[%s]", - default_error_proc_policy.c_str(), - new_error_proc_policy.c_str()); - default_error_proc_policy = new_error_proc_policy; - } - - if ( ! default_error_proc_policy.empty()) - { - sprintf(command, "echo %s > /sys/module/amvdec_mh264/parameters/error_proc_policy", - default_error_proc_policy.c_str()); - - MIRACASTLOG_INFO("command for applying error_proc_policy[%s]",command); - if (0 == MiracastCommon::execute_SystemCommand(command)) - { - MIRACASTLOG_INFO("error_proc_policy applied successfully"); - } - else - { - MIRACASTLOG_ERROR("!!! Failed to apply error_proc_policy !!!"); - } - - } - } - gst_init(nullptr, nullptr); - m_bBuffering = false; - m_bReady = false; - m_currentPosition = 0.0f; - m_buffering_level = 100; - m_player_statistics_tid = 0; - MIRACASTLOG_TRACE("Exiting..."); -} - -SoC_GstPlayer::~SoC_GstPlayer() -{ - MIRACASTLOG_TRACE("Entering..."); - stop(); - MIRACASTLOG_TRACE("Exiting..."); -} - -bool SoC_GstPlayer::setVideoRectangle( VIDEO_RECT_STRUCT video_rect , bool apply ) -{ - bool ret = false; - - MIRACASTLOG_TRACE("Entering..."); - - m_video_rect_st.startX = video_rect.startX; - m_video_rect_st.startY = video_rect.startY; - m_video_rect_st.width = video_rect.width; - m_video_rect_st.height = video_rect.height; - - if ( true == apply ){ - updateVideoSinkRectangle(); - } - ret = true; - - MIRACASTLOG_TRACE("Exiting Coords[%d,%d,%d,%d]Apply[%x]...", - video_rect.startX,video_rect.startY,video_rect.width,video_rect.height, - apply); - - return ret; -} - -bool SoC_GstPlayer::updateVideoSinkRectangle(void) -{ - bool ret = false; - - MIRACASTLOG_TRACE("Entering..."); - - if (( nullptr != m_video_sink ) && ( 0 < m_video_rect_st.width ) && ( 0 < m_video_rect_st.height )) - { - char rectString[64]; - sprintf(rectString,"%d,%d,%d,%d", m_video_rect_st.startX, m_video_rect_st.startY, - m_video_rect_st.width, m_video_rect_st.height); - g_object_set(G_OBJECT(m_video_sink), "window-set", rectString, nullptr); - } - MIRACASTLOG_TRACE("Exiting..."); - return ret; -} - -bool SoC_GstPlayer::launch(std::string& localip , std::string& streaming_port, MiracastRTSPMsg *rtsp_instance) -{ - char urlBuffer[128] = {0}; - bool ret = false; - - MIRACASTLOG_TRACE("Entering..."); - - snprintf(urlBuffer,sizeof(urlBuffer),"udp://%s:%s",localip.c_str(),streaming_port.c_str()); - m_uri = urlBuffer; - - m_streaming_port = std::stoull(streaming_port.c_str()); - if ( nullptr != rtsp_instance ) - { - m_rtsp_reference_instance = rtsp_instance; - } - ret = createPipeline(); - if ( !ret ){ - m_rtsp_reference_instance = nullptr; - MIRACASTLOG_ERROR("Failed to create the pipeline"); - } - MIRACASTLOG_TRACE("Exiting..."); - return ret; -} - -bool SoC_GstPlayer::pause() -{ - return changePipelineState(GST_STATE_PAUSED); -} - -bool SoC_GstPlayer::resume() -{ - return changePipelineState(GST_STATE_PLAYING); -} - -bool SoC_GstPlayer::stop() -{ - MIRACASTLOG_TRACE("Entering.."); - - if (!m_pipeline) - { - MIRACASTLOG_ERROR("Pipeline is NULL"); - return false; - } - m_statistics_thread_loop = false; - if (m_player_statistics_tid){ - pthread_join(m_player_statistics_tid,nullptr); - m_player_statistics_tid = 0; - } - if (m_main_loop) - { - g_main_loop_quit(m_main_loop); - } - if (m_playback_thread) - { - pthread_join(m_playback_thread,nullptr); - } - GstStateChangeReturn ret; - ret = gst_element_set_state(m_pipeline, GST_STATE_NULL); - if (ret == GST_STATE_CHANGE_FAILURE) - { - MIRACASTLOG_ERROR("Failed to set gst_element_set_state as NULL"); - } - GstBus *bus = gst_pipeline_get_bus(GST_PIPELINE(m_pipeline)); - if (bus) - { - gst_bus_set_sync_handler(bus, nullptr, nullptr, nullptr); - gst_object_unref(bus); - } - if (m_audio_sink) - { - gst_object_unref(m_audio_sink); - m_audio_sink = nullptr; - } - if (m_audioconvert) - { - gst_object_unref(m_audioconvert); - m_audioconvert = nullptr; - } - if (m_avdec_aac) - { - gst_object_unref(m_avdec_aac); - m_avdec_aac = nullptr; - } - if (m_aacparse) - { - gst_object_unref(m_aacparse); - m_aacparse = nullptr; - } - if (m_aQueue) - { - gst_object_unref(m_aQueue); - m_aQueue = nullptr; - } - if (m_video_sink) - { - gst_object_unref(m_video_sink); - m_video_sink = nullptr; - } - if (m_h264parse) - { - gst_object_unref(m_h264parse); - m_h264parse = nullptr; - } - if (m_vQueue) - { - gst_object_unref(m_vQueue); - m_vQueue = nullptr; - } - if (m_tsdemux) - { - gst_object_unref(m_tsdemux); - m_tsdemux = nullptr; - } - if (m_tsparse) - { - gst_object_unref(m_tsparse); - m_tsparse = nullptr; - } - if (m_rtpmp2tdepay) - { - gst_object_unref(m_rtpmp2tdepay); - m_rtpmp2tdepay = nullptr; - } - if (m_rtpjitterbuffer) - { - gst_object_unref(m_rtpjitterbuffer); - m_tsparse = nullptr; - } - if (m_udpsrc) - { - gst_object_unref(m_udpsrc); - m_udpsrc = nullptr; - } - if (m_main_loop) - { - g_main_loop_unref(m_main_loop); - m_main_loop = nullptr; - } - if (m_main_loop_context) - { - g_main_context_unref(m_main_loop_context); - m_main_loop_context = nullptr; - } - if (m_pipeline) - { - g_object_unref(m_pipeline); - m_pipeline = nullptr; - } - MIRACASTLOG_TRACE("Exiting.."); - return true; -} - -bool SoC_GstPlayer::changePipelineState(GstState state) const -{ - MIRACASTLOG_TRACE("Entering..!!!"); - GstStateChangeReturn ret; - bool status = false; - GstState current, pending; - current = pending = GST_STATE_VOID_PENDING; - ret = gst_element_get_state(m_pipeline, ¤t, &pending, 0); - - if ((ret != GST_STATE_CHANGE_FAILURE) && (current == state || pending == state)) - { - status = true; - } - MIRACASTLOG_TRACE("Changing state to %s.", gst_element_state_get_name(state)); - if (gst_element_set_state(m_pipeline, state) != GST_STATE_CHANGE_FAILURE) - { - status = true; - } - else - { - status = false; - } - MIRACASTLOG_TRACE("Exiting..!!!"); - return status; -} - -void *SoC_GstPlayer::playbackThread(void *ctx) -{ - MIRACASTLOG_TRACE("Entering..!!!"); - SoC_GstPlayer *self = (SoC_GstPlayer *)ctx; - g_main_context_push_thread_default(self->m_main_loop_context); - g_main_loop_run(self->m_main_loop); - self->m_playback_thread = 0; - MIRACASTLOG_TRACE("Exiting..!!!"); - pthread_exit(nullptr); -} - -void* SoC_GstPlayer::monitor_player_statistics_thread(void *ctx) -{ - SoC_GstPlayer *self = (SoC_GstPlayer *)ctx; - int elapsed_seconds = 0, - stats_timeout = 0; - MIRACASTLOG_TRACE("Entering..!!!"); - self->m_statistics_thread_loop = true; - struct timespec start_time, current_time; - std::string opt_flag_buffer = ""; - - clock_gettime(CLOCK_REALTIME, &start_time); - while (true == self->m_statistics_thread_loop) - { - clock_gettime(CLOCK_REALTIME, ¤t_time); - opt_flag_buffer = MiracastCommon::parse_opt_flag("/opt/miracast_player_stats",true,false); - - if (!opt_flag_buffer.empty()) - { - stats_timeout = std::atoi(opt_flag_buffer.c_str()); - elapsed_seconds = current_time.tv_sec - start_time.tv_sec; - if (elapsed_seconds >= stats_timeout) - { - self->get_player_statistics(); - // Refresh the Statistics time - clock_gettime(CLOCK_REALTIME, &start_time); - } - } - usleep(100000); - } - self->m_player_statistics_tid = 0; - MIRACASTLOG_TRACE("Exiting..!!!"); - pthread_exit(nullptr); -} - -double SoC_GstPlayer::getDuration( GstElement *pipeline ) -{ - MIRACASTLOG_TRACE("Entering..!!!"); - gint64 duration = 0; - double ret = 0.0f; - - if ( nullptr == pipeline ) - { - pipeline = m_pipeline; - } - - if (gst_element_query_duration(pipeline, GST_FORMAT_TIME, &duration) && GST_CLOCK_TIME_IS_VALID(duration)) - { - ret = static_cast(duration) / GST_SECOND; - } - MIRACASTLOG_TRACE("Exiting..!!!"); - return ret; -} - -double SoC_GstPlayer::getCurrentPosition(GstElement *pipeline) -{ - MIRACASTLOG_TRACE("Entering..!!!"); - gint64 position = 0; - - if ( nullptr == pipeline ) - { - pipeline = m_pipeline; - } - - if (gst_element_query_position(pipeline, GST_FORMAT_TIME, &position)) - { - position = static_cast(position) / GST_SECOND; - } - MIRACASTLOG_TRACE("Exiting..!!!"); - return position; -} - -bool SoC_GstPlayer::seekTo(double seconds, GstElement *pipeline ) -{ - MIRACASTLOG_TRACE("Entering..!!!"); - bool ret = false; - gint64 cur = static_cast(seconds * GST_SECOND); - m_currentPosition = seconds; - - MIRACASTLOG_VERBOSE("seekToPos=%f", seconds); - - if ( nullptr == pipeline ) - { - pipeline = m_pipeline; - } - - if (!gst_element_seek(pipeline, 1.0, GST_FORMAT_TIME, GST_SEEK_FLAG_FLUSH, GST_SEEK_TYPE_SET, cur, GST_SEEK_TYPE_NONE, GST_CLOCK_TIME_NONE)) - { - MIRACASTLOG_VERBOSE("SeekToPos success\n"); - ret = true; - } - else - { - MIRACASTLOG_VERBOSE("seekToPos failed"); - } - MIRACASTLOG_TRACE("Exiting..!!!"); - return ret; -} - -void SoC_GstPlayer::print_pipeline_state(GstElement *pipeline) -{ - MIRACASTLOG_TRACE("Entering..!!!"); - - if ( nullptr == pipeline ) - { - MIRACASTLOG_ERROR("pipeline is NULL. Can't proceed with print_pipeline_state(). \n"); - } - else - { - GstState current, pending; - GstStateChangeReturn ret_state = GST_STATE_CHANGE_FAILURE; - current = pending = GST_STATE_VOID_PENDING; - - ret_state = gst_element_get_state(pipeline, ¤t, &pending, 0); - MIRACASTLOG_VERBOSE("\n[%s]Pipeline State - Current:[%s], Pending:[%s],Ret:[%d]\n", - gst_element_get_name(pipeline), - gst_element_state_get_name(current), - gst_element_state_get_name(pending), - ret_state); - } - MIRACASTLOG_TRACE("Exiting..!!!"); -} - -bool SoC_GstPlayer::get_player_statistics() -{ - MIRACASTLOG_TRACE("Entering..!!!"); - GstStructure *stats = nullptr; - bool ret = true; - - if (nullptr == m_video_sink ) - { - MIRACASTLOG_ERROR("video-sink is NULL. Can't proceed with getPlayerStatistics(). \n"); - return false; - } - MIRACASTLOG_INFO("============= Player Statistics ============="); - - double cur_position = getCurrentPosition(); - - g_object_get( G_OBJECT(m_video_sink), "stats", &stats, nullptr ); - - if ( stats ) - { - const GValue *value = nullptr; - guint64 render_frame = 0, - dropped_frame = 0, - total_video_frames = 0, - dropped_video_frames = 0; - - /* Get Rendered Frames*/ - value = gst_structure_get_value( stats, (const gchar *)"rendered" ); - if ( value ) - { - render_frame = g_value_get_uint64( value ); - MIRACASTLOG_TRACE("!!!! render_frame[%lu] !!!",render_frame); - } - /* Get Dropped Frames*/ - value = gst_structure_get_value( stats, (const gchar *)"dropped" ); - if ( value ) - { - dropped_frame = g_value_get_uint64( value ); - MIRACASTLOG_TRACE("!!!! dropped_frame[%lu] !!!",dropped_frame); - } - - total_video_frames = render_frame + dropped_frame; - dropped_video_frames = dropped_frame; - - MIRACASTLOG_INFO("Current PTS: [ %f ]",cur_position); - MIRACASTLOG_INFO("Total Frames: [ %lu], Rendered Frames : [ %lu ], Dropped Frames: [%lu]", - total_video_frames, - render_frame, - dropped_video_frames); - gst_structure_free( stats ); - } - print_pipeline_state(m_pipeline); - MIRACASTLOG_INFO("\n=============================================\n"); - MIRACASTLOG_TRACE("Exiting..!!!"); - return ret; -} - -gboolean SoC_GstPlayer::busMessageCb(GstBus *bus, GstMessage *msg, gpointer userdata) -{ - SoC_GstPlayer *self = static_cast(userdata); - - MIRACASTLOG_TRACE("Entering...\n"); - switch (GST_MESSAGE_TYPE(msg)) - { - case GST_MESSAGE_ERROR: - { - GError *error; - gchar *info; - gst_message_parse_error(msg, &error, &info); - MIRACASTLOG_ERROR("#### GST-FAIL Error received from element [%s | %s | %s] ####", GST_OBJECT_NAME(msg->src), error->message, info ? info : "none"); - g_error_free(error); - g_free(info); - GST_DEBUG_BIN_TO_DOT_FILE((GstBin *)self->m_pipeline, GST_DEBUG_GRAPH_SHOW_ALL, "miracast_player_error"); - self->notifyPlaybackState(MIRACAST_GSTPLAYER_STATE_STOPPED,MIRACAST_PLAYER_REASON_CODE_GST_ERROR); - break; - } - case GST_MESSAGE_EOS: - { - MIRACASTLOG_VERBOSE("!!!!GST_MESSAGE_EOS reached !!!!"); - gst_element_set_state(self->m_pipeline, GST_STATE_READY); // TBD ? (Should be do explicitly or destry automatically.) - g_main_loop_quit(self->m_main_loop); - } - break; - case GST_MESSAGE_STATE_CHANGED: - { - GstState old, now, pending; - gst_message_parse_state_changed(msg, &old, &now, &pending); - MIRACASTLOG_VERBOSE("[GST_MESSAGE_STATE_CHANGED] Element [%s], Pipeline state change from Old [%s] -> New [%s] and Pending state is [%s]", - GST_ELEMENT_NAME(GST_MESSAGE_SRC(msg)), - gst_element_state_get_name(old), - gst_element_state_get_name(now), - gst_element_state_get_name(pending)); - - if (GST_MESSAGE_SRC(msg) == GST_OBJECT(self->m_pipeline)) - { - char fileName[128] = {0}; - static int id = 0; - id++; - snprintf( fileName, - sizeof(fileName), - "MiracastPlayer_%s_%s_%s_%s_DBG", - GST_OBJECT_NAME(self->m_pipeline), - std::to_string(id).c_str(), - gst_element_state_get_name(old), - gst_element_state_get_name(now)); - GST_DEBUG_BIN_TO_DOT_FILE((GstBin *)self->m_pipeline, GST_DEBUG_GRAPH_SHOW_ALL, fileName); - } - break; - } - case GST_MESSAGE_BUFFERING: - { - gint percent = 0; - gst_message_parse_buffering(msg, &percent); - MIRACASTLOG_VERBOSE("Buffering [%3d%%].", percent); - - /* Wait until buffering is complete before start/resume playing */ - if (percent < 100) - { - gst_element_set_state(self->m_pipeline, GST_STATE_PAUSED); - } - else - { - gst_element_set_state(self->m_pipeline, GST_STATE_PLAYING); - } - } - break; - case GST_MESSAGE_TAG: - { - MIRACASTLOG_VERBOSE("!!!! GST_MESSAGE_TAG !!!!"); - } - break; - case GST_MESSAGE_CLOCK_LOST: - { - MIRACASTLOG_VERBOSE("!!!! GST_MESSAGE_CLOCK_LOST !!!!"); - /* The current clock as selected by the pipeline became unusable, then select a new clock */ - gst_element_set_state(self->m_pipeline, GST_STATE_PAUSED); - gst_element_set_state(self->m_pipeline, GST_STATE_PLAYING); - } - break; - case GST_MESSAGE_QOS: - { - MIRACASTLOG_VERBOSE("Received [%s], a buffer was dropped or an element changed its processing strategy for Quality of Service reasons.", gst_message_type_get_name(msg->type)); - GstFormat format; - guint64 processed; - guint64 dropped; - gst_message_parse_qos_stats(msg, &format, &processed, &dropped); - MIRACASTLOG_VERBOSE("Format [%s], Processed [%lu], Dropped [%lu].", gst_format_get_name(format), processed, dropped); - - gint64 jitter; - gdouble proportion; - gint quality; - gst_message_parse_qos_values(msg, &jitter, &proportion, &quality); - MIRACASTLOG_VERBOSE("Jitter [%lu], Proportion [%lf], Quality [%u].", jitter, proportion, quality); - - gboolean live; - guint64 running_time; - guint64 stream_time; - guint64 timestamp; - guint64 duration; - gst_message_parse_qos(msg, &live, &running_time, &stream_time, ×tamp, &duration); - MIRACASTLOG_VERBOSE("live stream [%d], runninng_time [%lu], stream_time [%lu], timestamp [%lu], duration [%lu].", live, running_time, stream_time, timestamp, duration); - } - break; - default: - { - } - break; - } - MIRACASTLOG_TRACE("Exiting...\n"); - return TRUE; -} - -void SoC_GstPlayer::pad_added_handler(GstElement *gstelement, GstPad *new_pad, gpointer userdata) -{ - SoC_GstPlayer *self = static_cast(userdata); - GstCaps *new_pad_caps = NULL; - GstStructure *new_pad_struct = NULL; - - MIRACASTLOG_TRACE("Entering..!!!"); - - if (!self->m_pipeline) - { - MIRACASTLOG_ERROR("failed to link elements and Exiting...!!!"); - gst_object_unref(self->m_pipeline); - self->m_pipeline = NULL; - return; - } - - /* Check the new pad's type */ - new_pad_caps = gst_pad_get_current_caps(new_pad); - new_pad_struct = gst_caps_get_structure(new_pad_caps, 0); - - char *pad_name = (char *)gst_structure_get_name(new_pad_struct); - - MIRACASTLOG_TRACE("Pad Name: %s", pad_name); - - if (strncmp(pad_name, "audio", strlen("audio")) == 0) - { - GstElement *sink = (GstElement *)self->m_aQueue; - GstPad *sinkpad = gst_element_get_static_pad(sink, "sink"); - bool linked = GST_PAD_LINK_SUCCESSFUL(gst_pad_link(new_pad, sinkpad)); - if (!linked) - { - MIRACASTLOG_ERROR("Failed to link demux and audio pad (%s)", pad_name); - } - else - { - MIRACASTLOG_VERBOSE("Configured audio pad"); - } - gst_object_unref(sinkpad); - } - else if (strncmp(pad_name, "video", strlen("video")) == 0) - { - GstElement *sink = (GstElement *)self->m_vQueue; - GstPad *sinkpad = gst_element_get_static_pad(sink, "sink"); - bool linked = GST_PAD_LINK_SUCCESSFUL(gst_pad_link(new_pad, sinkpad)); - if (!linked) - { - MIRACASTLOG_ERROR("Failed to link demux and video pad (%s)", pad_name); - } - else - { - MIRACASTLOG_VERBOSE("Configured video pad"); - } - gst_object_unref(sinkpad); - } - MIRACASTLOG_TRACE("Exiting..!!!"); -} - -/** - * @brief Callback invoked after first video frame decoded - * @param[in] object pointer to element raising the callback - * @param[in] arg0 number of arguments - * @param[in] arg1 array of arguments - * @param[in] _this pointer to SoC_GstPlayer instance - */ -void SoC_GstPlayer::onFirstVideoFrameCallback(GstElement* object, guint arg0, gpointer arg1,gpointer userdata) -{ - MIRACASTLOG_TRACE("Entering..!!!"); - SoC_GstPlayer *self = static_cast(userdata); - self->m_firstVideoFrameReceived = true; - MIRACASTLOG_INFO("!!! First Video Frame has received !!!"); - self->notifyPlaybackState(MIRACAST_GSTPLAYER_STATE_FIRST_VIDEO_FRAME_RECEIVED); - MIRACASTLOG_TRACE("Exiting..!!!"); -} - -void SoC_GstPlayer::notifyPlaybackState(eMIRA_GSTPLAYER_STATES gst_player_state, eM_PLAYER_REASON_CODE state_reason_code ) -{ - MIRACASTLOG_TRACE("Entering..!!!"); - if ( nullptr != m_rtsp_reference_instance ) - { - RTSP_HLDR_MSGQ_STRUCT rtsp_hldr_msgq_data = {0}; - - rtsp_hldr_msgq_data.state = RTSP_NOTIFY_GSTPLAYER_STATE; - rtsp_hldr_msgq_data.gst_player_state = gst_player_state; - rtsp_hldr_msgq_data.state_reason_code = state_reason_code; - MIRACASTLOG_INFO("!!! GstPlayer to RTSP [%#08X] !!!",gst_player_state); - m_rtsp_reference_instance->send_msgto_rtsp_msg_hdler_thread(rtsp_hldr_msgq_data); - } - MIRACASTLOG_TRACE("Exiting..!!!"); -} - -bool SoC_GstPlayer::createPipeline() -{ - MIRACASTLOG_TRACE("Entering..!!!"); - GstStateChangeReturn ret; - GstBus *bus = nullptr; - bool return_value = true; - - /* create gst pipeline */ - m_main_loop_context = g_main_context_new(); - g_main_context_push_thread_default(m_main_loop_context); - m_main_loop = g_main_loop_new(m_main_loop_context, FALSE); - - MIRACASTLOG_TRACE("Creating Pipeline..."); - - m_pipeline = gst_pipeline_new("miracast_player"); - if (!m_pipeline) - { - MIRACASTLOG_ERROR("Failed to create gstreamer pipeline"); - MIRACASTLOG_TRACE("Exiting..!!!"); - return false; - } - - m_udpsrc = gst_element_factory_make("udpsrc", "udpsrc"); - - m_rtpjitterbuffer = gst_element_factory_make("rtpjitterbuffer", "rtpjitterbuffer"); - m_rtpmp2tdepay = gst_element_factory_make("rtpmp2tdepay", "rtpmp2tdepay"); - - m_tsparse = gst_element_factory_make("tsparse", "tsparse"); - //GstElement *m_Queue2 = gst_element_factory_make("queue2", "queue2"); - m_tsdemux = gst_element_factory_make("tsdemux", "tsdemux"); - - m_vQueue = gst_element_factory_make("queue", "vQueue"); - m_h264parse = gst_element_factory_make("h264parse", "h264parse"); - m_video_sink = gst_element_factory_make("westerossink", "westerossink"); - - m_aQueue = gst_element_factory_make("queue", "aQueue"); - m_aacparse = gst_element_factory_make("aacparse", "aacparse"); - m_avdec_aac = gst_element_factory_make("avdec_aac", "avdec_aac"); - m_audioconvert = gst_element_factory_make("audioconvert", "audioconvert"); - m_audio_sink = gst_element_factory_make("amlhalasink", "amlhalasink"); - - if ((!m_udpsrc) || (!m_rtpjitterbuffer) || (!m_rtpmp2tdepay)|| - (!m_tsparse) || (!m_tsdemux) || - (!m_vQueue) || (!m_h264parse) || (!m_video_sink) || - (!m_aQueue) || (!m_aacparse) || (!m_avdec_aac) || (!m_audioconvert) || (!m_audio_sink)) - { - MIRACASTLOG_ERROR("Element creation failure, check below"); - MIRACASTLOG_WARNING("udpsrc[%x]rtpjitterbuffer[%x]rtpmp2tdepay[%x]", - m_udpsrc,m_rtpjitterbuffer,m_rtpmp2tdepay); - MIRACASTLOG_WARNING("tsparse[%x]tsdemux[%x]", - m_tsparse,m_tsdemux); - MIRACASTLOG_WARNING("vQueue[%x]h264parse[%x]videoSink[%x]", - m_vQueue,m_h264parse,m_video_sink); - MIRACASTLOG_WARNING("aQueue[%x]aacparse[%x]avdec_aac[%x]audioconvert[%x]audioSink[%x]", - m_aQueue,m_aacparse,m_avdec_aac,m_audioconvert,m_audio_sink); - MIRACASTLOG_TRACE("Exiting..!!!"); - return false; - } - MIRACASTLOG_INFO("Add all the elements to the Pipeline "); - - /* Add all the elements into the pipeline */ - gst_bin_add_many(GST_BIN(m_pipeline), - m_udpsrc, - m_rtpjitterbuffer,m_rtpmp2tdepay, - m_tsparse,m_tsdemux, - m_vQueue,m_h264parse,m_video_sink, - m_aQueue,m_aacparse,m_avdec_aac,m_audioconvert,m_audio_sink, - nullptr ); - - MIRACASTLOG_TRACE("Link all the elements together. "); - - /* Link the elements together */ - if (!gst_element_link_many(m_udpsrc, m_rtpjitterbuffer,m_rtpmp2tdepay,m_tsparse,m_tsdemux,nullptr )) - { - MIRACASTLOG_ERROR("Elements (udpsrc->rtpjitterbuffer->rtpmp2tdepay->tsparse->tsdemux) could not be linked"); - gst_object_unref(m_pipeline); - MIRACASTLOG_TRACE("Exiting..!!!"); - return false; - } - - if (!gst_element_link_many(m_vQueue,m_h264parse,m_video_sink, nullptr)) - { - MIRACASTLOG_ERROR("Elements (vQueue->h264parse->westerossink) could not be linked"); - gst_object_unref(m_pipeline); - MIRACASTLOG_TRACE("Exiting..!!!"); - return false; - } - - if (!gst_element_link_many(m_aQueue,m_aacparse,m_avdec_aac,m_audioconvert,m_audio_sink, nullptr)) - { - MIRACASTLOG_ERROR("Elements (aQueue->aacparse->avdec_aac->amlhalasink) could not be linked"); - gst_object_unref(m_pipeline); - MIRACASTLOG_TRACE("Exiting..!!!"); - return false; - } - - /*{{{ udpsrc related element configuration*/ - MIRACASTLOG_TRACE(">>>>>>>udpsrc configuration start"); - MIRACASTLOG_TRACE("Set the port[%llu] and to udp source.",m_streaming_port); - g_object_set(G_OBJECT(m_udpsrc), "port", m_streaming_port, nullptr); - //g_object_set(G_OBJECT(m_udpsrc), "uri", m_uri.c_str(), nullptr); - - GstCaps *caps = gst_caps_new_simple("application/x-rtp", "media", G_TYPE_STRING, "video", nullptr); - if (caps) - { - g_object_set(m_udpsrc, "caps", caps, nullptr); - gst_caps_unref(caps); - MIRACASTLOG_TRACE("Set the caps to udp source."); - } - else - { - MIRACASTLOG_ERROR("Unable to Set caps to udp source."); - } - MIRACASTLOG_TRACE("udpsrc configuration end<<<<<<<<"); - /*}}}*/ - - /*{{{ rtpjitterbuffer related element configuration*/ - MIRACASTLOG_TRACE(">>>>>>>rtpjitterbuffer configuration start"); - MIRACASTLOG_TRACE("Set the 'post-drop-messages' and 'do-lost' to rtpjitterbuffer."); - g_object_set(G_OBJECT(m_rtpjitterbuffer), - "post-drop-messages", true, "do-lost" , true , nullptr ); - MIRACASTLOG_TRACE("rtpjitterbuffer configuration end<<<<<<<<"); - /*}}}*/ - - /*{{{ tsparse related element configuration*/ - MIRACASTLOG_TRACE(">>>>>>>tsparse configuration start"); - MIRACASTLOG_TRACE("Set 'set-timestamps' to tsparse"); - g_object_set(G_OBJECT(m_tsparse), "set-timestamps", true, nullptr ); - MIRACASTLOG_TRACE("tsparse configuration end<<<<<<<<"); - /*}}}*/ - - /*{{{ tsdemux related element configuration*/ - MIRACASTLOG_TRACE(">>>>>>>tsdemux configuration start"); - MIRACASTLOG_TRACE("Connect to the pad-added signal for tsdemux"); - g_signal_connect(m_tsdemux, "pad-added", G_CALLBACK(pad_added_handler), (gpointer)this); - MIRACASTLOG_TRACE("tsdemux configuration end<<<<<<<<"); - /*}}}*/ - - /*{{{ vQueue related element configuration*/ - MIRACASTLOG_TRACE(">>>>>>>vQueue configuration start"); - uint64_t vQ_max_size_buffers = 2; - MIRACASTLOG_TRACE("set 'max-size-buffers' as 2 in videoQueue"); - g_object_set(G_OBJECT(m_vQueue), "max-size-buffers", vQ_max_size_buffers, nullptr ); - MIRACASTLOG_TRACE("vQueue configuration end<<<<<<<<"); - /*}}}*/ - - /*{{{ westerossink related element configuration*/ - MIRACASTLOG_TRACE(">>>>>>>westerossink configuration start"); - updateVideoSinkRectangle(); - - g_signal_connect(m_video_sink, "first-video-frame-callback",G_CALLBACK(onFirstVideoFrameCallback), (gpointer)this); - MIRACASTLOG_TRACE("westerossink configuration end<<<<<<<<"); - /*}}}*/ - - /*{{{ aQueue related element configuration*/ - uint64_t aQ_max_size_buffers = 2, - aQ_max_size_time = 0; - MIRACASTLOG_TRACE(">>>>>>>aQueue configuration start"); - MIRACASTLOG_TRACE("set 'max-size-buffers' as 2 and max-size-time as '0' in audioQueue"); - g_object_set(G_OBJECT(m_aQueue), "max-size-buffers", aQ_max_size_buffers, nullptr ); - g_object_set(G_OBJECT(m_aQueue), "max-size-time", aQ_max_size_time , nullptr ); - MIRACASTLOG_TRACE("aQueue configuration end<<<<<<<<"); - /*}}}*/ - - /*{{{ amlhalasink related element configuration*/ - MIRACASTLOG_TRACE(">>>>>>>amlhalasink configuration start"); - - MIRACASTLOG_TRACE("Set disable-xrun as true to amlhalasink"); - g_object_set(G_OBJECT(m_audio_sink), "disable-xrun" , true, nullptr ); - - std::string opt_flag_buffer = MiracastCommon::parse_opt_flag("/opt/miracast_avoid_amlhalasink_iptv_mode"); - if (opt_flag_buffer.empty()) - { - MIRACASTLOG_INFO("[DEFAULT] Set avsync-mode as 2(IPTV) to amlhalasink"); - g_object_set(G_OBJECT(m_audio_sink), "avsync-mode" , 2, nullptr ); - } - MIRACASTLOG_TRACE("amlhalasink configuration end<<<<<<<<"); - /*}}}*/ - - MIRACASTLOG_TRACE("Listen to the bus."); - /* Listen to the bus */ - bus = gst_element_get_bus(m_pipeline); - gst_bus_add_watch(bus, (GstBusFunc)busMessageCb, this ); - gst_object_unref(bus); - - MIRACASTLOG_TRACE("Start Playing...."); - - g_main_context_pop_thread_default(m_main_loop_context); - pthread_create(&m_playback_thread, nullptr, SoC_GstPlayer::playbackThread, this); - pthread_create(&m_player_statistics_tid, nullptr, SoC_GstPlayer::monitor_player_statistics_thread, this); - - ret = gst_element_set_state(m_pipeline, GST_STATE_PLAYING); - - if (ret == GST_STATE_CHANGE_FAILURE) - { - MIRACASTLOG_ERROR("Unable to set the pipeline to the playing state."); - if(m_audio_sink) gst_object_unref(m_audio_sink); - if(m_audioconvert) gst_object_unref(m_audioconvert); - if(m_avdec_aac) gst_object_unref(m_avdec_aac); - if(m_aacparse) gst_object_unref(m_aacparse); - if(m_aQueue) gst_object_unref(m_aQueue); - - if(m_video_sink) gst_object_unref(m_video_sink); - if(m_h264parse) gst_object_unref(m_h264parse); - if(m_vQueue) gst_object_unref(m_vQueue); - - if(m_tsdemux) gst_object_unref(m_tsdemux); - if(m_tsparse) gst_object_unref(m_tsparse); - - if(m_rtpmp2tdepay) gst_object_unref(m_rtpmp2tdepay); - if(m_rtpjitterbuffer) gst_object_unref(m_rtpjitterbuffer); - if(m_udpsrc) gst_object_unref(m_udpsrc); - - if(m_pipeline) gst_object_unref(m_pipeline); - return_value = false; - } - else if (ret == GST_STATE_CHANGE_NO_PREROLL) - { - MIRACASTLOG_TRACE("Streaming live"); - m_is_live = true; - } - - MIRACASTLOG_TRACE("Exiting..!!!"); - return return_value; -} diff --git a/Miracast/MiracastPlayer/CMakeLists.txt b/Miracast/MiracastPlayer/CMakeLists.txt index dfb4acd812..f5e8fcbe99 100644 --- a/Miracast/MiracastPlayer/CMakeLists.txt +++ b/Miracast/MiracastPlayer/CMakeLists.txt @@ -34,22 +34,20 @@ pkg_check_modules(GSTREAMERBASE REQUIRED gstreamer-app-1.0) find_package(GStreamer REQUIRED) -find_library(GLIB_LIBRARY NAMES glib-2.0 ) +find_library(GLIB_LIBRARY NAMES glib-2.0) add_library(${MODULE_NAME} SHARED Module.cpp MiracastPlayer.cpp ../common/MiracastLogger.cpp ../common/MiracastCommon.cpp RTSP/MiracastRTSPMsg.cpp) -if (BUILD_AMLOGIC) +if (RDK_SERVICES_L1_TEST) target_sources(${MODULE_NAME} PRIVATE - Amlogic/SoC_GstPlayer.cpp + Test/MiracastGstPlayer.cpp ) -elseif (RDK_SERVICES_L1_TEST) +else() target_sources(${MODULE_NAME} PRIVATE - Test/SoC_GstPlayer.cpp + Generic/MiracastGstPlayer.cpp ) -else() - message(FATAL_ERROR "SoC abstraction yet to be implemented") endif() set_target_properties(${MODULE_NAME} PROPERTIES @@ -74,12 +72,15 @@ target_link_libraries(${MODULE_NAME} PRIVATE ${GLIB_LIBRARIES}) target_link_libraries(${MODULE_NAME} PRIVATE ${GSTREAMER_LIBRARIES}) target_link_libraries(${MODULE_NAME} PRIVATE ${GSTREAMERBASE_LIBRARIES}) target_link_libraries(${MODULE_NAME} PRIVATE -lpthread) -#target_link_libraries(${MODULE_NAME} PRIVATE -lrdkloggers) if(WPEFRAMEWORK_SECURITYUTIL_FOUND) target_link_libraries(${MODULE_NAME} PRIVATE ${NAMESPACE}SecurityUtil) endif() +if(NOT RDK_SERVICES_L1_TEST AND NOT RDK_SERVICE_L2_TEST) + target_link_libraries(${MODULE_NAME} PRIVATE MiracastPlayerHal) +endif() + install(TARGETS ${MODULE_NAME} DESTINATION lib/${STORAGE_DIRECTORY}/plugins) diff --git a/Miracast/MiracastPlayer/Generic/MiracastGstPlayer.cpp b/Miracast/MiracastPlayer/Generic/MiracastGstPlayer.cpp new file mode 100644 index 0000000000..b17ea807f9 --- /dev/null +++ b/Miracast/MiracastPlayer/Generic/MiracastGstPlayer.cpp @@ -0,0 +1,1042 @@ +/* + * If not stated otherwise in this file or this component's Licenses.txt file the + * following copyright and licenses apply: + * + * Copyright 2023 RDK Management + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include "MiracastLogger.h" +#include "MiracastRTSPMsg.h" +#include "MiracastGstPlayer.h" +#include + +MiracastGstPlayer *MiracastGstPlayer::m_GstPlayer{nullptr}; + +MiracastGstPlayer *MiracastGstPlayer::getInstance() +{ + if (m_GstPlayer == nullptr) + { + m_GstPlayer = new MiracastGstPlayer(); + } + return m_GstPlayer; +} + +void MiracastGstPlayer::destroyInstance() +{ + MIRACASTLOG_TRACE("Entering..."); + if (m_GstPlayer != nullptr) + { + m_GstPlayer->stop(); + if (m_GstPlayer->stop()) + { + MIRACASTLOG_INFO("Stop miracast player"); + } + else + { + MIRACASTLOG_ERROR("Failed to stop miracast player"); + } + delete m_GstPlayer; + m_GstPlayer = nullptr; + } + MIRACASTLOG_TRACE("Exiting..."); +} + +MiracastGstPlayer::MiracastGstPlayer() +{ + MIRACASTLOG_TRACE("Entering..."); + gst_init(nullptr, nullptr); + m_bBuffering = false; + m_bReady = false; + m_currentPosition = 0.0f; + m_buffering_level = 100; + m_player_statistics_tid = 0; + MIRACASTLOG_TRACE("Exiting..."); +} + +MiracastGstPlayer::~MiracastGstPlayer() +{ + MIRACASTLOG_TRACE("Entering..."); + stop(); + MIRACASTLOG_TRACE("Exiting..."); +} + +bool MiracastGstPlayer::setVideoRectangle( VIDEO_RECT_STRUCT video_rect , bool apply ) +{ + bool ret = false; + + MIRACASTLOG_TRACE("Entering..."); + + m_video_rect_st.startX = video_rect.startX; + m_video_rect_st.startY = video_rect.startY; + m_video_rect_st.width = video_rect.width; + m_video_rect_st.height = video_rect.height; + + if ( true == apply ){ + updateVideoSinkRectangle(); + } + ret = true; + + MIRACASTLOG_TRACE("Exiting Coords[%d,%d,%d,%d]Apply[%x]...", + video_rect.startX,video_rect.startY,video_rect.width,video_rect.height, + apply); + + return ret; +} + +bool MiracastGstPlayer::updateVideoSinkRectangle(void) +{ + bool ret = false; + + MIRACASTLOG_TRACE("Entering..."); + + if (( nullptr != m_video_sink ) && ( 0 < m_video_rect_st.width ) && ( 0 < m_video_rect_st.height )) + { + char rectString[64]; + sprintf(rectString,"%d,%d,%d,%d", m_video_rect_st.startX, m_video_rect_st.startY, + m_video_rect_st.width, m_video_rect_st.height); + g_object_set(G_OBJECT(m_video_sink), "window-set", rectString, nullptr); + } + MIRACASTLOG_TRACE("Exiting..."); + return ret; +} + +bool MiracastGstPlayer::launch(std::string& localip , std::string& streaming_port, MiracastRTSPMsg *rtsp_instance) +{ + char urlBuffer[128] = {0}; + bool ret = false; + + MIRACASTLOG_TRACE("Entering..."); + + snprintf(urlBuffer,sizeof(urlBuffer),"udp://%s:%s",localip.c_str(),streaming_port.c_str()); + m_uri = urlBuffer; + + m_streaming_port = std::stoull(streaming_port.c_str()); + if ( nullptr != rtsp_instance ) + { + m_rtsp_reference_instance = rtsp_instance; + } + ret = createPipeline(); + if ( !ret ){ + m_rtsp_reference_instance = nullptr; + MIRACASTLOG_ERROR("Failed to create the pipeline"); + } + MIRACASTLOG_TRACE("Exiting..."); + return ret; +} + +bool MiracastGstPlayer::pause() +{ + changePipelineState(m_append_pipeline,GST_STATE_PAUSED); + changePipelineState(m_playbin_pipeline,GST_STATE_PAUSED); + return true; +} + +bool MiracastGstPlayer::resume() +{ + changePipelineState(m_append_pipeline,GST_STATE_PLAYING); + changePipelineState(m_playbin_pipeline,GST_STATE_PLAYING); + return true; +} + +bool MiracastGstPlayer::changePipelineState(GstElement *pipeline, GstState state) const +{ + MIRACASTLOG_TRACE("Entering..!!!"); + GstStateChangeReturn ret; + bool status = false; + GstState current, pending; + current = pending = GST_STATE_VOID_PENDING; + ret = gst_element_get_state(pipeline, ¤t, &pending, 0); + + if ((ret != GST_STATE_CHANGE_FAILURE) && (current == state || pending == state)) + { + status = true; + } + MIRACASTLOG_TRACE("Changing state to %s.", gst_element_state_get_name(state)); + if (gst_element_set_state(pipeline, state) != GST_STATE_CHANGE_FAILURE) + { + status = true; + } + else + { + status = false; + } + MIRACASTLOG_TRACE("Exiting..!!!"); + return status; +} + +void *MiracastGstPlayer::playbackThread(void *ctx) +{ + MIRACASTLOG_TRACE("Entering..!!!"); + MiracastGstPlayer *self = (MiracastGstPlayer *)ctx; + g_main_context_push_thread_default(self->m_main_loop_context); + g_main_loop_run(self->m_main_loop); + self->m_playback_thread = 0; + MIRACASTLOG_TRACE("Exiting..!!!"); + pthread_exit(nullptr); +} + +void* MiracastGstPlayer::monitor_player_statistics_thread(void *ctx) +{ + MiracastGstPlayer *self = (MiracastGstPlayer *)ctx; + int elapsed_seconds = 0, + stats_timeout = 0; + MIRACASTLOG_TRACE("Entering..!!!"); + self->m_statistics_thread_loop = true; + struct timespec start_time, current_time; + std::string opt_flag_buffer = ""; + + clock_gettime(CLOCK_REALTIME, &start_time); + while (true == self->m_statistics_thread_loop) + { + clock_gettime(CLOCK_REALTIME, ¤t_time); + opt_flag_buffer = MiracastCommon::parse_opt_flag("/opt/miracast_player_stats",true,false); + + if (!opt_flag_buffer.empty()) + { + stats_timeout = std::atoi(opt_flag_buffer.c_str()); + elapsed_seconds = current_time.tv_sec - start_time.tv_sec; + if (elapsed_seconds >= stats_timeout) + { + self->get_player_statistics(); + // Refresh the Statistics time + clock_gettime(CLOCK_REALTIME, &start_time); + } + } + usleep(100000); + } + self->m_player_statistics_tid = 0; + MIRACASTLOG_TRACE("Exiting..!!!"); + pthread_exit(nullptr); +} + +double MiracastGstPlayer::getDuration( GstElement *pipeline ) +{ + MIRACASTLOG_TRACE("Entering..!!!"); + gint64 duration = 0; + double ret = 0.0f; + + if ( nullptr != pipeline ) + { + if (gst_element_query_duration(pipeline, GST_FORMAT_TIME, &duration) && GST_CLOCK_TIME_IS_VALID(duration)) + { + ret = static_cast(duration) / GST_SECOND; + } + } + MIRACASTLOG_TRACE("Exiting..!!!"); + return ret; +} + +double MiracastGstPlayer::getCurrentPosition(GstElement *pipeline) +{ + MIRACASTLOG_TRACE("Entering..!!!"); + gint64 position = 0; + + if ( nullptr != pipeline ) + { + if (gst_element_query_position(pipeline, GST_FORMAT_TIME, &position)) + { + position = static_cast(position) / GST_SECOND; + } + } + MIRACASTLOG_TRACE("Exiting..!!!"); + return position; +} + +bool MiracastGstPlayer::seekTo(double seconds, GstElement *pipeline ) +{ + MIRACASTLOG_TRACE("Entering..!!!"); + bool ret = false; + gint64 cur = static_cast(seconds * GST_SECOND); + m_currentPosition = seconds; + + MIRACASTLOG_VERBOSE("seekToPos=%f", seconds); + + if ( nullptr != pipeline ) + { + if (!gst_element_seek(pipeline, 1.0, GST_FORMAT_TIME, GST_SEEK_FLAG_FLUSH, GST_SEEK_TYPE_SET, cur, GST_SEEK_TYPE_NONE, GST_CLOCK_TIME_NONE)) + { + MIRACASTLOG_VERBOSE("SeekToPos success"); + ret = true; + } + else + { + MIRACASTLOG_VERBOSE("seekToPos failed"); + } + } + MIRACASTLOG_TRACE("Exiting..!!!"); + return ret; +} + +void MiracastGstPlayer::print_pipeline_state(GstElement *pipeline) +{ + MIRACASTLOG_TRACE("Entering..!!!"); + + if ( nullptr == pipeline ) + { + MIRACASTLOG_ERROR("pipeline is NULL. Can't proceed with print_pipeline_state(). "); + } + else + { + GstState current, pending; + GstStateChangeReturn ret_state = GST_STATE_CHANGE_FAILURE; + current = pending = GST_STATE_VOID_PENDING; + + ret_state = gst_element_get_state(pipeline, ¤t, &pending, 0); + MIRACASTLOG_VERBOSE("\n[%s]Pipeline State - Current:[%s], Pending:[%s],Ret:[%d]\n", + gst_element_get_name(pipeline), + gst_element_state_get_name(current), + gst_element_state_get_name(pending), + ret_state); + } + MIRACASTLOG_TRACE("Exiting..!!!"); +} + +bool MiracastGstPlayer::get_player_statistics() +{ + MIRACASTLOG_TRACE("Entering..!!!"); + GstStructure *stats = nullptr; + bool ret = true; + + if (nullptr == m_video_sink ) + { + MIRACASTLOG_ERROR("video-sink is NULL. Can't proceed with getPlayerStatistics(). "); + return false; + } + MIRACASTLOG_INFO("============= Player Statistics ============="); + + double cur_position = getCurrentPosition(); + + g_object_get( G_OBJECT(m_video_sink), "stats", &stats, nullptr ); + + if ( stats ) + { + const GValue *value = nullptr; + guint64 render_frame = 0, + dropped_frame = 0, + total_video_frames = 0, + dropped_video_frames = 0; + + /* Get Rendered Frames*/ + value = gst_structure_get_value( stats, (const gchar *)"rendered" ); + if ( value ) + { + render_frame = g_value_get_uint64( value ); + MIRACASTLOG_TRACE("!!!! render_frame[%lu] !!!",render_frame); + } + /* Get Dropped Frames*/ + value = gst_structure_get_value( stats, (const gchar *)"dropped" ); + if ( value ) + { + dropped_frame = g_value_get_uint64( value ); + MIRACASTLOG_TRACE("!!!! dropped_frame[%lu] !!!",dropped_frame); + } + + total_video_frames = render_frame + dropped_frame; + dropped_video_frames = dropped_frame; + + MIRACASTLOG_INFO("Current PTS: [ %f ]",cur_position); + MIRACASTLOG_INFO("Total Frames: [ %lu], Rendered Frames : [ %lu ], Dropped Frames: [%lu]", + total_video_frames, + render_frame, + dropped_video_frames); + gst_structure_free( stats ); + } + print_pipeline_state(m_append_pipeline); + print_pipeline_state(m_playbin_pipeline); + MIRACASTLOG_INFO("\n============================================="); + MIRACASTLOG_TRACE("Exiting..!!!"); + return ret; +} + +/** + * @brief Callback invoked after first video frame decoded + * @param[in] object pointer to element raising the callback + * @param[in] arg0 number of arguments + * @param[in] arg1 array of arguments + * @param[in] _this pointer to MiracastGstPlayer instance + */ +void MiracastGstPlayer::onFirstVideoFrameCallback(GstElement* object, guint arg0, gpointer arg1,gpointer userdata) +{ + MIRACASTLOG_TRACE("Entering..!!!"); + MiracastGstPlayer *self = static_cast(userdata); + + self->m_firstVideoFrameReceived = true; + MIRACASTLOG_INFO("!!! First Video Frame has received !!!"); + self->notifyPlaybackState(MIRACAST_GSTPLAYER_STATE_FIRST_VIDEO_FRAME_RECEIVED); + MIRACASTLOG_TRACE("Exiting..!!!"); +} + +void MiracastGstPlayer::notifyPlaybackState(eMIRA_GSTPLAYER_STATES gst_player_state, eM_PLAYER_REASON_CODE state_reason_code ) +{ + MIRACASTLOG_TRACE("Entering..!!!"); + if ( nullptr != m_rtsp_reference_instance ) + { + RTSP_HLDR_MSGQ_STRUCT rtsp_hldr_msgq_data = {0}; + + rtsp_hldr_msgq_data.state = RTSP_NOTIFY_GSTPLAYER_STATE; + rtsp_hldr_msgq_data.gst_player_state = gst_player_state; + rtsp_hldr_msgq_data.state_reason_code = state_reason_code; + MIRACASTLOG_INFO("!!! GstPlayer to RTSP [%#08X] !!!",gst_player_state); + m_rtsp_reference_instance->send_msgto_rtsp_msg_hdler_thread(rtsp_hldr_msgq_data); + } + MIRACASTLOG_TRACE("Exiting..!!!"); +} + +GstFlowReturn MiracastGstPlayer::appendPipelineNewSampleHandler(GstElement *elt, gpointer userdata) +{ + MiracastGstPlayer *self = static_cast(userdata); + GstSample *sample = NULL; + GstBuffer *buffer = NULL; + GstMapInfo map; + + if (nullptr == self->m_appsrc) + { + MIRACASTLOG_WARNING("Yet to get the Appsrc handle"); + return GST_FLOW_OK; + } + + // Pull the sample from appsink + sample = gst_app_sink_pull_sample(GST_APP_SINK(elt)); + if (!sample) + { + MIRACASTLOG_ERROR("Failed to pull sample from appsink\n"); + return GST_FLOW_ERROR; + } + + // Get the buffer from the sample + buffer = gst_sample_get_buffer(sample); + if (!buffer) + { + MIRACASTLOG_ERROR("Failed to get buffer from sample\n"); + gst_sample_unref(sample); + return GST_FLOW_ERROR; + } + + // Map the buffer for reading + if (!gst_buffer_map(buffer, &map, GST_MAP_READ)) + { + MIRACASTLOG_ERROR("Failed to map buffer\n"); + gst_sample_unref(sample); + return GST_FLOW_ERROR; + } + + GstBuffer *new_buffer = gst_buffer_new_allocate(NULL, map.size, NULL); + + // Copy data from the original buffer to the new buffer + gst_buffer_fill(new_buffer, 0, map.data, map.size); + + MIRACASTLOG_INFO("==> Received sample size [%u][%x] <==",map.size,new_buffer); + + self->m_customQueueHandle->sendData(static_cast(new_buffer)); + + // Unmap and cleanup + gst_buffer_unmap(buffer, &map); + gst_sample_unref(sample); + + return GST_FLOW_OK; +} + +/* called when we get a GstMessage from the source pipeline when we get EOS, we + * notify the appsrc of it. */ +gboolean MiracastGstPlayer::appendPipelineBusMessage(GstBus * bus, GstMessage * message, gpointer userdata) +{ + MiracastGstPlayer *self = static_cast(userdata); + GstElement *source; + MIRACASTLOG_TRACE("Entering..."); + switch (GST_MESSAGE_TYPE (message)) + { + case GST_MESSAGE_EOS: + { + MIRACASTLOG_INFO ("The source got dry"); + source = gst_bin_get_by_name (GST_BIN (self->m_append_pipeline), "miracast_appsink"); + gst_app_src_end_of_stream (GST_APP_SRC (source)); + gst_object_unref (source); + } + break; + case GST_MESSAGE_ERROR: + { + GError *error; + gchar *info; + gst_message_parse_error(message, &error, &info); + MIRACASTLOG_ERROR("#### GST-FAIL Error received from element [%s | %s | %s] ####", GST_OBJECT_NAME(message->src), error->message, info ? info : "none"); + g_error_free(error); + g_free(info); + GST_DEBUG_BIN_TO_DOT_FILE((GstBin *)self->m_append_pipeline, GST_DEBUG_GRAPH_SHOW_ALL, "miracast_udpsrc2appsink_error"); + gst_element_set_state(self->m_append_pipeline, GST_STATE_READY); + self->notifyPlaybackState(MIRACAST_GSTPLAYER_STATE_STOPPED,MIRACAST_PLAYER_REASON_CODE_GST_ERROR); + } + break; + case GST_MESSAGE_STATE_CHANGED: + { + GstState old, now, pending; + gst_message_parse_state_changed(message, &old, &now, &pending); + MIRACASTLOG_VERBOSE("[GST_MESSAGE_STATE_CHANGED] Element [%s], Pipeline state change from Old [%s] -> New [%s] and Pending state is [%s]", + GST_ELEMENT_NAME(GST_MESSAGE_SRC(message)), + gst_element_state_get_name(old), + gst_element_state_get_name(now), + gst_element_state_get_name(pending)); + + if (GST_MESSAGE_SRC(message) == GST_OBJECT(self->m_append_pipeline)) + { + char fileName[128] = {0}; + static int playbin_id = 0; + playbin_id++; + snprintf( fileName, + sizeof(fileName), + "MiracastUdpsrcAppsink_%s_%s_%s_%s_DBG", + GST_OBJECT_NAME(self->m_append_pipeline), + std::to_string(playbin_id).c_str(), + gst_element_state_get_name(old), + gst_element_state_get_name(now)); + GST_DEBUG_BIN_TO_DOT_FILE((GstBin *)self->m_append_pipeline, GST_DEBUG_GRAPH_SHOW_ALL, fileName); + } + } + break; + default: + break; + } + MIRACASTLOG_TRACE("Exiting..."); + return TRUE; +} + +/* called when we get a GstMessage from the sink pipeline when we get EOS, we + * exit the mainloop and this testapp. */ +gboolean MiracastGstPlayer::playbinPipelineBusMessage (GstBus * bus, GstMessage * message, gpointer userdata) +{ + MiracastGstPlayer *self = static_cast(userdata); + + switch (GST_MESSAGE_TYPE(message)) + { + case GST_MESSAGE_EOS: + case GST_MESSAGE_ERROR: + { + if (GST_MESSAGE_EOS == GST_MESSAGE_TYPE(message)) + { + MIRACASTLOG_INFO("Finished playback"); + } + else + { + GError *error; + gchar *info; + gst_message_parse_error(message, &error, &info); + MIRACASTLOG_ERROR("#### GST-FAIL Error received from element [%s | %s | %s] ####", GST_OBJECT_NAME(message->src), error->message, info ? info : "none"); + g_error_free(error); + g_free(info); + } + GST_DEBUG_BIN_TO_DOT_FILE((GstBin *)self->m_playbin_pipeline, GST_DEBUG_GRAPH_SHOW_ALL, "miracast_playbin2appSrc_error"); + self->notifyPlaybackState(MIRACAST_GSTPLAYER_STATE_STOPPED,MIRACAST_PLAYER_REASON_CODE_GST_ERROR); + } + break; + case GST_MESSAGE_STATE_CHANGED: + { + GstState old, now, pending; + gst_message_parse_state_changed(message, &old, &now, &pending); + MIRACASTLOG_VERBOSE("[GST_MESSAGE_STATE_CHANGED] Element [%s], Pipeline state change from Old [%s] -> New [%s] and Pending state is [%s]", + GST_ELEMENT_NAME(GST_MESSAGE_SRC(message)), + gst_element_state_get_name(old), + gst_element_state_get_name(now), + gst_element_state_get_name(pending)); + + if (GST_MESSAGE_SRC(message) == GST_OBJECT(self->m_playbin_pipeline)) + { + char fileName[128] = {0}; + static int playbin_id = 0; + playbin_id++; + snprintf( fileName, + sizeof(fileName), + "MiracastPlaybinAppsrc_%s_%s_%s_%s_DBG", + GST_OBJECT_NAME(self->m_playbin_pipeline), + std::to_string(playbin_id).c_str(), + gst_element_state_get_name(old), + gst_element_state_get_name(now)); + GST_DEBUG_BIN_TO_DOT_FILE((GstBin *)self->m_playbin_pipeline, GST_DEBUG_GRAPH_SHOW_ALL, fileName); + } + } + break; + case GST_MESSAGE_BUFFERING: + { + gint percent = 0; + gst_message_parse_buffering(message, &percent); + MIRACASTLOG_VERBOSE("Buffering [%3d%%].", percent); + + /* Wait until buffering is complete before start/resume playing */ + if (percent < 100) + { + gst_element_set_state(self->m_playbin_pipeline, GST_STATE_PAUSED); + } + else + { + gst_element_set_state(self->m_playbin_pipeline, GST_STATE_PLAYING); + } + } + break; + case GST_MESSAGE_TAG: + { + MIRACASTLOG_VERBOSE("!!!! GST_MESSAGE_TAG !!!!"); + } + break; + case GST_MESSAGE_CLOCK_LOST: + { + MIRACASTLOG_VERBOSE("!!!! GST_MESSAGE_CLOCK_LOST !!!!"); + /* The current clock as selected by the pipeline became unusable, then select a new clock */ + gst_element_set_state(self->m_playbin_pipeline, GST_STATE_PAUSED); + gst_element_set_state(self->m_playbin_pipeline, GST_STATE_PLAYING); + } + break; + case GST_MESSAGE_QOS: + { + MIRACASTLOG_VERBOSE("Received [%s], a buffer was dropped or an element changed its processing strategy for Quality of Service reasons.", gst_message_type_get_name(message->type)); + GstFormat format; + guint64 processed; + guint64 dropped; + gst_message_parse_qos_stats(message, &format, &processed, &dropped); + MIRACASTLOG_VERBOSE("Format [%s], Processed [%lu], Dropped [%lu].", gst_format_get_name(format), processed, dropped); + + gint64 jitter; + gdouble proportion; + gint quality; + gst_message_parse_qos_values(message, &jitter, &proportion, &quality); + MIRACASTLOG_VERBOSE("Jitter [%lu], Proportion [%lf], Quality [%u].", jitter, proportion, quality); + + gboolean live; + guint64 running_time; + guint64 stream_time; + guint64 timestamp; + guint64 duration; + gst_message_parse_qos(message, &live, &running_time, &stream_time, ×tamp, &duration); + MIRACASTLOG_VERBOSE("live stream [%d], runninng_time [%lu], stream_time [%lu], timestamp [%lu], duration [%lu].", live, running_time, stream_time, timestamp, duration); + } + break; + default: + break; + } + return TRUE; +} + +gboolean MiracastGstPlayer::pushBufferToAppsrc(gpointer userdata) +{ + MIRACASTLOG_TRACE("Entering..."); + MiracastGstPlayer *self = static_cast(userdata); + void* buffer; + GstBuffer *gstBuffer; + self->m_customQueueHandle->ReceiveData(buffer); + + gstBuffer = static_cast(buffer); + + if (nullptr != gstBuffer) + { + // Push the new buffer to appsrc + GstFlowReturn ret = gst_app_src_push_buffer(GST_APP_SRC(self->m_appsrc), gstBuffer); + if (ret != GST_FLOW_OK) + { + MIRACASTLOG_ERROR("Error pushing buffer to appsrc"); + } + } + MIRACASTLOG_TRACE("Exiting..."); + return TRUE; +} + +// Module functions +void MiracastGstPlayer::gst_bin_need_data(GstAppSrc *src, guint length, gpointer userdata) +{ + MIRACASTLOG_TRACE("Entering..."); + MiracastGstPlayer *self = static_cast(userdata); + MIRACASTLOG_INFO("AppSrc empty"); + if ((self->m_sourceId == 0) && (false == self->m_destroyTimer)) + { + MIRACASTLOG_INFO("start feeding\n"); + self->m_sourceId = g_idle_add((GSourceFunc)pushBufferToAppsrc, self); + } + MIRACASTLOG_TRACE("Exiting..."); + return; +} + +void MiracastGstPlayer::gst_bin_enough_data(GstAppSrc *src, gpointer userdata) +{ + MiracastGstPlayer *self = static_cast(userdata); + MIRACASTLOG_INFO("AppSrc Full!!!!"); + if (self->m_sourceId != 0) + { + MIRACASTLOG_INFO("stop feeding\n"); + g_source_remove(self->m_sourceId); + self->m_sourceId = 0; + } + return; +} + +/* This function is called when playbin2 has created the appsrc element, so we have + * a chance to configure it. */ +void MiracastGstPlayer::source_setup(GstElement *pipeline, GstElement *source, gpointer userdata) +{ + MiracastGstPlayer *self = static_cast(userdata); + MIRACASTLOG_INFO("Entering..."); + MIRACASTLOG_INFO("Source has been created. Configuring [%x]",source); + self->m_appsrc = source; + // Set AppSrc parameters + GstAppSrcCallbacks callbacks = {gst_bin_need_data, gst_bin_enough_data, NULL}; + gst_app_src_set_callbacks(GST_APP_SRC(self->m_appsrc), &callbacks, (gpointer)(self), NULL); + g_object_set(GST_APP_SRC(self->m_appsrc), "max-bytes", (guint64) 20 * 1024 * 1024, NULL); + + g_object_set(GST_APP_SRC(self->m_appsrc), "format", GST_FORMAT_TIME, NULL); + g_object_set(GST_APP_SRC(self->m_appsrc), "is-live", true, NULL); + const gchar *set_cap = "video/mpegts, systemstream=(boolean)true, packetsize=(int)188"; + GstCaps *caps = gst_caps_from_string (set_cap); + g_object_set(GST_APP_SRC(self->m_appsrc), "caps", caps, NULL); + if(caps) { + self->m_capsSrc = caps; + } + MIRACASTLOG_INFO("Exiting... "); +} + +void MiracastGstPlayer::gstBufferReleaseCallback(void* userParam) +{ + GstBuffer *gstBuffer; + gstBuffer = static_cast(userParam); + + if (nullptr != gstBuffer) + { + MIRACASTLOG_INFO("gstBuffer[%x]",gstBuffer); + gst_buffer_unref(gstBuffer); + } +} + +bool MiracastGstPlayer::createPipeline() +{ + MIRACASTLOG_TRACE("Entering..!!!"); + GstStateChangeReturn ret; + GstBus *bus = nullptr; + bool return_value = true; + m_customQueueHandle = new MessageQueue(100,gstBufferReleaseCallback); + + if (nullptr == m_customQueueHandle) + { + MIRACASTLOG_ERROR("Failed to create MessageQueue"); + return false; + } + + /* create gst pipeline */ + m_main_loop_context = g_main_context_new(); + g_main_context_push_thread_default(m_main_loop_context); + m_main_loop = g_main_loop_new(m_main_loop_context, FALSE); + + MIRACASTLOG_INFO("Creating Pipeline..."); + + // Create a new pipeline + m_append_pipeline = gst_pipeline_new("miracast_data_collector"); + // Create elements + m_udpsrc = gst_element_factory_make("udpsrc", "miracast_udpsrc"); + m_rtpjitterbuffer = gst_element_factory_make("rtpjitterbuffer", "miracast_rtpjitterbuffer"); + m_rtpmp2tdepay = gst_element_factory_make("rtpmp2tdepay", "miracast_rtpmp2tdepay"); + m_tsparse = gst_element_factory_make("tsparse", "miracast_tsparse"); + m_Queue = gst_element_factory_make("queue", "miracast_queue"); + m_appsink = gst_element_factory_make("appsink", "miracast_appsink"); + m_video_sink = gst_element_factory_make("westerossink", "miracast_westerossink"); + m_audio_sink = Soc_CreateAudioHALSinkProperty(); + + if (!m_append_pipeline || !m_udpsrc || !m_rtpjitterbuffer || !m_rtpmp2tdepay || + !m_tsparse || !m_Queue || !m_appsink || !m_video_sink || !m_audio_sink ) + { + MIRACASTLOG_ERROR("Append Pipeline[%x]: Element creation failure, check below",m_append_pipeline); + MIRACASTLOG_WARNING("udpsrc[%x]rtpjitterbuffer[%x]rtpmp2tdepay[%x]Queue[%x]", + m_udpsrc,m_rtpjitterbuffer,m_rtpmp2tdepay,m_Queue); + MIRACASTLOG_WARNING("tsparse[%x]appsink[%x]videosink[%x]audiosink[%x]", + m_tsparse,m_appsink,m_video_sink,m_audio_sink); + return -1; + } + + /*{{{ udpsrc related element configuration*/ + MIRACASTLOG_TRACE(">>>>>>>udpsrc configuration start"); + MIRACASTLOG_TRACE("Set the port[%llu] and to udp source.",m_streaming_port); + g_object_set(G_OBJECT(m_udpsrc), "port", m_streaming_port, nullptr); + + GstCaps *caps = gst_caps_new_simple("application/x-rtp", "media", G_TYPE_STRING, "video", nullptr); + if (caps) + { + g_object_set(m_udpsrc, "caps", caps, nullptr); + gst_caps_unref(caps); + MIRACASTLOG_TRACE("Set the caps to udp source."); + } + else + { + MIRACASTLOG_ERROR("Unable to Set caps to udp source."); + } + MIRACASTLOG_TRACE("udpsrc configuration end<<<<<<<<"); + /*}}}*/ + + /*{{{ rtpjitterbuffer related element configuration*/ + MIRACASTLOG_TRACE(">>>>>>>rtpjitterbuffer configuration start"); + MIRACASTLOG_TRACE("Set the 'post-drop-messages' and 'do-lost' to rtpjitterbuffer."); + g_object_set(G_OBJECT(m_rtpjitterbuffer), "post-drop-messages", true, "do-lost" , true , nullptr ); + MIRACASTLOG_TRACE("rtpjitterbuffer configuration end<<<<<<<<"); + /*}}}*/ + + /*{{{ tsparse related element configuration*/ + MIRACASTLOG_TRACE(">>>>>>>tsparse configuration start"); + uint64_t packetsPerBuffer = 512; + MIRACASTLOG_TRACE("Set 'set-timestamps' to tsparse"); + g_object_set(G_OBJECT(m_tsparse), "set-timestamps", true, nullptr ); + MIRACASTLOG_TRACE("Set 'alignment' to tsparse"); + g_object_set(G_OBJECT(m_tsparse), "alignment", packetsPerBuffer, nullptr ); + MIRACASTLOG_TRACE("tsparse configuration end<<<<<<<<"); + /*}}}*/ + + /* to be notified of messages from this pipeline, mostly EOS */ + bus = gst_element_get_bus(m_append_pipeline); + gst_bus_add_watch(bus, (GstBusFunc)appendPipelineBusMessage, this); + gst_object_unref(bus); + + /*{{{ appsink related element configuration*/ + MIRACASTLOG_TRACE(">>>>>>>appsink configuration start"); + // Configure the appsink + g_object_set(G_OBJECT(m_appsink), "emit-signals", TRUE, "sync", FALSE, NULL); + g_object_set(G_OBJECT(m_appsink), "async", FALSE, NULL); + // Set up a signal handler for new buffer signals from appsink + g_signal_connect(G_OBJECT(m_appsink), "new-sample", G_CALLBACK(appendPipelineNewSampleHandler), this); + MIRACASTLOG_TRACE("appsink configuration end<<<<<<<<"); + /*}}}*/ + + // Add elements to the pipeline + gst_bin_add_many(GST_BIN(m_append_pipeline), + m_udpsrc, + m_rtpjitterbuffer, + m_Queue, + m_rtpmp2tdepay, + m_tsparse, + m_appsink, + nullptr ); + + if (!gst_element_link_many(m_udpsrc, + m_rtpjitterbuffer, + m_Queue, + m_rtpmp2tdepay, + m_tsparse, + m_appsink, + nullptr )) + { + MIRACASTLOG_ERROR("Elements (udpsrc->rtpjitterbuffer->queue->rtpmp2tdepay->tsparse->appsink) could not be linked"); + gst_object_unref(m_append_pipeline); + return -1; + } + + // Set up pipeline + m_playbin_pipeline = gst_element_factory_make("playbin", "miracast_playbin"); + if (!m_playbin_pipeline) + { + MIRACASTLOG_ERROR( "Failed to create pipeline."); + } + else + { + bus = gst_element_get_bus (m_playbin_pipeline); + gst_bus_add_watch (bus, (GstBusFunc) playbinPipelineBusMessage, this); + gst_object_unref (bus); + // Pipeline created + g_object_set(m_playbin_pipeline, "uri", "appsrc://", nullptr); + + g_signal_connect(m_playbin_pipeline, "source-setup", G_CALLBACK(source_setup), this); + + /*{{{ westerossink related element configuration*/ + MIRACASTLOG_TRACE(">>>>>>>westerossink configuration start"); + updateVideoSinkRectangle(); + + g_signal_connect(m_video_sink, "first-video-frame-callback",G_CALLBACK(onFirstVideoFrameCallback), (gpointer)this); + MIRACASTLOG_TRACE("westerossink configuration end<<<<<<<<"); + g_object_set(m_playbin_pipeline, "video-sink", m_video_sink, nullptr); + /*}}}*/ + + g_object_set(m_playbin_pipeline, "audio-sink", m_audio_sink, nullptr); + } + + g_main_context_pop_thread_default(m_main_loop_context); + pthread_create(&m_playback_thread, nullptr, MiracastGstPlayer::playbackThread, this); + pthread_create(&m_player_statistics_tid, nullptr, MiracastGstPlayer::monitor_player_statistics_thread, this); + + /* launching things */ + MIRACASTLOG_INFO("m_playbin_pipeline, GST_STATE_PLAYING"); + ret = gst_element_set_state(m_append_pipeline, GST_STATE_PLAYING); + ret = gst_element_set_state(m_playbin_pipeline, GST_STATE_PLAYING); + + if (ret == GST_STATE_CHANGE_FAILURE) + { + MIRACASTLOG_ERROR("Unable to set the pipeline to the playing state."); + return_value = false; + } + else if (ret == GST_STATE_CHANGE_NO_PREROLL) + { + MIRACASTLOG_TRACE("Streaming live"); + m_is_live = true; + } + + MIRACASTLOG_TRACE("Exiting..!!!"); + return return_value; +} + +bool MiracastGstPlayer::stop() +{ + GstStateChangeReturn ret; + MIRACASTLOG_TRACE("Entering.."); + + if (!m_playbin_pipeline) + { + MIRACASTLOG_ERROR("Pipeline is NULL"); + return false; + } + m_destroyTimer = true; + if (m_sourceId != 0) + { + MIRACASTLOG_INFO("remove Timer"); + g_source_remove(m_sourceId); + m_sourceId = 0; + } + ret = gst_element_set_state(m_playbin_pipeline, GST_STATE_NULL); + if (ret == GST_STATE_CHANGE_FAILURE) + { + MIRACASTLOG_ERROR("Failed to set gst_element_set_state as NULL"); + } + ret = gst_element_set_state(m_append_pipeline, GST_STATE_NULL); + if (ret == GST_STATE_CHANGE_FAILURE) + { + MIRACASTLOG_ERROR("Failed to set gst_element_set_state as NULL"); + } + + if (m_main_loop) + { + g_main_loop_quit(m_main_loop); + } + if (m_playback_thread) + { + pthread_join(m_playback_thread,nullptr); + } + GstBus *bus = gst_pipeline_get_bus(GST_PIPELINE(m_append_pipeline)); + if (bus) + { + gst_bus_set_sync_handler(bus, nullptr, nullptr, nullptr); + gst_object_unref(bus); + } + + bus = gst_pipeline_get_bus(GST_PIPELINE(m_playbin_pipeline)); + if (bus) + { + gst_bus_set_sync_handler(bus, nullptr, nullptr, nullptr); + gst_object_unref(bus); + } + + if (m_audio_sink) + { + gst_object_unref(m_audio_sink); + m_audio_sink = nullptr; + } + if (m_audioconvert) + { + gst_object_unref(m_audioconvert); + m_audioconvert = nullptr; + } + if (m_avdec_aac) + { + gst_object_unref(m_avdec_aac); + m_avdec_aac = nullptr; + } + if (m_aacparse) + { + gst_object_unref(m_aacparse); + m_aacparse = nullptr; + } + if (m_aQueue) + { + gst_object_unref(m_aQueue); + m_aQueue = nullptr; + } + if (m_video_sink) + { + gst_object_unref(m_video_sink); + m_video_sink = nullptr; + } + if (m_h264parse) + { + gst_object_unref(m_h264parse); + m_h264parse = nullptr; + } + if (m_vQueue) + { + gst_object_unref(m_vQueue); + m_vQueue = nullptr; + } + if (m_tsdemux) + { + gst_object_unref(m_tsdemux); + m_tsdemux = nullptr; + } + if (m_tsparse) + { + gst_object_unref(m_tsparse); + m_tsparse = nullptr; + } + if (m_rtpmp2tdepay) + { + gst_object_unref(m_rtpmp2tdepay); + m_rtpmp2tdepay = nullptr; + } + if (m_rtpjitterbuffer) + { + gst_object_unref(m_rtpjitterbuffer); + m_tsparse = nullptr; + } + if (m_udpsrc) + { + gst_object_unref(m_udpsrc); + m_udpsrc = nullptr; + } + if (m_main_loop) + { + g_main_loop_unref(m_main_loop); + m_main_loop = nullptr; + } + if (m_main_loop_context) + { + g_main_context_unref(m_main_loop_context); + m_main_loop_context = nullptr; + } + if (m_append_pipeline) + { + g_object_unref(m_append_pipeline); + m_append_pipeline = nullptr; + } + if (m_playbin_pipeline) + { + g_object_unref(m_playbin_pipeline); + m_playbin_pipeline = nullptr; + } + if (m_capsSrc) + { + gst_caps_unref(m_capsSrc); + m_capsSrc = nullptr; + } + + if (m_customQueueHandle) + { + delete m_customQueueHandle; + m_customQueueHandle = nullptr; + } + MIRACASTLOG_TRACE("Exiting.."); + return true; +} \ No newline at end of file diff --git a/Miracast/MiracastPlayer/SoC_GstPlayer.h b/Miracast/MiracastPlayer/MiracastGstPlayer.h similarity index 66% rename from Miracast/MiracastPlayer/SoC_GstPlayer.h rename to Miracast/MiracastPlayer/MiracastGstPlayer.h index fea9953e48..18fe2e90a3 100644 --- a/Miracast/MiracastPlayer/SoC_GstPlayer.h +++ b/Miracast/MiracastPlayer/MiracastGstPlayer.h @@ -17,8 +17,8 @@ * limitations under the License. **/ -#ifndef _MIRACAST_PLAYER_H_ -#define _MIRACAST_PLAYER_H_ +#ifndef _MIRACAST_GST_PLAYER_H_ +#define _MIRACAST_GST_PLAYER_H_ #include #include @@ -29,10 +29,10 @@ #include #include -class SoC_GstPlayer +class MiracastGstPlayer { public: - static SoC_GstPlayer *getInstance(); + static MiracastGstPlayer *getInstance(); static void destroyInstance(); bool launch(std::string& localip , std::string& streaming_port,MiracastRTSPMsg *rtsp_instance); bool stop(); @@ -47,11 +47,18 @@ class SoC_GstPlayer void print_pipeline_state(GstElement *pipeline = nullptr); private: - GstElement *m_pipeline{nullptr}; + GstElement *m_append_pipeline{nullptr}; GstElement *m_udpsrc{nullptr}; - GstElement *m_rtpmp2tdepay{nullptr}; GstElement *m_rtpjitterbuffer{nullptr}; - GstElement *m_tsparse{nullptr}; + GstElement *m_rtpmp2tdepay{nullptr}; + GstElement *m_Queue{nullptr}; + GstElement *m_tsparse{nullptr}; + GstElement *m_appsink{nullptr}; + + GstElement *m_playbin_pipeline{nullptr}; + GstElement *m_appsrc; + GstCaps *m_capsSrc; + GstElement *m_tsdemux{nullptr}; GstElement *m_vQueue{nullptr}; GstElement *m_h264parse{nullptr}; @@ -59,8 +66,13 @@ class SoC_GstPlayer GstElement *m_aacparse{nullptr}; GstElement *m_avdec_aac{nullptr}; GstElement *m_audioconvert{nullptr}; + bool m_firstVideoFrameReceived{false}; + bool m_destroyTimer{false}; + guint m_sourceId{0}; + MiracastRTSPMsg *m_rtsp_reference_instance{nullptr}; + MessageQueue* m_customQueueHandle{nullptr}; std::string m_uri; guint64 m_streaming_port; @@ -75,18 +87,17 @@ class SoC_GstPlayer pthread_t m_playback_thread{0}; VIDEO_RECT_STRUCT m_video_rect_st; - static SoC_GstPlayer *m_GstPlayer; - SoC_GstPlayer(); - virtual ~SoC_GstPlayer(); - SoC_GstPlayer &operator=(const SoC_GstPlayer &) = delete; - SoC_GstPlayer(const SoC_GstPlayer &) = delete; + static MiracastGstPlayer *m_GstPlayer; + MiracastGstPlayer(); + virtual ~MiracastGstPlayer(); + MiracastGstPlayer &operator=(const MiracastGstPlayer &) = delete; + MiracastGstPlayer(const MiracastGstPlayer &) = delete; bool createPipeline(); bool updateVideoSinkRectangle(void); static void onFirstVideoFrameCallback(GstElement* object, guint arg0, gpointer arg1,gpointer userdata); void notifyPlaybackState(eMIRA_GSTPLAYER_STATES gst_player_state, eM_PLAYER_REASON_CODE state_reason_code = MIRACAST_PLAYER_REASON_CODE_SUCCESS ); - static gboolean busMessageCb(GstBus *bus, GstMessage *msg, gpointer user_data); - bool changePipelineState(GstState state) const; + bool changePipelineState(GstElement* pipeline, GstState state) const; static void *playbackThread(void *ctx); GMainLoop *m_main_loop{nullptr}; @@ -96,7 +107,15 @@ class SoC_GstPlayer pthread_t m_player_statistics_tid{0}; static void *monitor_player_statistics_thread(void *ctx); - static void pad_added_handler(GstElement *gstelement, GstPad *new_pad, gpointer userdata); + + static GstFlowReturn appendPipelineNewSampleHandler(GstElement *elt, gpointer userdata); + static gboolean appendPipelineBusMessage(GstBus * bus, GstMessage * message, gpointer userdata); + static gboolean playbinPipelineBusMessage (GstBus * bus, GstMessage * message, gpointer userdata); + static gboolean pushBufferToAppsrc(gpointer userdata); + static void gst_bin_need_data(GstAppSrc *src, guint length, gpointer user_data); + static void gst_bin_enough_data(GstAppSrc *src, gpointer user_data); + static void source_setup(GstElement *pipeline, GstElement *source, gpointer userdata); + static void gstBufferReleaseCallback(void* userParam); }; -#endif /* SoC_GstPlayer_hpp */ +#endif /* _MIRACAST_GST_PLAYER_H_ */ \ No newline at end of file diff --git a/Miracast/MiracastPlayer/MiracastPlayer.cpp b/Miracast/MiracastPlayer/MiracastPlayer.cpp index a59037444a..cdd70f6d65 100644 --- a/Miracast/MiracastPlayer/MiracastPlayer.cpp +++ b/Miracast/MiracastPlayer/MiracastPlayer.cpp @@ -119,7 +119,7 @@ namespace WPEFramework if (nullptr != m_miracast_rtsp_obj) { m_CurrentService = service; - m_GstPlayer = SoC_GstPlayer::getInstance(); + m_GstPlayer = MiracastGstPlayer::getInstance(); m_isServiceInitialized = true; } else diff --git a/Miracast/MiracastPlayer/MiracastPlayer.h b/Miracast/MiracastPlayer/MiracastPlayer.h index c7b6bcf190..095f4e0537 100644 --- a/Miracast/MiracastPlayer/MiracastPlayer.h +++ b/Miracast/MiracastPlayer/MiracastPlayer.h @@ -24,7 +24,7 @@ #include "Module.h" #include "MiracastRTSPMsg.h" -#include "SoC_GstPlayer.h" +#include "MiracastGstPlayer.h" using std::vector; namespace WPEFramework @@ -88,7 +88,7 @@ namespace WPEFramework VIDEO_RECT_STRUCT m_video_sink_rect; bool m_isServiceInitialized; bool m_isServiceEnabled; - SoC_GstPlayer *m_GstPlayer; + MiracastGstPlayer *m_GstPlayer; MiracastRTSPMsg *m_rtsp_msg; uint32_t playRequest(const JsonObject ¶meters, JsonObject &response); diff --git a/Miracast/MiracastPlayer/RTSP/MiracastRTSPMsg.cpp b/Miracast/MiracastPlayer/RTSP/MiracastRTSPMsg.cpp index 6918e525e1..4acd3a5bed 100644 --- a/Miracast/MiracastPlayer/RTSP/MiracastRTSPMsg.cpp +++ b/Miracast/MiracastPlayer/RTSP/MiracastRTSPMsg.cpp @@ -18,7 +18,7 @@ */ #include -#include +#include MiracastRTSPMsg *MiracastRTSPMsg::m_rtsp_msg_obj{nullptr}; static std::string empty_string = ""; @@ -1852,16 +1852,16 @@ RTSP_STATUS MiracastRTSPMsg::rtsp_sink2src_request_msg_handling(eCONTROLLER_FW_S status_code = send_rtsp_reply_sink2src( request_mode ); if ( RTSP_MSG_SUCCESS == status_code ){ - SoC_GstPlayer *SoC_GstPlayerObj = SoC_GstPlayer::getInstance(); + MiracastGstPlayer *MiracastGstPlayerObj = MiracastGstPlayer::getInstance(); if ( RTSP_MSG_FMT_PLAY_REQUEST == request_mode ) { - SoC_GstPlayerObj->resume(); + MiracastGstPlayerObj->resume(); MIRACASTLOG_INFO("GstPlayback resumed..."); } else if ( RTSP_MSG_FMT_PAUSE_REQUEST == request_mode ) { - SoC_GstPlayerObj->pause(); + MiracastGstPlayerObj->pause(); MIRACASTLOG_INFO("GstPlayback paused..."); } } @@ -1944,9 +1944,9 @@ MiracastError MiracastRTSPMsg::start_streaming( VIDEO_RECT_STRUCT video_rect ) } else { - SoC_GstPlayer *SoC_GstPlayerObj = SoC_GstPlayer::getInstance(); - SoC_GstPlayerObj->setVideoRectangle( video_rect ); - SoC_GstPlayerObj->launch(m_sink_ip, m_wfd_streaming_port ,this); + MiracastGstPlayer *MiracastGstPlayerObj = MiracastGstPlayer::getInstance(); + MiracastGstPlayerObj->setVideoRectangle( video_rect ); + MiracastGstPlayerObj->launch(m_sink_ip, m_wfd_streaming_port ,this); } } m_streaming_started = true; @@ -1965,14 +1965,14 @@ MiracastError MiracastRTSPMsg::stop_streaming( eMIRA_PLAYER_STATES state ) { if (MIRACAST_PLAYER_STATE_SELF_ABORT == state) { - SoC_GstPlayer::destroyInstance(); - MIRACASTLOG_INFO("SoC_GstPlayer instance destroyed..."); + MiracastGstPlayer::destroyInstance(); + MIRACASTLOG_INFO("MiracastGstPlayer instance destroyed..."); } else { - SoC_GstPlayer *SoC_GstPlayerObj = SoC_GstPlayer::getInstance(); - SoC_GstPlayerObj->stop(); - MIRACASTLOG_INFO("SoC_GstPlayer instance stopped..."); + MiracastGstPlayer *MiracastGstPlayerObj = MiracastGstPlayer::getInstance(); + MiracastGstPlayerObj->stop(); + MIRACASTLOG_INFO("MiracastGstPlayer instance stopped..."); } m_streaming_started = false; } @@ -1985,8 +1985,8 @@ MiracastError MiracastRTSPMsg::updateVideoRectangle( VIDEO_RECT_STRUCT videorect { MIRACASTLOG_TRACE("Entering..."); - SoC_GstPlayer *SoC_GstPlayerObj = SoC_GstPlayer::getInstance(); - SoC_GstPlayerObj->setVideoRectangle( videorect , true ); + MiracastGstPlayer *MiracastGstPlayerObj = MiracastGstPlayer::getInstance(); + MiracastGstPlayerObj->setVideoRectangle( videorect , true ); MIRACASTLOG_TRACE("Exiting..."); return MIRACAST_OK; diff --git a/Miracast/MiracastPlayer/Test/SoC_GstPlayer.cpp b/Miracast/MiracastPlayer/Test/MiracastGstPlayer.cpp similarity index 72% rename from Miracast/MiracastPlayer/Test/SoC_GstPlayer.cpp rename to Miracast/MiracastPlayer/Test/MiracastGstPlayer.cpp index 7417958ddf..355fb26f6b 100644 --- a/Miracast/MiracastPlayer/Test/SoC_GstPlayer.cpp +++ b/Miracast/MiracastPlayer/Test/MiracastGstPlayer.cpp @@ -27,20 +27,20 @@ #include #include "MiracastLogger.h" #include "MiracastRTSPMsg.h" -#include "SoC_GstPlayer.h" +#include "MiracastGstPlayer.h" -SoC_GstPlayer *SoC_GstPlayer::m_GstPlayer{nullptr}; +MiracastGstPlayer *MiracastGstPlayer::m_GstPlayer{nullptr}; -SoC_GstPlayer *SoC_GstPlayer::getInstance() +MiracastGstPlayer *MiracastGstPlayer::getInstance() { if (m_GstPlayer == nullptr) { - m_GstPlayer = new SoC_GstPlayer(); + m_GstPlayer = new MiracastGstPlayer(); } return m_GstPlayer; } -void SoC_GstPlayer::destroyInstance() +void MiracastGstPlayer::destroyInstance() { MIRACASTLOG_TRACE("Entering..."); if (m_GstPlayer != nullptr) @@ -52,15 +52,15 @@ void SoC_GstPlayer::destroyInstance() MIRACASTLOG_TRACE("Exiting..."); } -SoC_GstPlayer::SoC_GstPlayer() +MiracastGstPlayer::MiracastGstPlayer() { } -SoC_GstPlayer::~SoC_GstPlayer() +MiracastGstPlayer::~MiracastGstPlayer() { } -bool SoC_GstPlayer::setVideoRectangle( VIDEO_RECT_STRUCT video_rect , bool apply ) +bool MiracastGstPlayer::setVideoRectangle( VIDEO_RECT_STRUCT video_rect , bool apply ) { MIRACASTLOG_TRACE("Entering..."); MIRACASTLOG_TRACE("Exiting Coords[%d,%d,%d,%d]Apply[%x]...", @@ -69,7 +69,7 @@ bool SoC_GstPlayer::setVideoRectangle( VIDEO_RECT_STRUCT video_rect , bool apply return true; } -bool SoC_GstPlayer::launch(std::string& localip , std::string& streaming_port, MiracastRTSPMsg *rtsp_instance) +bool MiracastGstPlayer::launch(std::string& localip , std::string& streaming_port, MiracastRTSPMsg *rtsp_instance) { if ( nullptr != rtsp_instance ) { @@ -79,33 +79,33 @@ bool SoC_GstPlayer::launch(std::string& localip , std::string& streaming_port, M return true; } -bool SoC_GstPlayer::pause() +bool MiracastGstPlayer::pause() { return true; } -bool SoC_GstPlayer::resume() +bool MiracastGstPlayer::resume() { return true; } -bool SoC_GstPlayer::stop() +bool MiracastGstPlayer::stop() { destroyInstance(); return true; } -void SoC_GstPlayer::onFirstVideoFrameCallback(GstElement* object, guint arg0, gpointer arg1,gpointer userdata) +void MiracastGstPlayer::onFirstVideoFrameCallback(GstElement* object, guint arg0, gpointer arg1,gpointer userdata) { MIRACASTLOG_TRACE("Entering..!!!"); - SoC_GstPlayer *self = static_cast(userdata); + MiracastGstPlayer *self = static_cast(userdata); self->m_firstVideoFrameReceived = true; MIRACASTLOG_INFO("!!! First Video Frame has received !!!"); self->notifyPlaybackState(MIRACAST_GSTPLAYER_STATE_FIRST_VIDEO_FRAME_RECEIVED); MIRACASTLOG_TRACE("Exiting..!!!"); } -void SoC_GstPlayer::notifyPlaybackState(eMIRA_GSTPLAYER_STATES gst_player_state, eM_PLAYER_REASON_CODE state_reason_code ) +void MiracastGstPlayer::notifyPlaybackState(eMIRA_GSTPLAYER_STATES gst_player_state, eM_PLAYER_REASON_CODE state_reason_code ) { MIRACASTLOG_TRACE("Entering..!!!"); if ( nullptr != m_rtsp_reference_instance ) diff --git a/Miracast/common/MiracastCommon.cpp b/Miracast/common/MiracastCommon.cpp index 1605191aaf..f217223971 100644 --- a/Miracast/common/MiracastCommon.cpp +++ b/Miracast/common/MiracastCommon.cpp @@ -292,4 +292,94 @@ bool MiracastCommon::execute_PopenCommand( const char* popen_command, const char } MIRACASTLOG_TRACE("Exiting ..."); return returnValue; +} + +MessageQueue::MessageQueue(int queueSize,void (*free_cb)(void *param)) +{ + std::cout << "[ctor] " << std::endl; + m_currentMsgCount = 0; + m_maxMsgCount = queueSize; + m_free_resource_cb = free_cb; +} + +MessageQueue::~MessageQueue(void) +{ + std::cout << "[dtor] " << std::endl; + { + std::lock_guard lk(mutexSync); + m_isDestructing = true; + } + m_condNotEmpty.notify_all(); + m_condNotFull.notify_all(); + + std::unique_lock lk(mutexSync); + void* userParam = nullptr; + while (!m_internalQueue.empty()) + { + userParam = m_internalQueue.front(); + if (nullptr != m_free_resource_cb) + { + std::cout << "[dtor] asked to free : " << userParam << std::endl; + m_free_resource_cb(userParam); + } + m_internalQueue.pop(); + m_currentMsgCount--; + } + std::cout << "[dtor] done" << std::endl; +} + +void MessageQueue::sendData(void* new_value,int wait_time_ms) +{ + std::unique_lock lk(mutexSync); + // Wait if the queue is full + if (!m_condNotFull.wait_for(lk, std::chrono::milliseconds(wait_time_ms), [this] { return (( m_currentMsgCount < m_maxMsgCount ) || m_isDestructing ) ; })) + { + if (m_isDestructing) + { + std::cout << "[sendData] skipped dueto m_isDestructing" << std::endl; + } + else + { + std::cout << "[sendData] Timeout occurred while waiting to send data." << std::endl; + } + return; + } + + if (m_isDestructing){ + return; + } + m_internalQueue.push(new_value); + m_currentMsgCount++; + std::cout << "[sendData] data at address: " << new_value << std::endl; + // Notify consumer that new data is available + m_condNotEmpty.notify_one(); +} + +void MessageQueue::ReceiveData(void*& value,int wait_time_ms) +{ + std::unique_lock lk(mutexSync); + // Wait if the queue is empty + if (!m_condNotEmpty.wait_for(lk, std::chrono::milliseconds(wait_time_ms), [this] { return (( !m_internalQueue.empty()) || m_isDestructing ); })) + { + if (m_isDestructing) + { + std::cout << "[receiveData] skipped dueto m_isDestructing" << std::endl; + } + else + { + std::cout << "[receiveData] Timeout occurred while waiting to send data." << std::endl; + } + return; // Timeout occurred + } + + if (m_isDestructing){ + return; + } + + value = m_internalQueue.front(); + m_internalQueue.pop(); + m_currentMsgCount--; + std::cout << "[ReceiveData] data at address: " << value << std::endl; + // Notify producer that space is available + m_condNotFull.notify_one(); } \ No newline at end of file diff --git a/Miracast/common/MiracastCommon.h b/Miracast/common/MiracastCommon.h index 0b2bb2b088..d396fae052 100644 --- a/Miracast/common/MiracastCommon.h +++ b/Miracast/common/MiracastCommon.h @@ -29,6 +29,10 @@ #include #include #include +#include +#include +#include +#include #include using namespace std; @@ -366,4 +370,25 @@ class MiracastCommon static bool execute_PopenCommand( const char* popen_command, const char* expected_char, unsigned int retry_count, std::string& popen_buffer, unsigned int interval_micro_sec ); }; -#endif +#define DEFAULT_MSGQ_WAIT_TIME_MS (3000*1000) + +class MessageQueue +{ +private: + std::mutex mutexSync; + std::queue m_internalQueue; // Queue to hold void* data + std::condition_variable m_condNotEmpty; + std::condition_variable m_condNotFull; + int m_currentMsgCount; // Guard with Mutex, keep track of queue size + int m_maxMsgCount{5}; // Maximum size of the queue + void (*m_free_resource_cb)(void *); + bool m_isDestructing{false}; + +public: + MessageQueue(int queueSize,void (*free_cb)(void *param)); + ~MessageQueue(); + void sendData(void* new_value, int wait_time_ms = DEFAULT_MSGQ_WAIT_TIME_MS); + void ReceiveData(void*& value, int wait_time_ms = DEFAULT_MSGQ_WAIT_TIME_MS); +}; + +#endif \ No newline at end of file