-#ifdef HAVE_GSTREAMER
-
/* note: this requires gstreamer 0.10.x and a big list of plugins. */
/* it's currently hardcoded to use a big-endian alsasink as sink. */
#include <lib/base/ebase.h>
extensions.push_back("mp4");
extensions.push_back("mov");
extensions.push_back("m4a");
- extensions.push_back("m2ts");
+ extensions.push_back("flv");
+ extensions.push_back("3gp");
+ extensions.push_back("3g2");
+ extensions.push_back("dts");
+ extensions.push_back("wmv");
+ extensions.push_back("asf");
+ extensions.push_back("wma");
sc->addServiceFactory(eServiceFactoryMP3::id, this, extensions);
}
m_buffer_size = 1*1024*1024;
m_prev_decoder_time = -1;
m_decoder_time_valid_state = 0;
+ m_errorInfo.missing_codec = "";
+ //vuplus
+ m_is_hls_stream = 0;
+ audioSink = videoSink = NULL;
CONNECT(m_seekTimeout->timeout, eServiceMP3::seekTimeoutCB);
CONNECT(m_subtitle_sync_timer->timeout, eServiceMP3::pushSubtitles);
m_sourceinfo.containertype = ctVCD;
m_sourceinfo.is_video = TRUE;
}
- if ( (strncmp(filename, "http://", 7)) == 0 || (strncmp(filename, "udp://", 6)) == 0 || (strncmp(filename, "rtp://", 6)) == 0 || (strncmp(filename, "https://", 8)) == 0 || (strncmp(filename, "mms://", 6)) == 0 || (strncmp(filename, "rtsp://", 7)) == 0 || (strncmp(filename, "rtspt://", 7)) == 0 )
+ if ( strstr(filename, "://") )
m_sourceinfo.is_streaming = TRUE;
gchar *uri;
uri = g_filename_to_uri(filename, NULL, NULL);
- eDebug("eServiceMP3::playbin2 uri=%s", uri);
+ //eDebug("eServiceMP3::playbin2 uri=%s", uri);
+ eDebug("eServiceMP3::playbin2");
m_gst_playbin = gst_element_factory_make("playbin2", "playbin");
if (!m_gst_playbin)
- m_error_message = "failed to create GStreamer pipeline!\n";
+ m_errorInfo.error_message = "failed to create GStreamer pipeline!\n";
g_object_set (G_OBJECT (m_gst_playbin), "uri", uri, NULL);
eDebug("eServiceMP3::subtitle uri: %s", g_filename_to_uri(srt_filename, NULL, NULL));
g_object_set (G_OBJECT (m_gst_playbin), "suburi", g_filename_to_uri(srt_filename, NULL, NULL), NULL);
}
+ if ( m_sourceinfo.is_streaming )
+ {
+ g_signal_connect (G_OBJECT (m_gst_playbin), "notify::source", G_CALLBACK (gstHTTPSourceSetAgent), this);
+ }
} else
{
m_event((iPlayableService*)this, evUser+12);
if (m_gst_playbin)
gst_object_unref(GST_OBJECT(m_gst_playbin));
- eDebug("eServiceMP3::sorry, can't play: %s",m_error_message.c_str());
+ eDebug("eServiceMP3::sorry, can't play: %s",m_errorInfo.error_message.c_str());
m_gst_playbin = 0;
}
if (m_stream_tags)
gst_tag_list_free(m_stream_tags);
+
+ if (audioSink)
+ {
+ gst_object_unref(GST_OBJECT(audioSink));
+ audioSink = NULL;
+ }
+ if (videoSink)
+ {
+ gst_object_unref(GST_OBJECT(videoSink));
+ videoSink = NULL;
+ }
if (m_gst_playbin)
{
//GST_DEBUG_BIN_TO_DOT_FILE(GST_BIN(m_gst_playbin),GST_DEBUG_GRAPH_SHOW_ALL,"e2-playbin");
- eDebug("eServiceMP3::stop %s", m_ref.path.c_str());
+ //eDebug("eServiceMP3::stop %s", m_ref.path.c_str());
+ eDebug("eServiceMP3::stop service..");
gst_element_set_state(m_gst_playbin, GST_STATE_NULL);
m_state = stStopped;
{
GstFormat fmt = GST_FORMAT_TIME;
gint64 pos;
- GstElement *sink;
pts = 0;
if (!m_gst_playbin)
if (m_state != stRunning)
return -1;
- g_object_get (G_OBJECT (m_gst_playbin), "audio-sink", &sink, NULL);
-
- if (!sink)
- g_object_get (G_OBJECT (m_gst_playbin), "video-sink", &sink, NULL);
-
- if (!sink)
- return -1;
-
- gchar *name = gst_element_get_name(sink);
- gboolean use_get_decoder_time = strstr(name, "dvbaudiosink") || strstr(name, "dvbvideosink");
- g_free(name);
-
- if (use_get_decoder_time)
- g_signal_emit_by_name(sink, "get-decoder-time", &pos);
-
- gst_object_unref(sink);
-
- if (!use_get_decoder_time && !gst_element_query_position(m_gst_playbin, &fmt, &pos)) {
- eDebug("gst_element_query_position failed in getPlayPosition");
- return -1;
+ if (audioSink || videoSink)
+ {
+ g_signal_emit_by_name(audioSink ? audioSink : videoSink, "get-decoder-time", &pos);
+ if (!GST_CLOCK_TIME_IS_VALID(pos))
+ return -1;
+ }
+ else
+ {
+ if(!gst_element_query_position(m_gst_playbin, &fmt, &pos))
+ {
+ eDebug("gst_element_query_position failed in getPlayPosition");
+ return -1;
+ }
}
/* pos is in nanoseconds. we have 90 000 pts per second. */
tag = "channel-mode";
break;
case sUser+12:
- return m_error_message;
+ return m_errorInfo.error_message;
default:
return "";
}
subtype_t type = stUnknown;
GstCaps* caps = gst_pad_get_negotiated_caps(pad);
+ if (!caps && !g_codec)
+ {
+ caps = gst_pad_get_allowed_caps(pad);
+ }
+
if ( caps )
{
GstStructure* str = gst_caps_get_structure(caps, 0);
return type;
}
+gint eServiceMP3::match_sinktype(GstElement *element, gpointer type)
+{
+ return strcmp(g_type_name(G_OBJECT_TYPE(element)), (const char*)type);
+}
+
void eServiceMP3::gstBusCall(GstBus *bus, GstMessage *msg)
{
if (!msg)
return;
gchar *sourceName;
GstObject *source;
-
source = GST_MESSAGE_SRC(msg);
+ if (!GST_IS_OBJECT(source))
+ return;
sourceName = gst_object_get_name(source);
#if 0
+ gchar *string;
if (gst_message_get_structure(msg))
- {
- gchar *string = gst_structure_to_string(gst_message_get_structure(msg));
- eDebug("eServiceMP3::gst_message from %s: %s", sourceName, string);
- g_free(string);
- }
+ string = gst_structure_to_string(gst_message_get_structure(msg));
else
- eDebug("eServiceMP3::gst_message from %s: %s (without structure)", sourceName, GST_MESSAGE_TYPE_NAME(msg));
+ string = g_strdup(GST_MESSAGE_TYPE_NAME(msg));
+ eDebug("eTsRemoteSource::gst_message from %s: %s", sourceName, string);
+ g_free(string);
#endif
switch (GST_MESSAGE_TYPE (msg))
{
case GST_MESSAGE_STATE_CHANGED:
{
if(GST_MESSAGE_SRC(msg) != GST_OBJECT(m_gst_playbin))
+ {
+ //vuplus
+ if(!strncmp(sourceName, "hls", 3))
+ {
+ //eDebug("HLS Protocol detected : source [%s]", sourceName);
+ m_is_hls_stream = 1;
+ }
break;
+ }
GstState old_state, new_state;
gst_message_parse_state_changed(msg, &old_state, &new_state, NULL);
} break;
case GST_STATE_CHANGE_READY_TO_PAUSED:
{
+ GstIterator *children;
GstElement *appsink = gst_bin_get_by_name(GST_BIN(m_gst_playbin), "subtitle_sink");
if (appsink)
{
eDebug("eServiceMP3::appsink properties set!");
gst_object_unref(appsink);
}
+
+ if (audioSink)
+ {
+ gst_object_unref(GST_OBJECT(audioSink));
+ audioSink = NULL;
+ }
+ if (videoSink)
+ {
+ gst_object_unref(GST_OBJECT(videoSink));
+ videoSink = NULL;
+ }
+
+ children = gst_bin_iterate_recurse(GST_BIN(m_gst_playbin));
+ audioSink = GST_ELEMENT_CAST(gst_iterator_find_custom(children, (GCompareFunc)match_sinktype, (gpointer)"GstDVBAudioSink"));
+ gst_iterator_free(children);
+
+ children = gst_bin_iterate_recurse(GST_BIN(m_gst_playbin));
+ videoSink = GST_ELEMENT_CAST(gst_iterator_find_custom(children, (GCompareFunc)match_sinktype, (gpointer)"GstDVBVideoSink"));
+ gst_iterator_free(children);
+
setAC3Delay(ac3_delay);
setPCMDelay(pcm_delay);
} break;
} break;
case GST_STATE_CHANGE_PAUSED_TO_READY:
{
+ if (audioSink)
+ {
+ gst_object_unref(GST_OBJECT(audioSink));
+ audioSink = NULL;
+ }
+ if (videoSink)
+ {
+ gst_object_unref(GST_OBJECT(videoSink));
+ videoSink = NULL;
+ }
} break;
case GST_STATE_CHANGE_READY_TO_NULL:
{
GError *err;
gst_message_parse_error (msg, &err, &debug);
g_free (debug);
- eWarning("Gstreamer error: %s (%i) from %s", err->message, err->code, sourceName );
+ eWarning("Gstreamer error: %s (domain:%i, code:%i) from %s", err->message, err->domain, err->code, sourceName );
if ( err->domain == GST_STREAM_ERROR )
{
if ( err->code == GST_STREAM_ERROR_CODEC_NOT_FOUND )
m_event((iPlayableService*)this, evUser+10);
}
}
+ else //if( err->domain == 1232 )
+ {
+ if ( err->code == 5 )
+ m_event((iPlayableService*)this, evUser+20);
+ }
g_error_free(err);
break;
}
g_free (g_lang);
}
m_event((iPlayableService*)this, evUpdatedEventInfo);
+
+ if ( m_errorInfo.missing_codec != "" )
+ {
+ if ( m_errorInfo.missing_codec.find("video/") == 0 || ( m_errorInfo.missing_codec.find("audio/") == 0 && getNumberOfTracks() == 0 ) )
+ m_event((iPlayableService*)this, evUser+12);
+ }
break;
}
case GST_MESSAGE_ELEMENT:
{
- if ( gst_is_missing_plugin_message(msg) )
+ if (const GstStructure *msgstruct = gst_message_get_structure(msg))
{
- gchar *description = gst_missing_plugin_message_get_description(msg);
- if ( description )
+ if ( gst_is_missing_plugin_message(msg) )
{
- m_error_message = "GStreamer plugin " + (std::string)description + " not available!\n";
- g_free(description);
- m_event((iPlayableService*)this, evUser+12);
- }
- }
- else if (const GstStructure *msgstruct = gst_message_get_structure(msg))
- {
- const gchar *eventname = gst_structure_get_name(msgstruct);
- if ( eventname )
- {
- if (!strcmp(eventname, "eventSizeChanged") || !strcmp(eventname, "eventSizeAvail"))
+ GstCaps *caps= NULL;
+ gboolean ret = gst_structure_get (msgstruct, "detail", GST_TYPE_CAPS, &caps, NULL);
+ if (ret)
{
- gst_structure_get_int (msgstruct, "aspect_ratio", &m_aspect);
- gst_structure_get_int (msgstruct, "width", &m_width);
- gst_structure_get_int (msgstruct, "height", &m_height);
- if (strstr(eventname, "Changed"))
- m_event((iPlayableService*)this, evVideoSizeChanged);
+ std::string codec = (const char*) gst_caps_to_string(caps);
+ gchar *description = gst_missing_plugin_message_get_description(msg);
+ if ( description )
+ {
+ eDebug("eServiceMP3::m_errorInfo.missing_codec = %s", codec.c_str());
+ m_errorInfo.error_message = "GStreamer plugin " + (std::string)description + " not available!\n";
+ m_errorInfo.missing_codec = codec.substr(0,(codec.find_first_of(',')));
+ g_free(description);
+ }
+ gst_caps_unref(caps);
}
- else if (!strcmp(eventname, "eventFrameRateChanged") || !strcmp(eventname, "eventFrameRateAvail"))
- {
- gst_structure_get_int (msgstruct, "frame_rate", &m_framerate);
- if (strstr(eventname, "Changed"))
- m_event((iPlayableService*)this, evVideoFramerateChanged);
- }
- else if (!strcmp(eventname, "eventProgressiveChanged") || !strcmp(eventname, "eventProgressiveAvail"))
+ }
+ else
+ {
+ const gchar *eventname = gst_structure_get_name(msgstruct);
+ if ( eventname )
{
- gst_structure_get_int (msgstruct, "progressive", &m_progressive);
- if (strstr(eventname, "Changed"))
- m_event((iPlayableService*)this, evVideoProgressiveChanged);
+ if (!strcmp(eventname, "eventSizeChanged") || !strcmp(eventname, "eventSizeAvail"))
+ {
+ gst_structure_get_int (msgstruct, "aspect_ratio", &m_aspect);
+ gst_structure_get_int (msgstruct, "width", &m_width);
+ gst_structure_get_int (msgstruct, "height", &m_height);
+ if (strstr(eventname, "Changed"))
+ m_event((iPlayableService*)this, evVideoSizeChanged);
+ }
+ else if (!strcmp(eventname, "eventFrameRateChanged") || !strcmp(eventname, "eventFrameRateAvail"))
+ {
+ gst_structure_get_int (msgstruct, "frame_rate", &m_framerate);
+ if (strstr(eventname, "Changed"))
+ m_event((iPlayableService*)this, evVideoFramerateChanged);
+ }
+ else if (!strcmp(eventname, "eventProgressiveChanged") || !strcmp(eventname, "eventProgressiveAvail"))
+ {
+ gst_structure_get_int (msgstruct, "progressive", &m_progressive);
+ if (strstr(eventname, "Changed"))
+ m_event((iPlayableService*)this, evVideoProgressiveChanged);
+ }
}
}
}
g_object_set (G_OBJECT (owner), "timeout", HTTP_TIMEOUT, NULL);
eDebug("eServiceMP3::GST_STREAM_STATUS_TYPE_CREATE -> setting timeout on %s to %is", name, HTTP_TIMEOUT);
}
-
+ //vuplus
+ else if (m_is_hls_stream && !strncmp(name, "queue", 5))
+ {
+ m_streamingsrc_timeout->stop();
+ m_is_hls_stream = 0;
+ //eDebug("Stoped response timeout!! : HLS");
+ }
}
if ( GST_IS_PAD(source) )
gst_object_unref(owner);
for (std::vector<subtitleStream>::iterator IterSubtitleStream(m_subtitleStreams.begin()); IterSubtitleStream != m_subtitleStreams.end(); ++IterSubtitleStream)
{
subtype_t type = IterSubtitleStream->type;
- ePyObject tuple = PyTuple_New(5);
-// eDebug("eServiceMP3::getSubtitleList idx=%i type=%i, code=%s", stream_idx, int(type), (IterSubtitleStream->language_code).c_str());
- PyTuple_SET_ITEM(tuple, 0, PyInt_FromLong(2));
- PyTuple_SET_ITEM(tuple, 1, PyInt_FromLong(stream_idx));
- PyTuple_SET_ITEM(tuple, 2, PyInt_FromLong(int(type)));
- PyTuple_SET_ITEM(tuple, 3, PyInt_FromLong(0));
- PyTuple_SET_ITEM(tuple, 4, PyString_FromString((IterSubtitleStream->language_code).c_str()));
- PyList_Append(l, tuple);
- Py_DECREF(tuple);
+ switch(type)
+ {
+ case stUnknown:
+ case stVOB:
+ case stPGS:
+ break;
+ default:
+ {
+ ePyObject tuple = PyTuple_New(5);
+// eDebug("eServiceMP3::getSubtitleList idx=%i type=%i, code=%s", stream_idx, int(type), (IterSubtitleStream->language_code).c_str());
+ PyTuple_SET_ITEM(tuple, 0, PyInt_FromLong(2));
+ PyTuple_SET_ITEM(tuple, 1, PyInt_FromLong(stream_idx));
+ PyTuple_SET_ITEM(tuple, 2, PyInt_FromLong(int(type)));
+ PyTuple_SET_ITEM(tuple, 3, PyInt_FromLong(0));
+ PyTuple_SET_ITEM(tuple, 4, PyString_FromString((IterSubtitleStream->language_code).c_str()));
+ PyList_Append(l, tuple);
+ Py_DECREF(tuple);
+ }
+ }
stream_idx++;
}
eDebug("eServiceMP3::getSubtitleList finished");
return;
else
{
- GstElement *sink;
int config_delay_int = delay;
- g_object_get (G_OBJECT (m_gst_playbin), "video-sink", &sink, NULL);
-
- if (sink)
+ if (videoSink)
{
std::string config_delay;
if(ePythonConfigQuery::getConfigValue("config.av.generalAC3delay", config_delay) == 0)
config_delay_int += atoi(config_delay.c_str());
- gst_object_unref(sink);
}
else
{
config_delay_int = 0;
}
- g_object_get (G_OBJECT (m_gst_playbin), "audio-sink", &sink, NULL);
-
- if (sink)
+ if (audioSink)
{
- gchar *name = gst_element_get_name(sink);
- if (strstr(name, "dvbaudiosink"))
- eTSMPEGDecoder::setHwAC3Delay(config_delay_int);
- g_free(name);
- gst_object_unref(sink);
+ eTSMPEGDecoder::setHwAC3Delay(config_delay_int);
}
}
}
return;
else
{
- GstElement *sink;
int config_delay_int = delay;
- g_object_get (G_OBJECT (m_gst_playbin), "video-sink", &sink, NULL);
-
- if (sink)
+ if (videoSink)
{
std::string config_delay;
if(ePythonConfigQuery::getConfigValue("config.av.generalPCMdelay", config_delay) == 0)
config_delay_int += atoi(config_delay.c_str());
- gst_object_unref(sink);
}
else
{
config_delay_int = 0;
}
- g_object_get (G_OBJECT (m_gst_playbin), "audio-sink", &sink, NULL);
-
- if (sink)
+ if (audioSink)
{
- gchar *name = gst_element_get_name(sink);
- if (strstr(name, "dvbaudiosink"))
- eTSMPEGDecoder::setHwPCMDelay(config_delay_int);
- else
- {
- // this is realy untested..and not used yet
- gint64 offset = config_delay_int;
- offset *= 1000000; // milli to nano
- g_object_set (G_OBJECT (m_gst_playbin), "ts-offset", offset, NULL);
- }
- g_free(name);
- gst_object_unref(sink);
+ eTSMPEGDecoder::setHwPCMDelay(config_delay_int);
}
}
}
-#else
-#warning gstreamer not available, not building media player
-#endif