-#ifdef HAVE_GSTREAMER
-
/* note: this requires gstreamer 0.10.x and a big list of plugins. */
/* it's currently hardcoded to use a big-endian alsasink as sink. */
#include <lib/base/ebase.h>
extensions.push_back("mp4");
extensions.push_back("mov");
extensions.push_back("m4a");
- extensions.push_back("m2ts");
sc->addServiceFactory(eServiceFactoryMP3::id, this, extensions);
}
m_currentTrickRatio = 0;
m_subs_to_pull = 0;
m_buffer_size = 1*1024*1024;
+ m_prev_decoder_time = -1;
+ m_decoder_time_valid_state = 0;
+
CONNECT(m_seekTimeout->timeout, eServiceMP3::seekTimeoutCB);
CONNECT(m_subtitle_sync_timer->timeout, eServiceMP3::pushSubtitles);
CONNECT(m_subtitle_hide_timer->timeout, eServiceMP3::hideSubtitles);
if (!(ret = seekToImpl(to)))
{
m_subtitle_pages.clear();
+ m_prev_decoder_time = -1;
+ m_decoder_time_valid_state = 0;
m_subs_to_pull = 0;
}
}
source = GST_MESSAGE_SRC(msg);
sourceName = gst_object_get_name(source);
-#if 1
+#if 0
if (gst_message_get_structure(msg))
{
gchar *string = gst_structure_to_string(gst_message_get_structure(msg));
} break;
case GST_STATE_CHANGE_READY_TO_PAUSED:
{
-// GstElement *appsink = gst_bin_get_by_name(GST_BIN(m_gst_subtitlebin), "subtitle_sink");
+ GstElement *appsink = gst_bin_get_by_name(GST_BIN(m_gst_subtitlebin), "subtitle_sink");
// GstElement *appsink = gst_bin_get_by_name(GST_BIN(m_gst_playbin), "subtitle_sink");
-// if (appsink)
-// {
-// g_object_set (G_OBJECT (appsink), "max-buffers", 2, NULL);
-// g_object_set (G_OBJECT (appsink), "sync", FALSE, NULL);
-// g_object_set (G_OBJECT (appsink), "emit-signals", TRUE, NULL);
-// eDebug("eServiceMP3::appsink properties set!");
-// gst_object_unref(appsink);
-// }
+ if (appsink)
+ {
+ g_object_set (G_OBJECT (appsink), "max-buffers", 2, NULL);
+ g_object_set (G_OBJECT (appsink), "sync", FALSE, NULL);
+ g_object_set (G_OBJECT (appsink), "emit-signals", TRUE, NULL);
+ eDebug("eServiceMP3::appsink properties set!");
+ gst_object_unref(appsink);
+ }
setAC3Delay(ac3_delay);
setPCMDelay(pcm_delay);
} break;
gchar *g_codec = NULL, *g_lang = NULL;
g_signal_emit_by_name (m_gst_playbin, "get-text-tags", i, &tags);
subtitleStream subs;
- int ret;
+// int ret;
g_lang = g_strdup_printf ("und");
if ( tags && gst_is_tag_list(tags) )
GstBusSyncReply eServiceMP3::gstBusSyncHandler(GstBus *bus, GstMessage *message, gpointer user_data)
{
eServiceMP3 *_this = (eServiceMP3*)user_data;
- _this->m_pump.send(1);
+ _this->m_pump.send(Message(1));
/* wake */
return GST_BUS_PASS;
}
return atUnknown;
}
-void eServiceMP3::gstPoll(const int &msg)
+void eServiceMP3::gstPoll(const Message &msg)
{
- /* ok, we have a serious problem here. gstBusSyncHandler sends
- us the wakup signal, but likely before it was posted.
- the usleep, an EVIL HACK (DON'T DO THAT!!!) works around this.
-
- I need to understand the API a bit more to make this work
- proplerly. */
- if (msg == 1)
+ if (msg.type == 1)
{
GstBus *bus = gst_pipeline_get_bus (GST_PIPELINE (m_gst_playbin));
GstMessage *message;
- usleep(1);
- while ((message = gst_bus_pop (bus)))
+ while (message = gst_bus_pop(bus))
{
gstBusCall(bus, message);
gst_message_unref (message);
}
}
- else
+ else if (msg.type == 2)
pullSubtitle();
+ else if (msg.type == 3)
+ gstGhostpadHasCAPS_synced(msg.d.pad);
+ else
+ eDebug("gstPoll unhandled Message %d\n", msg.type);
}
eAutoInitPtr<eServiceFactoryMP3> init_eServiceFactoryMP3(eAutoInitNumbers::service+1, "eServiceFactoryMP3");
eServiceMP3 *_this = (eServiceMP3*)user_data;
eSingleLocker l(_this->m_subs_to_pull_lock);
++_this->m_subs_to_pull;
- _this->m_pump.send(2);
+ _this->m_pump.send(Message(2));
}
gboolean eServiceMP3::gstGhostpadSinkEvent(GstPad * pad, GstEvent * event)
// eDebug("eServiceMP3::gstGhostpadSinkEvent %s", gst_structure_get_name (event->structure));
// eServiceMP3 *_this = (eServiceMP3*) (gst_pad_get_parent (pad));
- eServiceMP3 *_this = g_object_get_data (G_OBJECT (pad), "application-instance");
+ eServiceMP3 *_this = (eServiceMP3*) g_object_get_data (G_OBJECT (pad), "application-instance");
gboolean ret;
GstFormat format;
GstFlowReturn eServiceMP3::gstGhostpadBufferAlloc(GstPad *pad, guint64 offset, guint size, GstCaps *caps, GstBuffer **buf)
{
- eServiceMP3 *_this = g_object_get_data (G_OBJECT (pad), "application-instance");
+ eServiceMP3 *_this = (eServiceMP3*) g_object_get_data (G_OBJECT (pad), "application-instance");
// eDebug("eServiceMP3::gstGhostpadBufferAlloc prevcaps=%s newcaps=%s", gst_caps_to_string(_this->m_gst_prev_subtitle_caps), gst_caps_to_string(caps));
if (!GST_PAD_CAPS (pad) || !gst_caps_is_equal (_this->m_gst_prev_subtitle_caps, caps))
void eServiceMP3::gstGhostpadHasCAPS(GstPad *pad, GParamSpec * unused, gpointer user_data)
{
- GstCaps *caps;
eServiceMP3 *_this = (eServiceMP3*)user_data;
+ gst_object_ref (pad);
+
+ _this->m_pump.send(Message(3, pad));
+}
+
+// after messagepump
+void eServiceMP3::gstGhostpadHasCAPS_synced(GstPad *pad)
+{
+ GstCaps *caps;
+
g_object_get (G_OBJECT (pad), "caps", &caps, NULL);
-// eDebug("gstGhostpadHasCAPS:: signal::caps = %s", gst_caps_to_string(caps));
- if (!caps)
- return;
+// eDebug("gstGhostpadHasCAPS:: signal::caps = %s", gst_caps_to_string(caps));
- subtitleStream subs = _this->m_subtitleStreams[_this->m_currentSubtitleStream];
-
- if ( subs.type == stUnknown )
+ if (caps)
{
- GstTagList *tags;
-// eDebug("gstGhostpadHasCAPS::m_subtitleStreams[%i].type == stUnknown...", _this->m_currentSubtitleStream);
-
- gchar *g_lang;
- g_signal_emit_by_name (_this->m_gst_playbin, "get-text-tags", _this->m_currentSubtitleStream, &tags);
-
- g_lang = g_strdup_printf ("und");
- if ( tags && gst_is_tag_list(tags) )
- gst_tag_list_get_string(tags, GST_TAG_LANGUAGE_CODE, &g_lang);
- subs.language_code = std::string(g_lang);
- GstPad *ghostpad = gst_element_get_static_pad(_this->m_gst_subtitlebin, "sink");
- subs.type = getSubtitleType(ghostpad);
-
- _this->m_subtitleStreams[_this->m_currentSubtitleStream] = subs;
+ subtitleStream subs;
- g_free (g_lang);
- }
+// eDebug("gstGhostpadHasCAPS_synced %p %d", pad, m_subtitleStreams.size());
-// eDebug("gstGhostpadHasCAPS:: _this->m_gst_prev_subtitle_caps=%s equal=%i",gst_caps_to_string(_this->m_gst_prev_subtitle_caps),gst_caps_is_equal(_this->m_gst_prev_subtitle_caps, caps));
+ if (!m_subtitleStreams.empty())
+ subs = m_subtitleStreams[m_currentSubtitleStream];
+ else {
+ subs.type = stUnknown;
+ subs.pad = pad;
+ }
- if (!GST_PAD_CAPS (pad) || !gst_caps_is_equal (_this->m_gst_prev_subtitle_caps, caps))
- gstGhostpadLink(_this, caps);
-
- _this->m_gst_prev_subtitle_caps = gst_caps_copy(caps);
- gst_caps_unref (caps);
+ if ( subs.type == stUnknown )
+ {
+ GstTagList *tags;
+// eDebug("gstGhostpadHasCAPS::m_subtitleStreams[%i].type == stUnknown...", m_currentSubtitleStream);
+
+ gchar *g_lang;
+ g_signal_emit_by_name (m_gst_playbin, "get-text-tags", m_currentSubtitleStream, &tags);
+
+ g_lang = g_strdup_printf ("und");
+ if ( tags && gst_is_tag_list(tags) )
+ gst_tag_list_get_string(tags, GST_TAG_LANGUAGE_CODE, &g_lang);
+
+ subs.language_code = std::string(g_lang);
+ GstPad *ghostpad = gst_element_get_static_pad(m_gst_subtitlebin, "sink");
+ subs.type = getSubtitleType(ghostpad);
+
+ if (!m_subtitleStreams.empty())
+ m_subtitleStreams[m_currentSubtitleStream] = subs;
+ else
+ m_subtitleStreams.push_back(subs);
+
+ g_free (g_lang);
+ }
+
+// eDebug("gstGhostpadHasCAPS:: m_gst_prev_subtitle_caps=%s equal=%i",gst_caps_to_string(m_gst_prev_subtitle_caps),gst_caps_is_equal(m_gst_prev_subtitle_caps, caps));
+
+ if (!GST_PAD_CAPS (pad) || !gst_caps_is_equal (m_gst_prev_subtitle_caps, caps))
+ gstGhostpadLink(this, caps);
+
+ m_gst_prev_subtitle_caps = gst_caps_copy(caps);
+
+ gst_caps_unref (caps);
+ }
+
+ gst_object_unref (pad);
}
GstFlowReturn eServiceMP3::gstGhostpadChainFunction(GstPad * pad, GstBuffer * buffer)
{
GstFlowReturn ret = GST_FLOW_OK;
- eServiceMP3 *_this = g_object_get_data (G_OBJECT (pad), "application-instance");
+ eServiceMP3 *_this = (eServiceMP3*)g_object_get_data (G_OBJECT (pad), "application-instance");
- gint64 buf_pos = GST_BUFFER_TIMESTAMP(buffer);
- gint64 duration_ns = GST_BUFFER_DURATION(buffer);
+// gint64 buf_pos = GST_BUFFER_TIMESTAMP(buffer);
+// gint64 duration_ns = GST_BUFFER_DURATION(buffer);
size_t len = GST_BUFFER_SIZE(buffer);
unsigned char line[len+1];
if ( m_subtitleStreams[m_currentSubtitleStream].type < stVOB )
{
unsigned char line[len+1];
+ SubtitlePage page;
memcpy(line, GST_BUFFER_DATA(buffer), len);
line[len] = 0;
eDebug("got new text subtitle @ buf_pos = %lld ns (in pts=%lld): '%s' ", buf_pos, buf_pos/11111, line);
- ePangoSubtitlePage* page = new ePangoSubtitlePage;
gRGB rgbcol(0xD0,0xD0,0xD0);
- page->m_elements.push_back(ePangoSubtitlePageElement(rgbcol, (const char*)line));
- page->show_pts = buf_pos / 11111L;
- page->m_timeout = duration_ns / 1000000;
- SubtitlePage subtitlepage;
- subtitlepage.pango_page = page;
- subtitlepage.vob_page = NULL;
- m_subtitle_pages.push_back(subtitlepage);
- pushSubtitles();
+ page.type = SubtitlePage::Pango;
+ page.pango_page.m_elements.push_back(ePangoSubtitlePageElement(rgbcol, (const char*)line));
+ page.pango_page.m_show_pts = buf_pos / 11111L;
+ page.pango_page.m_timeout = duration_ns / 1000000;
+ m_subtitle_pages.push_back(page);
+ if (m_subtitle_pages.size()==1)
+ pushSubtitles();
}
else if ( m_subtitleStreams[m_currentSubtitleStream].type == stVOB )
{
+ SubtitlePage page;
eDebug("got new subpicture @ buf_pos = %lld ns (in pts=%lld), duration=%lld ns, len=%i bytes. ", buf_pos, buf_pos/11111, duration_ns, len);
- eVobSubtitlePage* page = new eVobSubtitlePage;
- eSize size = eSize(720, 576);
- page->m_pixmap = new gPixmap(size, 32, 0);
- // ePtr<gPixmap> pixmap;
- // pixmap = new gPixmap(size, 32, 1); /* allocate accel surface (if possible) */
- memcpy(page->m_pixmap->surface->data, GST_BUFFER_DATA(buffer), len);
- page->show_pts = buf_pos / 11111L;
- page->m_timeout = duration_ns / 1000;
- SubtitlePage subtitlepage;
- subtitlepage.vob_page = page;
- subtitlepage.pango_page = NULL;
- m_subtitle_pages.push_back(subtitlepage);
- pushSubtitles();
+ page.type = SubtitlePage::Vob;
+ page.vob_page.m_pixmap = new gPixmap(eSize(720, 576), 32, 1);
+ memcpy(page.vob_page.m_pixmap->surface->data, GST_BUFFER_DATA(buffer), len);
+ page.vob_page.m_show_pts = buf_pos / 11111L;
+ page.vob_page.m_timeout = duration_ns / 1000;
+ m_subtitle_pages.push_back(page);
+ if (m_subtitle_pages.size()==1)
+ pushSubtitles();
}
else
{
void eServiceMP3::pushSubtitles()
{
- pts_t running_pts;
while ( !m_subtitle_pages.empty() )
{
- SubtitlePage frontpage = m_subtitle_pages.front();
+ SubtitlePage &frontpage = m_subtitle_pages.front();
+ pts_t running_pts;
gint64 diff_ms = 0;
- gint64 show_pts = 0;
-
+ gint64 show_pts;
+
getPlayPosition(running_pts);
-
- if ( frontpage.pango_page != 0 )
- show_pts = frontpage.pango_page->show_pts;
- else if ( frontpage.vob_page != 0 )
- show_pts = frontpage.vob_page->show_pts;
-
- diff_ms = ( show_pts - running_pts ) / 90;
- GstFormat fmt = GST_FORMAT_TIME;
- gint64 now;
- if ( gst_element_query_position(m_gst_playbin, &fmt, &now) != -1 )
- eDebug("check decoder/pipeline diff: decoder: %lld, pipeline: %lld, show_pts: %lld, diff: %lld ms", running_pts/90, now/1000000, show_pts/90, diff_ms);
+
+ if (m_decoder_time_valid_state < 4) {
+ ++m_decoder_time_valid_state;
+ if (m_prev_decoder_time == running_pts)
+ m_decoder_time_valid_state = 0;
+ if (m_decoder_time_valid_state < 4) {
+// if (m_decoder_time_valid_state)
+// eDebug("%d: decoder time not valid! prev %lld, now %lld\n", m_decoder_time_valid_state, m_prev_decoder_time/90, running_pts/90);
+// else
+// eDebug("%d: decoder time not valid! now %lld\n", m_decoder_time_valid_state, running_pts/90);
+ m_subtitle_sync_timer->start(25, true);
+ m_prev_decoder_time = running_pts;
+ break;
+ }
+ }
+
+ if (frontpage.type == SubtitlePage::Pango)
+ show_pts = frontpage.pango_page.m_show_pts;
else
- eDebug("query position for decoder/pipeline check failed!");
+ show_pts = frontpage.vob_page.m_show_pts;
+
+ diff_ms = ( show_pts - running_pts ) / 90;
+ eDebug("check subtitle: decoder: %lld, show_pts: %lld, diff: %lld ms", running_pts/90, show_pts/90, diff_ms);
if ( diff_ms < -100 )
{
- now /= 11111;
- diff_ms = abs((now - running_pts) / 90);
-
- if (diff_ms > 100000)
- {
- eDebug("high decoder/pipeline difference.. assume decoder has now started yet.. check again in 1sec");
- m_subtitle_sync_timer->start(1000, true);
- break;
- }
eDebug("subtitle too late... drop");
m_subtitle_pages.pop_front();
}
else if ( diff_ms > 20 )
{
- eDebug("start recheck timer");
- m_subtitle_sync_timer->start(diff_ms > 1000 ? 1000 : diff_ms, true);
+ eDebug("start timer");
+ m_subtitle_sync_timer->start(diff_ms, true);
break;
}
else // immediate show
{
if ( m_subtitle_widget )
{
- if ( frontpage.pango_page != 0)
- {
- m_subtitle_widget->setPage(*(frontpage.pango_page));
- }
- else if ( frontpage.vob_page != 0)
+ eDebug("show!\n");
+ if ( frontpage.type == SubtitlePage::Pango)
+ m_subtitle_widget->setPage(frontpage.pango_page);
+ else
{
- m_subtitle_widget->setPixmap(frontpage.vob_page->m_pixmap, eRect(0, 0, 720, 576));
- eDebug("blit vobsub pixmap... hide in %i ms", frontpage.vob_page->m_timeout);
- m_subtitle_hide_timer->start(frontpage.vob_page->m_timeout, true);
+ m_subtitle_widget->setPixmap(frontpage.vob_page.m_pixmap, eRect(0, 0, 720, 576));
+ eDebug("blit vobsub pixmap... hide in %i ms", frontpage.vob_page.m_timeout);
+ m_subtitle_hide_timer->start(frontpage.vob_page.m_timeout, true);
}
m_subtitle_widget->show();
}
// eDebug ("eServiceMP3::enableSubtitles g_object_set current-text = %i", pid);
m_currentSubtitleStream = pid;
m_subs_to_pull = 0;
+ m_prev_decoder_time = -1;
m_subtitle_pages.clear();
}
}
}
-#else
-#warning gstreamer not available, not building media player
-#endif