-#ifdef HAVE_GSTREAMER
-
/* note: this requires gstreamer 0.10.x and a big list of plugins. */
/* it's currently hardcoded to use a big-endian alsasink as sink. */
#include <lib/base/ebase.h>
extensions.push_back("mp4");
extensions.push_back("mov");
extensions.push_back("m4a");
- extensions.push_back("m2ts");
sc->addServiceFactory(eServiceFactoryMP3::id, this, extensions);
}
source = GST_MESSAGE_SRC(msg);
sourceName = gst_object_get_name(source);
-#if 1
+#if 0
if (gst_message_get_structure(msg))
{
gchar *string = gst_structure_to_string(gst_message_get_structure(msg));
} break;
case GST_STATE_CHANGE_READY_TO_PAUSED:
{
-// GstElement *appsink = gst_bin_get_by_name(GST_BIN(m_gst_subtitlebin), "subtitle_sink");
+ GstElement *appsink = gst_bin_get_by_name(GST_BIN(m_gst_subtitlebin), "subtitle_sink");
// GstElement *appsink = gst_bin_get_by_name(GST_BIN(m_gst_playbin), "subtitle_sink");
-// if (appsink)
-// {
-// g_object_set (G_OBJECT (appsink), "max-buffers", 2, NULL);
-// g_object_set (G_OBJECT (appsink), "sync", FALSE, NULL);
-// g_object_set (G_OBJECT (appsink), "emit-signals", TRUE, NULL);
-// eDebug("eServiceMP3::appsink properties set!");
-// gst_object_unref(appsink);
-// }
+ if (appsink)
+ {
+ g_object_set (G_OBJECT (appsink), "max-buffers", 2, NULL);
+ g_object_set (G_OBJECT (appsink), "sync", FALSE, NULL);
+ g_object_set (G_OBJECT (appsink), "emit-signals", TRUE, NULL);
+ eDebug("eServiceMP3::appsink properties set!");
+ gst_object_unref(appsink);
+ }
setAC3Delay(ac3_delay);
setPCMDelay(pcm_delay);
} break;
gchar *g_codec = NULL, *g_lang = NULL;
g_signal_emit_by_name (m_gst_playbin, "get-text-tags", i, &tags);
subtitleStream subs;
- int ret;
+// int ret;
g_lang = g_strdup_printf ("und");
if ( tags && gst_is_tag_list(tags) )
GstBusSyncReply eServiceMP3::gstBusSyncHandler(GstBus *bus, GstMessage *message, gpointer user_data)
{
eServiceMP3 *_this = (eServiceMP3*)user_data;
- _this->m_pump.send(1);
+ _this->m_pump.send(Message(1));
/* wake */
return GST_BUS_PASS;
}
return atUnknown;
}
-void eServiceMP3::gstPoll(const int &msg)
+void eServiceMP3::gstPoll(const Message &msg)
{
/* ok, we have a serious problem here. gstBusSyncHandler sends
us the wakup signal, but likely before it was posted.
I need to understand the API a bit more to make this work
proplerly. */
- if (msg == 1)
+ if (msg.type == 1)
{
GstBus *bus = gst_pipeline_get_bus (GST_PIPELINE (m_gst_playbin));
GstMessage *message;
gst_message_unref (message);
}
}
- else
+ else if (msg.type == 2)
pullSubtitle();
+ else if (msg.type == 3)
+ gstGhostpadHasCAPS_synced(msg.d.pad);
+ else
+ eDebug("gstPoll unhandled Message %d\n", msg.type);
}
eAutoInitPtr<eServiceFactoryMP3> init_eServiceFactoryMP3(eAutoInitNumbers::service+1, "eServiceFactoryMP3");
eServiceMP3 *_this = (eServiceMP3*)user_data;
eSingleLocker l(_this->m_subs_to_pull_lock);
++_this->m_subs_to_pull;
- _this->m_pump.send(2);
+ _this->m_pump.send(Message(2));
}
gboolean eServiceMP3::gstGhostpadSinkEvent(GstPad * pad, GstEvent * event)
// eDebug("eServiceMP3::gstGhostpadSinkEvent %s", gst_structure_get_name (event->structure));
// eServiceMP3 *_this = (eServiceMP3*) (gst_pad_get_parent (pad));
- eServiceMP3 *_this = g_object_get_data (G_OBJECT (pad), "application-instance");
+ eServiceMP3 *_this = (eServiceMP3*) g_object_get_data (G_OBJECT (pad), "application-instance");
gboolean ret;
GstFormat format;
GstFlowReturn eServiceMP3::gstGhostpadBufferAlloc(GstPad *pad, guint64 offset, guint size, GstCaps *caps, GstBuffer **buf)
{
- eServiceMP3 *_this = g_object_get_data (G_OBJECT (pad), "application-instance");
+ eServiceMP3 *_this = (eServiceMP3*) g_object_get_data (G_OBJECT (pad), "application-instance");
// eDebug("eServiceMP3::gstGhostpadBufferAlloc prevcaps=%s newcaps=%s", gst_caps_to_string(_this->m_gst_prev_subtitle_caps), gst_caps_to_string(caps));
if (!GST_PAD_CAPS (pad) || !gst_caps_is_equal (_this->m_gst_prev_subtitle_caps, caps))
void eServiceMP3::gstGhostpadHasCAPS(GstPad *pad, GParamSpec * unused, gpointer user_data)
{
- GstCaps *caps;
eServiceMP3 *_this = (eServiceMP3*)user_data;
+ gst_object_ref (pad);
+
+ _this->m_pump.send(Message(3, pad));
+}
+
+// after messagepump
+void eServiceMP3::gstGhostpadHasCAPS_synced(GstPad *pad)
+{
+ GstCaps *caps;
+
g_object_get (G_OBJECT (pad), "caps", &caps, NULL);
-// eDebug("gstGhostpadHasCAPS:: signal::caps = %s", gst_caps_to_string(caps));
- if (!caps)
- return;
+// eDebug("gstGhostpadHasCAPS:: signal::caps = %s", gst_caps_to_string(caps));
- subtitleStream subs = _this->m_subtitleStreams[_this->m_currentSubtitleStream];
-
- if ( subs.type == stUnknown )
+ if (caps)
{
- GstTagList *tags;
-// eDebug("gstGhostpadHasCAPS::m_subtitleStreams[%i].type == stUnknown...", _this->m_currentSubtitleStream);
-
- gchar *g_lang;
- g_signal_emit_by_name (_this->m_gst_playbin, "get-text-tags", _this->m_currentSubtitleStream, &tags);
-
- g_lang = g_strdup_printf ("und");
- if ( tags && gst_is_tag_list(tags) )
- gst_tag_list_get_string(tags, GST_TAG_LANGUAGE_CODE, &g_lang);
- subs.language_code = std::string(g_lang);
- GstPad *ghostpad = gst_element_get_static_pad(_this->m_gst_subtitlebin, "sink");
- subs.type = getSubtitleType(ghostpad);
-
- _this->m_subtitleStreams[_this->m_currentSubtitleStream] = subs;
+ subtitleStream subs;
- g_free (g_lang);
- }
+// eDebug("gstGhostpadHasCAPS_synced %p %d", pad, m_subtitleStreams.size());
-// eDebug("gstGhostpadHasCAPS:: _this->m_gst_prev_subtitle_caps=%s equal=%i",gst_caps_to_string(_this->m_gst_prev_subtitle_caps),gst_caps_is_equal(_this->m_gst_prev_subtitle_caps, caps));
+ if (!m_subtitleStreams.empty())
+ subs = m_subtitleStreams[m_currentSubtitleStream];
+ else {
+ subs.type = stUnknown;
+ subs.pad = pad;
+ }
- if (!GST_PAD_CAPS (pad) || !gst_caps_is_equal (_this->m_gst_prev_subtitle_caps, caps))
- gstGhostpadLink(_this, caps);
-
- _this->m_gst_prev_subtitle_caps = gst_caps_copy(caps);
- gst_caps_unref (caps);
+ if ( subs.type == stUnknown )
+ {
+ GstTagList *tags;
+// eDebug("gstGhostpadHasCAPS::m_subtitleStreams[%i].type == stUnknown...", m_currentSubtitleStream);
+
+ gchar *g_lang;
+ g_signal_emit_by_name (m_gst_playbin, "get-text-tags", m_currentSubtitleStream, &tags);
+
+ g_lang = g_strdup_printf ("und");
+ if ( tags && gst_is_tag_list(tags) )
+ gst_tag_list_get_string(tags, GST_TAG_LANGUAGE_CODE, &g_lang);
+
+ subs.language_code = std::string(g_lang);
+ GstPad *ghostpad = gst_element_get_static_pad(m_gst_subtitlebin, "sink");
+ subs.type = getSubtitleType(ghostpad);
+
+ if (!m_subtitleStreams.empty())
+ m_subtitleStreams[m_currentSubtitleStream] = subs;
+ else
+ m_subtitleStreams.push_back(subs);
+
+ g_free (g_lang);
+ }
+
+// eDebug("gstGhostpadHasCAPS:: m_gst_prev_subtitle_caps=%s equal=%i",gst_caps_to_string(m_gst_prev_subtitle_caps),gst_caps_is_equal(m_gst_prev_subtitle_caps, caps));
+
+ if (!GST_PAD_CAPS (pad) || !gst_caps_is_equal (m_gst_prev_subtitle_caps, caps))
+ gstGhostpadLink(this, caps);
+
+ m_gst_prev_subtitle_caps = gst_caps_copy(caps);
+
+ gst_caps_unref (caps);
+ }
+
+ gst_object_unref (pad);
}
GstFlowReturn eServiceMP3::gstGhostpadChainFunction(GstPad * pad, GstBuffer * buffer)
{
GstFlowReturn ret = GST_FLOW_OK;
- eServiceMP3 *_this = g_object_get_data (G_OBJECT (pad), "application-instance");
+ eServiceMP3 *_this = (eServiceMP3*)g_object_get_data (G_OBJECT (pad), "application-instance");
- gint64 buf_pos = GST_BUFFER_TIMESTAMP(buffer);
- gint64 duration_ns = GST_BUFFER_DURATION(buffer);
+// gint64 buf_pos = GST_BUFFER_TIMESTAMP(buffer);
+// gint64 duration_ns = GST_BUFFER_DURATION(buffer);
size_t len = GST_BUFFER_SIZE(buffer);
unsigned char line[len+1];
if ( m_subtitleStreams[m_currentSubtitleStream].type < stVOB )
{
unsigned char line[len+1];
+ SubtitlePage page;
memcpy(line, GST_BUFFER_DATA(buffer), len);
line[len] = 0;
eDebug("got new text subtitle @ buf_pos = %lld ns (in pts=%lld): '%s' ", buf_pos, buf_pos/11111, line);
- ePangoSubtitlePage* page = new ePangoSubtitlePage;
gRGB rgbcol(0xD0,0xD0,0xD0);
- page->m_elements.push_back(ePangoSubtitlePageElement(rgbcol, (const char*)line));
- page->show_pts = buf_pos / 11111L;
- page->m_timeout = duration_ns / 1000000;
- SubtitlePage subtitlepage;
- subtitlepage.pango_page = page;
- subtitlepage.vob_page = NULL;
- m_subtitle_pages.push_back(subtitlepage);
+ page.type = SubtitlePage::Pango;
+ page.pango_page.m_elements.push_back(ePangoSubtitlePageElement(rgbcol, (const char*)line));
+ page.pango_page.m_show_pts = buf_pos / 11111L;
+ page.pango_page.m_timeout = duration_ns / 1000000;
+ m_subtitle_pages.push_back(page);
pushSubtitles();
}
else if ( m_subtitleStreams[m_currentSubtitleStream].type == stVOB )
{
+ SubtitlePage page;
eDebug("got new subpicture @ buf_pos = %lld ns (in pts=%lld), duration=%lld ns, len=%i bytes. ", buf_pos, buf_pos/11111, duration_ns, len);
- eVobSubtitlePage* page = new eVobSubtitlePage;
- eSize size = eSize(720, 576);
- page->m_pixmap = new gPixmap(size, 32, 0);
- // ePtr<gPixmap> pixmap;
- // pixmap = new gPixmap(size, 32, 1); /* allocate accel surface (if possible) */
- memcpy(page->m_pixmap->surface->data, GST_BUFFER_DATA(buffer), len);
- page->show_pts = buf_pos / 11111L;
- page->m_timeout = duration_ns / 1000;
- SubtitlePage subtitlepage;
- subtitlepage.vob_page = page;
- subtitlepage.pango_page = NULL;
- m_subtitle_pages.push_back(subtitlepage);
+ page.type = SubtitlePage::Vob;
+ page.vob_page.m_pixmap = new gPixmap(eSize(720, 576), 32, 1);
+ memcpy(page.vob_page.m_pixmap->surface->data, GST_BUFFER_DATA(buffer), len);
+ page.vob_page.m_show_pts = buf_pos / 11111L;
+ page.vob_page.m_timeout = duration_ns / 1000;
+ m_subtitle_pages.push_back(page);
pushSubtitles();
}
else
pts_t running_pts;
while ( !m_subtitle_pages.empty() )
{
- SubtitlePage frontpage = m_subtitle_pages.front();
+ SubtitlePage &frontpage = m_subtitle_pages.front();
gint64 diff_ms = 0;
- gint64 show_pts = 0;
-
+ gint64 show_pts;
+
+ if (frontpage.type == SubtitlePage::Pango)
+ show_pts = frontpage.pango_page.m_show_pts;
+ else
+ show_pts = frontpage.vob_page.m_show_pts;
+
getPlayPosition(running_pts);
-
- if ( frontpage.pango_page != 0 )
- show_pts = frontpage.pango_page->show_pts;
- else if ( frontpage.vob_page != 0 )
- show_pts = frontpage.vob_page->show_pts;
-
+
diff_ms = ( show_pts - running_pts ) / 90;
GstFormat fmt = GST_FORMAT_TIME;
gint64 now;
{
if ( m_subtitle_widget )
{
- if ( frontpage.pango_page != 0)
- {
- m_subtitle_widget->setPage(*(frontpage.pango_page));
- }
- else if ( frontpage.vob_page != 0)
+ if ( frontpage.type == SubtitlePage::Pango)
+ m_subtitle_widget->setPage(frontpage.pango_page);
+ else
{
- m_subtitle_widget->setPixmap(frontpage.vob_page->m_pixmap, eRect(0, 0, 720, 576));
- eDebug("blit vobsub pixmap... hide in %i ms", frontpage.vob_page->m_timeout);
- m_subtitle_hide_timer->start(frontpage.vob_page->m_timeout, true);
+ m_subtitle_widget->setPixmap(frontpage.vob_page.m_pixmap, eRect(0, 0, 720, 576));
+ eDebug("blit vobsub pixmap... hide in %i ms", frontpage.vob_page.m_timeout);
+ m_subtitle_hide_timer->start(frontpage.vob_page.m_timeout, true);
}
m_subtitle_widget->show();
}
}
}
-#else
-#warning gstreamer not available, not building media player
-#endif