1
0
mirror of https://bitbucket.org/librepilot/librepilot.git synced 2025-02-20 10:54:14 +01:00

LP-109 add gstreamer library and video gadget

This commit is contained in:
Philippe Renon 2016-09-22 02:23:15 +02:00
parent e3a658d153
commit 892c83b30e
76 changed files with 6451 additions and 3 deletions

View File

@ -0,0 +1,82 @@
win32:gstreamer {
GST_BIN_DIR = $$system(pkg-config --variable=exec_prefix gstreamer-1.0)/bin
GST_PLUGINS_DIR = $$system(pkg-config --variable=pluginsdir gstreamer-1.0)
# gstreamer libraries
GST_LIBS = \
libgstreamer-1.0-0.dll
gstreamer_utilities:GST_LIBS += \
gst-inspect-1.0.exe \
gst-launch-1.0.exe
for(lib, GST_LIBS) {
addCopyFileTarget($${lib},$${GST_BIN_DIR},$${GCS_APP_PATH})
addCopyDependenciesTarget($${lib},$${GST_BIN_DIR},$${GCS_APP_PATH})
}
# gstreamer core
GST_PLUGINS = \
libgstcoreelements.dll
# gst-plugins-base
GST_PLUGINS += \
libgstapp.dll \
libgstaudiotestsrc.dll \
libgstpango.dll \
libgstplayback.dll \
libgsttcp.dll \
libgsttypefindfunctions.dll \
libgstvideoconvert.dll \
libgstvideorate.dll \
libgstvideoscale.dll \
libgstvideotestsrc.dll
# gst-plugins-good
GST_PLUGINS += \
libgstautodetect.dll \
libgstavi.dll \
libgstdeinterlace.dll \
libgstdirectsoundsink.dll \
libgstimagefreeze.dll \
libgstjpeg.dll \
libgstrawparse.dll \
libgstrtp.dll \
libgstrtpmanager.dll \
libgstrtsp.dll \
libgstudp.dll \
libgstvideomixer.dll
# gst-plugins-bad
GST_PLUGINS += \
libgstaudiovisualizers.dll \
libgstautoconvert.dll \
libgstcompositor.dll \
libgstd3dvideosink.dll \
libgstdebugutilsbad.dll \
libgstdirectsoundsrc.dll \
libgstinter.dll \
libgstmpegpsdemux.dll \
libgstmpegpsmux.dll \
libgstmpegtsdemux.dll \
libgstmpegtsmux.dll \
libgstvideoparsersbad.dll \
libgstwinks.dll \
libgstwinscreencap.dll
# gst-plugins-ugly
GST_PLUGINS += \
libgstmpeg2dec.dll \
libgstx264.dll
# gst-libav
GST_PLUGINS += \
libgstlibav.dll
for(lib, GST_PLUGINS) {
addCopyFileTarget($${lib},$${GST_PLUGINS_DIR},$${GCS_LIBRARY_PATH}/gstreamer-1.0)
addCopyDependenciesTarget($${lib},$${GST_PLUGINS_DIR},$${GCS_APP_PATH})
}
}

View File

@ -0,0 +1,136 @@
/**
******************************************************************************
*
* @file devicemonitor.cpp
* @author The LibrePilot Project, http://www.librepilot.org Copyright (C) 2017.
* @brief
* @see The GNU Public License (GPL) Version 3
* @defgroup
* @{
*
*****************************************************************************/
/*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* for more details.
*
* You should have received a copy of the GNU General Public License along
* with this program; if not, write to the Free Software Foundation, Inc.,
* 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
#include "devicemonitor.h"
#include "gst_util.h"
#include <gst/gst.h>
#include <QDebug>
static GstBusSyncReply my_bus_sync_func(GstBus *bus, GstMessage *message, gpointer user_data)
{
Q_UNUSED(bus)
DeviceMonitor * dm;
GstDevice *device;
gchar *name;
switch (GST_MESSAGE_TYPE(message)) {
case GST_MESSAGE_DEVICE_ADDED:
gst_message_parse_device_added(message, &device);
name = gst_device_get_display_name(device);
dm = (DeviceMonitor *)user_data;
QMetaObject::invokeMethod(dm, "device_added", Qt::QueuedConnection,
Q_ARG(QString, QString(name)));
g_free(name);
break;
case GST_MESSAGE_DEVICE_REMOVED:
gst_message_parse_device_removed(message, &device);
name = gst_device_get_display_name(device);
dm = (DeviceMonitor *)user_data;
QMetaObject::invokeMethod(dm, "device_removed", Qt::QueuedConnection,
Q_ARG(QString, QString(name)));
g_free(name);
break;
default:
break;
}
// no need to pass it to the async queue, there is none...
return GST_BUS_DROP;
}
DeviceMonitor::DeviceMonitor(QObject *parent) : QObject(parent)
{
// initialize gstreamer
gst::init(NULL, NULL);
monitor = gst_device_monitor_new();
GstBus *bus = gst_device_monitor_get_bus(monitor);
gst_bus_set_sync_handler(bus, (GstBusSyncHandler)my_bus_sync_func, this, NULL);
gst_object_unref(bus);
GstCaps *caps = NULL; // gst_caps_new_empty_simple("video/x-raw");
const gchar *classes = "Video/Source";
gst_device_monitor_add_filter(monitor, classes, caps);
if (caps) {
gst_caps_unref(caps);
}
if (!gst_device_monitor_start(monitor)) {
qWarning() << "Failed to start device monitor";
}
}
DeviceMonitor::~DeviceMonitor()
{
gst_device_monitor_stop(monitor);
gst_object_unref(monitor);
}
QList<Device> DeviceMonitor::devices() const
{
QList<Device> devices;
GList *list = gst_device_monitor_get_devices(monitor);
while (list != NULL) {
gchar *name;
gchar *device_class;
GstDevice *device = (GstDevice *)list->data;
name = gst_device_get_display_name(device);
device_class = gst_device_get_device_class(device);
devices << Device(name, device_class);
g_free(name);
g_free(device_class);
gst_object_unref(device);
list = g_list_remove_link(list, list);
}
return devices;
}
void DeviceMonitor::device_added(QString name)
{
// qDebug() << "**** ADDED:" << name;
emit deviceAdded(name);
}
void DeviceMonitor::device_removed(QString name)
{
// qDebug() << "**** REMOVED:" << name;
emit deviceRemoved(name);
}

View File

@ -0,0 +1,73 @@
/**
******************************************************************************
*
* @file devicemonitor.h
* @author The LibrePilot Project, http://www.librepilot.org Copyright (C) 2017.
* @brief
* @see The GNU Public License (GPL) Version 3
* @defgroup
* @{
*
*****************************************************************************/
/*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* for more details.
*
* You should have received a copy of the GNU General Public License along
* with this program; if not, write to the Free Software Foundation, Inc.,
* 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
#ifndef DEVICEMONITOR_H_
#define DEVICEMONITOR_H_
#include "gst_global.h"
#include <QObject>
typedef struct _GstDeviceMonitor GstDeviceMonitor;
class Device {
public:
Device(QString displayName, QString deviceClass) : m_displayName(displayName), m_deviceClass(deviceClass)
{}
QString displayName() const
{
return m_displayName;
}
QString deviceClass() const
{
return m_deviceClass;
}
private:
QString m_displayName;
QString m_deviceClass;
};
class GST_LIB_EXPORT DeviceMonitor : public QObject {
Q_OBJECT
public:
DeviceMonitor(QObject *parent = NULL);
virtual ~DeviceMonitor();
QList<Device> devices() const;
signals:
void deviceAdded(QString name);
void deviceRemoved(QString name);
private:
GstDeviceMonitor *monitor;
private slots:
void device_added(QString name);
void device_removed(QString name);
};
#endif /* DEVICEMONITOR_H_ */

View File

@ -0,0 +1,38 @@
/**
******************************************************************************
*
* @file gst_global.h
* @author The LibrePilot Project, http://www.librepilot.org Copyright (C) 2017.
* @brief
* @see The GNU Public License (GPL) Version 3
* @defgroup
* @{
*
*****************************************************************************/
/*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* for more details.
*
* You should have received a copy of the GNU General Public License along
* with this program; if not, write to the Free Software Foundation, Inc.,
* 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
#ifndef GST_GLOBAL_H
#define GST_GLOBAL_H
#include <QtCore/qglobal.h>
#if defined(GST_LIB_LIBRARY)
# define GST_LIB_EXPORT Q_DECL_EXPORT
#else
# define GST_LIB_EXPORT Q_DECL_IMPORT
#endif
#endif // GST_GLOBAL_H

View File

@ -0,0 +1,125 @@
/**
******************************************************************************
*
* @file gst_util.cpp
* @author The LibrePilot Project, http://www.librepilot.org Copyright (C) 2017.
* @brief
* @see The GNU Public License (GPL) Version 3
* @defgroup
* @{
*
*****************************************************************************/
/*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* for more details.
*
* You should have received a copy of the GNU General Public License along
* with this program; if not, write to the Free Software Foundation, Inc.,
* 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
#include "gst_util.h"
#include <gst/gst.h>
#ifdef USE_OPENCV
#include "plugins/cameracalibration/gstcameracalibration.h"
#include "plugins/cameracalibration/gstcameraundistort.h"
#endif
#include "utils/pathutils.h"
#include <QDebug>
static bool initialized = false;
gboolean gst_plugin_librepilot_register(GstPlugin *plugin)
{
#ifdef USE_OPENCV
if (!gst_camera_calibration_plugin_init(plugin)) {
return FALSE;
}
if (!gst_camera_undistort_plugin_init(plugin)) {
return FALSE;
}
#else
Q_UNUSED(plugin)
#endif
return TRUE;
}
void gst_plugin_librepilot_register()
{
gst_plugin_register_static(GST_VERSION_MAJOR, GST_VERSION_MINOR, "librepilot",
"LibrePilot plugin", gst_plugin_librepilot_register, "1.10.0", "GPL",
"librepilot", "LibrePilot", "http://librepilot.org/");
}
// see http://gstreamer.freedesktop.org/data/doc/gstreamer/head/gstreamer/html/gst-running.html
void gst::init(int *argc, char * *argv[])
{
// TODO Not thread safe. Does it need to be?
if (initialized) {
return;
}
initialized = true;
// qputenv("GST_DEBUG", "3");
// qputenv("GST_DEBUG", "3,rtspsrc:6,udpsrc:6");
// qputenv("GST_DEBUG", "3,bin:6");
// qputenv("GST_DEBUG", "3,rtpjitterbuffer:6");
// qputenv("GST_DEBUG_FILE", "gst.log");
// qputenv("GST_DEBUG_DUMP_DOT_DIR", ".");
#ifdef Q_OS_WIN
qputenv("GST_PLUGIN_PATH_1_0", (Utils::GetLibraryPath() + "gstreamer-1.0").toLatin1());
#endif
qDebug() << "gstreamer - initializing";
GError *error = NULL;
if (!gst_init_check(argc, argv, &error)) {
qCritical() << "failed to initialize gstreamer";
return;
}
qDebug() << "gstreamer - version:" << gst_version_string();
qDebug() << "gstreamer - plugin system path:" << qgetenv("GST_PLUGIN_SYSTEM_PATH_1_0");
qDebug() << "gstreamer - plugin path:" << qgetenv("GST_PLUGIN_PATH_1_0");
qDebug() << "gstreamer - registering plugins";
// GST_PLUGIN_STATIC_REGISTER(librepilot);
gst_plugin_librepilot_register();
#ifdef USE_OPENCV
// see http://stackoverflow.com/questions/32477403/how-to-know-if-sse2-is-activated-in-opencv
// see http://answers.opencv.org/question/696/how-to-enable-vectorization-in-opencv/
if (!cv::checkHardwareSupport(CV_CPU_SSE)) {
qWarning() << "SSE not supported";
}
if (!cv::checkHardwareSupport(CV_CPU_SSE2)) {
qWarning() << "SSE2 not supported";
}
if (!cv::checkHardwareSupport(CV_CPU_SSE3)) {
qWarning() << "SSE3 not supported";
}
qDebug() << "MMX :" << cv::checkHardwareSupport(CV_CPU_MMX);
qDebug() << "SSE :" << cv::checkHardwareSupport(CV_CPU_SSE);
qDebug() << "SSE2 :" << cv::checkHardwareSupport(CV_CPU_SSE2);
qDebug() << "SSE3 :" << cv::checkHardwareSupport(CV_CPU_SSE3);
qDebug() << "SSE4_1 :" << cv::checkHardwareSupport(CV_CPU_SSE4_1);
qDebug() << "SSE4_2 :" << cv::checkHardwareSupport(CV_CPU_SSE4_2);
#endif
}
QString gst::version(void)
{
init(NULL, NULL);
return QString(gst_version_string());
}

View File

@ -0,0 +1,39 @@
/**
******************************************************************************
*
* @file gst_util.h
* @author The LibrePilot Project, http://www.librepilot.org Copyright (C) 2017.
* @brief
* @see The GNU Public License (GPL) Version 3
* @defgroup
* @{
*
*****************************************************************************/
/*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* for more details.
*
* You should have received a copy of the GNU General Public License along
* with this program; if not, write to the Free Software Foundation, Inc.,
* 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
#ifndef GST_UTIL_H
#define GST_UTIL_H
#include "gst_global.h"
#include <QString>
namespace gst {
GST_LIB_EXPORT void init(int *argc, char * *argv[]);
GST_LIB_EXPORT QString version();
}
#endif // GST_UTIL_H

View File

@ -0,0 +1,3 @@
LIBS *= -l$$qtLibraryName(GCSGStreamer)
INCLUDEPATH += $$GCS_SOURCE_TREE/src/libs/gstreamer

View File

@ -0,0 +1,29 @@
TEMPLATE = lib
TARGET = GCSGStreamer
DEFINES += GST_LIB_LIBRARY
QT += widgets
include(../../library.pri)
include(../utils/utils.pri)
include(gstreamer_dependencies.pri)
gstreamer_plugins:include(plugins/plugins.pro)
HEADERS += \
gst_global.h \
gst_util.h \
devicemonitor.h \
pipeline.h \
pipelineevent.h \
overlay.h \
videowidget.h
SOURCES += \
gst_util.cpp \
devicemonitor.cpp \
pipeline.cpp \
videowidget.cpp
equals(copydata, 1):include(copydata.pro)

View File

@ -0,0 +1,9 @@
DEFINES += USE_GSTREAMER
opencv:DEFINES += USE_OPENCV
linux|win32 {
CONFIG += link_pkgconfig
PKGCONFIG += glib-2.0 gobject-2.0
PKGCONFIG += gstreamer-1.0 gstreamer-video-1.0
opencv:PKGCONFIG += opencv
}

View File

@ -0,0 +1,155 @@
Tips:
- Measuring video latency : display time on video + film video output -> the time between two frames is the latency
Limitations:
- It is not possible to view a web cam in two different gadgets (same is *not* true for DirectSound sources)
but it is not really an issue, as it possible to tee a video source in the pipeline itself
Issues:
- bad: libgstchromaprint - libchromaprint needs avcodec-56 (vs 57) and avutil-54 (vs 55)
- bad: libgstfragmented - needs libnettle-6-1 (vs 6-2) - was renamed to hls
- bad: libgstx265 - needs rebuild
- need to rebuild libgstpluginsbad and libchromaprint
Todo:
- should use openglvideosink for PFD
- save config as QR code and ...
- split cameraconfiguration -> cameraundistort
- exclude gst plugins from uncrustify
- fix crash on unsupported formats:
- undistort should be passthrough when not enabled
gst-launch-1.0.exe -v -m autovideosrc ! video/x-raw,format=BGRA,width=800,height=600 ! videoconvert ! queue ! x264enc pass=qual quantizer=20 tune=zerolatency ! rtph264pay ! udpsink host=127.0.0.1 port=5000
gst-launch-1.0.exe -v -m udpsrc port=5000 ! "application/x-rtp, payload=127" ! rtph264depay ! decodebin ! videoconvert ! timeoverlay ! autovideosink
autovideosrc ! videoconvert ! queue ! x264enc pass=qual quantizer=20 tune=zerolatency ! rtph264pay ! udpsink host=127.0.0.1 port=5000
autovideosrc ! queue ! videoscale ! video/x-raw,width=320,height=200 ! videoconvert ! x264enc tune=zerolatency ! rtph264pay ! udpsink host=127.0.0.1 port=5000
autovideosrc ! queue ! videoscale ! videorate ! video/x-raw,width=320,height=240,frame-rate=30/1 ! videoconvert ! x264enc tune=zerolatency ! rtph264pay ! udpsink host=127.0.0.1 port=5000
udpsrc port=5000 ! application/x-rtp,payload=96,clock-rate=90000 ! rtpjitterbuffer latency=30 ! rtph264depay ! decodebin ! videoconvert ! timeoverlay ! fpsdisplaysink
RTSP
server : ./test-launch.exe "( videotestsrc ! x264enc tune=zerolatency ! rtph264pay name=pay0 pt=96 )"
client : gst-launch-1.0.exe -v -m rtspsrc location=rtsp://127.0.0.1:8554/test latency=30 ! decodebin ! timeoverlay ! autovideosink
Qt:
Line 250058: 0:02:34.185436460 5988 d5a0480 DEBUG rtpjitterbuffer rtpjitterbuffer.c:563:calculate_skew: time 0:02:34.081238026, base 0:00:00.050268441, recv_diff 0:02:34.030969585, slope 8
Line 250059: 0:02:34.185499451 5988 d5a0480 DEBUG rtpjitterbuffer rtpjitterbuffer.c:642:calculate_skew: delta -2352638, new min: -2417925
Line 250060: 0:02:34.185552513 5988 d5a0480 DEBUG rtpjitterbuffer rtpjitterbuffer.c:664:calculate_skew: skew -2398275, out 0:02:34.081192389
RTP
server : gst-launch-1.0.exe -v -m videotestsrc ! x264enc tune=zerolatency ! rtph264pay ! udpsink host=127.0.0.1 port=5000
client : gst-launch-1.0.exe -v -m udpsrc port=5000 ! application/x-rtp,payload=96,clock-rate=90000 ! rtpjitterbuffer ! rtph264depay ! decodebin ! videoconvert ! timeoverlay ! autovideosink
WIFI CAM RTSP
client : gst-launch-1.0.exe -v -m rtspsrc location=rtsp://192.168.42.1/AmbaStreamTest latency=30 ! decodebin ! timeoverlay ! autovideosink
Qt:
Line 14594: 0:00:28.489562097 9388 d5b9518 DEBUG rtpjitterbuffer rtpjitterbuffer.c:563:calculate_skew: time 0:00:19.812089201, base 0:00:04.161093352, recv_diff 0:00:15.650995849, slope 7
Line 14595: 0:00:28.489625088 9388 d5b9518 DEBUG rtpjitterbuffer rtpjitterbuffer.c:642:calculate_skew: delta 2029182, new min: -2061750
Line 14596: 0:00:28.489677219 9388 d5b9518 DEBUG rtpjitterbuffer rtpjitterbuffer.c:664:calculate_skew: skew -2246751, out 0:00:19.807813268
Line 14612: 0:00:28.527222391 9388 d5b9518 DEBUG rtpjitterbuffer rtpjitterbuffer.c:563:calculate_skew: time 0:00:19.849735841, base 0:00:04.161093352, recv_diff 0:00:15.688642489, slope 7
Line 14613: 0:00:28.527285692 9388 d5b9518 DEBUG rtpjitterbuffer rtpjitterbuffer.c:642:calculate_skew: delta 6309156, new min: -2061750
Line 14614: 0:00:28.527339685 9388 d5b9518 DEBUG rtpjitterbuffer rtpjitterbuffer.c:664:calculate_skew: skew -2245270, out 0:00:19.841181415
Line 14630: 0:00:28.564027806 9388 d5b9518 DEBUG rtpjitterbuffer rtpjitterbuffer.c:563:calculate_skew: time 0:00:19.886522948, base 0:00:04.161093352, recv_diff 0:00:15.725429596, slope 7
Line 14631: 0:00:28.564091728 9388 d5b9518 DEBUG rtpjitterbuffer rtpjitterbuffer.c:642:calculate_skew: delta 9729596, new min: -2061750
Line 14632: 0:00:28.564145410 9388 d5b9518 DEBUG rtpjitterbuffer rtpjitterbuffer.c:664:calculate_skew: skew -2243801, out 0:00:19.874549551
Line 14654: 0:00:31.712747597 9388 d5b9518 DEBUG rtpjitterbuffer rtpjitterbuffer.c:563:calculate_skew: time 0:00:23.035042595, base 0:00:04.161093352, recv_diff 0:00:18.873949243, slope 6
Line 14655: 0:00:31.712811519 9388 d5b9518 WARN rtpjitterbuffer rtpjitterbuffer.c:570:calculate_skew: delta - skew: 0:00:03.127126377 too big, reset skew
Line 14656: 0:00:31.712867063 9388 d5b9518 DEBUG rtpjitterbuffer rtpjitterbuffer.c:580:calculate_skew: filling 0, delta 0
Line 14657: 0:00:31.712919194 9388 d5b9518 DEBUG rtpjitterbuffer rtpjitterbuffer.c:664:calculate_skew: skew 0, out 0:00:23.035042595
Line 14759: 0:00:31.720619622 9388 d5b9518 DEBUG rtpjitterbuffer rtpjitterbuffer.c:563:calculate_skew: time 0:00:23.043193270, base 0:00:23.035042595, recv_diff 0:00:00.008150675, slope 32
Line 14760: 0:00:31.720681061 9388 d5b9518 DEBUG rtpjitterbuffer rtpjitterbuffer.c:580:calculate_skew: filling 1, delta -25215991
Line 14761: 0:00:31.720734123 9388 d5b9518 DEBUG rtpjitterbuffer rtpjitterbuffer.c:664:calculate_skew: skew -2521, out 0:00:23.068406740
Line 14774: 0:00:31.721641753 9388 d5b9518 DEBUG rtpjitterbuffer rtpjitterbuffer.c:563:calculate_skew: time 0:00:23.044219745, base 0:00:23.035042595, recv_diff 0:00:00.009177150, slope 58
Line 14775: 0:00:31.721703813 9388 d5b9518 DEBUG rtpjitterbuffer rtpjitterbuffer.c:580:calculate_skew: filling 2, delta -57556183
Line 14776: 0:00:31.721755633 9388 d5b9518 DEBUG rtpjitterbuffer rtpjitterbuffer.c:664:calculate_skew: skew -54319, out 0:00:23.101721609
Line 14789: 0:00:31.722667608 9388 d5b9518 DEBUG rtpjitterbuffer rtpjitterbuffer.c:563:calculate_skew: time 0:00:23.045249324, base 0:00:23.035042595, recv_diff 0:00:00.010206729, slope 78
Line 14790: 0:00:31.722730288 9388 d5b9518 DEBUG rtpjitterbuffer rtpjitterbuffer.c:580:calculate_skew: filling 3, delta -89893271
Line 14791: 0:00:31.722782729 9388 d5b9518 DEBUG rtpjitterbuffer rtpjitterbuffer.c:664:calculate_skew: skew -278916, out 0:00:23.134863679
Line 14804: 0:00:31.723697497 9388 d5b9518 DEBUG rtpjitterbuffer rtpjitterbuffer.c:563:calculate_skew: time 0:00:23.046273007, base 0:00:23.035042595, recv_diff 0:00:00.011230412, slope 95
Line 14805: 0:00:31.723759557 9388 d5b9518 DEBUG rtpjitterbuffer rtpjitterbuffer.c:580:calculate_skew: filling 4, delta -122236254
Line 14806: 0:00:31.723811687 9388 d5b9518 DEBUG rtpjitterbuffer rtpjitterbuffer.c:664:calculate_skew: skew -717962, out 0:00:23.167791299
[...]
Line 14864: 0:00:31.727785401 9388 d5b9518 DEBUG rtpjitterbuffer rtpjitterbuffer.c:563:calculate_skew: time 0:00:23.050366186, base 0:00:23.035042595, recv_diff 0:00:00.015323591, slope 139
Line 14865: 0:00:31.727851185 9388 d5b9518 DEBUG rtpjitterbuffer rtpjitterbuffer.c:580:calculate_skew: filling 8, delta -251609742
Line 14866: 0:00:31.727903626 9388 d5b9518 DEBUG rtpjitterbuffer rtpjitterbuffer.c:664:calculate_skew: skew -10312755, out 0:00:23.291663173
Line 14964: 0:00:31.732567449 9388 d5b9518 DEBUG rtpjitterbuffer rtpjitterbuffer.c:563:calculate_skew: time 0:00:23.055371651, base 0:00:23.035042595, recv_diff 0:00:00.020329056, slope 118
Line 14965: 0:00:31.732595996 9388 d5b9518 DEBUG rtpjitterbuffer rtpjitterbuffer.c:580:calculate_skew: filling 9, delta -279970944
Line 14966: 0:00:31.732620200 9388 d5b9518 DEBUG rtpjitterbuffer rtpjitterbuffer.c:664:calculate_skew: skew -16380064, out 0:00:23.318962531
[...]
Line 15293: 0:00:31.746166387 9388 d5b9518 DEBUG rtpjitterbuffer rtpjitterbuffer.c:563:calculate_skew: time 0:00:23.068966555, base 0:00:23.035042595, recv_diff 0:00:00.033923960, slope 180
Line 15294: 0:00:31.746194935 9388 d5b9518 DEBUG rtpjitterbuffer rtpjitterbuffer.c:580:calculate_skew: filling 23, delta -733509373
Line 15295: 0:00:31.746218828 9388 d5b9518 DEBUG rtpjitterbuffer rtpjitterbuffer.c:664:calculate_skew: skew -430859680, out 0:00:23.371616248
Line 15310: 0:00:31.746791023 9388 d5b9518 DEBUG rtpjitterbuffer rtpjitterbuffer.c:563:calculate_skew: time 0:00:23.069478242, base 0:00:23.035042595, recv_diff 0:00:00.034435647, slope 186
Line 15311: 0:00:31.746820812 9388 d5b9518 DEBUG rtpjitterbuffer rtpjitterbuffer.c:580:calculate_skew: filling 24, delta -766364353
Line 15312: 0:00:31.746845636 9388 d5b9518 DEBUG rtpjitterbuffer rtpjitterbuffer.c:664:calculate_skew: skew -484540427, out 0:00:23.351302168
[...]
The high skew values can also be seen when using gst-launch but no pauses...
The pause duration is variable (~4s) but the pause is always on the beat every 10s (If the first pause is at 9s, then the next will be at 19s, then 29s, etc...).
Is it possible to disable the rtpjitterbuffer ?
Qos: element autovideosink1-actual-sink-d3dvideo sent qos event: live: 1; running time: 30719164049; stream time: 26558070697; timestamp: 30719164049; duration: 33366666 jitter: 3029708354; proportion: 0.15581; quality: 1000000; format: ; processed: 609; dropped: 2;
Wifi stall
0:04:39.600000124 8296 ee56ba0 LOG udpsrc gstudpsrc.c:882:gst_udpsrc_create:<udpsrc0> doing select, timeout -1
0:04:39.628925951 8296 ee56b10 DEBUG rtspsrc gstrtspsrc.c:2260:gst_rtspsrc_handle_src_event:<rtspsrc0> pad rtspsrc0:recv_rtp_src_0_275680090_96 received event qos
0:04:39.661793203 8296 ee56b10 DEBUG rtspsrc gstrtspsrc.c:2260:gst_rtspsrc_handle_src_event:<rtspsrc0> pad rtspsrc0:recv_rtp_src_0_275680090_96 received event qos
0:04:42.688912775 8296 ee56ba0 LOG udpsrc gstudpsrc.c:1014:gst_udpsrc_create:<udpsrc0> read packet of 26 bytes
0:04:42.689055513 8296 ee56ba0 WARN rtpjitterbuffer rtpjitterbuffer.c:570:calculate_skew: delta - skew: 0:00:03.058485393 too big, reset skew
Simply instantiating a QNetworkAccessManager will cause the active wifi network connection to stall for 3 seconds every 10s.
This affects, not just the Qt app, but also all other processes using the wifi connection.
From what I have gathered this is due to bearer management polling all interfaces every 10s (can be changed with the QT_BEARER_POLL_TIMEOUT environment variable).
On windows polling the wifi interface will trigger an ssid scan. That scan will stall the active connection. This might not happen with all wifi devices but does with mine.
In my case, setting QT_BEARER_POLL_TIMEOUT to less than 4 seconds results in a DoS ;)
https://msdn.microsoft.com/fr-fr/library/windows/desktop/ms706783(v=vs.85).aspx
quote: "Since it becomes more difficult for a wireless interface to send and receive data packets while a scan is occurring, the WlanScan function may increase latency until the network scan is complete."
# transmit gstreamer buffers over network
tcpserversrc host=0.0.0.0 port=50002 ! gdpdepay ! autovideoconvert ! autovideosink
v4l2src num-buffers=1 ! gdppay ! tcpclientsink host=0.0.0.0 port=50002
# play a rtsp stream
rtspsrc location=rtsp://192.168.42.1/AmbaStreamTest latency=30 ! decodebin ! timeoverlay ! autovideosink
# play video and sound
ksvideosrc ! queue ! mix.
directsoundsrc ! tee name=split ! queue ! directsoundsink
split. ! queue ! wavescope ! queue ! mix.
videomixer name=mix ! queue ! timeoverlay ! autovideosink
ksvideosrc ! queue ! timeoverlay ! autovideosink
directsoundsrc ! queue ! directsoundsink
directsoundsrc ! tee name=split ! queue ! directsoundsink
split. ! queue ! wavescope ! autovideosink
filesrc location=C:/Users/Utilisateur/Desktop/hst_2.mpg ! decodebin ! autovideosink
dx9screencapsrc ! queue ! videoconvert ! x264enc bitrate=498 ! avimux ! filesink location=capture.avi
compositor name=mixer background=black sink_0::offset=0 sink_1::offset=0 ! videoconvert ! autovideosink
ksvideosrc device_index=0 ! decodebin ! identity drop-probability=0 ! queue max-size-buffers=0 max-size-bytes=0 max-size-time=10000000000 ! mixer.
udpsrc port=9000 ! identity drop-probability=0 dump=false ! <something> ! video/x-raw, width=640, height=480 ! videorate drop-only=true ! video/x-raw, framerate=10/1 ! queue max-size-buffers=0 max-size-bytes=0 max-size-time=10000000000 ! mixer.
compositor name=mixer sink_1::ypos=50 ! videoconvert ! timeoverlay shaded-background=true auto-resize=false ! autovideosink sync=true
ksvideosrc ! video/x-raw ! decodebin ! queue ! mixer.
udpsrc port=9000 ! identity dump=false ! textrender halignment=left line-alignment=left ! video/x-raw, width=320, height=120 ! videorate drop-only=true ! video/x-raw, framerate=10/1 ! queue ! mixer.

View File

@ -0,0 +1,39 @@
/**
******************************************************************************
*
* @file overlay.h
* @author The LibrePilot Project, http://www.librepilot.org Copyright (C) 2017.
* @brief
* @see The GNU Public License (GPL) Version 3
* @defgroup
* @{
*
*****************************************************************************/
/*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* for more details.
*
* You should have received a copy of the GNU General Public License along
* with this program; if not, write to the Free Software Foundation, Inc.,
* 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
#ifndef OVERLAY_H_
#define OVERLAY_H_
class Overlay {
public:
Overlay()
{}
virtual ~Overlay()
{}
virtual void expose() = 0;
};
#endif /* OVERLAY_H_ */

View File

@ -0,0 +1,38 @@
/**
******************************************************************************
*
* @file pipeline.cpp
* @author The LibrePilot Project, http://www.librepilot.org Copyright (C) 2017.
* @brief
* @see The GNU Public License (GPL) Version 3
* @defgroup
* @{
*
*****************************************************************************/
/*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* for more details.
*
* You should have received a copy of the GNU General Public License along
* with this program; if not, write to the Free Software Foundation, Inc.,
* 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
#include "pipeline.h"
#include "gst_util.h"
Pipeline::Pipeline()
{
// initialize gstreamer
gst::init(NULL, NULL);
}
Pipeline::~Pipeline()
{}

View File

@ -0,0 +1,41 @@
/**
******************************************************************************
*
* @file pipeline.h
* @author The LibrePilot Project, http://www.librepilot.org Copyright (C) 2017.
* @brief
* @see The GNU Public License (GPL) Version 3
* @defgroup
* @{
*
*****************************************************************************/
/*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* for more details.
*
* You should have received a copy of the GNU General Public License along
* with this program; if not, write to the Free Software Foundation, Inc.,
* 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
#ifndef PIPELINE_H_
#define PIPELINE_H_
#include "gst_global.h"
class GST_LIB_EXPORT Pipeline {
public:
enum State {
VoidPending, Null, Ready, Paused, Playing
};
Pipeline();
virtual ~Pipeline();
};
#endif /* PIPELINE_H_ */

View File

@ -0,0 +1,413 @@
/**
******************************************************************************
*
* @file pipelineevent.h
* @author The LibrePilot Project, http://www.librepilot.org Copyright (C) 2017.
* @brief
* @see The GNU Public License (GPL) Version 3
* @defgroup
* @{
*
*****************************************************************************/
/*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* for more details.
*
* You should have received a copy of the GNU General Public License along
* with this program; if not, write to the Free Software Foundation, Inc.,
* 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
#ifndef PIPELINEEVENT_H_
#define PIPELINEEVENT_H_
#include <QEvent>
#include <QString>
#include "pipeline.h"
#include "overlay.h"
class PipelineEvent : public QEvent {
public:
// event types
static const QEvent::Type PrepareWindowId;
static const QEvent::Type StateChange;
static const QEvent::Type StreamStatus;
static const QEvent::Type NewClock;
static const QEvent::Type ClockProvide;
static const QEvent::Type ClockLost;
static const QEvent::Type Progress;
static const QEvent::Type Latency;
static const QEvent::Type Qos;
static const QEvent::Type Eos;
static const QEvent::Type Error;
static const QEvent::Type Warning;
static const QEvent::Type Info;
PipelineEvent(QEvent::Type type, QString src) :
QEvent(type), src(src)
{}
virtual ~PipelineEvent()
{}
public:
QString src;
};
const QEvent::Type PipelineEvent::PrepareWindowId = static_cast<QEvent::Type>(QEvent::registerEventType());
const QEvent::Type PipelineEvent::StateChange = static_cast<QEvent::Type>(QEvent::registerEventType());
const QEvent::Type PipelineEvent::StreamStatus = static_cast<QEvent::Type>(QEvent::registerEventType());
const QEvent::Type PipelineEvent::NewClock = static_cast<QEvent::Type>(QEvent::registerEventType());
const QEvent::Type PipelineEvent::ClockProvide = static_cast<QEvent::Type>(QEvent::registerEventType());
const QEvent::Type PipelineEvent::ClockLost = static_cast<QEvent::Type>(QEvent::registerEventType());
const QEvent::Type PipelineEvent::Progress = static_cast<QEvent::Type>(QEvent::registerEventType());
const QEvent::Type PipelineEvent::Latency = static_cast<QEvent::Type>(QEvent::registerEventType());
const QEvent::Type PipelineEvent::Qos = static_cast<QEvent::Type>(QEvent::registerEventType());
const QEvent::Type PipelineEvent::Eos = static_cast<QEvent::Type>(QEvent::registerEventType());
const QEvent::Type PipelineEvent::Error = static_cast<QEvent::Type>(QEvent::registerEventType());
const QEvent::Type PipelineEvent::Warning = static_cast<QEvent::Type>(QEvent::registerEventType());
const QEvent::Type PipelineEvent::Info = static_cast<QEvent::Type>(QEvent::registerEventType());
class PrepareWindowIdEvent : public PipelineEvent {
public:
PrepareWindowIdEvent(QString src, Overlay *overlay) :
PipelineEvent(PrepareWindowId, src), overlay(overlay)
{}
Overlay *getOverlay()
{
return overlay;
}
static QEvent::Type type()
{
return PrepareWindowId;
}
private:
Overlay *overlay;
};
class StateChangedEvent : public PipelineEvent {
public:
StateChangedEvent(QString src, Pipeline::State oldState, Pipeline::State newState, Pipeline::State pendingState) :
PipelineEvent(StateChange, src), oldState(oldState), newState(newState), pendingState(pendingState)
{}
static QEvent::Type type()
{
return StateChange;
}
Pipeline::State getOldState()
{
return oldState;
}
Pipeline::State getNewState()
{
return newState;
}
Pipeline::State getPendingState()
{
return pendingState;
}
static const char *stateName(Pipeline::State state)
{
switch (state) {
case Pipeline::VoidPending:
return "VoidPending";
case Pipeline::Null:
return "Null";
case Pipeline::Ready:
return "Ready";
case Pipeline::Paused:
return "Paused";
case Pipeline::Playing:
return "Playing";
}
return "<unknown>";
}
private:
Pipeline::State oldState;
Pipeline::State newState;
Pipeline::State pendingState;
};
class StreamStatusEvent : public PipelineEvent {
public:
enum StreamStatusType {
Create, Enter, Leave, Destroy, Start, Pause, Stop, Null
};
StreamStatusEvent(QString src, StreamStatusType status, QString owner) :
PipelineEvent(StreamStatus, src), status(status), owner(owner)
{}
static QEvent::Type type()
{
return StreamStatus;
}
StreamStatusType getStatus()
{
return status;
}
const char *getStatusName()
{
return statusName(status);
}
static const char *statusName(StreamStatusType status)
{
switch (status) {
case StreamStatusEvent::Create:
return "Create";
case StreamStatusEvent::Enter:
return "Enter";
case StreamStatusEvent::Leave:
return "Leave";
case StreamStatusEvent::Destroy:
return "Destroy";
case StreamStatusEvent::Start:
return "Start";
case StreamStatusEvent::Pause:
return "Pause";
case StreamStatusEvent::Stop:
return "Stop";
case StreamStatusEvent::Null:
return "Null";
}
return "<unknown>";
}
QString getOwner()
{
return owner;
}
private:
StreamStatusType status;
QString owner;
};
class ClockEvent : public PipelineEvent {
public:
ClockEvent(QEvent::Type type, QString src, QString name) : PipelineEvent(type, src), name(name)
{}
QString getName()
{
return name;
}
private:
QString name;
};
class NewClockEvent : public ClockEvent {
public:
NewClockEvent(QString src, QString name) : ClockEvent(NewClock, src, name)
{}
static QEvent::Type type()
{
return NewClock;
}
};
class ClockProvideEvent : public ClockEvent {
public:
ClockProvideEvent(QString src, QString name, bool ready) : ClockEvent(ClockProvide, src, name), ready(ready)
{}
static QEvent::Type type()
{
return ClockProvide;
}
bool isReady()
{
return ready;
}
private:
bool ready;
};
class ClockLostEvent : public ClockEvent {
public:
ClockLostEvent(QString src, QString name) : ClockEvent(ClockLost, src, name)
{}
static QEvent::Type type()
{
return ClockLost;
}
};
class ProgressEvent : public PipelineEvent {
public:
enum ProgressType {
Start, Continue, Complete, Cancelled, Error
};
ProgressEvent(QString src, ProgressType progressType, QString code, QString text) :
PipelineEvent(Progress, src), progressType(progressType), code(code), text(text)
{}
static QEvent::Type type()
{
return Progress;
}
ProgressType getProgressType()
{
return progressType;
}
QString getCode()
{
return code;
}
QString getText()
{
return text;
}
private:
ProgressType progressType;
QString code;
QString text;
};
class LatencyEvent : public PipelineEvent {
public:
LatencyEvent(QString src) :
PipelineEvent(Latency, src)
{}
static QEvent::Type type()
{
return Latency;
}
};
class QosData {
public:
// timestamps and live status
// If the message was generated by a live element
bool live;
// running_time, stream_time, timestamp and duration of the dropped buffer.
// Values of GST_CLOCK_TIME_NONE mean unknown values.
quint64 running_time;
quint64 stream_time;
quint64 timestamp;
quint64 duration;
// values
// The difference of the running-time against the deadline.
qint64 jitter;
// Long term prediction of the ideal rate relative to normal rate to get optimal quality.
qreal proportion; // won't work on ARM?
// An element dependent integer value that specifies the current quality level of the element.
// The default maximum quality is 1000000.
qint32 quality;
// stats
// QoS stats representing the history of the current continuous pipeline playback period.
// When format is GST_FORMAT_UNDEFINED both dropped and processed are invalid.
// Values of -1 for either processed or dropped mean unknown values.
// Units of the 'processed' and 'dropped' fields.
// Video sinks and video filters will use GST_FORMAT_BUFFERS (frames).
// Audio sinks and audio filters will likely use GST_FORMAT_DEFAULT (samples)
// GstFormat format;
// Total number of units correctly processed since the last state change to READY or a flushing operation.
quint64 processed;
// Total number of units dropped since the last state change to READY or a flushing operation.
quint64 dropped;
QString timestamps()
{
return QString("live: %0; running time: %1; stream time: %2; timestamp: %3; duration: %4").arg(live).arg(
running_time).arg(stream_time).arg(timestamp).arg(duration);
}
QString values()
{
return QString("jitter: %0; proportion: %1; quality: %2;").arg(jitter).arg(proportion).arg(quality);
}
QString stats()
{
return QString("format: %0; processed: %1; dropped: %2;").arg("").arg(processed).arg(dropped);
}
};
class QosEvent : public PipelineEvent {
public:
QosEvent(QString src, QosData data) : PipelineEvent(Qos, src), data(data)
{}
static QEvent::Type type()
{
return Qos;
}
QosData getData()
{
return data;
}
private:
QosData data;
};
class EosEvent : public PipelineEvent {
public:
EosEvent(QString src) : PipelineEvent(Eos, src)
{}
static QEvent::Type type()
{
return Eos;
}
};
class MessageEvent : public PipelineEvent {
public:
MessageEvent(QEvent::Type type, QString src, QString message, QString debug) :
PipelineEvent(type, src), message(message), debug(debug)
{}
QString getMessage()
{
return message;
}
QString getDebug()
{
return debug;
}
private:
QString message;
QString debug;
};
class ErrorEvent : public MessageEvent {
public:
ErrorEvent(QString src, QString message, QString debug) : MessageEvent(Error, src, message, debug)
{}
static QEvent::Type type()
{
return Error;
}
};
class WarningEvent : public MessageEvent {
public:
WarningEvent(QString src, QString message, QString debug) : MessageEvent(Warning, src, message, debug)
{}
static QEvent::Type type()
{
return Warning;
}
};
class InfoEvent : public MessageEvent {
public:
InfoEvent(QString src, QString message, QString debug) : MessageEvent(Info, src, message, debug)
{}
static QEvent::Type type()
{
return Info;
}
};
#endif /* PIPELINEEVENT_H_ */

View File

@ -0,0 +1,96 @@
/* GStreamer
* Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
* Library <2002> Ronald Bultje <rbultje@ronald.bitfreak.net>
* Copyright (C) 2007 David A. Schleef <ds@schleef.org>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
* Boston, MA 02110-1301, USA.
*/
#include "cameraevent.hpp"
#include <opencv2/opencv.hpp>
#include <QDebug>
#include <QString>
/**
* gst_video_event_new_still_frame:
* @in_still: boolean value for the still-frame state of the event.
*
* Creates a new Still Frame event. If @in_still is %TRUE, then the event
* represents the start of a still frame sequence. If it is %FALSE, then
* the event ends a still frame sequence.
*
* To parse an event created by gst_video_event_new_still_frame() use
* gst_video_event_parse_still_frame().
*
* Returns: The new GstEvent
*/
GstEvent *
gst_camera_event_new_calibrated (gchar * settings)
{
GstEvent *calibrated_event;
GstStructure *s;
s = gst_structure_new (GST_CAMERA_EVENT_CALIBRATED_NAME,
"undistort-settings", G_TYPE_STRING, g_strdup(settings), NULL);
calibrated_event = gst_event_new_custom (GST_EVENT_CUSTOM_BOTH, s);
return calibrated_event;
}
/**
* gst_video_event_parse_still_frame:
* @event: A #GstEvent to parse
* @in_still: A boolean to receive the still-frame status from the event, or NULL
*
* Parse a #GstEvent, identify if it is a Still Frame event, and
* return the still-frame state from the event if it is.
* If the event represents the start of a still frame, the in_still
* variable will be set to TRUE, otherwise FALSE. It is OK to pass NULL for the
* in_still variable order to just check whether the event is a valid still-frame
* event.
*
* Create a still frame event using gst_video_event_new_still_frame()
*
* Returns: %TRUE if the event is a valid still-frame event. %FALSE if not
*/
gboolean
gst_camera_event_parse_calibrated (GstEvent * event, gchar ** settings)
{
const GstStructure *s;
g_return_val_if_fail (event != NULL, FALSE);
if (GST_EVENT_TYPE (event) != GST_EVENT_CUSTOM_BOTH)
return FALSE; /* Not a calibrated event */
s = gst_event_get_structure (event);
if (s == NULL
|| !gst_structure_has_name (s, GST_CAMERA_EVENT_CALIBRATED_NAME))
return FALSE; /* Not a calibrated event */
const gchar *str = gst_structure_get_string(s, "undistort-settings");
if (!str)
return FALSE; /* Not calibrated frame event */
//qDebug() << "*** " << buf;//QString::fromStdString(buf);
*settings = g_strdup (str);
return TRUE;
}

View File

@ -0,0 +1,37 @@
/* GStreamer
* Copyright (C) <2011> Wim Taymans <wim.taymans@gmail.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
* Boston, MA 02110-1301, USA.
*/
#ifndef __GST_CAMERA_EVENT_H__
#define __GST_CAMERA_EVENT_H__
#include <gst/gst.h>
G_BEGIN_DECLS
#define GST_CAMERA_EVENT_CALIBRATED_NAME "GstEventCalibrated"
/* camera calibration event creation and parsing */
GstEvent * gst_camera_event_new_calibrated (gchar * settings);
gboolean gst_camera_event_parse_calibrated (GstEvent * event, gchar ** settings);
G_END_DECLS
#endif /* __GST_CAMERA_EVENT_H__ */

View File

@ -0,0 +1,48 @@
/* GStreamer
* Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
* Library <2002> Ronald Bultje <rbultje@ronald.bitfreak.net>
* Copyright (C) 2007 David A. Schleef <ds@schleef.org>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
* Boston, MA 02110-1301, USA.
*/
#include "camerautils.hpp"
#include <opencv2/opencv.hpp>
#include <QDebug>
#include <QString>
gchar *
camera_serialize_undistort_settings (cv::Mat &cameraMatrix, cv::Mat &distCoeffs)
{
cv::FileStorage fs(".xml", cv::FileStorage::WRITE + cv::FileStorage::MEMORY);
fs << "cameraMatrix" << cameraMatrix;
fs << "distCoeffs" << distCoeffs;
std::string buf = fs.releaseAndGetString();
return g_strdup(buf.c_str());
}
gboolean
camera_deserialize_undistort_settings (gchar * str, cv::Mat &cameraMatrix, cv::Mat &distCoeffs)
{
cv::FileStorage fs(str, cv::FileStorage::READ + cv::FileStorage::MEMORY);
fs["cameraMatrix"] >> cameraMatrix;
fs["distCoeffs"] >> distCoeffs;
return TRUE;
}

View File

@ -0,0 +1,34 @@
/* GStreamer
* Copyright (C) <2011> Wim Taymans <wim.taymans@gmail.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
* Boston, MA 02110-1301, USA.
*/
#ifndef __GST_CAMERA_UTILS_H__
#define __GST_CAMERA_UTILS_H__
#include <gst/gst.h>
#include <cv.h>
G_BEGIN_DECLS
gchar *camera_serialize_undistort_settings (cv::Mat &cameraMatrix, cv::Mat &distCoeffs);
gboolean camera_deserialize_undistort_settings (gchar *str, cv::Mat &cameraMatrix, cv::Mat &distCoeffs);
G_END_DECLS
#endif /* __GST_CAMERA_UTILS_H__ */

View File

@ -0,0 +1,954 @@
/*
* GStreamer
* Copyright (C) 2005 Thomas Vander Stichele <thomas@apestaart.org>
* Copyright (C) 2005 Ronald S. Bultje <rbultje@ronald.bitfreak.net>
* Copyright (C) 2008 Michael Sheldon <mike@mikeasoft.com>
* Copyright (C) 2011 Stefan Sauer <ensonic@users.sf.net>
* Copyright (C) 2014 Robert Jobbagy <jobbagy.robert@gmail.com>
*
* Permission is hereby granted, free of charge, to any person obtaining a
* copy of this software and associated documentation files (the "Software"),
* to deal in the Software without restriction, including without limitation
* the rights to use, copy, modify, merge, publish, distribute, sublicense,
* and/or sell copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
* DEALINGS IN THE SOFTWARE.
*
* Alternatively, the contents of this file may be used under the
* GNU Lesser General Public License Version 2.1 (the "LGPL"), in
* which case the following provisions apply instead of the ones
* mentioned above:
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
* Boston, MA 02110-1301, USA.
*/
/**
* SECTION:element-cameracalibration
*
* Performs face detection on videos and images.
* If you have high cpu load you need to use videoscale with capsfilter and reduce the video resolution.
*
* The image is scaled down multiple times using the GstCameraCalibration::scale-factor
* until the size is &lt;= GstCameraCalibration::min-size-width or
* GstCameraCalibration::min-size-height.
*
* <refsect2>
* <title>Example launch line</title>
* |[
* gst-launch-1.0 autovideosrc ! decodebin ! colorspace ! cameracalibration ! videoconvert ! xvimagesink
* ]| Detect and show faces
* |[
* gst-launch-1.0 autovideosrc ! video/x-raw,width=320,height=240 ! videoconvert ! cameracalibration min-size-width=60 min-size-height=60 ! colorspace ! xvimagesink
* ]| Detect large faces on a smaller image
*
* </refsect2>
*/
/* FIXME: development version of OpenCV has CV_HAAR_FIND_BIGGEST_OBJECT which
* we might want to use if available
* see https://code.ros.org/svn/opencv/trunk/opencv/modules/objdetect/src/haar.cpp
*/
#ifdef HAVE_CONFIG_H
# include <config.h>
#endif
#include "gstcameracalibration.h"
#if (CV_MAJOR_VERSION >= 3)
#include <opencv2/imgproc.hpp>
#endif
#include <opencv2/calib3d.hpp>
#include <gst/opencv/gstopencvutils.h>
#include "camerautils.hpp"
#include "cameraevent.hpp"
#include <vector>
#include <QDebug>
GST_DEBUG_CATEGORY_STATIC (gst_camera_calibration_debug);
#define GST_CAT_DEFAULT gst_camera_calibration_debug
#define DEFAULT_CALIBRATON_PATTERN GST_CAMERACALIBRATION_PATTERN_CHESSBOARD
#define DEFAULT_BOARD_WIDTH 9
#define DEFAULT_BOARD_HEIGHT 6
#define DEFAULT_SQUARE_SIZE 50
#define DEFAULT_ASPECT_RATIO 1.0
#define DEFAULT_CORNER_SUB_PIXEL true
#define DEFAULT_ZERO_TANGENT_DISTORTION false
#define DEFAULT_CENTER_PRINCIPAL_POINT false
#define DEFAULT_USE_FISHEYE false
#define DEFAULT_FRAME_COUNT 25
#define DEFAULT_DELAY 350
#define DEFAULT_SHOW_CORNERS true
///* Filter signals and args */
//enum
//{
// /* FILL ME */
// LAST_SIGNAL
//};
enum
{
PROP_0,
PROP_CALIBRATON_PATTERN,
PROP_BOARD_WIDTH,
PROP_BOARD_HEIGHT,
PROP_SQUARE_SIZE,
PROP_ASPECT_RATIO,
PROP_CORNER_SUB_PIXEL,
PROP_ZERO_TANGENT_DISTORTION,
PROP_CENTER_PRINCIPAL_POINT,
PROP_USE_FISHEYE,
PROP_FRAME_COUNT,
PROP_DELAY,
PROP_SHOW_CORNERS
};
enum {
DETECTION = 0,
CAPTURING = 1,
CALIBRATED = 2
};
#define GST_TYPE_CAMERA_CALIBRATION_PATTERN (cameracalibration_pattern_get_type ())
static GType
cameracalibration_pattern_get_type (void)
{
static GType cameracalibration_pattern_type = 0;
static const GEnumValue cameracalibration_pattern[] = {
{GST_CAMERACALIBRATION_PATTERN_CHESSBOARD, "Chessboard", "chessboard"},
{GST_CAMERACALIBRATION_PATTERN_CIRCLES_GRID, "Circle Grids", "circle_grids"},
{GST_CAMERACALIBRATION_PATTERN_ASYMMETRIC_CIRCLES_GRID, "Asymmetric Circle Grids", "asymmetric_circle_grids"},
{0, NULL, NULL},
};
if (!cameracalibration_pattern_type) {
cameracalibration_pattern_type =
g_enum_register_static ("GstCameraCalibrationPattern", cameracalibration_pattern);
}
return cameracalibration_pattern_type;
}
G_DEFINE_TYPE (GstCameraCalibration, gst_camera_calibration, GST_TYPE_OPENCV_VIDEO_FILTER);
static void gst_camera_calibration_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec);
static void gst_camera_calibration_get_property (GObject * object, guint prop_id,
GValue * value, GParamSpec * pspec);
//static gboolean gst_camera_calibration_set_caps (GstOpencvVideoFilter * transform,
// gint in_width, gint in_height, gint in_depth, gint in_channels,
// gint out_width, gint out_height, gint out_depth, gint out_channels);
static GstFlowReturn gst_camera_calibration_transform_frame_ip (
GstOpencvVideoFilter * cvfilter, GstBuffer * frame, IplImage * img);
/* Clean up */
static void
gst_camera_calibration_finalize (GObject * obj)
{
G_OBJECT_CLASS (gst_camera_calibration_parent_class)->finalize (obj);
}
/* initialize the cameracalibration's class */
static void
gst_camera_calibration_class_init (GstCameraCalibrationClass * klass)
{
GObjectClass *gobject_class = G_OBJECT_CLASS (klass);
GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
GstOpencvVideoFilterClass *opencvfilter_class = GST_OPENCV_VIDEO_FILTER_CLASS (klass);
GstCaps *caps;
GstPadTemplate *templ;
gobject_class->finalize = GST_DEBUG_FUNCPTR (gst_camera_calibration_finalize);
gobject_class->set_property = gst_camera_calibration_set_property;
gobject_class->get_property = gst_camera_calibration_get_property;
opencvfilter_class->cv_trans_ip_func =
gst_camera_calibration_transform_frame_ip;
g_object_class_install_property (gobject_class, PROP_CALIBRATON_PATTERN,
g_param_spec_enum ("pattern", "Calibration Pattern",
"One of the chessboard, circles, or asymmetric circle pattern",
GST_TYPE_CAMERA_CALIBRATION_PATTERN, DEFAULT_CALIBRATON_PATTERN,
(GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)));
g_object_class_install_property (gobject_class, PROP_BOARD_WIDTH,
g_param_spec_int ("board-width", "Board Width",
"The board width in number of items",
1, G_MAXINT, DEFAULT_BOARD_WIDTH,
(GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)));
g_object_class_install_property (gobject_class, PROP_BOARD_HEIGHT,
g_param_spec_int ("board-height", "Board Height",
"The board height in number of items",
1, G_MAXINT, DEFAULT_BOARD_WIDTH,
(GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)));
g_object_class_install_property (gobject_class, PROP_SQUARE_SIZE,
g_param_spec_float ("square-size", "Square Size",
"The size of a square in your defined unit (point, millimeter, etc.)",
0.0, G_MAXFLOAT, DEFAULT_SQUARE_SIZE,
(GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)));
g_object_class_install_property (gobject_class, PROP_ASPECT_RATIO,
g_param_spec_float ("aspect-ratio", "Aspect Ratio",
"The aspect ratio",
0.0, G_MAXFLOAT, DEFAULT_ASPECT_RATIO,
(GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)));
g_object_class_install_property (gobject_class, PROP_CORNER_SUB_PIXEL,
g_param_spec_boolean ("corner-sub-pixel", "Corner Sub Pixel",
"Improve corner detection accuracy for chessboard",
DEFAULT_CORNER_SUB_PIXEL, (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)));
g_object_class_install_property (gobject_class, PROP_ZERO_TANGENT_DISTORTION,
g_param_spec_boolean ("zero-tangent-distorsion", "Zero Tangent Distorsion",
"Assume zero tangential distortion",
DEFAULT_ZERO_TANGENT_DISTORTION, (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)));
g_object_class_install_property (gobject_class, PROP_CENTER_PRINCIPAL_POINT,
g_param_spec_boolean ("center-principal-point", "Center Principal Point",
"Fix the principal point at the center",
DEFAULT_CENTER_PRINCIPAL_POINT, (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)));
g_object_class_install_property (gobject_class, PROP_USE_FISHEYE,
g_param_spec_boolean ("use-fisheye", "Use Fisheye",
"Use fisheye camera model for calibration",
DEFAULT_USE_FISHEYE, (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)));
g_object_class_install_property (gobject_class, PROP_DELAY,
g_param_spec_int ("delay", "Delay",
"Sampling periodicity in ms", 0, G_MAXINT,
DEFAULT_DELAY,
(GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)));
g_object_class_install_property (gobject_class, PROP_FRAME_COUNT,
g_param_spec_int ("frame-count", "Frame Count",
"The number of frames to use from the input for calibration", 1, G_MAXINT,
DEFAULT_FRAME_COUNT,
(GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)));
g_object_class_install_property (gobject_class, PROP_SHOW_CORNERS,
g_param_spec_boolean ("show-corners", "Show Corners",
"Show corners",
DEFAULT_SHOW_CORNERS, (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)));
gst_element_class_set_static_metadata (element_class,
"cameracalibration",
"Filter/Effect/Video",
"Performs camera calibration",
"Philippe Renon <philippe_renon@yahoo.fr>");
/* add sink and source pad templates */
caps = gst_opencv_caps_from_cv_image_type (CV_8UC4);
gst_caps_append (caps, gst_opencv_caps_from_cv_image_type (CV_8UC3));
gst_caps_append (caps, gst_opencv_caps_from_cv_image_type (CV_8UC1));
templ = gst_pad_template_new ("sink", GST_PAD_SINK, GST_PAD_ALWAYS,
gst_caps_ref (caps));
gst_element_class_add_pad_template (element_class, templ);
templ = gst_pad_template_new ("src", GST_PAD_SRC, GST_PAD_ALWAYS, caps);
gst_element_class_add_pad_template (element_class, templ);
// gst_element_class_add_static_pad_template (element_class, &src_factory);
// gst_element_class_add_static_pad_template (element_class, &sink_factory);
}
/* initialize the new element
* initialize instance structure
*/
static void
gst_camera_calibration_init (GstCameraCalibration * calib)
{
calib->calibrationPattern = DEFAULT_CALIBRATON_PATTERN;
calib->boardSize.width = DEFAULT_BOARD_WIDTH;
calib->boardSize.height = DEFAULT_BOARD_HEIGHT;
calib->squareSize = DEFAULT_SQUARE_SIZE;
calib->aspectRatio = DEFAULT_ASPECT_RATIO;
calib->cornerSubPix = DEFAULT_CORNER_SUB_PIXEL;
calib->calibZeroTangentDist = DEFAULT_ZERO_TANGENT_DISTORTION;
calib->calibFixPrincipalPoint = DEFAULT_CENTER_PRINCIPAL_POINT;
calib->useFisheye = DEFAULT_USE_FISHEYE;
calib->nrFrames = DEFAULT_FRAME_COUNT;
calib->delay = DEFAULT_DELAY;
calib->showCorners = DEFAULT_SHOW_CORNERS;
calib->flags = cv::CALIB_FIX_K4 | cv::CALIB_FIX_K5;
if (calib->calibFixPrincipalPoint) calib->flags |= cv::CALIB_FIX_PRINCIPAL_POINT;
if (calib->calibZeroTangentDist) calib->flags |= cv::CALIB_ZERO_TANGENT_DIST;
if (calib->aspectRatio) calib->flags |= cv::CALIB_FIX_ASPECT_RATIO;
if (calib->useFisheye) {
// the fisheye model has its own enum, so overwrite the flags
calib->flags = cv::fisheye::CALIB_FIX_SKEW | cv::fisheye::CALIB_RECOMPUTE_EXTRINSIC |
// cv::fisheye::CALIB_FIX_K1 |
cv::fisheye::CALIB_FIX_K2 | cv::fisheye::CALIB_FIX_K3 | cv::fisheye::CALIB_FIX_K4;
}
calib->mode = CAPTURING; //DETECTION;
calib->prevTimestamp = 0;
calib->imagePoints.clear();
calib->cameraMatrix = 0;
calib->distCoeffs = 0;
gst_opencv_video_filter_set_in_place (
GST_OPENCV_VIDEO_FILTER_CAST (calib), TRUE);
}
static void
gst_camera_calibration_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec)
{
GstCameraCalibration *calib = GST_CAMERA_CALIBRATION (object);
switch (prop_id) {
case PROP_CALIBRATON_PATTERN:
calib->calibrationPattern = g_value_get_enum (value);
break;
case PROP_BOARD_WIDTH:
calib->boardSize.width = g_value_get_int (value);
break;
case PROP_BOARD_HEIGHT:
calib->boardSize.height = g_value_get_int (value);
break;
case PROP_SQUARE_SIZE:
calib->squareSize = g_value_get_float (value);
break;
case PROP_ASPECT_RATIO:
calib->aspectRatio = g_value_get_float (value);
break;
case PROP_CORNER_SUB_PIXEL:
calib->cornerSubPix = g_value_get_boolean (value);
break;
case PROP_ZERO_TANGENT_DISTORTION:
calib->calibZeroTangentDist = g_value_get_boolean (value);
break;
case PROP_CENTER_PRINCIPAL_POINT:
calib->calibFixPrincipalPoint = g_value_get_boolean (value);
break;
case PROP_USE_FISHEYE:
calib->useFisheye = g_value_get_boolean (value);
break;
case PROP_FRAME_COUNT:
calib->nrFrames = g_value_get_int (value);
break;
case PROP_DELAY:
calib->delay = g_value_get_int (value);
break;
case PROP_SHOW_CORNERS:
calib->showCorners = g_value_get_boolean (value);
break;
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
break;
}
}
static void
gst_camera_calibration_get_property (GObject * object, guint prop_id,
GValue * value, GParamSpec * pspec)
{
GstCameraCalibration *calib = GST_CAMERA_CALIBRATION (object);
switch (prop_id) {
case PROP_CALIBRATON_PATTERN:
g_value_set_enum (value, calib->calibrationPattern);
break;
case PROP_BOARD_WIDTH:
g_value_set_int (value, calib->boardSize.width);
break;
case PROP_BOARD_HEIGHT:
g_value_set_int (value, calib->boardSize.height);
break;
case PROP_SQUARE_SIZE:
g_value_set_float (value, calib->squareSize);
break;
case PROP_ASPECT_RATIO:
g_value_set_float (value, calib->aspectRatio);
break;
case PROP_CORNER_SUB_PIXEL:
g_value_set_boolean (value, calib->cornerSubPix);
break;
case PROP_ZERO_TANGENT_DISTORTION:
g_value_set_boolean (value, calib->calibZeroTangentDist);
break;
case PROP_CENTER_PRINCIPAL_POINT:
g_value_set_boolean (value, calib->calibFixPrincipalPoint);
break;
case PROP_USE_FISHEYE:
g_value_set_boolean (value, calib->useFisheye);
break;
case PROP_FRAME_COUNT:
g_value_set_int (value, calib->nrFrames);
break;
case PROP_DELAY:
g_value_set_int (value, calib->delay);
break;
case PROP_SHOW_CORNERS:
g_value_set_boolean (value, calib->showCorners);
break;
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
break;
}
}
/* GstElement vmethod implementations */
/* this function handles the link with other elements */
//static gboolean
//gst_camera_calibration_set_caps (GstOpencvVideoFilter * transform, gint in_width,
// gint in_height, gint in_depth, gint in_channels,
// gint out_width, gint out_height, gint out_depth, gint out_channels)
//{
// GstCameraCalibration *calib;
//
// calib = GST_CAMERA_CALIBRATION (transform);
//
// if (calib->cvGray)
// cvReleaseImage (&calib->cvGray);
//
// calib->cvGray = cvCreateImage (cvSize (in_width, in_height), IPL_DEPTH_8U,
// 1);
//
// return TRUE;
//}
//static GstMessage *
//gst_camera_calibration_message_new (GstCameraCalibration * calib, GstBuffer * buf)
//{
// GstBaseTransform *trans = GST_BASE_TRANSFORM_CAST (calib);
// GstStructure *s;
// GstClockTime running_time, stream_time;
//
// running_time = gst_segment_to_running_time (&trans->segment, GST_FORMAT_TIME,
// GST_BUFFER_TIMESTAMP (buf));
// stream_time = gst_segment_to_stream_time (&trans->segment, GST_FORMAT_TIME,
// GST_BUFFER_TIMESTAMP (buf));
//
// s = gst_structure_new ("cameracalibration",
// "timestamp", G_TYPE_UINT64, GST_BUFFER_TIMESTAMP (buf),
// "stream-time", G_TYPE_UINT64, stream_time,
// "running-time", G_TYPE_UINT64, running_time,
// "duration", G_TYPE_UINT64, GST_BUFFER_DURATION (buf), NULL);
//
// return gst_message_new_element (GST_OBJECT (calib), s);
//}
void camera_calibration_run(GstCameraCalibration *calib, IplImage *img);
/*
* Performs the camera calibration
*/
static GstFlowReturn
gst_camera_calibration_transform_frame_ip (GstOpencvVideoFilter * cvfilter,
G_GNUC_UNUSED GstBuffer * frame, IplImage * img)
{
GstCameraCalibration *calib = GST_CAMERA_CALIBRATION (cvfilter);
camera_calibration_run(calib, img);
return GST_FLOW_OK;
}
/* entry point to initialize the plug-in
* initialize the plug-in itself
* register the element factories and other features
*/
gboolean
gst_camera_calibration_plugin_init (GstPlugin * plugin)
{
/* debug category for filtering log messages */
GST_DEBUG_CATEGORY_INIT (gst_camera_calibration_debug, "cameracalibration",
0,
"Performs camera calibration");
return gst_element_register (plugin, "cameracalibration", GST_RANK_NONE,
GST_TYPE_CAMERA_CALIBRATION);
}
// void validate()
// {
// goodInput = true;
// if (boardSize.width <= 0 || boardSize.height <= 0)
// {
// cerr << "Invalid Board size: " << boardSize.width << " " << boardSize.height << endl;
// goodInput = false;
// }
// if (squareSize <= 10e-6)
// {
// cerr << "Invalid square size " << squareSize << endl;
// goodInput = false;
// }
// if (nrFrames <= 0)
// {
// cerr << "Invalid number of frames " << nrFrames << endl;
// goodInput = false;
// }
//
// if (input.empty()) // Check for valid input
// inputType = INVALID;
// else
// {
// if (input[0] >= '0' && input[0] <= '9')
// {
// stringstream ss(input);
// ss >> cameraID;
// inputType = CAMERA;
// }
// else
// {
// if (readStringList(input, imageList))
// {
// inputType = IMAGE_LIST;
// nrFrames = (nrFrames < (int)imageList.size()) ? nrFrames : (int)imageList.size();
// }
// else
// inputType = VIDEO_FILE;
// }
// if (inputType == CAMERA)
// inputCapture.open(cameraID);
// if (inputType == VIDEO_FILE)
// inputCapture.open(input);
// if (inputType != IMAGE_LIST && !inputCapture.isOpened())
// inputType = INVALID;
// }
// if (inputType == INVALID)
// {
// cerr << " Input does not exist: " << input;
// goodInput = false;
// }
//
// flag = CALIB_FIX_K4 | CALIB_FIX_K5;
// if(calibFixPrincipalPoint) flag |= CALIB_FIX_PRINCIPAL_POINT;
// if(calibZeroTangentDist) flag |= CALIB_ZERO_TANGENT_DIST;
// if(aspectRatio) flag |= CALIB_FIX_ASPECT_RATIO;
//
// if (useFisheye) {
// // the fisheye model has its own enum, so overwrite the flags
// flag = fisheye::CALIB_FIX_SKEW | fisheye::CALIB_RECOMPUTE_EXTRINSIC |
// // fisheye::CALIB_FIX_K1 |
// fisheye::CALIB_FIX_K2 | fisheye::CALIB_FIX_K3 | fisheye::CALIB_FIX_K4;
// }
//
// calibrationPattern = NOT_EXISTING;
// if (!patternToUse.compare("CHESSBOARD")) calibrationPattern = CHESSBOARD;
// if (!patternToUse.compare("CIRCLES_GRID")) calibrationPattern = CIRCLES_GRID;
// if (!patternToUse.compare("ASYMMETRIC_CIRCLES_GRID")) calibrationPattern = ASYMMETRIC_CIRCLES_GRID;
// if (calibrationPattern == NOT_EXISTING)
// {
// cerr << " Camera calibration mode does not exist: " << patternToUse << endl;
// goodInput = false;
// }
// atImageList = 0;
//
// }
bool runCalibration(GstCameraCalibration *calib, cv::Size imageSize, cv::Mat& cameraMatrix, cv::Mat& distCoeffs,
std::vector<std::vector<cv::Point2f> > imagePoints );
void doCalibration(GstElement *element, gpointer user_data);
void camera_calibration_run(GstCameraCalibration *calib, IplImage *img)
{
cv::Mat view = cv::cvarrToMat(img);
// For camera only take new samples after delay time
if (calib->mode == CAPTURING) {
// get_input
cv::Size imageSize = view.size();
// find_pattern
// FIXME find ways to reduce CPU usage
// don't do it on all frames ? will it help ? corner display will be affected.
// in a separate frame?
// in a separate element that gets composited back into the main stream (video is tee-d into it and can then be decimated, scaled, etc..)
std::vector<cv::Point2f> pointBuf;
bool found;
int chessBoardFlags = cv::CALIB_CB_ADAPTIVE_THRESH | cv::CALIB_CB_NORMALIZE_IMAGE;
if (!calib->useFisheye) {
// fast check erroneously fails with high distortions like fisheye
chessBoardFlags |= cv::CALIB_CB_FAST_CHECK;
}
// Find feature points on the input format
switch(calib->calibrationPattern) {
case GST_CAMERACALIBRATION_PATTERN_CHESSBOARD:
found = cv::findChessboardCorners(view, calib->boardSize, pointBuf, chessBoardFlags);
break;
case GST_CAMERACALIBRATION_PATTERN_CIRCLES_GRID:
found = cv::findCirclesGrid(view, calib->boardSize, pointBuf);
break;
case GST_CAMERACALIBRATION_PATTERN_ASYMMETRIC_CIRCLES_GRID:
found = cv::findCirclesGrid(view, calib->boardSize, pointBuf, cv::CALIB_CB_ASYMMETRIC_GRID );
break;
default:
found = false;
break;
}
bool blinkOutput = false;
if (found) {
// improve the found corners' coordinate accuracy for chessboard
if (calib->calibrationPattern == GST_CAMERACALIBRATION_PATTERN_CHESSBOARD && calib->cornerSubPix) {
// FIXME findChessboardCorners and alike do a cv::COLOR_BGR2GRAY (and a histogram balance)
// the color convert should be done once (if needed) and shared
// FIXME keep viewGray around to avoid reallocating it each time...
cv::Mat viewGray;
cv::cvtColor(view, viewGray, cv::COLOR_BGR2GRAY);
cv::cornerSubPix(viewGray, pointBuf, cv::Size(11, 11),
cv::Size(-1, -1), cv::TermCriteria(cv::TermCriteria::EPS + cv::TermCriteria::COUNT, 30, 0.1));
}
// For camera only take new samples after delay time
if ((calib->mode == CAPTURING) && ((clock() - calib->prevTimestamp) > calib->delay * 1e-3 * CLOCKS_PER_SEC)) {
calib->imagePoints.push_back(pointBuf);
calib->prevTimestamp = clock();
blinkOutput = true;
}
// Draw the cornerfilter
if (calib->showCorners) {
cv::drawChessboardCorners(view, calib->boardSize, cv::Mat(pointBuf), found);
}
}
// If got enough frames then stop calibration and show result
if (calib->mode == CAPTURING && calib->imagePoints.size() >= (size_t)calib->nrFrames) {
//GstElementCallAsyncFunc func;
//gst_element_call_async (GST_ELEMENT (calib), /*GstElementCallAsyncFunc*/ doCalibration, NULL, NULL);
if (runCalibration(calib, imageSize, calib->cameraMatrix, calib->distCoeffs, calib->imagePoints)) {
calib->mode = CALIBRATED;
GstPad *sinkPad = GST_BASE_TRANSFORM_SINK_PAD (calib);
//GstPad *srcPad = GST_BASE_TRANSFORM_SRC_PAD (calib);
GstEvent *event;
//gboolean result;
// create calibrated event and send upstream and downstream
// FIXME should keep settings around for answering queries
gchar *settings = camera_serialize_undistort_settings(calib->cameraMatrix, calib->distCoeffs);
event = gst_camera_event_new_calibrated(settings);
g_free (settings);
//gst_event_ref(event);
GST_LOG_OBJECT (sinkPad, "Sending upstream event %s.", GST_EVENT_TYPE_NAME (event));
if (!gst_pad_push_event (sinkPad, event)) {
GST_WARNING_OBJECT (sinkPad, "Sending upstream event %p (%s) failed.",
event, GST_EVENT_TYPE_NAME (event));
}
// GST_LOG_OBJECT (srcPad, "Sending downstream event %s.", GST_EVENT_TYPE_NAME (event));
// if (!gst_pad_push_event (srcPad, event)) {
// GST_WARNING_OBJECT (srcPad, "Sending downstream event %p (%s) failed.",
// event, GST_EVENT_TYPE_NAME (event));
// }
} else {
calib->mode = DETECTION;
}
}
if (calib->mode == CAPTURING && blinkOutput) {
bitwise_not(view, view);
}
}
// Output Text
// FIXME all additional rendering (text, corners, ...) should be done with cairo or another gst framework.
// this will relax the conditions on the input format (RBG only at the moment).
// the calibration itself accepts more formats...
std::string msg = (calib->mode == CAPTURING) ? "100/100" :
(calib->mode == CALIBRATED) ? "Calibrated" : "Press 'g' to start";
int baseLine = 0;
cv::Size textSize = cv::getTextSize(msg, 1, 1, 1, &baseLine);
cv::Point textOrigin(view.cols - 2 * textSize.width - 10, view.rows - 2 * baseLine - 10);
if (calib->mode == CAPTURING) {
msg = cv::format( "%d/%d", (int)calib->imagePoints.size(), calib->nrFrames );
}
const cv::Scalar RED(0,0,255);
const cv::Scalar GREEN(0,255,0);
cv::putText(view, msg, textOrigin, 1, 1, calib->mode == CALIBRATED ? GREEN : RED);
}
void doCalibration(__attribute__((unused)) GstElement *element, __attribute__((unused)) gpointer user_data)
{
// GstCameraCalibration *calib = GST_CAMERA_CALIBRATION (element);
}
static double computeReprojectionErrors( const std::vector<std::vector<cv::Point3f> >& objectPoints,
const std::vector<std::vector<cv::Point2f> >& imagePoints,
const std::vector<cv::Mat>& rvecs, const std::vector<cv::Mat>& tvecs,
const cv::Mat& cameraMatrix , const cv::Mat& distCoeffs,
std::vector<float>& perViewErrors, bool fisheye)
{
std::vector<cv::Point2f> imagePoints2;
size_t totalPoints = 0;
double totalErr = 0, err;
perViewErrors.resize(objectPoints.size());
for(size_t i = 0; i < objectPoints.size(); ++i)
{
if (fisheye)
{
cv::fisheye::projectPoints(objectPoints[i], imagePoints2, rvecs[i], tvecs[i], cameraMatrix,
distCoeffs);
}
else
{
cv::projectPoints(objectPoints[i], rvecs[i], tvecs[i], cameraMatrix, distCoeffs, imagePoints2);
}
err = cv::norm(imagePoints[i], imagePoints2, cv::NORM_L2);
size_t n = objectPoints[i].size();
perViewErrors[i] = (float) std::sqrt(err*err/n);
totalErr += err*err;
totalPoints += n;
}
return std::sqrt(totalErr/totalPoints);
}
static void calcBoardCornerPositions(cv::Size boardSize, float squareSize, std::vector<cv::Point3f>& corners,
gint patternType /*= CHESSBOARD*/)
{
corners.clear();
switch(patternType)
{
case GST_CAMERACALIBRATION_PATTERN_CHESSBOARD:
case GST_CAMERACALIBRATION_PATTERN_CIRCLES_GRID:
for( int i = 0; i < boardSize.height; ++i)
for( int j = 0; j < boardSize.width; ++j)
corners.push_back(cv::Point3f(j * squareSize, i * squareSize, 0));
break;
case GST_CAMERACALIBRATION_PATTERN_ASYMMETRIC_CIRCLES_GRID:
for( int i = 0; i < boardSize.height; i++)
for( int j = 0; j < boardSize.width; j++)
corners.push_back(cv::Point3f((2 * j + i % 2) * squareSize, i * squareSize, 0));
break;
default:
break;
}
}
static bool runCalibration(GstCameraCalibration *calib, cv::Size& imageSize, cv::Mat& cameraMatrix, cv::Mat& distCoeffs,
std::vector<std::vector<cv::Point2f> > imagePoints, std::vector<cv::Mat>& rvecs, std::vector<cv::Mat>& tvecs,
std::vector<float>& reprojErrs, double& totalAvgErr)
{
//! [fixed_aspect]
cameraMatrix = cv::Mat::eye(3, 3, CV_64F);
if (calib->flags & cv::CALIB_FIX_ASPECT_RATIO) {
cameraMatrix.at<double>(0,0) = calib->aspectRatio;
}
//! [fixed_aspect]
if (calib->useFisheye) {
distCoeffs = cv::Mat::zeros(4, 1, CV_64F);
} else {
distCoeffs = cv::Mat::zeros(8, 1, CV_64F);
}
std::vector<std::vector<cv::Point3f> > objectPoints(1);
calcBoardCornerPositions(calib->boardSize, calib->squareSize, objectPoints[0], calib->calibrationPattern);
objectPoints.resize(imagePoints.size(), objectPoints[0]);
// Find intrinsic and extrinsic camera parameters
double rms;
if (calib->useFisheye) {
cv::Mat _rvecs, _tvecs;
rms = cv::fisheye::calibrate(objectPoints, imagePoints, imageSize, cameraMatrix, distCoeffs, _rvecs,
_tvecs, calib->flags);
rvecs.reserve(_rvecs.rows);
tvecs.reserve(_tvecs.rows);
for(int i = 0; i < int(objectPoints.size()); i++){
rvecs.push_back(_rvecs.row(i));
tvecs.push_back(_tvecs.row(i));
}
} else {
rms = cv::calibrateCamera(objectPoints, imagePoints, imageSize, cameraMatrix, distCoeffs, rvecs, tvecs,
calib->flags);
}
GST_LOG_OBJECT (calib,
"Re-projection error reported by calibrateCamera: %f", rms);
qDebug() << "Re-projection error reported by calibrateCamera:" << rms;
bool ok = checkRange(cameraMatrix) && checkRange(distCoeffs);
totalAvgErr = computeReprojectionErrors(objectPoints, imagePoints, rvecs, tvecs, cameraMatrix,
distCoeffs, reprojErrs, calib->useFisheye);
return ok;
}
// Print camera parameters to the output file
//static void saveCameraParams( Settings& s, Size& imageSize, Mat& cameraMatrix, Mat& distCoeffs,
// const vector<Mat>& rvecs, const vector<Mat>& tvecs,
// const vector<float>& reprojErrs, const vector<vector<Point2f> >& imagePoints,
// double totalAvgErr)
//{
// FileStorage fs( s.outputFileName, FileStorage::WRITE);
//
// time_t tm;
// time( &tm);
// struct tm *t2 = localtime( &tm);
// char buf[1024];
// strftime( buf, sizeof(buf), "%c", t2);
//
// fs << "calibration_time" << buf;
//
// if (!rvecs.empty() || !reprojErrs.empty())
// fs << "nr_of_frames" << (int)std::max(rvecs.size(), reprojErrs.size());
// fs << "image_width" << imageSize.width;
// fs << "image_height" << imageSize.height;
// fs << "board_width" << s.boardSize.width;
// fs << "board_height" << s.boardSize.height;
// fs << "square_size" << s.squareSize;
//
// if (s.flag & CALIB_FIX_ASPECT_RATIO)
// fs << "fix_aspect_ratio" << s.aspectRatio;
//
// if (s.flag)
// {
// if (s.useFisheye)
// {
// sprintf(buf, "flags:%s%s%s%s%s%s",
// s.flag & fisheye::CALIB_FIX_SKEW ? " +fix_skew" : "",
// s.flag & fisheye::CALIB_FIX_K1 ? " +fix_k1" : "",
// s.flag & fisheye::CALIB_FIX_K2 ? " +fix_k2" : "",
// s.flag & fisheye::CALIB_FIX_K3 ? " +fix_k3" : "",
// s.flag & fisheye::CALIB_FIX_K4 ? " +fix_k4" : "",
// s.flag & fisheye::CALIB_RECOMPUTE_EXTRINSIC ? " +recompute_extrinsic" : "");
// }
// else
// {
// sprintf(buf, "flags:%s%s%s%s",
// s.flag & CALIB_USE_INTRINSIC_GUESS ? " +use_intrinsic_guess" : "",
// s.flag & CALIB_FIX_ASPECT_RATIO ? " +fix_aspectRatio" : "",
// s.flag & CALIB_FIX_PRINCIPAL_POINT ? " +fix_principal_point" : "",
// s.flag & CALIB_ZERO_TANGENT_DIST ? " +zero_tangent_dist" : "");
// }
// cvWriteComment(*fs, buf, 0);
// }
//
// fs << "flags" << s.flag;
//
// fs << "fisheye_model" << s.useFisheye;
//
// fs << "camera_matrix" << cameraMatrix;
// fs << "distortion_coefficients" << distCoeffs;
//
// fs << "avg_reprojection_error" << totalAvgErr;
// if (s.writeExtrinsics && !reprojErrs.empty())
// fs << "per_view_reprojection_errors" << Mat(reprojErrs);
//
// if(s.writeExtrinsics && !rvecs.empty() && !tvecs.empty())
// {
// CV_Assert(rvecs[0].type() == tvecs[0].type());
// Mat bigmat((int)rvecs.size(), 6, rvecs[0].type());
// for( size_t i = 0; i < rvecs.size(); i++)
// {
// Mat r = bigmat(Range(int(i), int(i+1)), Range(0,3));
// Mat t = bigmat(Range(int(i), int(i+1)), Range(3,6));
//
// CV_Assert(rvecs[i].rows == 3 && rvecs[i].cols == 1);
// CV_Assert(tvecs[i].rows == 3 && tvecs[i].cols == 1);
// //*.t() is MatExpr (not Mat) so we can use assignment operator
// r = rvecs[i].t();
// t = tvecs[i].t();
// }
// //cvWriteComment( *fs, "a set of 6-tuples (rotation vector + translation vector) for each view", 0);
// fs << "extrinsic_parameters" << bigmat;
// }
//
// if(s.writePoints && !imagePoints.empty())
// {
// Mat imagePtMat((int)imagePoints.size(), (int)imagePoints[0].size(), CV_32FC2);
// for( size_t i = 0; i < imagePoints.size(); i++)
// {
// Mat r = imagePtMat.row(int(i)).reshape(2, imagePtMat.cols);
// Mat imgpti(imagePoints[i]);
// imgpti.copyTo(r);
// }
// fs << "image_points" << imagePtMat;
// }
//}
//! [run_and_save]
//bool runCalibrationAndSave(Settings& s, Size imageSize, Mat& cameraMatrix, Mat& distCoeffs,
// vector<vector<Point2f> > imagePoints)
//{
// vector<Mat> rvecs, tvecs;
// vector<float> reprojErrs;
// double totalAvgErr = 0;
//
// bool ok = runCalibration(s, imageSize, cameraMatrix, distCoeffs, imagePoints, rvecs, tvecs, reprojErrs,
// totalAvgErr);
// cout << (ok ? "Calibration succeeded" : "Calibration failed")
// << ". avg re projection error = " << totalAvgErr << endl;
//
//// if (ok)
//// saveCameraParams(s, imageSize, cameraMatrix, distCoeffs, rvecs, tvecs, reprojErrs, imagePoints,
//// totalAvgErr);
// return ok;
//}
//! [run_and_save]
bool runCalibration(GstCameraCalibration *calib, cv::Size imageSize, cv::Mat& cameraMatrix, cv::Mat& distCoeffs,
std::vector<std::vector<cv::Point2f> > imagePoints)
{
std::vector<cv::Mat> rvecs, tvecs;
std::vector<float> reprojErrs;
double totalAvgErr = 0;
bool ok = runCalibration(calib, imageSize, cameraMatrix, distCoeffs, imagePoints, rvecs, tvecs, reprojErrs,
totalAvgErr);
GST_LOG_OBJECT (calib,
(ok ? "Calibration succeeded" : "Calibration failed"));// + ". avg re projection error = " + totalAvgErr);
return ok;
}

View File

@ -0,0 +1,114 @@
/*
* GStreamer
* Copyright (C) 2005 Thomas Vander Stichele <thomas@apestaart.org>
* Copyright (C) 2005 Ronald S. Bultje <rbultje@ronald.bitfreak.net>
* Copyright (C) 2008 Michael Sheldon <mike@mikeasoft.com>
* Copyright (C) 2011 Stefan Sauer <ensonic@users.sf.net>
* Copyright (C) 2011 Robert Jobbagy <jobbagy.robert@gmail.com>
*
* Permission is hereby granted, free of charge, to any person obtaining a
* copy of this software and associated documentation files (the "Software"),
* to deal in the Software without restriction, including without limitation
* the rights to use, copy, modify, merge, publish, distribute, sublicense,
* and/or sell copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
* DEALINGS IN THE SOFTWARE.
*
* Alternatively, the contents of this file may be used under the
* GNU Lesser General Public License Version 2.1 (the "LGPL"), in
* which case the following provisions apply instead of the ones
* mentioned above:
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
* Boston, MA 02110-1301, USA.
*/
#ifndef __GST_CAMERA_CALIBRATION_H__
#define __GST_CAMERA_CALIBRATION_H__
#include <gst/gst.h>
#include <gst/opencv/gstopencvvideofilter.h>
#include <opencv2/core.hpp>
G_BEGIN_DECLS
#define GST_TYPE_CAMERA_CALIBRATION \
(gst_camera_calibration_get_type())
#define GST_CAMERA_CALIBRATION(obj) \
(G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_CAMERA_CALIBRATION,GstCameraCalibration))
#define GST_CAMERA_CALIBRATION_CLASS(klass) \
(G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_CAMERA_CALIBRATION,GstCameraCalibrationClass))
#define GST_IS_CAMERA_CALIBRATION(obj) \
(G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_CAMERA_CALIBRATION))
#define GST_IS_CAMERA_CALIBRATION_CLASS(klass) \
(G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_CAMERA_CALIBRATION))
typedef struct _GstCameraCalibration GstCameraCalibration;
typedef struct _GstCameraCalibrationClass GstCameraCalibrationClass;
enum _GstCameraCalibrationPattern {
GST_CAMERACALIBRATION_PATTERN_CHESSBOARD,
GST_CAMERACALIBRATION_PATTERN_CIRCLES_GRID,
GST_CAMERACALIBRATION_PATTERN_ASYMMETRIC_CIRCLES_GRID
};
struct _GstCameraCalibration
{
GstOpencvVideoFilter cvfilter;
// settings
gint calibrationPattern; // One of the chessboard, circles, or asymmetric circle pattern
cv::Size boardSize; // The size of the board -> Number of items by width and height
float squareSize; // The size of a square in your defined unit (point, millimeter,etc).
float aspectRatio; // The aspect ratio
bool cornerSubPix; //
bool calibZeroTangentDist; // Assume zero tangential distortion
bool calibFixPrincipalPoint; // Fix the principal point at the center
bool useFisheye; // use fisheye camera model for calibration
int nrFrames; // The number of frames to use from the input for calibration
int delay; // In case of a video input
bool showUndistorsed; // Show undistorted images after calibration
bool showCorners; // Show corners
// state
int flags;
int mode;
clock_t prevTimestamp;
std::vector<std::vector<cv::Point2f> > imagePoints;
cv::Mat cameraMatrix, distCoeffs;
};
struct _GstCameraCalibrationClass
{
GstOpencvVideoFilterClass parent_class;
};
GType gst_camera_calibration_get_type (void);
gboolean gst_camera_calibration_plugin_init (GstPlugin * plugin);
G_END_DECLS
#endif /* __GST_CAMERA_CALIBRATION_H__ */

View File

@ -0,0 +1,515 @@
/*
* GStreamer
* Copyright (C) 2005 Thomas Vander Stichele <thomas@apestaart.org>
* Copyright (C) 2005 Ronald S. Bultje <rbultje@ronald.bitfreak.net>
* Copyright (C) 2008 Michael Sheldon <mike@mikeasoft.com>
* Copyright (C) 2011 Stefan Sauer <ensonic@users.sf.net>
* Copyright (C) 2014 Robert Jobbagy <jobbagy.robert@gmail.com>
*
* Permission is hereby granted, free of charge, to any person obtaining a
* copy of this software and associated documentation files (the "Software"),
* to deal in the Software without restriction, including without limitation
* the rights to use, copy, modify, merge, publish, distribute, sublicense,
* and/or sell copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
* DEALINGS IN THE SOFTWARE.
*
* Alternatively, the contents of this file may be used under the
* GNU Lesser General Public License Version 2.1 (the "LGPL"), in
* which case the following provisions apply instead of the ones
* mentioned above:
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
* Boston, MA 02110-1301, USA.
*/
/**
* SECTION:element-cameraundistort
*
* Performs face detection on videos and images.
* If you have high cpu load you need to use videoscale with capsfilter and reduce the video resolution.
*
* The image is scaled down multiple times using the GstCameraCalibration::scale-factor
* until the size is &lt;= GstCameraCalibration::min-size-width or
* GstCameraCalibration::min-size-height.
*
* <refsect2>
* <title>Example launch line</title>
* |[
* gst-launch-1.0 autovideosrc ! decodebin ! colorspace ! cameraundistort ! videoconvert ! xvimagesink
* ]| Detect and show faces
* |[
* gst-launch-1.0 autovideosrc ! video/x-raw,width=320,height=240 ! videoconvert ! cameraundistort min-size-width=60 min-size-height=60 ! colorspace ! xvimagesink
* ]| Detect large faces on a smaller image
*
* </refsect2>
*/
/* FIXME: development version of OpenCV has CV_HAAR_FIND_BIGGEST_OBJECT which
* we might want to use if available
* see https://code.ros.org/svn/opencv/trunk/opencv/modules/objdetect/src/haar.cpp
*/
#ifdef HAVE_CONFIG_H
# include <config.h>
#endif
#include <vector>
#include "camerautils.hpp"
#include "cameraevent.hpp"
#include <QDebug>
#include <QElapsedTimer>
#include "gstcameraundistort.h"
#if (CV_MAJOR_VERSION >= 3)
#include <opencv2/imgproc.hpp>
#endif
#include <opencv2/calib3d.hpp>
#include <gst/opencv/gstopencvutils.h>
GST_DEBUG_CATEGORY_STATIC (gst_camera_undistort_debug);
#define GST_CAT_DEFAULT gst_camera_undistort_debug
#define DEFAULT_SHOW_UNDISTORTED true
#define DEFAULT_ALPHA 1.0
#define DEFAULT_CROP true
enum
{
PROP_0,
PROP_SHOW_UNDISTORTED,
PROP_ALPHA,
PROP_CROP,
PROP_SETTINGS
};
/*#define GST_CAMERA_UNDISTORT_GET_LOCK(playsink) (&((GstCameraUndistort *)undist)->lock)
#define GST_CAMERA_UNDISTORT_LOCK(undist) G_STMT_START { \
GST_LOG_OBJECT (playsink, "locking from thread %p", g_thread_self ()); \
g_rec_mutex_lock (GST_CAMERA_UNDISTORT_GET_LOCK (undist)); \
GST_LOG_OBJECT (playsink, "locked from thread %p", g_thread_self ()); \
} G_STMT_END
#define GST_CAMERA_UNDISTORT_UNLOCK(undist) G_STMT_START { \
GST_LOG_OBJECT (playsink, "unlocking from thread %p", g_thread_self ()); \
g_rec_mutex_unlock (GST_CAMERA_UNDISTORT_GET_LOCK (undist)); \
} G_STMT_END*/
G_DEFINE_TYPE (GstCameraUndistort, gst_camera_undistort, GST_TYPE_OPENCV_VIDEO_FILTER);
static void gst_camera_undistort_dispose (GObject * object);
static void gst_camera_undistort_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec);
static void gst_camera_undistort_get_property (GObject * object, guint prop_id,
GValue * value, GParamSpec * pspec);
static gboolean gst_camera_undistort_set_info (GstOpencvVideoFilter * cvfilter,
gint in_width, gint in_height, gint in_depth, gint in_channels,
gint out_width, gint out_height, gint out_depth, gint out_channels);
static GstFlowReturn gst_camera_undistort_transform_frame (
GstOpencvVideoFilter * cvfilter,
GstBuffer * frame, IplImage * img,
GstBuffer * outframe, IplImage * outimg);
static gboolean gst_camera_undistort_sink_event (GstBaseTransform *trans, GstEvent *event);
static gboolean gst_camera_undistort_src_event (GstBaseTransform *trans, GstEvent *event);
static void camera_undistort_run(GstCameraUndistort *undist, IplImage *img, IplImage *outimg);
static gboolean camera_undistort_init_undistort_rectify_map(GstCameraUndistort *undist);
/* initialize the cameraundistort's class */
static void
gst_camera_undistort_class_init (GstCameraUndistortClass * klass)
{
GObjectClass *gobject_class = G_OBJECT_CLASS (klass);
GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
GstBaseTransformClass *trans_class = GST_BASE_TRANSFORM_CLASS (klass);
GstOpencvVideoFilterClass *opencvfilter_class = GST_OPENCV_VIDEO_FILTER_CLASS (klass);
GstCaps *caps;
GstPadTemplate *templ;
gobject_class->dispose = gst_camera_undistort_dispose;
gobject_class->set_property = gst_camera_undistort_set_property;
gobject_class->get_property = gst_camera_undistort_get_property;
trans_class->sink_event =
GST_DEBUG_FUNCPTR (gst_camera_undistort_sink_event);
trans_class->src_event =
GST_DEBUG_FUNCPTR (gst_camera_undistort_src_event);
opencvfilter_class->cv_set_caps = gst_camera_undistort_set_info;
opencvfilter_class->cv_trans_func =
gst_camera_undistort_transform_frame;
g_object_class_install_property (gobject_class, PROP_SHOW_UNDISTORTED,
g_param_spec_boolean ("show-undistorted", "Show Undistorted",
"Show undistorted images",
DEFAULT_SHOW_UNDISTORTED, (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)));
g_object_class_install_property (gobject_class, PROP_ALPHA,
g_param_spec_float ("alpha", "Pixels",
"Pixels bla bla...",
0.0, 1.0, DEFAULT_ALPHA,
(GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)));
g_object_class_install_property (gobject_class, PROP_SETTINGS,
g_param_spec_string ("settings", "Settings",
"Undistort settings (OpenCV serialized opaque string)",
NULL, (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)));
gst_element_class_set_static_metadata (element_class,
"cameraundistort",
"Filter/Effect/Video",
"Performs camera undistort",
"Philippe Renon <philippe_renon@yahoo.fr>");
/* add sink and source pad templates */
caps = gst_opencv_caps_from_cv_image_type (CV_16UC1);
gst_caps_append (caps, gst_opencv_caps_from_cv_image_type (CV_8UC4));
gst_caps_append (caps, gst_opencv_caps_from_cv_image_type (CV_8UC3));
gst_caps_append (caps, gst_opencv_caps_from_cv_image_type (CV_8UC1));
templ = gst_pad_template_new ("sink", GST_PAD_SINK, GST_PAD_ALWAYS,
gst_caps_ref (caps));
gst_element_class_add_pad_template (element_class, templ);
templ = gst_pad_template_new ("src", GST_PAD_SRC, GST_PAD_ALWAYS, caps);
gst_element_class_add_pad_template (element_class, templ);
}
/* initialize the new element
* initialize instance structure
*/
static void
gst_camera_undistort_init (GstCameraUndistort * undist)
{
undist->showUndistorted = DEFAULT_SHOW_UNDISTORTED;
undist->alpha = DEFAULT_ALPHA;
undist->crop = DEFAULT_CROP;
undist->doUndistort = false;
undist->settingsChanged = false;
undist->cameraMatrix = 0;
undist->distCoeffs = 0;
undist->map1 = 0;
undist->map2 = 0;
//undist->validPixROI = 0;
undist->settings = NULL;
}
static void
gst_camera_undistort_dispose (GObject * object)
{
GstCameraUndistort *undist = GST_CAMERA_UNDISTORT (object);
g_free (undist->settings);
undist->settings = NULL;
G_OBJECT_CLASS (gst_camera_undistort_parent_class)->dispose (object);
}
static void
gst_camera_undistort_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec)
{
GstCameraUndistort *undist = GST_CAMERA_UNDISTORT (object);
const char *str;
switch (prop_id) {
case PROP_SHOW_UNDISTORTED:
undist->showUndistorted = g_value_get_boolean (value);
undist->settingsChanged = true;
break;
case PROP_ALPHA:
undist->alpha = g_value_get_float (value);
undist->settingsChanged = true;
break;
case PROP_CROP:
undist->crop = g_value_get_boolean (value);
break;
case PROP_SETTINGS:
if (undist->settings) {
g_free (undist->settings);
undist->settings = NULL;
}
str = g_value_get_string (value);
if (str)
undist->settings = g_strdup (str);
undist->settingsChanged = true;
break;
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
break;
}
}
static void
gst_camera_undistort_get_property (GObject * object, guint prop_id,
GValue * value, GParamSpec * pspec)
{
GstCameraUndistort *undist = GST_CAMERA_UNDISTORT (object);
switch (prop_id) {
case PROP_SHOW_UNDISTORTED:
g_value_set_boolean (value, undist->showUndistorted);
break;
case PROP_ALPHA:
g_value_set_float (value, undist->alpha);
break;
case PROP_CROP:
g_value_set_boolean (value, undist->crop);
break;
case PROP_SETTINGS:
g_value_set_string (value, undist->settings);
break;
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
break;
}
}
gboolean
gst_camera_undistort_set_info (GstOpencvVideoFilter * cvfilter,
gint in_width, gint in_height,
__attribute__((unused)) gint in_depth, __attribute__((unused)) gint in_channels,
__attribute__((unused)) gint out_width, __attribute__((unused)) gint out_height,
__attribute__((unused)) gint out_depth, __attribute__((unused)) gint out_channels)
{
GstCameraUndistort *undist = GST_CAMERA_UNDISTORT (cvfilter);
undist->imageSize = cv::Size(in_width, in_height);
return TRUE;
}
//static GstMessage *
//gst_camera_undistort_message_new (GstCameraUndistort * undist, GstBuffer * buf)
//{
// GstBaseTransform *trans = GST_BASE_TRANSFORM_CAST (undist);
// GstStructure *s;
// GstClockTime running_time, stream_time;
//
// running_time = gst_segment_to_running_time (&trans->segment, GST_FORMAT_TIME,
// GST_BUFFER_TIMESTAMP (buf));
// stream_time = gst_segment_to_stream_time (&trans->segment, GST_FORMAT_TIME,
// GST_BUFFER_TIMESTAMP (buf));
//
// s = gst_structure_new ("cameracalibration",
// "timestamp", G_TYPE_UINT64, GST_BUFFER_TIMESTAMP (buf),
// "stream-time", G_TYPE_UINT64, stream_time,
// "running-time", G_TYPE_UINT64, running_time,
// "duration", G_TYPE_UINT64, GST_BUFFER_DURATION (buf), NULL);
//
// return gst_message_new_element (GST_OBJECT (undist), s);
//}
/*
* Performs the camera calibration
*/
static GstFlowReturn
gst_camera_undistort_transform_frame (GstOpencvVideoFilter * cvfilter,
G_GNUC_UNUSED GstBuffer * frame, IplImage * img,
G_GNUC_UNUSED GstBuffer * outframe, IplImage * outimg)
{
GstCameraUndistort *undist = GST_CAMERA_UNDISTORT (cvfilter);
camera_undistort_run(undist, img, outimg);
return GST_FLOW_OK;
}
/* entry point to initialize the plug-in
* initialize the plug-in itself
* register the element factories and other features
*/
gboolean
gst_camera_undistort_plugin_init (GstPlugin * plugin)
{
/* debug category for filtering log messages */
GST_DEBUG_CATEGORY_INIT (gst_camera_undistort_debug, "cameraundistort",
0,
"Performs camera undistortion");
return gst_element_register (plugin, "cameraundistort", GST_RANK_NONE,
GST_TYPE_CAMERA_UNDISTORT);
}
static void
camera_undistort_run(GstCameraUndistort *undist, IplImage *img, IplImage *outimg)
{
const cv::Mat view = cv::cvarrToMat(img);
cv::Mat outview = cv::cvarrToMat(outimg);
if (undist->settingsChanged) {
undist->doUndistort = false;
if (undist->showUndistorted && undist->settings) {
//qDebug() << undist->settings;
if (camera_deserialize_undistort_settings(
undist->settings, undist->cameraMatrix, undist->distCoeffs)) {
undist->doUndistort = camera_undistort_init_undistort_rectify_map(undist);
}
}
undist->settingsChanged = false;
}
if (undist->showUndistorted && undist->doUndistort) {
QElapsedTimer timer;
timer.start();
cv::remap(view, outview, undist->map1, undist->map2, cv::INTER_LINEAR);
qDebug() << "remap took" << timer.elapsed() << "ms";
if (undist->crop) {
const cv::Scalar CROP_COLOR(0, 255, 0);
cv::rectangle(outview, undist->validPixROI, CROP_COLOR);
}
}
else {
// FIXME should use passthrough to avoid this copy...
view.copyTo(outview);
}
}
// {
// Mat view, rview, map1, map2;
//
// if (undist->useFisheye)
// {
// Mat newCamMat;
// fisheye::estimateNewCameraMatrixForUndistortRectify(cameraMatrix, distCoeffs, imageSize,
// Matx33d::eye(), newCamMat, 1);
// fisheye::initUndistortRectifyMap(cameraMatrix, distCoeffs, Matx33d::eye(), newCamMat, imageSize,
// CV_16SC2, map1, map2);
// }
// else
// {
// initUndistortRectifyMap(
// cameraMatrix, distCoeffs, Mat(),
// getOptimalNewCameraMatrix(cameraMatrix, distCoeffs, imageSize, 1, imageSize, 0), imageSize,
// CV_16SC2, map1, map2);
// }
// }
static gboolean
camera_undistort_init_undistort_rectify_map(GstCameraUndistort *undist)
{
QElapsedTimer timer;
timer.start();
cv::Size newImageSize;
cv::Rect validPixROI;
cv::Mat newCameraMatrix = cv::getOptimalNewCameraMatrix(
undist->cameraMatrix, undist->distCoeffs, undist->imageSize,
undist->alpha, newImageSize, &validPixROI);
undist->validPixROI = validPixROI;
cv::initUndistortRectifyMap(undist->cameraMatrix, undist->distCoeffs, cv::Mat(),
newCameraMatrix, undist->imageSize, CV_16SC2, undist->map1, undist->map2);
qDebug() << "init rectify took" << timer.elapsed() << "ms";
return TRUE;
}
/*
qDebug() << "imageSize" << imageSize.width << imageSize.height;
qDebug() << "newImageSize" << imageSize.width << imageSize.height;
qDebug() << "alpha" << undist->alpha;
qDebug() << "roi" << undist->validPixROI.x << undist->validPixROI.y << undist->validPixROI.width << undist->validPixROI.height;
cv::FileStorage fs1(".xml", cv::FileStorage::WRITE + cv::FileStorage::MEMORY);
fs1 << "cameraMatrix" << undist->cameraMatrix;
const std::string buf1 = fs1.releaseAndGetString();
qDebug() << "cameraMatrix" << QString::fromStdString(buf1);
cv::FileStorage fs2(".xml", cv::FileStorage::WRITE + cv::FileStorage::MEMORY);
fs2 << "newCameraMatrix" << newCameraMatrix;
const std::string buf2 = fs2.releaseAndGetString();
qDebug() << "newCameraMatrix" << QString::fromStdString(buf2);
cv::FileStorage fs3(".xml", cv::FileStorage::WRITE + cv::FileStorage::MEMORY);
fs3 << "distCoeffs" << undist->distCoeffs;
const std::string buf3 = fs3.releaseAndGetString();
qDebug() << "distCoeffs" << QString::fromStdString(buf3);
*/
static gboolean camera_undistort_calibration_event(GstCameraUndistort *undist, GstEvent *event)
{
g_free (undist->settings);
if (!gst_camera_event_parse_calibrated(event, &(undist->settings))) {
qDebug() << "Failed to parse";
return FALSE;
}
undist->settingsChanged = true;
return TRUE;
}
static gboolean
gst_camera_undistort_sink_event (GstBaseTransform *trans, GstEvent *event)
{
GstCameraUndistort *undist = GST_CAMERA_UNDISTORT (trans);
const GstStructure *structure = gst_event_get_structure (event);
if (GST_EVENT_TYPE (event) == GST_EVENT_CUSTOM_BOTH && structure) {
if (strcmp (gst_structure_get_name (structure), GST_CAMERA_EVENT_CALIBRATED_NAME) == 0) {
qDebug() << "GOT CALIBRATION EVENT FROM UPSTREAM";
return camera_undistort_calibration_event(undist, event);
}
}
return GST_BASE_TRANSFORM_CLASS (gst_camera_undistort_parent_class)->sink_event (trans, event);
}
static gboolean
gst_camera_undistort_src_event (GstBaseTransform *trans, GstEvent *event)
{
GstCameraUndistort *undist = GST_CAMERA_UNDISTORT (trans);
const GstStructure *structure = gst_event_get_structure (event);
if (GST_EVENT_TYPE (event) == GST_EVENT_CUSTOM_BOTH && structure) {
if (strcmp (gst_structure_get_name (structure), GST_CAMERA_EVENT_CALIBRATED_NAME) == 0) {
qDebug() << "GOT CALIBRATION EVENT FROM DOWNSTREAM";
return camera_undistort_calibration_event(undist, event);
}
}
return GST_BASE_TRANSFORM_CLASS (gst_camera_undistort_parent_class)->src_event (trans, event);
}

View File

@ -0,0 +1,109 @@
/*
* GStreamer
* Copyright (C) 2005 Thomas Vander Stichele <thomas@apestaart.org>
* Copyright (C) 2005 Ronald S. Bultje <rbultje@ronald.bitfreak.net>
* Copyright (C) 2008 Michael Sheldon <mike@mikeasoft.com>
* Copyright (C) 2011 Stefan Sauer <ensonic@users.sf.net>
* Copyright (C) 2011 Robert Jobbagy <jobbagy.robert@gmail.com>
*
* Permission is hereby granted, free of charge, to any person obtaining a
* copy of this software and associated documentation files (the "Software"),
* to deal in the Software without restriction, including without limitation
* the rights to use, copy, modify, merge, publish, distribute, sublicense,
* and/or sell copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
* DEALINGS IN THE SOFTWARE.
*
* Alternatively, the contents of this file may be used under the
* GNU Lesser General Public License Version 2.1 (the "LGPL"), in
* which case the following provisions apply instead of the ones
* mentioned above:
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
* Boston, MA 02110-1301, USA.
*/
#ifndef __GST_CAMERA_UNDISTORT_H__
#define __GST_CAMERA_UNDISTORT_H__
#include <gst/gst.h>
#include <gst/opencv/gstopencvvideofilter.h>
//#include "gstopencvvideofilter.h"
#include <opencv2/core.hpp>
G_BEGIN_DECLS
#define GST_TYPE_CAMERA_UNDISTORT \
(gst_camera_undistort_get_type())
#define GST_CAMERA_UNDISTORT(obj) \
(G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_CAMERA_UNDISTORT,GstCameraUndistort))
#define GST_CAMERA_UNDISTORT_CLASS(klass) \
(G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_CAMERA_UNDISTORT,GstCameraUndistortClass))
#define GST_IS_CAMERA_UNDISTORT(obj) \
(G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_CAMERA_UNDISTORT))
#define GST_IS_CAMERA_UNDISTORT_CLASS(klass) \
(G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_CAMERA_UNDISTORT))
typedef struct _GstCameraUndistort GstCameraUndistort;
typedef struct _GstCameraUndistortClass GstCameraUndistortClass;
struct _GstCameraUndistort
{
GstOpencvVideoFilter cvfilter;
//GRecMutex stream_lock;
// settings
bool showUndistorted;
float alpha;
bool crop;
// obscure string containing opencv calibration settings
gchar *settings;
// opencv calibration settings
cv::Mat cameraMatrix, distCoeffs;
// state
bool doUndistort;
bool settingsChanged;
cv::Size imageSize;
cv::Mat map1, map2;
cv::Rect validPixROI;
};
struct _GstCameraUndistortClass
{
GstOpencvVideoFilterClass parent_class;
};
GType gst_camera_undistort_get_type (void);
gboolean gst_camera_undistort_plugin_init (GstPlugin * plugin);
G_END_DECLS
#endif /* __GST_CAMERA_UNDISTORT_H__ */

View File

@ -0,0 +1,178 @@
/* GStreamer
* Copyright (C) <2010> Thiago Santos <thiago.sousa.santos@collabora.co.uk>
*
* gstopencvutils.c: miscellaneous utility functions
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
* Boston, MA 02110-1301, USA.
*/
#ifdef HAVE_CONFIG_H
#include "config.h"
#endif
#include "gstopencvutils.h"
#include <opencv2/core/core_c.h>
/*
The various opencv image containers or headers store the following information:
- number of channels (usually 1, 3 or 4)
- depth (8, 16, 32, 64...); all channels have the same depth.
The channel layout (BGR vs RGB) is not stored...
This gives us the following list of supported image formats:
CV_8UC1, CV_8UC2, CV_8UC3, CV_8UC4
CV_8SC1, CV_8SC2, CV_8SC3, CV_8SC4
CV_16UC1, CV_16UC2, CV_16UC3, CV_16UC4
CV_16SC1, CV_16SC2, CV_16SC3, CV_16SC4
CV_32SC1, CV_32SC2, CV_32SC3, CV_32SC4
CV_32FC1, CV_32FC2, CV_32FC3, CV_32FC4
CV_64FC1, CV_64FC2, CV_64FC3, CV_64FC4
Where the first part of the format name is the depth followed by a digit
representing the number of channels.
Note that opencv supports more that 4 channels.
The opencv algorithms don't all support all the image types.
For example findChessboardCorners() supports only 8 bits formats
(gray scale and color).
And, typically, this algorithm will convert the image to gray scale before
proceeding. It will do so with something like this:
cvtColor(srcImg, destImg, CV_BGR2GRAY);
The conversion will work on any BGR format (BGR, BGRA, BGRx).
The extra channel(s) will be ignored.
It will also produce a result for any RGB format.
The result will be "wrong" to the human eye and might affect some algorithms
(not findChessboardCorners() afaik...).
This is due to how RGB gets converted to gray where each color has a
different weight.
Another example is the 2D rendering API.
It work with RGB but the colors will be wrong.
Likewise other layouts like xBGR and ABGR formats will probably misbehave
with most algorithms.
The bad thing is that it is not possible to change the "default" BGR format.
Safest is to not assume that RGB will work and always convert to BGR.
That said, the current opencv gstreamer elements all accept BGR and RGB caps !
Some have restrictions but if a format is supported then both BGR and RGB
layouts will be supported.
*/
gboolean
gst_opencv_parse_iplimage_params_from_caps (GstCaps * caps, gint * width,
gint * height, gint * ipldepth, gint * channels, GError ** err)
{
GstVideoInfo info;
GstVideoFormat format;
int cv_type;
gchar *caps_str;
if (!gst_video_info_from_caps (&info, caps)) {
caps_str = gst_caps_to_string (caps);
GST_ERROR ("Failed to get video info from caps %s", caps_str);
g_set_error (err, GST_CORE_ERROR, GST_CORE_ERROR_NEGOTIATION,
"Failed to get video info from caps %s", caps_str);
g_free (caps_str);
return FALSE;
}
format = GST_VIDEO_INFO_FORMAT (&info);
if (!gst_opencv_cv_image_type_from_video_format (format, &cv_type, err)) {
return FALSE;
}
*width = GST_VIDEO_INFO_WIDTH (&info);
*height = GST_VIDEO_INFO_HEIGHT (&info);
*ipldepth = cvIplDepth (cv_type);
*channels = CV_MAT_CN (cv_type);
return TRUE;
}
gboolean
gst_opencv_cv_image_type_from_video_format (GstVideoFormat format,
int * cv_type, GError ** err)
{
const gchar *format_str;
switch (format) {
case GST_VIDEO_FORMAT_GRAY8:
*cv_type = CV_8UC1;
break;
case GST_VIDEO_FORMAT_RGB:
case GST_VIDEO_FORMAT_BGR:
*cv_type = CV_8UC3;
break;
case GST_VIDEO_FORMAT_RGBx:
case GST_VIDEO_FORMAT_xRGB:
case GST_VIDEO_FORMAT_BGRx:
case GST_VIDEO_FORMAT_xBGR:
case GST_VIDEO_FORMAT_RGBA:
case GST_VIDEO_FORMAT_ARGB:
case GST_VIDEO_FORMAT_BGRA:
case GST_VIDEO_FORMAT_ABGR:
*cv_type = CV_8UC4;
break;
case GST_VIDEO_FORMAT_GRAY16_LE:
case GST_VIDEO_FORMAT_GRAY16_BE:
*cv_type = CV_16UC1;
break;
default:
format_str = gst_video_format_to_string (format);
g_set_error (err, GST_CORE_ERROR, GST_CORE_ERROR_NEGOTIATION,
"Unsupported video format %s", format_str);
return FALSE;
}
return TRUE;
}
GstCaps *
gst_opencv_caps_from_cv_image_type (int cv_type)
{
GstCaps *c = gst_caps_new_empty ();
switch (cv_type) {
case CV_8UC1:
gst_caps_append (c, gst_caps_from_string (GST_VIDEO_CAPS_MAKE ("GRAY8")));
break;
case CV_8UC3:
gst_caps_append (c, gst_caps_from_string (GST_VIDEO_CAPS_MAKE ("RGB")));
gst_caps_append (c, gst_caps_from_string (GST_VIDEO_CAPS_MAKE ("BGR")));
break;
case CV_8UC4:
gst_caps_append (c, gst_caps_from_string (GST_VIDEO_CAPS_MAKE ("RGBx")));
gst_caps_append (c, gst_caps_from_string (GST_VIDEO_CAPS_MAKE ("xRGB")));
gst_caps_append (c, gst_caps_from_string (GST_VIDEO_CAPS_MAKE ("BGRx")));
gst_caps_append (c, gst_caps_from_string (GST_VIDEO_CAPS_MAKE ("xBGR")));
gst_caps_append (c, gst_caps_from_string (GST_VIDEO_CAPS_MAKE ("RGBA")));
gst_caps_append (c, gst_caps_from_string (GST_VIDEO_CAPS_MAKE ("ARGB")));
gst_caps_append (c, gst_caps_from_string (GST_VIDEO_CAPS_MAKE ("BGRA")));
gst_caps_append (c, gst_caps_from_string (GST_VIDEO_CAPS_MAKE ("ABGR")));
break;
case CV_16UC1:
gst_caps_append (c,
gst_caps_from_string (GST_VIDEO_CAPS_MAKE ("GRAY16_LE")));
gst_caps_append (c,
gst_caps_from_string (GST_VIDEO_CAPS_MAKE ("GRAY16_BE")));
break;
}
return c;
}

View File

@ -0,0 +1,46 @@
/* GStreamer
* Copyright (C) <2010> Thiago Santos <thiago.sousa.santos@collabora.co.uk>
*
* gstopencvutils.h: miscellaneous utility functions
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
* Boston, MA 02110-1301, USA.
*/
#ifndef __GST_OPENCV_UTILS__
#define __GST_OPENCV_UTILS__
#ifdef HAVE_CONFIG_H
#include "config.h"
#endif
#include <gst/gst.h>
#include <gst/video/video.h>
G_BEGIN_DECLS
gboolean gst_opencv_parse_iplimage_params_from_caps
(GstCaps * caps, gint * width, gint * height, gint * depth,
gint * channels, GError ** err);
gboolean
gst_opencv_cv_image_type_from_video_format (GstVideoFormat format,
int * cv_type, GError ** err);
GstCaps * gst_opencv_caps_from_cv_image_type (int cv_type);
G_END_DECLS
#endif /* __GST_OPENCV_UTILS__ */

View File

@ -0,0 +1,289 @@
/*
* GStreamer
* Copyright (C) 2010 Thiago Santos <thiago.sousa.santos@collabora.co.uk>
*
* Permission is hereby granted, free of charge, to any person obtaining a
* copy of this software and associated documentation files (the "Software"),
* to deal in the Software without restriction, including without limitation
* the rights to use, copy, modify, merge, publish, distribute, sublicense,
* and/or sell copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
* DEALINGS IN THE SOFTWARE.
*
* Alternatively, the contents of this file may be used under the
* GNU Lesser General Public License Version 2.1 (the "LGPL"), in
* which case the following provisions apply instead of the ones
* mentioned above:
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
* Boston, MA 02110-1301, USA.
*/
/* TODO opencv can do scaling for some cases */
#ifdef HAVE_CONFIG_H
# include <config.h>
#endif
#include "gstopencvvideofilter.h"
#include "gstopencvutils.h"
#include <opencv2/core/core_c.h>
GST_DEBUG_CATEGORY_STATIC (gst_opencv_video_filter_debug);
#define GST_CAT_DEFAULT gst_opencv_video_filter_debug
/* Filter signals and args */
enum
{
/* FILL ME */
LAST_SIGNAL
};
enum
{
PROP_0
};
static GstElementClass *parent_class = NULL;
static void gst_opencv_video_filter_class_init (GstOpencvVideoFilterClass *
klass);
static void gst_opencv_video_filter_init (GstOpencvVideoFilter * cv_filter,
GstOpencvVideoFilterClass * klass);
static gboolean gst_opencv_video_filter_set_info (GstVideoFilter * vfilter,
GstCaps * incaps, GstVideoInfo * in_info,
GstCaps * outcaps, GstVideoInfo * out_info);
static GstFlowReturn gst_opencv_video_filter_transform_frame_ip (
GstVideoFilter * vfilter, GstVideoFrame * frame);
static GstFlowReturn gst_opencv_video_filter_transform_frame (
GstVideoFilter * vfilter,
GstVideoFrame * inframe, GstVideoFrame * outframe);
static void gst_opencv_video_filter_set_property (GObject * object,
guint prop_id, const GValue * value, GParamSpec * pspec);
static void gst_opencv_video_filter_get_property (GObject * object,
guint prop_id, GValue * value, GParamSpec * pspec);
GType
gst_opencv_video_filter_get_type (void)
{
static volatile gsize opencv_video_filter_type = 0;
if (g_once_init_enter (&opencv_video_filter_type)) {
GType _type;
static const GTypeInfo opencv_video_filter_info = {
sizeof (GstOpencvVideoFilterClass),
NULL,
NULL,
(GClassInitFunc) gst_opencv_video_filter_class_init,
NULL,
NULL,
sizeof (GstOpencvVideoFilter),
0,
(GInstanceInitFunc) gst_opencv_video_filter_init,
};
_type = g_type_register_static (GST_TYPE_VIDEO_FILTER,
"GstOpencvVideoFilter", &opencv_video_filter_info,
G_TYPE_FLAG_ABSTRACT);
g_once_init_leave (&opencv_video_filter_type, _type);
}
return opencv_video_filter_type;
}
/* Clean up */
static void
gst_opencv_video_filter_finalize (GObject * obj)
{
GstOpencvVideoFilter *cv_filter = GST_OPENCV_VIDEO_FILTER (obj);
if (cv_filter->cvImage)
cvReleaseImage (&cv_filter->cvImage);
if (cv_filter->out_cvImage)
cvReleaseImage (&cv_filter->out_cvImage);
G_OBJECT_CLASS (parent_class)->finalize (obj);
}
static void
gst_opencv_video_filter_class_init (GstOpencvVideoFilterClass * klass)
{
GObjectClass *gobject_class;
GstVideoFilterClass *filter_class;
gobject_class = (GObjectClass *) klass;
filter_class = (GstVideoFilterClass *) klass;
parent_class = (GstElementClass *) g_type_class_peek_parent (klass);
GST_DEBUG_CATEGORY_INIT (gst_opencv_video_filter_debug,
"opencvvideofilter", 0, "opencvvideofilter element");
gobject_class->finalize =
GST_DEBUG_FUNCPTR (gst_opencv_video_filter_finalize);
gobject_class->set_property = gst_opencv_video_filter_set_property;
gobject_class->get_property = gst_opencv_video_filter_get_property;
filter_class->transform_frame = gst_opencv_video_filter_transform_frame;
filter_class->transform_frame_ip =
gst_opencv_video_filter_transform_frame_ip;
filter_class->set_info = gst_opencv_video_filter_set_info;
}
static void
gst_opencv_video_filter_init (GstOpencvVideoFilter * cv_filter,
GstOpencvVideoFilterClass * klass)
{
}
static GstFlowReturn
gst_opencv_video_filter_transform_frame (GstVideoFilter * vfilter,
GstVideoFrame * inframe, GstVideoFrame * outframe)
{
GstOpencvVideoFilter *cv_filter;
GstOpencvVideoFilterClass *fclass;
GstFlowReturn ret;
cv_filter = GST_OPENCV_VIDEO_FILTER (vfilter);
fclass = GST_OPENCV_VIDEO_FILTER_GET_CLASS (vfilter);
g_return_val_if_fail (fclass->cv_transform_frame != NULL, GST_FLOW_ERROR);
g_return_val_if_fail (cv_filter->cvImage != NULL, GST_FLOW_ERROR);
g_return_val_if_fail (cv_filter->out_cvImage != NULL, GST_FLOW_ERROR);
cv_filter->cvImage->imageData = (char *) inframe->data;
cv_filter->out_cvImage->imageData = (char *) outframe->data;
ret = fclass->cv_transform_frame (cv_filter, inframe, cv_filter->cvImage,
outframe, cv_filter->out_cvImage);
return ret;
}
static GstFlowReturn
gst_opencv_video_filter_transform_frame_ip (GstVideoFilter * vfilter,
GstVideoFrame * frame)
{
GstOpencvVideoFilter *cv_filter;
GstOpencvVideoFilterClass *fclass;
GstFlowReturn ret;
cv_filter = GST_OPENCV_VIDEO_FILTER (vfilter);
fclass = GST_OPENCV_VIDEO_FILTER_GET_CLASS (vfilter);
g_return_val_if_fail (fclass->cv_transform_frame_ip != NULL, GST_FLOW_ERROR);
g_return_val_if_fail (cv_filter->cvImage != NULL, GST_FLOW_ERROR);
cv_filter->cvImage->imageData = (char *) frame->data;
ret = fclass->cv_transform_frame_ip (cv_filter, frame, cv_filter->cvImage);
return ret;
}
static gboolean
gst_opencv_video_filter_set_info (GstVideoFilter * vfilter, GstCaps * incaps,
GstVideoInfo * in_info, GstCaps * outcaps, GstVideoInfo * out_info)
{
GstOpencvVideoFilter *cv_filter = GST_OPENCV_VIDEO_FILTER (vfilter);
GstOpencvVideoFilterClass *klass =
GST_OPENCV_VIDEO_FILTER_GET_CLASS (cv_filter);
gint in_width, in_height;
gint in_depth, in_channels;
gint out_width, out_height;
gint out_depth, out_channels;
GError *in_err = NULL;
GError *out_err = NULL;
if (!gst_opencv_parse_iplimage_params_from_caps (incaps, &in_width,
&in_height, &in_depth, &in_channels, &in_err)) {
GST_WARNING_OBJECT (cv_filter, "Failed to parse input caps: %s",
in_err->message);
g_error_free (in_err);
return FALSE;
}
if (!gst_opencv_parse_iplimage_params_from_caps (outcaps, &out_width,
&out_height, &out_depth, &out_channels, &out_err)) {
GST_WARNING_OBJECT (cv_filter, "Failed to parse output caps: %s",
out_err->message);
g_error_free (out_err);
return FALSE;
}
if (klass->cv_set_info) {
if (!klass->cv_set_info (cv_filter, in_width, in_height, in_depth,
in_channels, out_width, out_height, out_depth, out_channels))
return FALSE;
}
if (cv_filter->cvImage) {
cvReleaseImage (&cv_filter->cvImage);
}
if (cv_filter->out_cvImage) {
cvReleaseImage (&cv_filter->out_cvImage);
}
cv_filter->cvImage =
cvCreateImageHeader (cvSize (in_width, in_height), in_depth, in_channels);
cv_filter->out_cvImage =
cvCreateImageHeader (cvSize (out_width, out_height), out_depth,
out_channels);
gst_base_transform_set_in_place (GST_BASE_TRANSFORM (cv_filter),
cv_filter->in_place);
return TRUE;
}
static void
gst_opencv_video_filter_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec)
{
switch (prop_id) {
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
break;
}
}
static void
gst_opencv_video_filter_get_property (GObject * object, guint prop_id,
GValue * value, GParamSpec * pspec)
{
switch (prop_id) {
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
break;
}
}
void
gst_opencv_video_filter_set_in_place (GstOpencvVideoFilter * cv_filter,
gboolean ip)
{
cv_filter->in_place = ip;
gst_base_transform_set_in_place (GST_BASE_TRANSFORM (cv_filter), ip);
}

View File

@ -0,0 +1,110 @@
/*
* GStreamer
* Copyright (C) 2010 Thiago Santos <thiago.sousa.santos@collabora.co.uk>
*
* Permission is hereby granted, free of charge, to any person obtaining a
* copy of this software and associated documentation files (the "Software"),
* to deal in the Software without restriction, including without limitation
* the rights to use, copy, modify, merge, publish, distribute, sublicense,
* and/or sell copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
* DEALINGS IN THE SOFTWARE.
*
* Alternatively, the contents of this file may be used under the
* GNU Lesser General Public License Version 2.1 (the "LGPL"), in
* which case the following provisions apply instead of the ones
* mentioned above:
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
* Boston, MA 02110-1301, USA.
*/
#ifndef __GST_OPENCV_VIDEO_FILTER_H__
#define __GST_OPENCV_VIDEO_FILTER_H__
#include <gst/gst.h>
#include <gst/video/gstvideofilter.h>
G_BEGIN_DECLS
/* forward declare opencv type to avoid exposing them in this API */
typedef struct _IplImage IplImage;
/* #defines don't like whitespacey bits */
#define GST_TYPE_OPENCV_VIDEO_FILTER \
(gst_opencv_video_filter_get_type())
#define GST_OPENCV_VIDEO_FILTER(obj) \
(G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_OPENCV_VIDEO_FILTER,GstOpencvVideoFilter))
#define GST_OPENCV_VIDEO_FILTER_CLASS(klass) \
(G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_OPENCV_VIDEO_FILTER,GstOpencvVideoFilterClass))
#define GST_IS_OPENCV_VIDEO_FILTER(obj) \
(G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_OPENCV_VIDEO_FILTER))
#define GST_IS_OPENCV_VIDEO_FILTER_CLASS(klass) \
(G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_OPENCV_VIDEO_FILTER))
#define GST_OPENCV_VIDEO_FILTER_GET_CLASS(obj) \
(G_TYPE_INSTANCE_GET_CLASS((obj),GST_TYPE_OPENCV_VIDEO_FILTER,GstOpencvVideoFilterClass))
#define GST_OPENCV_VIDEO_FILTER_CAST(obj) ((GstOpencvVideoFilter *) (obj))
typedef struct _GstOpencvVideoFilter GstOpencvVideoFilter;
typedef struct _GstOpencvVideoFilterClass GstOpencvVideoFilterClass;
typedef GstFlowReturn (*GstOpencvVideoFilterTransformIPFunc)
(GstOpencvVideoFilter * cvfilter, GstVideoFrame * frame, IplImage * img);
typedef GstFlowReturn (*GstOpencvVideoFilterTransformFunc)
(GstOpencvVideoFilter * cvfilter, GstVideoFrame * frame, IplImage * img,
GstVideoFrame * outframe, IplImage * outimg);
typedef gboolean (*GstOpencvVideoFilterSetInfo)
(GstOpencvVideoFilter * cv_filter, gint in_width, gint in_height,
gint in_depth, gint in_channels, gint out_width, gint out_height,
gint out_depth, gint out_channels);
struct _GstOpencvVideoFilter
{
GstVideoFilter videofilter;
gboolean in_place;
IplImage *cvImage;
IplImage *out_cvImage;
};
struct _GstOpencvVideoFilterClass
{
GstVideoFilterClass parent_class;
GstOpencvVideoFilterTransformFunc cv_transform_frame;
GstOpencvVideoFilterTransformIPFunc cv_transform_frame_ip;
GstOpencvVideoFilterSetInfo cv_set_info;
};
GType gst_opencv_video_filter_get_type (void);
void gst_opencv_video_filter_set_in_place (GstOpencvVideoFilter * cv_filter,
gboolean ip);
G_END_DECLS
#endif /* __GST_OPENCV_VIDEO_FILTER_H__ */

View File

@ -0,0 +1,33 @@
DEFINES += GST_PLUGIN_BUILD_STATIC
#CONFIG += link_pkgconfig
PKGCONFIG += gstreamer-base-1.0
do_not_compile {
HEADERS += \
plugins/cameracalibration/gstopencvutils.h \
plugins/cameracalibration/gstopencvvideofilter.hpp
SOURCES += \
plugins/cameracalibration/gstopencvutils.cpp \
plugins/cameracalibration/gstopencvvideofilter.cpp \
}
opencv {
# there is no package for gst opencv yet...
GSTREAMER_SDK_DIR = $$system(pkg-config --variable=exec_prefix gstreamer-1.0)
LIBS += -L$(GSTREAMER_SDK_DIR)/lib/gstreamer-1.0/opencv
LIBS += -lgstopencv-1.0
HEADERS += \
plugins/cameracalibration/camerautils.hpp \
plugins/cameracalibration/cameraevent.hpp \
plugins/cameracalibration/gstcameracalibration.h \
plugins/cameracalibration/gstcameraundistort.h
SOURCES += \
plugins/cameracalibration/camerautils.cpp \
plugins/cameracalibration/cameraevent.cpp \
plugins/cameracalibration/gstcameracalibration.cpp \
plugins/cameracalibration/gstcameraundistort.cpp
}

View File

@ -0,0 +1,67 @@
###############################################################################
# General
###############################################################################
Add the following line to your build config file:
GCS_EXTRA_CONF += gstreamer
or run this command
make config_append GCS_EXTRA_CONF+=gstreamer
The build config file is at the root of your source directory.
###############################################################################
# Windows (msys2)
###############################################################################
i686:
$ pacman -S mingw-w64-i686-gst-plugins-base mingw-w64-i686-gst-plugins-good mingw-w64-i686-gst-plugins-bad mingw-w64-i686-gst-plugins-ugly mingw-w64-i686-gst-libav
x86_64:
$ pacman -S mingw-w64-x86_64-gst-plugins-base mingw-w64-x86_64-gst-plugins-good mingw-w64-x86_64-gst-plugins-bad mingw-w64-x86_64-gst-plugins-ugly mingw-w64-x86_64-gst-libav
###############################################################################
# Linux
###############################################################################
Get all the gstreamer libraries.
This might work:
Add the repository ppa:gstreamer-developers/ppa using Synaptic Package Manager or CLI
> sudo add-apt-repository ppa:gstreamer-developers/ppa
> sudo apt-get update
Upgrade to latest version of the packages using Synaptic Package Manager or CLI
> sudo apt-get install gstreamer1.0-tools gstreamer1.0-plugins-base gstreamer1.0-plugins-good gstreamer1.0-plugins-bad gstreamer1.0-plugins-ugly gstreamer1.0-libav
> sudo apt-get install gstreamer1.0-dev gstreamer-plugins-base1.0-dev
###############################################################################
# Mac
###############################################################################
###############################################################################
# How to find required libraries (for copydata.pro)
###############################################################################
use gst-inspect with an element or plugin and look at Filename.
$ gst-inspect-1.0.exe ksvideosrc
Factory Details:
Rank none (0)
Long-name KsVideoSrc
Klass Source/Video
Description Stream data from a video capture device through Windows kernel streaming
Author Ole André Vadla Ravnås <ole.andre.ravnas@tandberg.com>
Haakon Sporsheim <hakon.sporsheim@tandberg.com>
Andres Colubri <andres.colubri@gmail.com>
Plugin Details:
Name winks
Description Windows kernel streaming plugin
Filename C:\msys64\mingw64\lib\gstreamer-1.0\libgstwinks.dll
Version 1.6.3
License LGPL
Source module gst-plugins-bad
Source release date 2016-01-20
Binary package GStreamer
Origin URL http://gstreamer.net/

View File

@ -0,0 +1,851 @@
/**
******************************************************************************
*
* @file videowidget.cpp
* @author The LibrePilot Project, http://www.librepilot.org Copyright (C) 2017.
* @brief
* @see The GNU Public License (GPL) Version 3
* @defgroup
* @{
*
*****************************************************************************/
/*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* for more details.
*
* You should have received a copy of the GNU General Public License along
* with this program; if not, write to the Free Software Foundation, Inc.,
* 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
#include "videowidget.h"
#include "gst_util.h"
#include "overlay.h"
#include "pipelineevent.h"
// #include "devicemonitor.h"
#include <gst/gst.h>
#include <gst/video/videooverlay.h>
#include <QtCore>
#include <QPainter>
#include <QDebug>
#include <QRect>
#include <QTextDocument>
#include <string>
// TODO find a better way and move away from this file
static Pipeline::State cvt(GstState state);
static const char *name(Pipeline::State state);
static ProgressEvent::ProgressType cvt(GstProgressType type);
class GstOverlayImpl : public Overlay {
public:
GstOverlayImpl(GstVideoOverlay *gst_overlay) :
gst_overlay(gst_overlay)
{}
void expose()
{
if (gst_overlay) {
gst_video_overlay_expose(gst_overlay);
}
}
private:
GstVideoOverlay *gst_overlay;
};
class BusSyncHandler {
public:
BusSyncHandler(VideoWidget *widget, WId wid) :
widget(widget), wId(wid)
{}
bool handleMessage(GstMessage *msg);
private:
VideoWidget *widget;
WId wId;
};
static GstElement *createPipelineFromDesc(const char *, QString &lastError);
static GstBusSyncReply gst_bus_sync_handler(GstBus *, GstMessage *, BusSyncHandler *);
VideoWidget::VideoWidget(QWidget *parent) :
QWidget(parent), pipeline(NULL), overlay(NULL)
{
qDebug() << "VideoWidget::VideoWidget";
// initialize gstreamer
gst::init(NULL, NULL);
// foreach(Device d, m.devices()) {
// qDebug() << d.displayName();
// }
// make the widget native so it gets its own native window id that we will pass to gstreamer
setAttribute(Qt::WA_NativeWindow);
// setAttribute(Qt::WA_DontCreateNativeAncestors);
// set black background
QPalette pal(palette());
pal.setColor(backgroundRole(), Qt::black);
setPalette(pal);
// calling winId() will realize the window if it is not yet realized
// so we need to call winId() here and not later from a gstreamer thread...
WId wid = winId();
qDebug() << "VideoWidget::VideoWidget - video winId :" << (gulong)wid;
handler = new BusSyncHandler(this, wid);
// init widget state (see setOverlay() for more information)
// setOverlay(NULL);
setAutoFillBackground(true);
setAttribute(Qt::WA_OpaquePaintEvent, false);
setAttribute(Qt::WA_PaintOnScreen, false);
// init state
lastError = "";
}
VideoWidget::~VideoWidget()
{
if (pipeline) {
dispose();
}
if (handler) {
delete handler;
handler = NULL;
}
}
bool VideoWidget::isPlaying()
{
return pipeline && (GST_STATE(pipeline) == GST_STATE_PLAYING);
}
QString VideoWidget::pipelineDesc()
{
return m_pipelineDesc;
}
void VideoWidget::setPipelineDesc(QString pipelineDesc)
{
qDebug() << "VideoWidget::setPipelineDesc -" << pipelineDesc;
stop();
this->m_pipelineDesc = pipelineDesc;
}
void VideoWidget::start()
{
qDebug() << "VideoWidget::start -" << m_pipelineDesc;
init();
update();
if (pipeline) {
gst_element_set_state(GST_ELEMENT(pipeline), GST_STATE_PLAYING);
}
}
void VideoWidget::pause()
{
qDebug() << "VideoWidget::pause -" << m_pipelineDesc;
init();
update();
if (pipeline) {
if (GST_STATE(pipeline) == GST_STATE_PAUSED) {
gst_element_set_state(GST_ELEMENT(pipeline), GST_STATE_PLAYING);
} else if (GST_STATE(pipeline) == GST_STATE_PLAYING) {
gst_element_set_state(GST_ELEMENT(pipeline), GST_STATE_PAUSED);
}
}
}
void VideoWidget::stop()
{
qDebug() << "VideoWidget::stop -" << m_pipelineDesc;
if (pipeline) {
dispose();
} else {
// emit fake state change event. this is needed by the UI...
emit stateChanged(Pipeline::Null, Pipeline::Null, Pipeline::VoidPending);
}
update();
}
void VideoWidget::init()
{
if (pipeline) {
// if pipeline is already created, reset some state and return
qDebug() << "VideoWidget::init - reseting pipeline state :" << m_pipelineDesc;
lastError = "";
return;
}
// reset state
lastError = "";
// create pipeline
qDebug() << "VideoWidget::init - initializing pipeline :" << m_pipelineDesc;
pipeline = createPipelineFromDesc(m_pipelineDesc.toStdString().c_str(), lastError);
if (pipeline) {
gst_pipeline_set_auto_flush_bus(GST_PIPELINE(pipeline), true);
// register bus synchronous handler
GstBus *bus = gst_pipeline_get_bus(GST_PIPELINE(pipeline));
gst_bus_set_sync_handler(bus, (GstBusSyncHandler)gst_bus_sync_handler, handler, NULL);
gst_object_unref(bus);
} else {
// emit fake state change event. this is needed by the UI...
emit stateChanged(Pipeline::Null, Pipeline::Null, Pipeline::VoidPending);
}
}
void VideoWidget::dispose()
{
qDebug() << "VideoWidget::dispose -" << m_pipelineDesc;
setOverlay(NULL);
if (pipeline) {
gst_element_set_state(pipeline, GST_STATE_NULL);
gst_object_unref(pipeline);
pipeline = NULL;
}
}
void VideoWidget::paintEvent(QPaintEvent *event)
{
if (overlay) {
overlay->expose();
} else {
QWidget::paintEvent(event);
paintStatus(event);
}
}
void VideoWidget::paintStatus(QPaintEvent *event)
{
Q_UNUSED(event);
QTextDocument doc;
doc.setDefaultStyleSheet("* { color:red; }");
QString html = "<p align=center><font size=+2>" + getStatusMessage() + "</font></p>";
QRect widgetRect = QWidget::rect();
int x = 0;
int w = widgetRect.width();
int hh = widgetRect.height() / 4;
int y = (widgetRect.height() - hh) / 2;
int h = widgetRect.height() - y;
QRect rect = QRect(x, y, w, h);
doc.setHtml(html);
doc.setTextWidth(rect.width());
QPainter painter(this);
painter.save();
painter.translate(rect.topLeft());
doc.drawContents(&painter, rect.translated(-rect.topLeft()));
painter.restore();
// painter.drawRect(rect);
// QBrush brush( Qt::yellow );
// painter.setBrush( brush ); // set the yellow brush
// painter.setPen( Qt::NoPen ); // do not draw outline
// painter.drawRect(0, 0, width(), height()); // draw filled rectangle
// painter.end();
// QFont font = QApplication::font();
// font.setPixelSize( rect.height() );
// painter.setFont( font );
}
QString VideoWidget::getStatus()
{
if (!lastError.isEmpty()) {
return "ERROR";
} else if (!pipeline && m_pipelineDesc.isEmpty()) {
return "NO PIPELINE";
}
return "";
}
QString VideoWidget::getStatusMessage()
{
if (!lastError.isEmpty()) {
return lastError;
} else if (!pipeline && m_pipelineDesc.isEmpty()) {
return "No pipeline";
}
return "";
}
void VideoWidget::mouseDoubleClickEvent(QMouseEvent *event)
{
Q_UNUSED(event);
}
void VideoWidget::resizeEvent(QResizeEvent *event)
{
if (overlay) {
overlay->expose();
} else {
QWidget::resizeEvent(event);
}
}
QPaintEngine *VideoWidget::paintEngine() const
{
// bypass double buffering, see setOverlay() for explanation
return overlay ? NULL : QWidget::paintEngine();
}
static Pipeline::State cvt(GstState state)
{
switch (state) {
case GST_STATE_VOID_PENDING:
return Pipeline::VoidPending;
case GST_STATE_NULL:
return Pipeline::Null;
case GST_STATE_READY:
return Pipeline::Ready;
case GST_STATE_PAUSED:
return Pipeline::Paused;
case GST_STATE_PLAYING:
return Pipeline::Playing;
}
return Pipeline::Null;
}
static const char *name(Pipeline::State state)
{
switch (state) {
case Pipeline::VoidPending:
return "VoidPending";
case Pipeline::Null:
return "Null";
case Pipeline::Ready:
return "Ready";
case Pipeline::Paused:
return "Paused";
case Pipeline::Playing:
return "Playing";
}
return "<unknown>";
}
// static StreamStatusEvent::StreamStatusType cvt(GstStreamStatusType type)
// {
// switch (type) {
// case GST_STREAM_STATUS_TYPE_CREATE:
// return StreamStatusEvent::Create;
//
// case GST_STREAM_STATUS_TYPE_ENTER:
// return StreamStatusEvent::Enter;
//
// case GST_STREAM_STATUS_TYPE_LEAVE:
// return StreamStatusEvent::Leave;
//
// case GST_STREAM_STATUS_TYPE_DESTROY:
// return StreamStatusEvent::Destroy;
//
// case GST_STREAM_STATUS_TYPE_START:
// return StreamStatusEvent::Start;
//
// case GST_STREAM_STATUS_TYPE_PAUSE:
// return StreamStatusEvent::Pause;
//
// case GST_STREAM_STATUS_TYPE_STOP:
// return StreamStatusEvent::Stop;
// }
// return StreamStatusEvent::Null;
// }
static ProgressEvent::ProgressType cvt(GstProgressType type)
{
switch (type) {
case GST_PROGRESS_TYPE_START:
return ProgressEvent::Start;
case GST_PROGRESS_TYPE_CONTINUE:
return ProgressEvent::Continue;
case GST_PROGRESS_TYPE_COMPLETE:
return ProgressEvent::Complete;
case GST_PROGRESS_TYPE_CANCELED:
return ProgressEvent::Cancelled;
case GST_PROGRESS_TYPE_ERROR:
return ProgressEvent::Error;
}
return ProgressEvent::Error;
}
bool VideoWidget::event(QEvent *event)
{
if (event->type() == PipelineEvent::PrepareWindowId) {
PrepareWindowIdEvent *pe = static_cast<PrepareWindowIdEvent *>(event);
// we take ownership of the overlay object
setOverlay(pe->getOverlay());
QString msg = QString("PrepareWindowId: element %0 prepare window id").arg(pe->src);
emitEventMessage(msg);
return true;
} else if (event->type() == PipelineEvent::StateChange) {
StateChangedEvent *sce = static_cast<StateChangedEvent *>(event);
QString msg = QString("StateChange: element %0 changed state from %1 to %2")
.arg(sce->src).arg(name(sce->getOldState())).arg(name(sce->getNewState()));
emitEventMessage(msg);
emit stateChanged(sce->getOldState(), sce->getNewState(), sce->getPendingState());
if (sce->getNewState() == Pipeline::Playing) {
if (pipeline) {
toDotFile("pipeline");
}
}
return true;
} else if (event->type() == PipelineEvent::StreamStatus) {
StreamStatusEvent *sse = static_cast<StreamStatusEvent *>(event);
QString msg = QString("StreamStatus: %0 %1 (%2)").arg(sse->src).arg(sse->getStatusName()).arg(sse->getOwner());
emitEventMessage(msg);
return true;
} else if (event->type() == PipelineEvent::NewClock) {
NewClockEvent *nce = static_cast<NewClockEvent *>(event);
QString msg = QString("NewClock : element %0 has new clock %1").arg(nce->src).arg(nce->getName());
emitEventMessage(msg);
return true;
} else if (event->type() == PipelineEvent::ClockProvide) {
ClockProvideEvent *cpe = static_cast<ClockProvideEvent *>(event);
QString msg = QString("ClockProvide: element %0 clock provide %1 ready=%2").arg(cpe->src).arg(cpe->getName()).arg(cpe->isReady());
emitEventMessage(msg);
return true;
} else if (event->type() == PipelineEvent::ClockLost) {
ClockLostEvent *cle = static_cast<ClockLostEvent *>(event);
QString msg = QString("ClockLost: element %0 lost clock %1").arg(cle->src).arg(cle->getName());
emitEventMessage(msg);
// PRINT ("Clock lost, selecting a new one\n");
// gst_element_set_state (pipeline, GST_STATE_PAUSED);
// gst_element_set_state (pipeline, GST_STATE_PLAYING);
return true;
} else if (event->type() == PipelineEvent::Progress) {
ProgressEvent *pe = static_cast<ProgressEvent *>(event);
QString msg = QString("Progress: element %0 sent progress event: %1 %2 (%3)").arg(pe->src).arg(pe->getProgressType()).arg(
pe->getCode()).arg(pe->getText());
emitEventMessage(msg);
return true;
} else if (event->type() == PipelineEvent::Latency) {
LatencyEvent *le = static_cast<LatencyEvent *>(event);
QString msg = QString("Latency: element %0 sent latency event").arg(le->src);
emitEventMessage(msg);
bool success = gst_bin_recalculate_latency(GST_BIN(pipeline));
if (!success) {
qWarning() << "Failed to recalculate latency";
}
return true;
} else if (event->type() == PipelineEvent::Qos) {
QosEvent *qe = static_cast<QosEvent *>(event);
QString msg = QString("Qos: element %0 sent QOS event: %1 %2 %3").arg(qe->src).arg(qe->getData().timestamps()).arg(
qe->getData().values()).arg(qe->getData().stats());
emitEventMessage(msg);
if (pipeline) {
toDotFile("pipeline_qos");
}
return true;
} else if (event->type() == PipelineEvent::Eos) {
QString msg = QString("Eos: element %0 sent EOS event");
emitEventMessage(msg);
if (pipeline) {
toDotFile("pipeline_eos");
}
return true;
} else if (event->type() == PipelineEvent::Error) {
ErrorEvent *ee = static_cast<ErrorEvent *>(event);
QString msg = QString("Error: element %0 sent error event: %1 (%2)").arg(ee->src).arg(ee->getMessage()).arg(
ee->getDebug());
emitEventMessage(msg);
if (lastError.isEmpty()) {
// remember first error only (usually the most useful)
lastError = QString("Pipeline error: %0").arg(ee->getMessage());
// stop pipeline...
stop();
} else {
// TODO record subsequent errors separately
}
return true;
} else if (event->type() == PipelineEvent::Warning) {
WarningEvent *we = static_cast<WarningEvent *>(event);
QString msg = QString("Warning: element %0 sent warning event: %1 (%2)").arg(we->src).arg(we->getMessage()).arg(
we->getDebug());
emitEventMessage(msg);
return true;
} else if (event->type() == PipelineEvent::Info) {
InfoEvent *ie = static_cast<InfoEvent *>(event);
QString msg = QString("Info: element %0 sent info event: %1 (%2)").arg(ie->src).arg(ie->getMessage()).arg(
ie->getDebug());
emitEventMessage(msg);
return true;
}
return QWidget::event(event);
}
void VideoWidget::emitEventMessage(QString msg)
{
// qDebug() << "VideoWidget::event -" << msg;
emit message(msg);
}
void VideoWidget::setOverlay(Overlay *overlay)
{
if (this->overlay != overlay) {
Overlay *oldOverlay = this->overlay;
this->overlay = overlay;
if (oldOverlay) {
delete oldOverlay;
}
}
bool hasOverlay = overlay ? true : false;
setAutoFillBackground(!hasOverlay);
// disable background painting to avoid flickering when resizing
setAttribute(Qt::WA_OpaquePaintEvent, hasOverlay);
// setAttribute(Qt::WA_NoSystemBackground, hasOverlay); // not sure it is needed
// disable double buffering to avoid flickering when resizing
// for this to work we also need to override paintEngine() and make it return NULL.
// see http://qt-project.org/faq/answer/how_does_qtwa_paintonscreen_relate_to_the_backing_store_widget_composition_
// drawback is that this widget won't participate in composition...
setAttribute(Qt::WA_PaintOnScreen, hasOverlay);
}
void VideoWidget::toDotFile(QString name)
{
if (!pipeline) {
return;
}
GST_DEBUG_BIN_TO_DOT_FILE_WITH_TS(GST_BIN(pipeline), GST_DEBUG_GRAPH_SHOW_VERBOSE, name.toStdString().c_str());
}
static GstElement *createPipelineFromDesc(const char *desc, QString &lastError)
{
qDebug() << "VideoWidget::createPipelineFromDesc - creating pipeline :" << desc;
GError *error = NULL;
GstElement *pipeline = gst_parse_launch_full(desc, NULL, GST_PARSE_FLAG_FATAL_ERRORS, &error);
if (!pipeline) {
if (error) {
// no pipeline and error...
// report error to user
QString msg = QString("Failed to create pipeline: %0").arg(error->message);
qCritical() << "VideoWidget::createPipelineFromDesc -" << msg;
lastError = msg;
} else {
// no pipeline and no error...
// report generic error
QString msg = QString("Failed to create pipeline (no error reported!)");
qCritical() << "VideoWidget::createPipelineFromDesc -" << msg;
lastError = msg;
}
} else if (error) {
// pipeline and error...
// report error to user?
// warning?
QString msg = QString("Created pipeline with error: %0").arg(error->message);
qWarning() << "VideoWidget::createPipelineFromDesc -" << msg;
} else {
// qDebug() << gst_bin_get_by_name(GST_BIN(pipeline), "videotestsrc0");
}
if (error) {
g_error_free(error);
}
return pipeline;
}
bool BusSyncHandler::handleMessage(GstMessage *message)
{
// this method is called by gstreamer as a callback
// and as such is not necessarily called on the QT event handling thread
bool handled = false;
switch (GST_MESSAGE_TYPE(message)) {
case GST_MESSAGE_ELEMENT:
{
if (gst_is_video_overlay_prepare_window_handle_message(message)) {
qDebug().noquote() << QString("VideoWidget::handleMessage - element %0 prepare window with id #%1").arg(GST_OBJECT_NAME(message->src)).arg((gulong)wId);
// prepare-xwindow-id must be handled synchronously in order to have gstreamer use our window
GstVideoOverlay *gst_video_overlay = GST_VIDEO_OVERLAY(GST_MESSAGE_SRC(message));
gst_video_overlay_set_window_handle(gst_video_overlay, (gulong)wId);
// and now post event asynchronously
Overlay *overlay = new GstOverlayImpl(gst_video_overlay);
QString src(GST_OBJECT_NAME(message->src));
QCoreApplication::postEvent(widget, new PrepareWindowIdEvent(src, overlay));
// notify that the message was handled
handled = true;
}
break;
}
case GST_MESSAGE_STATE_CHANGED:
{
if (GST_IS_PIPELINE(message->src)) {
GstState old_state, new_state, pending_state;
gst_message_parse_state_changed(message, &old_state, &new_state, &pending_state);
QString src(GST_OBJECT_NAME(message->src));
QCoreApplication::postEvent(widget, new StateChangedEvent(src, cvt(old_state), cvt(new_state), cvt(pending_state)));
}
break;
}
case GST_MESSAGE_STREAM_STATUS:
{
GstStreamStatusType type;
GstElement *owner;
gst_message_parse_stream_status(message, &type, &owner);
// QString src(GST_OBJECT_NAME(message->src));
// QString name(GST_OBJECT_NAME(owner));
// QCoreApplication::postEvent(widget, new StreamStatusEvent(src, cvt(type), name));
break;
}
case GST_MESSAGE_NEW_CLOCK:
{
if (GST_IS_PIPELINE(message->src)) {
GstClock *clock;
gst_message_parse_new_clock(message, &clock);
QString src(GST_OBJECT_NAME(message->src));
QString name(GST_OBJECT_NAME(clock));
QCoreApplication::postEvent(widget, new NewClockEvent(src, name));
}
break;
}
case GST_MESSAGE_CLOCK_PROVIDE:
{
if (GST_IS_PIPELINE(message->src)) {
GstClock *clock;
gboolean ready;
gst_message_parse_clock_provide(message, &clock, &ready);
QString src(GST_OBJECT_NAME(message->src));
QString name(GST_OBJECT_NAME(clock));
QCoreApplication::postEvent(widget, new ClockProvideEvent(src, name, ready));
}
break;
}
case GST_MESSAGE_CLOCK_LOST:
{
if (GST_IS_PIPELINE(message->src)) {
GstClock *clock;
gst_message_parse_clock_lost(message, &clock);
QString src(GST_OBJECT_NAME(message->src));
QString name(GST_OBJECT_NAME(clock));
QCoreApplication::postEvent(widget, new ClockLostEvent(src, name));
}
break;
}
case GST_MESSAGE_PROGRESS:
{
GstProgressType type;
gchar *code;
gchar *text;
gst_message_parse_progress(message, &type, &code, &text);
QString src(GST_OBJECT_NAME(message->src));
QCoreApplication::postEvent(widget, new ProgressEvent(src, cvt(type), QString(code), QString(text)));
g_free(code);
g_free(text);
break;
}
case GST_MESSAGE_SEGMENT_START:
{
GstFormat format;
gint64 position;
gst_message_parse_segment_start(message, &format, &position);
// QString src(GST_OBJECT_NAME(message->src));
// QCoreApplication::postEvent(widget, new InfoEvent(src, QString("Segment start %0").arg(position), ""));
break;
}
case GST_MESSAGE_SEGMENT_DONE:
{
GstFormat format;
gint64 position;
gst_message_parse_segment_done(message, &format, &position);
QString src(GST_OBJECT_NAME(message->src));
QCoreApplication::postEvent(widget, new InfoEvent(src, QString("Segment done %0").arg(position), ""));
break;
}
case GST_MESSAGE_LATENCY:
{
QString src(GST_OBJECT_NAME(message->src));
QCoreApplication::postEvent(widget, new LatencyEvent(src));
break;
}
case GST_MESSAGE_BUFFERING:
{
gint percent;
gst_message_parse_buffering(message, &percent);
QString src(GST_OBJECT_NAME(message->src));
QCoreApplication::postEvent(widget, new InfoEvent(src, QString("%0%").arg(percent), ""));
break;
}
case GST_MESSAGE_QOS:
{
QosData data;
gboolean live;
guint64 running_time;
guint64 stream_time;
guint64 timestamp;
guint64 duration;
gst_message_parse_qos(message, &live, &running_time, &stream_time, &timestamp, &duration);
data.live = (live == true);
data.running_time = running_time;
data.stream_time = stream_time;
data.timestamp = timestamp;
data.duration = duration;
gint64 jitter;
gdouble proportion;
gint quality;
gst_message_parse_qos_values(message, &jitter, &proportion, &quality);
data.jitter = jitter;
data.proportion = proportion;
data.quality = quality;
guint64 processed;
guint64 dropped;
gst_message_parse_qos_stats(message, NULL, &processed, &dropped);
data.processed = processed;
data.dropped = dropped;
QString src(GST_OBJECT_NAME(message->src));
QCoreApplication::postEvent(widget, new QosEvent(src, data));
break;
}
case GST_MESSAGE_EOS:
{
/* end-of-stream */
// g_main_loop_quit (loop);
QString src(GST_OBJECT_NAME(message->src));
QCoreApplication::postEvent(widget, new EosEvent(src));
break;
}
case GST_MESSAGE_ERROR:
{
GError *err;
gchar *debug;
gst_message_parse_error(message, &err, &debug);
QString src(GST_OBJECT_NAME(message->src));
QCoreApplication::postEvent(widget, new ErrorEvent(src, QString(err->message), QString(debug)));
g_error_free(err);
g_free(debug);
break;
}
case GST_MESSAGE_WARNING:
{
GError *err;
gchar *debug;
gst_message_parse_warning(message, &err, &debug);
QString src(GST_OBJECT_NAME(message->src));
QCoreApplication::postEvent(widget, new WarningEvent(src, QString(err->message), QString(debug)));
g_error_free(err);
g_free(debug);
break;
}
case GST_MESSAGE_INFO:
{
GError *err;
gchar *debug;
gst_message_parse_info(message, &err, &debug);
QString src(GST_OBJECT_NAME(message->src));
QCoreApplication::postEvent(widget, new InfoEvent(src, QString(err->message), QString(debug)));
g_error_free(err);
g_free(debug);
break;
}
case GST_MESSAGE_TAG:
{
GstTagList *tags = NULL;
gst_message_parse_tag(message, &tags);
// QString src(GST_OBJECT_NAME(message->src));
// QCoreApplication::postEvent(widget, new InfoEvent(src, QString(err->message), QString(debug)));
gst_tag_list_unref(tags);
break;
}
default:
{
// const GstStructure *s;
// const gchar *name;
//
// s = gst_message_get_structure (message);
//
// name = gst_structure_get_name(s);
QString src(GST_OBJECT_NAME(message->src));
QCoreApplication::postEvent(widget, new InfoEvent(src, "Unhandled message", QString("%0").arg(GST_MESSAGE_TYPE_NAME(message))));
break;
}
}
return handled;
}
static GstBusSyncReply gst_bus_sync_handler(GstBus *bus, GstMessage *message, BusSyncHandler *handler)
{
Q_UNUSED(bus);
// qDebug().noquote() << QString("VideoWidget::gst_bus_sync_handler (%0) : %1 : %2")
// .arg((long)QThread::currentThreadId())
// .arg(GST_MESSAGE_SRC_NAME(message))
// .arg(GST_MESSAGE_TYPE_NAME(message));
if (handler->handleMessage(message)) {
gst_message_unref(message);
return GST_BUS_DROP;
}
return GST_BUS_PASS;
}

View File

@ -0,0 +1,94 @@
/**
******************************************************************************
*
* @file videogadgetwidget.h
* @author The LibrePilot Project, http://www.librepilot.org Copyright (C) 2017.
* @brief
* @see The GNU Public License (GPL) Version 3
* @defgroup
* @{
*
*****************************************************************************/
/*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* for more details.
*
* You should have received a copy of the GNU General Public License along
* with this program; if not, write to the Free Software Foundation, Inc.,
* 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
#ifndef VIDEOWIDGET_H_
#define VIDEOWIDGET_H_
#include "gst_global.h"
#include "pipeline.h"
#include "overlay.h"
#include <QWidget>
#include <QResizeEvent>
#include <QPaintEvent>
#include <QMouseEvent>
typedef struct _GstElement GstElement;
class BusSyncHandler;
class GST_LIB_EXPORT VideoWidget : public QWidget {
Q_OBJECT
public:
VideoWidget(QWidget *parent = 0);
~VideoWidget();
QString pipelineDesc();
void setPipelineDesc(QString pipelineDesc);
bool isPlaying();
public slots:
void start();
void pause();
void stop();
signals:
void message(QString);
void stateChanged(Pipeline::State oldState, Pipeline::State newState, Pipeline::State pendingState);
protected:
QString getStatus();
QString getStatusMessage();
void paintStatus(QPaintEvent *);
// QWidget overrides
void paintEvent(QPaintEvent *);
void resizeEvent(QResizeEvent *);
void mouseDoubleClickEvent(QMouseEvent *);
private:
void init();
void dispose();
void setOverlay(Overlay *);
void emitEventMessage(QString msg);
void toDotFile(QString name);
// QWidget overrides
bool event(QEvent *);
QPaintEngine *paintEngine() const;
QString m_pipelineDesc;
QString lastError;
GstElement *pipeline;
Overlay *overlay;
BusSyncHandler *handler;
// DeviceMonitor m;
};
#endif /* VIDEOWIDGET_H_ */

View File

@ -11,6 +11,6 @@ SUBDIRS = \
qwt \
sdlgamepad
osg {
SUBDIRS += osgearth
}
gstreamer:SUBDIRS += gstreamer
osg:SUBDIRS += osgearth

View File

@ -231,6 +231,13 @@ plugin_flightlog.depends += plugin_uavobjects
plugin_flightlog.depends += plugin_uavtalk
SUBDIRS += plugin_flightlog
# Video plugin
gstreamer {
plugin_video.subdir = video
plugin_video.depends = plugin_coreplugin
SUBDIRS += plugin_video
}
# Usage Tracker plugin
plugin_usagetracker.subdir = usagetracker
plugin_usagetracker.depends = plugin_coreplugin

View File

@ -0,0 +1,10 @@
<plugin name="VideoGadget" version="1.0.0" compatVersion="1.0.0">
<vendor>The LibrePilot Project</vendor>
<copyright>(C) 2017 The LibrePilot Project</copyright>
<license>The GNU Public License (GPL) Version 3</license>
<description>A video gadget</description>
<url>http://www.librepilot.org</url>
<dependencyList>
<dependency name="Core" version="1.0.0"/>
</dependencyList>
</plugin>

View File

@ -0,0 +1,86 @@
/**
******************************************************************************
*
* @file helpdialog.cpp
* @author The LibrePilot Project, http://www.librepilot.org Copyright (C) 2017.
* @addtogroup GCSPlugins GCS Plugins
* @{
* @addtogroup VideoGadgetPlugin Video Gadget Plugin
* @{
* @brief A video gadget plugin
*****************************************************************************/
/*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* for more details.
*
* You should have received a copy of the GNU General Public License along
* with this program; if not, write to the Free Software Foundation, Inc.,
* 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
#include "helpdialog.h"
#include "ui_helpdialog.h"
#include <QDebug>
#include <QPushButton>
HelpDialog::HelpDialog(QWidget *parent, const QString &elementId)
: QDialog(parent),
m_windowWidth(0),
m_windowHeight(0)
{
Q_UNUSED(elementId)
m_helpDialog = new Ui_HelpDialog();
m_helpDialog->setupUi(this);
setWindowTitle(tr("GStreamer Help"));
if (m_windowWidth > 0 && m_windowHeight > 0) {
resize(m_windowWidth, m_windowHeight);
}
m_helpDialog->buttonBox->button(QDialogButtonBox::Close)->setDefault(true);
connect(m_helpDialog->buttonBox->button(QDialogButtonBox::Close), SIGNAL(clicked()), this, SLOT(close()));
m_helpDialog->splitter->setCollapsible(0, false);
m_helpDialog->splitter->setCollapsible(1, false);
connect(m_helpDialog->elementListWidget, SIGNAL(currentItemChanged(QListWidgetItem *, QListWidgetItem *)),
this, SLOT(pageSelected()));
QList<QString> plugins; // = gst::pluginList();
// foreach(QString pluginName, plugins) {
// new QListWidgetItem(pluginName, m_helpDialog->elementListWidget);
// }
}
HelpDialog::~HelpDialog()
{
// foreach(QString category, m_categoryItemsMap.keys()) {
// QList<QTreeWidgetItem *> *categoryItemList = m_categoryItemsMap.value(category);
// delete categoryItemList;
// }
}
void HelpDialog::itemSelected()
{}
void HelpDialog::close()
{}
bool HelpDialog::execDialog()
{
exec();
return true;
}

View File

@ -0,0 +1,60 @@
/**
******************************************************************************
*
* @file helpdialog.h
* @author The LibrePilot Project, http://www.librepilot.org Copyright (C) 2017.
* @addtogroup GCSPlugins GCS Plugins
* @{
* @addtogroup VideoGadgetPlugin Video Gadget Plugin
* @{
* @brief A video gadget plugin
*****************************************************************************/
/*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* for more details.
*
* You should have received a copy of the GNU General Public License along
* with this program; if not, write to the Free Software Foundation, Inc.,
* 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
#ifndef HELPDIALOG_H
#define HELPDIALOG_H
#include <QDialog>
#include <QList>
class Ui_HelpDialog;
class HelpDialog : public QDialog {
Q_OBJECT
public:
HelpDialog(QWidget *parent, const QString &initialElement = QString());
~HelpDialog();
// Run the dialog and return true if 'Ok' was choosen or 'Apply' was invoked
// at least once
bool execDialog();
private slots:
void itemSelected();
void close();
private:
Ui_HelpDialog *m_helpDialog;
QList<QString> m_elements;
int m_windowWidth;
int m_windowHeight;
};
#endif // HELPDIALOG_H

View File

@ -0,0 +1,82 @@
<?xml version="1.0" encoding="UTF-8"?>
<ui version="4.0">
<class>HelpDialog</class>
<widget class="QDialog" name="HelpDialog">
<property name="geometry">
<rect>
<x>0</x>
<y>0</y>
<width>697</width>
<height>476</height>
</rect>
</property>
<property name="windowTitle">
<string>Options</string>
</property>
<layout class="QVBoxLayout" name="verticalLayout">
<item>
<widget class="QSplitter" name="splitter">
<property name="orientation">
<enum>Qt::Vertical</enum>
</property>
<widget class="QWidget" name="layoutWidget">
<layout class="QHBoxLayout" name="horizontalLayout">
<item>
<widget class="QListWidget" name="pluginListWidget"/>
</item>
<item>
<widget class="QListWidget" name="elementListWidget"/>
</item>
</layout>
</widget>
<widget class="QTextBrowser" name="textBrowser"/>
</widget>
</item>
<item>
<widget class="QDialogButtonBox" name="buttonBox">
<property name="orientation">
<enum>Qt::Horizontal</enum>
</property>
<property name="standardButtons">
<set>QDialogButtonBox::Close</set>
</property>
</widget>
</item>
</layout>
</widget>
<resources/>
<connections>
<connection>
<sender>buttonBox</sender>
<signal>accepted()</signal>
<receiver>HelpDialog</receiver>
<slot>accept()</slot>
<hints>
<hint type="sourcelabel">
<x>297</x>
<y>361</y>
</hint>
<hint type="destinationlabel">
<x>297</x>
<y>193</y>
</hint>
</hints>
</connection>
<connection>
<sender>buttonBox</sender>
<signal>rejected()</signal>
<receiver>HelpDialog</receiver>
<slot>reject()</slot>
<hints>
<hint type="sourcelabel">
<x>297</x>
<y>361</y>
</hint>
<hint type="destinationlabel">
<x>297</x>
<y>193</y>
</hint>
</hints>
</connection>
</connections>
</ui>

Binary file not shown.

After

Width:  |  Height:  |  Size: 729 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 655 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 961 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 513 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.0 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1003 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.0 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.0 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.1 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 990 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.0 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 987 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 481 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.0 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 571 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.2 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.0 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.2 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.2 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.2 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.8 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.5 KiB

View File

@ -0,0 +1,37 @@
TEMPLATE = lib
TARGET = VideoGadget
QT += widgets
include(../../plugin.pri)
include(../../plugins/coreplugin/coreplugin.pri)
include(../../libs/gstreamer/gstreamer.pri)
HEADERS += \
helpdialog.h \
videoplugin.h \
videogadgetconfiguration.h \
videogadget.h \
videogadgetwidget.h \
videogadgetfactory.h \
videogadgetoptionspage.h
SOURCES += \
helpdialog.cpp \
videoplugin.cpp \
videogadgetconfiguration.cpp \
videogadget.cpp \
videogadgetfactory.cpp \
videogadgetwidget.cpp \
videogadgetoptionspage.cpp
OTHER_FILES += \
VideoGadget.pluginspec
FORMS += \
helpdialog.ui \
video.ui \
videooptionspage.ui
RESOURCES += \
video.qrc

View File

@ -0,0 +1,33 @@
<RCC>
<qresource prefix="/video" >
<file>resources/22x22/media-eject.png</file>
<file>resources/32x32/media-eject.png</file>
<file>resources/22x22/media-playback-pause.png</file>
<file>resources/32x32/media-playback-pause.png</file>
<file>resources/22x22/media-playback-start.png</file>
<file>resources/32x32/media-playback-start.png</file>
<file>resources/22x22/media-playback-stop.png</file>
<file>resources/32x32/media-playback-stop.png</file>
<file>resources/22x22/media-record.png</file>
<file>resources/32x32/media-record.png</file>
<file>resources/22x22/media-seek-backward.png</file>
<file>resources/32x32/media-seek-backward.png</file>
<file>resources/22x22/media-seek-forward.png</file>
<file>resources/32x32/media-seek-forward.png</file>
<file>resources/22x22/media-skip-backward.png</file>
<file>resources/32x32/media-skip-backward.png</file>
<file>resources/22x22/media-skip-forward.png</file>
<file>resources/32x32/media-skip-forward.png</file>
<file>resources/22x22/utilities-terminal.png</file>
<file>resources/32x32/utilities-terminal.png</file>
</qresource>
</RCC>

View File

@ -0,0 +1,204 @@
<?xml version="1.0" encoding="UTF-8"?>
<ui version="4.0">
<class>Form</class>
<widget class="QWidget" name="Form">
<property name="geometry">
<rect>
<x>0</x>
<y>0</y>
<width>400</width>
<height>572</height>
</rect>
</property>
<property name="windowTitle">
<string>Form</string>
</property>
<property name="autoFillBackground">
<bool>false</bool>
</property>
<layout class="QVBoxLayout">
<property name="spacing">
<number>0</number>
</property>
<property name="leftMargin">
<number>0</number>
</property>
<property name="topMargin">
<number>0</number>
</property>
<property name="rightMargin">
<number>0</number>
</property>
<property name="bottomMargin">
<number>0</number>
</property>
<item>
<widget class="VideoWidget" name="video" native="true">
<property name="sizePolicy">
<sizepolicy hsizetype="Preferred" vsizetype="Expanding">
<horstretch>0</horstretch>
<verstretch>0</verstretch>
</sizepolicy>
</property>
</widget>
</item>
<item>
<widget class="QTextBrowser" name="consoleTextBrowser">
<property name="sizePolicy">
<sizepolicy hsizetype="Expanding" vsizetype="Minimum">
<horstretch>0</horstretch>
<verstretch>0</verstretch>
</sizepolicy>
</property>
<property name="lineWrapMode">
<enum>QTextEdit::NoWrap</enum>
</property>
</widget>
</item>
<item>
<layout class="QVBoxLayout" name="verticalLayout">
<property name="spacing">
<number>0</number>
</property>
<property name="leftMargin">
<number>0</number>
</property>
<property name="topMargin">
<number>0</number>
</property>
<property name="rightMargin">
<number>0</number>
</property>
<property name="bottomMargin">
<number>0</number>
</property>
<item>
<layout class="QHBoxLayout" name="horizontalLayout">
<property name="leftMargin">
<number>6</number>
</property>
<property name="topMargin">
<number>6</number>
</property>
<property name="rightMargin">
<number>6</number>
</property>
<property name="bottomMargin">
<number>6</number>
</property>
<item>
<layout class="QHBoxLayout" name="horizontalLayout_3">
<item>
<widget class="QPushButton" name="startButton">
<property name="text">
<string/>
</property>
<property name="icon">
<iconset>
<normaloff>:/video/resources/22x22/media-playback-start.png</normaloff>
<normalon>:/video/resources/22x22/media-playback-start.png</normalon>:/video/resources/22x22/media-playback-start.png</iconset>
</property>
<property name="iconSize">
<size>
<width>22</width>
<height>22</height>
</size>
</property>
</widget>
</item>
<item>
<widget class="QPushButton" name="pauseButton">
<property name="text">
<string/>
</property>
<property name="icon">
<iconset>
<normaloff>:/video/resources/22x22/media-playback-pause.png</normaloff>
<normalon>:/video/resources/22x22/media-playback-pause.png</normalon>:/video/resources/22x22/media-playback-pause.png</iconset>
</property>
<property name="iconSize">
<size>
<width>22</width>
<height>22</height>
</size>
</property>
</widget>
</item>
<item>
<widget class="QPushButton" name="stopButton">
<property name="mouseTracking">
<bool>true</bool>
</property>
<property name="text">
<string/>
</property>
<property name="icon">
<iconset>
<normaloff>:/video/resources/22x22/media-playback-stop.png</normaloff>
<normalon>:/video/resources/22x22/media-playback-stop.png</normalon>:/video/resources/22x22/media-playback-stop.png</iconset>
</property>
<property name="iconSize">
<size>
<width>22</width>
<height>22</height>
</size>
</property>
</widget>
</item>
</layout>
</item>
<item>
<spacer name="horizontalSpacer">
<property name="orientation">
<enum>Qt::Horizontal</enum>
</property>
<property name="sizeHint" stdset="0">
<size>
<width>40</width>
<height>20</height>
</size>
</property>
</spacer>
</item>
<item>
<layout class="QHBoxLayout" name="horizontalLayout_2">
<item>
<widget class="QPushButton" name="consoleButton">
<property name="mouseTracking">
<bool>true</bool>
</property>
<property name="text">
<string/>
</property>
<property name="icon">
<iconset>
<normaloff>:/video/resources/22x22/utilities-terminal.png</normaloff>
<normalon>:/video/resources/22x22/utilities-terminal.png</normalon>:/video/resources/22x22/utilities-terminal.png</iconset>
</property>
<property name="iconSize">
<size>
<width>22</width>
<height>22</height>
</size>
</property>
</widget>
</item>
</layout>
</item>
</layout>
</item>
</layout>
</item>
</layout>
</widget>
<customwidgets>
<customwidget>
<class>VideoWidget</class>
<extends>QWidget</extends>
<header>videowidget.h</header>
<container>1</container>
</customwidget>
</customwidgets>
<resources/>
<connections/>
</ui>

View File

@ -0,0 +1,46 @@
/**
******************************************************************************
*
* @file videogadget.cpp
* @author The LibrePilot Project, http://www.librepilot.org Copyright (C) 2017.
* @addtogroup GCSPlugins GCS Plugins
* @{
* @addtogroup VideoGadgetPlugin Video Gadget Plugin
* @{
* @brief A video gadget plugin
*****************************************************************************/
/*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* for more details.
*
* You should have received a copy of the GNU General Public License along
* with this program; if not, write to the Free Software Foundation, Inc.,
* 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
#include "videogadgetconfiguration.h"
#include "videogadgetwidget.h"
#include "videogadget.h"
VideoGadget::VideoGadget(QString classId, VideoGadgetWidget *widget, QWidget *parent) :
IUAVGadget(classId, parent),
m_widget(widget)
{}
VideoGadget::~VideoGadget()
{
delete m_widget;
}
void VideoGadget::loadConfiguration(IUAVGadgetConfiguration *config)
{
VideoGadgetConfiguration *m = qobject_cast<VideoGadgetConfiguration *>(config);
m_widget->setConfiguration(m);
}

View File

@ -0,0 +1,70 @@
/**
******************************************************************************
*
* @file videogadget.h
* @author The LibrePilot Project, http://www.librepilot.org Copyright (C) 2017.
* @addtogroup GCSPlugins GCS Plugins
* @{
* @addtogroup VideoGadgetPlugin Video Gadget Plugin
* @{
* @brief A video gadget plugin
*****************************************************************************/
/*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* for more details.
*
* You should have received a copy of the GNU General Public License along
* with this program; if not, write to the Free Software Foundation, Inc.,
* 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
#ifndef VIDEOGADGET_H_
#define VIDEOGADGET_H_
#include <coreplugin/iuavgadget.h>
#include "videogadgetwidget.h"
namespace Core {
class IUAVGadget;
}
class IUAVGadget;
class QWidget;
class QString;
class VideoGadgetWidget;
using namespace Core;
class VideoGadget : public Core::IUAVGadget {
Q_OBJECT
public:
VideoGadget(QString classId, VideoGadgetWidget *widget, QWidget *parent = 0);
~VideoGadget();
QList<int> context() const
{
return m_context;
}
QWidget *widget()
{
return m_widget;
}
void loadConfiguration(IUAVGadgetConfiguration *config);
QString contextHelpId() const
{
return QString();
}
private:
VideoGadgetWidget *m_widget;
QList<int> m_context;
};
#endif // VIDEOGADGET_H_

View File

@ -0,0 +1,73 @@
/**
******************************************************************************
*
* @file videogadgetconfiguration.cpp
* @author The LibrePilot Project, http://www.librepilot.org Copyright (C) 2017.
* @addtogroup GCSPlugins GCS Plugins
* @{
* @addtogroup VideoGadgetPlugin Video Gadget Plugin
* @{
* @brief A video gadget plugin
*****************************************************************************/
/*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* for more details.
*
* You should have received a copy of the GNU General Public License along
* with this program; if not, write to the Free Software Foundation, Inc.,
* 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
#include "videogadgetconfiguration.h"
VideoGadgetConfiguration::VideoGadgetConfiguration(QString classId, QSettings &settings, QObject *parent) :
IUAVGadgetConfiguration(classId, parent)
{
m_displayVideo = settings.value("displayVideo").toBool();
m_autoStart = settings.value("autoStart").toBool();
m_displayControls = settings.value("displayControls").toBool();
m_respectAspectRatio = settings.value("respectAspectRatio").toBool();
m_pipelineDesc = settings.value("pipelineDesc").toString();
m_pipelineInfo = settings.value("pipelineInfo").toString();
}
VideoGadgetConfiguration::VideoGadgetConfiguration(const VideoGadgetConfiguration &obj) :
IUAVGadgetConfiguration(obj.classId(), obj.parent())
{
m_displayVideo = obj.m_displayVideo;
m_autoStart = obj.m_autoStart;
m_displayControls = obj.m_displayControls;
m_respectAspectRatio = obj.m_respectAspectRatio;
m_pipelineDesc = obj.m_pipelineDesc;
m_pipelineInfo = obj.m_pipelineInfo;
}
/**
* Clones a configuration.
*
*/
IUAVGadgetConfiguration *VideoGadgetConfiguration::clone() const
{
return new VideoGadgetConfiguration(*this);
}
/**
* Saves a configuration.
*
*/
void VideoGadgetConfiguration::saveConfig(QSettings &settings) const
{
settings.setValue("displayVideo", m_displayVideo);
settings.setValue("autoStart", m_autoStart);
settings.setValue("displayControls", m_displayControls);
settings.setValue("respectAspectRatio", m_respectAspectRatio);
settings.setValue("pipelineDesc", m_pipelineDesc);
settings.setValue("pipelineInfo", m_pipelineInfo);
}

View File

@ -0,0 +1,104 @@
/**
******************************************************************************
*
* @file videogadgetconfiguration.h
* @author The LibrePilot Project, http://www.librepilot.org Copyright (C) 2017.
* @addtogroup GCSPlugins GCS Plugins
* @{
* @addtogroup VideoGadgetPlugin Video Gadget Plugin
* @{
* @brief A video gadget plugin
*****************************************************************************/
/*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* for more details.
*
* You should have received a copy of the GNU General Public License along
* with this program; if not, write to the Free Software Foundation, Inc.,
* 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
#ifndef VIDEOGADGETCONFIGURATION_H
#define VIDEOGADGETCONFIGURATION_H
#include <coreplugin/iuavgadgetconfiguration.h>
using namespace Core;
class VideoGadgetConfiguration : public IUAVGadgetConfiguration {
Q_OBJECT
public:
explicit VideoGadgetConfiguration(QString classId, QSettings &Settings, QObject *parent = 0);
explicit VideoGadgetConfiguration(const VideoGadgetConfiguration &obj);
IUAVGadgetConfiguration *clone() const;
void saveConfig(QSettings &settings) const;
bool displayVideo() const
{
return m_displayVideo;
}
void setDisplayVideo(bool displayVideo)
{
m_displayVideo = displayVideo;
}
bool displayControls() const
{
return m_displayControls;
}
void setDisplayControls(bool displayControls)
{
m_displayControls = displayControls;
}
bool autoStart() const
{
return m_autoStart;
}
void setAutoStart(bool autoStart)
{
m_autoStart = autoStart;
}
bool respectAspectRatio() const
{
return m_respectAspectRatio;
}
void setRespectAspectRatio(bool respectAspectRatio)
{
m_respectAspectRatio = respectAspectRatio;
}
QString pipelineDesc() const
{
return m_pipelineDesc;
}
void setPipelineDesc(QString pipelineDesc)
{
m_pipelineDesc = pipelineDesc;
}
QString pipelineInfo() const
{
return m_pipelineInfo;
}
void setPipelineInfo(QString pipelineInfo)
{
m_pipelineInfo = pipelineInfo;
}
private:
// video
bool m_displayVideo;
bool m_respectAspectRatio;
// controls
bool m_displayControls;
bool m_autoStart;
QString m_pipelineDesc;
QString m_pipelineInfo;
};
#endif // VIDEOGADGETCONFIGURATION_H

View File

@ -0,0 +1,57 @@
/**
******************************************************************************
*
* @file videogadgetfactory.cpp
* @author The LibrePilot Project, http://www.librepilot.org Copyright (C) 2017.
* @addtogroup GCSPlugins GCS Plugins
* @{
* @addtogroup VideoGadgetPlugin Video Gadget Plugin
* @{
* @brief A video gadget plugin
*****************************************************************************/
/*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* for more details.
*
* You should have received a copy of the GNU General Public License along
* with this program; if not, write to the Free Software Foundation, Inc.,
* 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
#include "videogadgetfactory.h"
#include "videogadgetwidget.h"
#include "videogadget.h"
#include "videogadgetconfiguration.h"
#include "videogadgetoptionspage.h"
#include <coreplugin/uavgadgetoptionspagedecorator.h>
#include <coreplugin/iuavgadget.h>
VideoGadgetFactory::VideoGadgetFactory(QObject *parent) :
IUAVGadgetFactory(QString("VideoGadget"), tr("Video"), parent)
{}
VideoGadgetFactory::~VideoGadgetFactory()
{}
Core::IUAVGadget *VideoGadgetFactory::createGadget(QWidget *parent)
{
VideoGadgetWidget *gadgetWidget = new VideoGadgetWidget(parent);
return new VideoGadget(QString("VideoGadget"), gadgetWidget, parent);
}
IUAVGadgetConfiguration *VideoGadgetFactory::createConfiguration(QSettings &settings)
{
return new VideoGadgetConfiguration(QString("VideoGadget"), settings);
}
IOptionsPage *VideoGadgetFactory::createOptionsPage(IUAVGadgetConfiguration *config)
{
return new VideoGadgetOptionsPage(qobject_cast<VideoGadgetConfiguration *>(config));
}

View File

@ -0,0 +1,51 @@
/**
******************************************************************************
*
* @file videogadgetfactory.h
* @author The LibrePilot Project, http://www.librepilot.org Copyright (C) 2017.
* @addtogroup GCSPlugins GCS Plugins
* @{
* @addtogroup VideoGadgetPlugin Video Gadget Plugin
* @{
* @brief A video gadget plugin
*****************************************************************************/
/*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* for more details.
*
* You should have received a copy of the GNU General Public License along
* with this program; if not, write to the Free Software Foundation, Inc.,
* 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
#ifndef VIDEOGADGETFACTORY_H_
#define VIDEOGADGETFACTORY_H_
#include <coreplugin/iuavgadgetfactory.h>
namespace Core {
class IUAVGadget;
class IUAVGadgetFactory;
}
using namespace Core;
class VideoGadgetFactory : public IUAVGadgetFactory {
Q_OBJECT
public:
VideoGadgetFactory(QObject *parent = 0);
~VideoGadgetFactory();
IUAVGadget *createGadget(QWidget *parent);
IUAVGadgetConfiguration *createConfiguration(QSettings &settings);
IOptionsPage *createOptionsPage(IUAVGadgetConfiguration *config);
};
#endif // VIDEOGADGETFACTORY_H_

View File

@ -0,0 +1,82 @@
/**
******************************************************************************
*
* @file videogadgetoptionspage.cpp
* @author The LibrePilot Project, http://www.librepilot.org Copyright (C) 2017.
* @addtogroup GCSPlugins GCS Plugins
* @{
* @addtogroup VideoGadgetPlugin Video Gadget Plugin
* @{
* @brief A video gadget plugin
*****************************************************************************/
/*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* for more details.
*
* You should have received a copy of the GNU General Public License along
* with this program; if not, write to the Free Software Foundation, Inc.,
* 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
#include "videogadgetoptionspage.h"
#include "videogadgetconfiguration.h"
#include "helpdialog.h"
#include "ui_videooptionspage.h"
VideoGadgetOptionsPage::VideoGadgetOptionsPage(VideoGadgetConfiguration *config, QObject *parent) :
IOptionsPage(parent), m_config(config)
{
m_page = 0;
}
QWidget *VideoGadgetOptionsPage::createPage(QWidget *parent)
{
m_page = new Ui::VideoOptionsPage();
QWidget *w = new QWidget(parent);
m_page->setupUi(w);
// TODO
m_page->respectAspectRatioCheckBox->setVisible(false);
m_page->helpButton->setVisible(false);
m_page->displayVideoCheckBox->setChecked(m_config->displayVideo());
m_page->displayControlsCheckBox->setChecked(m_config->displayControls());
m_page->autoStartCheckBox->setChecked(m_config->autoStart());
m_page->respectAspectRatioCheckBox->setChecked(m_config->respectAspectRatio());
m_page->descPlainTextEdit->setPlainText(m_config->pipelineDesc());
m_page->infoPlainTextEdit->setPlainText(m_config->pipelineInfo());
connect(m_page->helpButton, SIGNAL(clicked()), this, SLOT(openHelpDialog()));
return w;
}
void VideoGadgetOptionsPage::apply()
{
m_config->setDisplayVideo(m_page->displayVideoCheckBox->isChecked());
m_config->setDisplayControls(m_page->displayControlsCheckBox->isChecked());
m_config->setAutoStart(m_page->autoStartCheckBox->isChecked());
m_config->setRespectAspectRatio(m_page->respectAspectRatioCheckBox->isChecked());
m_config->setPipelineDesc(m_page->descPlainTextEdit->toPlainText());
m_config->setPipelineInfo(m_page->infoPlainTextEdit->toPlainText());
}
void VideoGadgetOptionsPage::finish()
{
delete m_page;
}
void VideoGadgetOptionsPage::openHelpDialog()
{
HelpDialog dlg(0);
dlg.execDialog();
}

View File

@ -0,0 +1,80 @@
/**
******************************************************************************
*
* @file videogadgetoptionspage.h
* @author The LibrePilot Project, http://www.librepilot.org Copyright (C) 2017.
* @addtogroup GCSPlugins GCS Plugins
* @{
* @addtogroup VideoGadgetPlugin Video Gadget Plugin
* @{
* @brief A video gadget plugin
*****************************************************************************/
/*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* for more details.
*
* You should have received a copy of the GNU General Public License along
* with this program; if not, write to the Free Software Foundation, Inc.,
* 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
#ifndef VIDEOGADGETOPTIONSPAGE_H
#define VIDEOGADGETOPTIONSPAGE_H
#include "coreplugin/dialogs/ioptionspage.h"
#include <QLabel>
#include <QPlainTextEdit>
class VideoGadgetConfiguration;
namespace Ui {
class VideoOptionsPage;
}
using namespace Core;
class VideoGadgetOptionsPage : public IOptionsPage {
Q_OBJECT
public:
explicit VideoGadgetOptionsPage(VideoGadgetConfiguration *config, QObject *parent = 0);
QString id() const
{
return "";
}
QString trName() const
{
return "";
}
QString category() const
{
return "";
}
QString trCategory() const
{
return "";
}
QWidget *createPage(QWidget *parent);
void apply();
void finish();
// private signals:
// public slots:
private slots:
void openHelpDialog();
private:
VideoGadgetConfiguration *m_config;
Ui::VideoOptionsPage *m_page;
};
#endif // VIDEOGADGETOPTIONSPAGE_H

View File

@ -0,0 +1,170 @@
/**
******************************************************************************
*
* @file videogadgetwidget.cpp
* @author The LibrePilot Project, http://www.librepilot.org Copyright (C) 2017.
* @addtogroup GCSPlugins GCS Plugins
* @{
* @addtogroup VideoGadgetPlugin Video Gadget Plugin
* @{
* @brief A video gadget plugin
*****************************************************************************/
/*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* for more details.
*
* You should have received a copy of the GNU General Public License along
* with this program; if not, write to the Free Software Foundation, Inc.,
* 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
#include "videogadgetconfiguration.h"
#include "videogadgetwidget.h"
#include "pipeline.h"
#include <QtCore>
#include <QDebug>
#include <QStringList>
#include <QTextEdit>
#include <QPushButton>
#include <QWidget>
VideoGadgetWidget::VideoGadgetWidget(QWidget *parent) :
QFrame(parent)
{
m_ui = new Ui_Form();
m_ui->setupUi(this);
m_ui->consoleTextBrowser->setVisible(false);
connect(videoWidget(), &VideoWidget::stateChanged, this, &VideoGadgetWidget::onStateChanged);
connect(videoWidget(), &VideoWidget::message, this, &VideoGadgetWidget::msg);
connect(m_ui->startButton, &QPushButton::clicked, this, &VideoGadgetWidget::start);
connect(m_ui->pauseButton, &QPushButton::clicked, this, &VideoGadgetWidget::pause);
connect(m_ui->stopButton, &QPushButton::clicked, this, &VideoGadgetWidget::stop);
connect(m_ui->consoleButton, &QPushButton::clicked, this, &VideoGadgetWidget::console);
onStateChanged(Pipeline::Null, Pipeline::Null, Pipeline::Null);
}
VideoGadgetWidget::~VideoGadgetWidget()
{
m_ui = 0;
}
void VideoGadgetWidget::setConfiguration(VideoGadgetConfiguration *config)
{
videoWidget()->setVisible(config->displayVideo());
// m_ui->control->setEnabled(config->displayControls());
bool restart = false;
if (videoWidget()->pipelineDesc() != config->pipelineDesc()) {
if (videoWidget()->isPlaying()) {
restart = true;
stop();
}
msg(QString("setting pipeline %0").arg(config->pipelineDesc()));
videoWidget()->setPipelineDesc(config->pipelineDesc());
}
if (restart || (!videoWidget()->isPlaying() && config->autoStart())) {
start();
}
}
void VideoGadgetWidget::start()
{
msg(QString("starting..."));
m_ui->startButton->setEnabled(false);
videoWidget()->start();
}
void VideoGadgetWidget::pause()
{
msg(QString("pausing..."));
m_ui->pauseButton->setEnabled(false);
videoWidget()->pause();
}
void VideoGadgetWidget::stop()
{
msg(QString("stopping..."));
videoWidget()->stop();
}
void VideoGadgetWidget::console()
{
m_ui->consoleTextBrowser->setVisible(!m_ui->consoleTextBrowser->isVisible());
}
void VideoGadgetWidget::onStateChanged(Pipeline::State oldState, Pipeline::State newState, Pipeline::State pendingState)
{
Q_UNUSED(oldState);
// msg(QString("state changed: ") + VideoWidget::name(oldState) + " -> " + VideoWidget::name(newState) + " / " + VideoWidget::name(pendingState));
bool startEnabled = true;
bool pauseEnabled = true;
bool stopEnabled = true;
bool startVisible = false;
bool pauseVisible = false;
bool stopVisible = true;
switch (newState) {
case Pipeline::Ready:
// start & !stop
startVisible = true;
stopEnabled = false;
break;
case Pipeline::Paused:
if (pendingState == Pipeline::Playing) {
// !pause & stop
pauseVisible = true;
pauseEnabled = false;
} else if (pendingState == Pipeline::Ready) {
// start & !stop
startVisible = true;
stopEnabled = false;
} else {
// start & stop
startVisible = true;
}
break;
case Pipeline::Playing:
// pause & stop
pauseVisible = true;
break;
default:
// start & !stop
startVisible = true;
stopEnabled = false;
break;
}
m_ui->startButton->setVisible(startVisible);
m_ui->startButton->setEnabled(startEnabled);
m_ui->pauseButton->setVisible(pauseVisible);
m_ui->pauseButton->setEnabled(pauseEnabled);
m_ui->stopButton->setVisible(stopVisible);
m_ui->stopButton->setEnabled(stopEnabled);
}
void VideoGadgetWidget::msg(const QString &str)
{
if (m_ui) {
m_ui->consoleTextBrowser->append(str);
}
}
VideoWidget *VideoGadgetWidget::videoWidget()
{
return m_ui->video;
}

View File

@ -0,0 +1,68 @@
/**
******************************************************************************
*
* @file videogadgetwidget.h
* @author The LibrePilot Project, http://www.librepilot.org Copyright (C) 2017.
* @addtogroup GCSPlugins GCS Plugins
* @{
* @addtogroup VideoGadgetPlugin Video Gadget Plugin
* @{
* @brief A video gadget plugin
*****************************************************************************/
/*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* for more details.
*
* You should have received a copy of the GNU General Public License along
* with this program; if not, write to the Free Software Foundation, Inc.,
* 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
#ifndef VIDEOGADGETWIDGET_H_
#define VIDEOGADGETWIDGET_H_
#include "pipeline.h"
#include "ui_video.h"
#include <QFrame>
#include <QtCore/QEvent>
#include <QtGui/QResizeEvent>
#include <QtGui/QPaintEvent>
class VideoWidget;
class VideoGadgetConfiguration;
class VideoGadgetWidget : public QFrame {
Q_OBJECT
public:
VideoGadgetWidget(QWidget *parent = 0);
~VideoGadgetWidget();
void setConfiguration(VideoGadgetConfiguration *config);
private slots:
void start();
void pause();
void stop();
void console();
void onStateChanged(Pipeline::State oldState, Pipeline::State newState, Pipeline::State pendingState);
private:
Ui_Form *m_ui;
VideoGadgetConfiguration *config;
void msg(const QString &str);
VideoWidget *videoWidget();
};
#endif /* VIDEOGADGETWIDGET_H_ */

View File

@ -0,0 +1,96 @@
<?xml version="1.0" encoding="UTF-8"?>
<ui version="4.0">
<class>VideoOptionsPage</class>
<widget class="QWidget" name="VideoOptionsPage">
<property name="geometry">
<rect>
<x>0</x>
<y>0</y>
<width>378</width>
<height>300</height>
</rect>
</property>
<property name="windowTitle">
<string>Form</string>
</property>
<layout class="QGridLayout" name="gridLayout">
<property name="margin">
<number>0</number>
</property>
<item row="6" column="0">
<widget class="QLabel" name="descLabel">
<property name="text">
<string>Pipeline:</string>
</property>
</widget>
</item>
<item row="6" column="1">
<widget class="QPlainTextEdit" name="descPlainTextEdit"/>
</item>
<item row="1" column="1">
<widget class="QCheckBox" name="displayVideoCheckBox">
<property name="text">
<string>Display video</string>
</property>
</widget>
</item>
<item row="2" column="1">
<widget class="QCheckBox" name="displayControlsCheckBox">
<property name="text">
<string>Display controls</string>
</property>
</widget>
</item>
<item row="7" column="1">
<widget class="QPlainTextEdit" name="infoPlainTextEdit"/>
</item>
<item row="7" column="0">
<widget class="QLabel" name="infoLabel">
<property name="text">
<string>Info:</string>
</property>
</widget>
</item>
<item row="3" column="1">
<widget class="QCheckBox" name="autoStartCheckBox">
<property name="text">
<string>Auto Start</string>
</property>
</widget>
</item>
<item row="4" column="1">
<widget class="QCheckBox" name="respectAspectRatioCheckBox">
<property name="text">
<string>Respect aspect ratio</string>
</property>
</widget>
</item>
<item row="8" column="1">
<layout class="QHBoxLayout" name="horizontalLayout">
<item>
<spacer name="horizontalSpacer">
<property name="orientation">
<enum>Qt::Horizontal</enum>
</property>
<property name="sizeHint" stdset="0">
<size>
<width>40</width>
<height>20</height>
</size>
</property>
</spacer>
</item>
<item>
<widget class="QPushButton" name="helpButton">
<property name="text">
<string>Help</string>
</property>
</widget>
</item>
</layout>
</item>
</layout>
</widget>
<resources/>
<connections/>
</ui>

View File

@ -0,0 +1,64 @@
/**
******************************************************************************
*
* @file videoplugin.cpp
* @author The LibrePilot Project, http://www.librepilot.org Copyright (C) 2017.
* @addtogroup GCSPlugins GCS Plugins
* @{
* @addtogroup VideoGadgetPlugin Video Gadget Plugin
* @{
* @brief A video gadget plugin
*****************************************************************************/
/*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* for more details.
*
* You should have received a copy of the GNU General Public License along
* with this program; if not, write to the Free Software Foundation, Inc.,
* 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
#include "videoplugin.h"
#include "videogadgetfactory.h"
#include <extensionsystem/pluginmanager.h>
#include <QDebug>
#include <QtPlugin>
#include <QStringList>
VideoPlugin::VideoPlugin()
{
// Do nothing
}
VideoPlugin::~VideoPlugin()
{
// Do nothing
}
bool VideoPlugin::initialize(const QStringList & args, QString *errMsg)
{
Q_UNUSED(args);
Q_UNUSED(errMsg);
mf = new VideoGadgetFactory(this);
addAutoReleasedObject(mf);
return true;
}
void VideoPlugin::extensionsInitialized()
{
// Do nothing
}
void VideoPlugin::shutdown()
{
// Do nothing
}

View File

@ -0,0 +1,51 @@
/**
******************************************************************************
*
* @file videoplugin.h
* @author The LibrePilot Project, http://www.librepilot.org Copyright (C) 2017.
* @addtogroup GCSPlugins GCS Plugins
* @{
* @addtogroup VideoGadgetPlugin Video Gadget Plugin
* @{
* @brief A video gadget plugin
*****************************************************************************/
/*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* for more details.
*
* You should have received a copy of the GNU General Public License along
* with this program; if not, write to the Free Software Foundation, Inc.,
* 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
#ifndef VIDEOPLUGIN_H_
#define VIDEOPLUGIN_H_
#include <extensionsystem/iplugin.h>
class VideoGadgetFactory;
class VideoPlugin : public ExtensionSystem::IPlugin {
Q_OBJECT
Q_PLUGIN_METADATA(IID "OpenPilot.Video")
public:
VideoPlugin();
~VideoPlugin();
void extensionsInitialized();
bool initialize(const QStringList &arguments, QString *errorString);
void shutdown();
private:
VideoGadgetFactory *mf;
};
#endif /* VIDEOPLUGIN_H_ */

View File

@ -2785,6 +2785,128 @@
</data>
</default>
</UAVObjectBrowser>
<VideoGadget>
<Default>
<configInfo>
<locked>false</locked>
<version>0.0.0</version>
</configInfo>
<data>
<autoStart>true</autoStart>
<displayControls>false</displayControls>
<displayVideo>true</displayVideo>
<pipelineDesc>videotestsrc ! autovideosink</pipelineDesc>
<pipelineInfo></pipelineInfo>
<respectAspectRatio>false</respectAspectRatio>
</data>
</Default>
<Screen__PCT__20-__PCT__20Capture__PCT__20to__PCT__20file__PCT__20__PCT__28fast__PCT__20__PCT__26__PCT__20big__PCT__29>
<configInfo>
<locked>false</locked>
<version>0.0.0</version>
</configInfo>
<data>
<autoStart>false</autoStart>
<displayControls>false</displayControls>
<displayVideo>true</displayVideo>
<pipelineDesc>dx9screencapsrc monitor=0 cursor=true ! tee name=t
t. ! queue max-size-buffers=0 max-size-time=0 max-size-bytes=0 ! timeoverlay ! autovideosink
t. ! queue max-size-buffers=0 max-size-time=0 max-size-bytes=0 ! videoconvert ! x264enc interlaced=true pass=quant quantizer=0 speed-preset=ultrafast byte-stream=true ! mpegpsmux ! filesink location=capture_fast.mpg</pipelineDesc>
<pipelineInfo></pipelineInfo>
<respectAspectRatio>true</respectAspectRatio>
</data>
</Screen__PCT__20-__PCT__20Capture__PCT__20to__PCT__20file__PCT__20__PCT__28fast__PCT__20__PCT__26__PCT__20big__PCT__29>
<Screen__PCT__20-__PCT__20Capture__PCT__20to__PCT__20file>
<configInfo>
<locked>false</locked>
<version>0.0.0</version>
</configInfo>
<data>
<autoStart>false</autoStart>
<displayControls>false</displayControls>
<displayVideo>true</displayVideo>
<pipelineDesc>dx9screencapsrc monitor=0 cursor=true ! tee name=t
t. ! queue ! timeoverlay ! autovideosink
t. ! queue ! videoconvert ! x264enc tune=zerolatency tune=zerolatency bitrate=498 ! mpegpsmux ! filesink location=capture.mpg</pipelineDesc>
<pipelineInfo></pipelineInfo>
<respectAspectRatio>true</respectAspectRatio>
</data>
</Screen__PCT__20-__PCT__20Capture__PCT__20to__PCT__20file>
<Screen__PCT__20-__PCT__20Play__PCT__20__PCT__28640x480__PCT__29>
<configInfo>
<locked>false</locked>
<version>0.0.0</version>
</configInfo>
<data>
<autoStart>false</autoStart>
<displayControls>false</displayControls>
<displayVideo>true</displayVideo>
<pipelineDesc>dx9screencapsrc monitor=0 cursor=true x=0 y=0 width=640 height=480 ! autovideosink</pipelineDesc>
<pipelineInfo></pipelineInfo>
<respectAspectRatio>false</respectAspectRatio>
</data>
</Screen__PCT__20-__PCT__20Play__PCT__20__PCT__28640x480__PCT__29>
<Screen__PCT__20-__PCT__20Play>
<configInfo>
<locked>false</locked>
<version>0.0.0</version>
</configInfo>
<data>
<autoStart>true</autoStart>
<displayControls>false</displayControls>
<displayVideo>true</displayVideo>
<pipelineDesc>dx9screencapsrc monitor=0 cursor=true ! autovideosink</pipelineDesc>
<pipelineInfo></pipelineInfo>
<respectAspectRatio>false</respectAspectRatio>
</data>
</Screen__PCT__20-__PCT__20Play>
<USB__PCT__20Camera__PCT__20-__PCT__20Capture__PCT__20to__PCT__20file__PCT__20__PCT__28fast__PCT__20__PCT__26__PCT__20big__PCT__29>
<configInfo>
<locked>false</locked>
<version>0.0.0</version>
</configInfo>
<data>
<autoStart>false</autoStart>
<displayControls>false</displayControls>
<displayVideo>true</displayVideo>
<pipelineDesc>ksvideosrc device-index=0 ! tee name=t
t. ! queue max-size-buffers=0 max-size-time=0 max-size-bytes=0 ! timeoverlay ! autovideosink
t. ! queue max-size-buffers=0 max-size-time=0 max-size-bytes=0 ! videoconvert ! x264enc interlaced=true pass=quant quantizer=0 speed-preset=ultrafast byte-stream=true ! mpegpsmux ! filesink location=capture_fast.mpg</pipelineDesc>
<pipelineInfo></pipelineInfo>
<respectAspectRatio>true</respectAspectRatio>
</data>
</USB__PCT__20Camera__PCT__20-__PCT__20Capture__PCT__20to__PCT__20file__PCT__20__PCT__28fast__PCT__20__PCT__26__PCT__20big__PCT__29>
<USB__PCT__20Camera__PCT__20-__PCT__20Capture__PCT__20to__PCT__20file>
<configInfo>
<locked>false</locked>
<version>0.0.0</version>
</configInfo>
<data>
<autoStart>false</autoStart>
<displayControls>false</displayControls>
<displayVideo>true</displayVideo>
<pipelineDesc>ksvideosrc device-index=0 ! tee name=t
t. ! queue ! timeoverlay ! autovideosink
t. ! queue ! videoconvert ! x264enc tune=zerolatency tune=zerolatency bitrate=498 ! mpegpsmux ! filesink location=capture.mpg</pipelineDesc>
<pipelineInfo></pipelineInfo>
<respectAspectRatio>true</respectAspectRatio>
</data>
</USB__PCT__20Camera__PCT__20-__PCT__20Capture__PCT__20to__PCT__20file>
<USB__PCT__20Camera__PCT__20-__PCT__20Play>
<configInfo>
<locked>false</locked>
<version>0.0.0</version>
</configInfo>
<data>
<autoStart>true</autoStart>
<displayControls>false</displayControls>
<displayVideo>true</displayVideo>
<pipelineDesc>ksvideosrc device-index=0 ! autovideosink</pipelineDesc>
<pipelineInfo></pipelineInfo>
<respectAspectRatio>false</respectAspectRatio>
</data>
</USB__PCT__20Camera__PCT__20-__PCT__20Play>
</VideoGadget>
<configInfo>
<locked>false</locked>
<version>1.2.0</version>