1
0
mirror of https://bitbucket.org/librepilot/librepilot.git synced 2025-03-21 13:28:58 +01:00

Merged in filnet/librepilot/LP-109_video_gadget (pull request #454)

LP-109 video gadget

Approved-by: Philippe Renon <philippe_renon@yahoo.fr>
Approved-by: Lalanne Laurent <f5soh@free.fr>
Approved-by: Jan NIJS <dr.oblivium@gmail.com>
Approved-by: Brian Webb <webbbn@gmail.com>
This commit is contained in:
Philippe Renon 2017-09-18 19:58:07 +00:00 committed by Lalanne Laurent
commit dbe95ae493
108 changed files with 7224 additions and 85 deletions

View File

@ -9,7 +9,9 @@ build:
- if [ $$arch = 32 ]; then target=i686; fi
- if [ $$arch = 64 ]; then target=x86_64; fi
- echo -e "[librepilot-mingw]\nSigLevel = Optional TrustAll\nServer = http://download.librepilot.org/repo/mingw" >> /etc/pacman.conf
- pacman -Syu --noconfirm --noprogressbar --needed git unzip tar mingw-w64-${target}-toolchain mingw-w64-${target}-ccache mingw-w64-${target}-ntldd mingw-w64-${target}-qt5 mingw-w64-${target}-SDL mingw-w64-${target}-mesa mingw-w64-${target}-openssl mingw-w64-${target}-gdal-minimal mingw-w64-${target}-OpenSceneGraph mingw-w64-${target}-osgearth
- pacman -Syu --noconfirm --noprogressbar --needed git unzip tar mingw-w64-${target}-toolchain mingw-w64-${target}-ccache mingw-w64-${target}-ntldd mingw-w64-${target}-qt5 mingw-w64-${target}-SDL mingw-w64-${target}-mesa mingw-w64-${target}-openssl
- pacman -Syu --noconfirm --noprogressbar --needed mingw-w64-${target}-gdal-minimal mingw-w64-${target}-OpenSceneGraph mingw-w64-${target}-osgearth
- pacman -Syu --noconfirm --noprogressbar --needed mingw-w64-${target}-gstreamer mingw-w64-${target}-gst-plugins-base mingw-w64-${target}-gst-plugins-good mingw-w64-${target}-gst-plugins-bad
- mingw32-make all_sdk_install
- git config core.filemode false
- mingw32-make build-info && cat build/build-info.txt

View File

@ -131,16 +131,22 @@ ifeq ($(UNAME), Linux)
GCS_WITH_OSG := 1
GCS_WITH_OSGEARTH := 1
GCS_COPY_OSG := 0
GCS_WITH_GSTREAMER := 0
GCS_COPY_GSTREAMER := 0
else ifeq ($(UNAME), Darwin)
UAVOBJGENERATOR := $(BUILD_DIR)/uavobjgenerator/uavobjgenerator
GCS_WITH_OSG := 1
GCS_WITH_OSGEARTH := 0
GCS_COPY_OSG := 1
GCS_WITH_GSTREAMER := 0
GCS_COPY_GSTREAMER := 0
else ifeq ($(UNAME), Windows)
UAVOBJGENERATOR := $(BUILD_DIR)/uavobjgenerator/uavobjgenerator.exe
GCS_WITH_OSG := 1
GCS_WITH_OSGEARTH := 1
GCS_COPY_OSG := 1
GCS_WITH_GSTREAMER := 1
GCS_COPY_GSTREAMER := 1
endif
export UAVOBJGENERATOR
@ -159,6 +165,13 @@ ifeq ($(GCS_WITH_OSG), 1)
endif
endif
ifeq ($(GCS_WITH_GSTREAMER), 1)
GCS_EXTRA_CONF += gstreamer
ifeq ($(GCS_COPY_GSTREAMER), 1)
GCS_EXTRA_CONF += copy_gstreamer
endif
endif
##############################
#
# All targets
@ -590,6 +603,10 @@ config_help:
@$(ECHO) " (Needed unless using system versions)"
@$(ECHO) " Options: 0 or 1"
@$(ECHO)
@$(ECHO) " GCS_WITH_GSTREAMER=$(GCS_WITH_GSTREAMER)"
@$(ECHO) " Build the GCS with GStreamer support, this enables the video gadget and extra PFD video views"
@$(ECHO) " Options: 0 or 1"
@$(ECHO)
@$(ECHO) " CCACHE=$(CCACHE)"
@$(ECHO) " A prefix to compiler invocations, usually 'ccache' or 'path/to/ccache'"
@$(ECHO)

View File

@ -545,6 +545,7 @@ void PIOS_BOARD_IO_Configure_GCS_RCVR()
void PIOS_BOARD_IO_Configure_OPLink_RCVR()
{
uint32_t pios_oplinkrcvr_id;
OPLinkReceiverInitialize();
#if defined(PIOS_INCLUDE_RFM22B)
PIOS_OPLinkRCVR_Init(&pios_oplinkrcvr_id, pios_rfm22b_id);

View File

@ -34,12 +34,12 @@
#define POW2(x) (1 << x)
// Command addresses
#define MS56XX_RESET 0x1E
#define MS56XX_CALIB_ADDR 0xA2 /* First sample is factory stuff */
#define MS56XX_CALIB_LEN 16
#define MS56XX_ADC_READ 0x00
#define MS56XX_PRES_ADDR 0x40
#define MS56XX_TEMP_ADDR 0x50
#define MS56XX_RESET 0x1E
#define MS56XX_CALIB_ADDR 0xA2 /* First sample is factory stuff */
#define MS56XX_CALIB_LEN 16
#define MS56XX_ADC_READ 0x00
#define MS56XX_PRES_ADDR 0x40
#define MS56XX_TEMP_ADDR 0x50
// Option to change the interleave between Temp and Pressure conversions
// Undef for normal operation
@ -128,7 +128,7 @@ const PIOS_SENSORS_Driver PIOS_MS56xx_Driver = {
*/
void PIOS_MS56xx_Init(const struct pios_ms56xx_cfg *cfg, int32_t i2c_device)
{
i2c_id = i2c_device;
i2c_id = i2c_device;
ms56xx_address = cfg->address;
version = cfg->version;
@ -262,14 +262,14 @@ int32_t PIOS_MS56xx_ReadADC(void)
// Offset and sensitivity at actual temperature
if (version == MS56XX_VERSION_5611) {
// OFF = OFFT1 + TCO * dT = C2 * 2^16 + (C4 * dT) / 2^7
Offset = ((int64_t)CalibData.C[1]) * POW2(16) + (((int64_t)CalibData.C[3]) * deltaTemp) / POW2(7) - Offset2;
Offset = ((int64_t)CalibData.C[1]) * POW2(16) + (((int64_t)CalibData.C[3]) * deltaTemp) / POW2(7) - Offset2;
// SENS = SENST1 + TCS * dT = C1 * 2^15 + (C3 * dT) / 2^8
Sens = ((int64_t)CalibData.C[0]) * POW2(15) + (((int64_t)CalibData.C[2]) * deltaTemp) / POW2(8) - Sens2;
Sens = ((int64_t)CalibData.C[0]) * POW2(15) + (((int64_t)CalibData.C[2]) * deltaTemp) / POW2(8) - Sens2;
} else {
// OFF = OFFT1 + TCO * dT = C2 * 2^17 + (C4 * dT) / 2^6
Offset = ((int64_t)CalibData.C[1]) * POW2(17) + (((int64_t)CalibData.C[3]) * deltaTemp) / POW2(6) - Offset2;
Offset = ((int64_t)CalibData.C[1]) * POW2(17) + (((int64_t)CalibData.C[3]) * deltaTemp) / POW2(6) - Offset2;
// SENS = SENST1 + TCS * dT = C1 * 2^16 + (C3 * dT) / 2^7
Sens = ((int64_t)CalibData.C[0]) * POW2(16) + (((int64_t)CalibData.C[2]) * deltaTemp) / POW2(7) - Sens2;
Sens = ((int64_t)CalibData.C[0]) * POW2(16) + (((int64_t)CalibData.C[2]) * deltaTemp) / POW2(7) - Sens2;
}
// Temperature compensated pressure (10…1200mbar with 0.01mbar resolution)
@ -516,10 +516,11 @@ bool PIOS_MS56xx_driver_poll(__attribute__((unused)) uintptr_t context)
}
/* Poll the pressure sensor and return the temperature and pressure. */
bool PIOS_MS56xx_Read(float *temperature, float *pressure) {
bool PIOS_MS56xx_Read(float *temperature, float *pressure)
{
if (PIOS_MS56xx_driver_poll(0)) {
*temperature = results.temperature;
*pressure = results.sample;
*pressure = results.sample;
return true;
}
return false;

View File

@ -38,7 +38,7 @@
#include <pios_oplinkrcvr_priv.h>
// Put receiver in failsafe if not updated within timeout
#define PIOS_OPLINK_RCVR_TIMEOUT_MS 100
#define PIOS_OPLINK_RCVR_TIMEOUT_MS 100
/* Provide a RCVR driver */
static int32_t PIOS_OPLinkRCVR_Get(uint32_t rcvr_id, uint8_t channel);

View File

@ -1,4 +1,3 @@
/**
******************************************************************************
* @addtogroup OpenPilotSystem OpenPilot System

View File

@ -1,4 +1,3 @@
/**
******************************************************************************
* @addtogroup OpenPilotSystem OpenPilot System

View File

@ -91,7 +91,7 @@ win32 {
for(dll, QT_DLLS) {
addCopyFileTarget($${dll},$$[QT_INSTALL_BINS],$${GCS_APP_PATH})
win32:addCopyDependenciesTarget($${dll},$$[QT_INSTALL_BINS],$${GCS_APP_PATH})
addCopyDependenciesTarget($${dll},$$[QT_INSTALL_BINS],$${GCS_APP_PATH})
}
# copy OpenSSL DLLs

View File

@ -280,6 +280,10 @@ void systemInit()
QSurfaceFormat format = QSurfaceFormat::defaultFormat();
format.setSwapInterval(0);
QSurfaceFormat::setDefaultFormat(format);
// see https://bugreports.qt.io/browse/QTBUG-40332
int timeout = std::numeric_limits<int>::max();
qputenv("QT_BEARER_POLL_TIMEOUT", QString::number(timeout).toLatin1());
}
static FileLogger *logger = NULL;

View File

@ -102,9 +102,11 @@ void PluginErrorView::update(PluginSpec *spec)
case PluginSpec::Stopped:
text = tr("Stopped");
tooltip = tr("Plugin was shut down");
break;
case PluginSpec::Deleted:
text = tr("Deleted");
tooltip = tr("Plugin ended its life cycle and was deleted");
break;
}
m_ui->state->setText(text);
m_ui->state->setToolTip(tooltip);

View File

@ -0,0 +1,83 @@
win32:gstreamer {
GST_BIN_DIR = $$system(pkg-config --variable=exec_prefix gstreamer-1.0)/bin
GST_PLUGINS_DIR = $$system(pkg-config --variable=pluginsdir gstreamer-1.0)
# gstreamer libraries
GST_LIBS = \
libgstreamer-1.0-0.dll
gstreamer_utilities:GST_LIBS += \
gst-inspect-1.0.exe \
gst-launch-1.0.exe
for(lib, GST_LIBS) {
addCopyFileTarget($${lib},$${GST_BIN_DIR},$${GCS_APP_PATH})
addCopyDependenciesTarget($${lib},$${GST_BIN_DIR},$${GCS_APP_PATH})
}
# gstreamer core
GST_PLUGINS = \
libgstcoreelements.dll
# gst-plugins-base
GST_PLUGINS += \
libgstapp.dll \
libgstaudiotestsrc.dll \
libgstpango.dll \
libgstplayback.dll \
libgsttcp.dll \
libgsttypefindfunctions.dll \
libgstvideoconvert.dll \
libgstvideorate.dll \
libgstvideoscale.dll \
libgstvideotestsrc.dll
# gst-plugins-good
GST_PLUGINS += \
libgstautodetect.dll \
libgstavi.dll \
libgstdeinterlace.dll \
libgstdirectsoundsink.dll \
libgstimagefreeze.dll \
libgstjpeg.dll \
libgstrawparse.dll \
libgstrtp.dll \
libgstrtpmanager.dll \
libgstrtsp.dll \
libgstudp.dll \
libgstvideomixer.dll
# gst-plugins-bad
GST_PLUGINS += \
libgstaudiovisualizers.dll \
libgstautoconvert.dll \
libgstcompositor.dll \
libgstd3dvideosink.dll \
libgstdebugutilsbad.dll \
libgstdirectsoundsrc.dll \
libgstopengl.dll \
libgstinter.dll \
libgstmpegpsdemux.dll \
libgstmpegpsmux.dll \
libgstmpegtsdemux.dll \
libgstmpegtsmux.dll \
libgstvideoparsersbad.dll \
libgstwinks.dll \
libgstwinscreencap.dll
# gst-plugins-ugly
GST_PLUGINS += \
libgstmpeg2dec.dll \
libgstx264.dll
# gst-libav
GST_PLUGINS += \
libgstlibav.dll
for(lib, GST_PLUGINS) {
addCopyFileTarget($${lib},$${GST_PLUGINS_DIR},$${GCS_LIBRARY_PATH}/gstreamer-1.0)
addCopyDependenciesTarget($${lib},$${GST_PLUGINS_DIR},$${GCS_APP_PATH})
}
}

View File

@ -0,0 +1,136 @@
/**
******************************************************************************
*
* @file devicemonitor.cpp
* @author The LibrePilot Project, http://www.librepilot.org Copyright (C) 2017.
* @brief
* @see The GNU Public License (GPL) Version 3
* @defgroup
* @{
*
*****************************************************************************/
/*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* for more details.
*
* You should have received a copy of the GNU General Public License along
* with this program; if not, write to the Free Software Foundation, Inc.,
* 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
#include "devicemonitor.h"
#include "gst_util.h"
#include <gst/gst.h>
#include <QDebug>
static GstBusSyncReply my_bus_sync_func(GstBus *bus, GstMessage *message, gpointer user_data)
{
Q_UNUSED(bus)
DeviceMonitor * dm;
GstDevice *device;
gchar *name;
switch (GST_MESSAGE_TYPE(message)) {
case GST_MESSAGE_DEVICE_ADDED:
gst_message_parse_device_added(message, &device);
name = gst_device_get_display_name(device);
dm = (DeviceMonitor *)user_data;
QMetaObject::invokeMethod(dm, "device_added", Qt::QueuedConnection,
Q_ARG(QString, QString(name)));
g_free(name);
break;
case GST_MESSAGE_DEVICE_REMOVED:
gst_message_parse_device_removed(message, &device);
name = gst_device_get_display_name(device);
dm = (DeviceMonitor *)user_data;
QMetaObject::invokeMethod(dm, "device_removed", Qt::QueuedConnection,
Q_ARG(QString, QString(name)));
g_free(name);
break;
default:
break;
}
// no need to pass it to the async queue, there is none...
return GST_BUS_DROP;
}
DeviceMonitor::DeviceMonitor(QObject *parent) : QObject(parent)
{
// initialize gstreamer
gst::init(NULL, NULL);
monitor = gst_device_monitor_new();
GstBus *bus = gst_device_monitor_get_bus(monitor);
gst_bus_set_sync_handler(bus, (GstBusSyncHandler)my_bus_sync_func, this, NULL);
gst_object_unref(bus);
GstCaps *caps = NULL; // gst_caps_new_empty_simple("video/x-raw");
const gchar *classes = "Video/Source";
gst_device_monitor_add_filter(monitor, classes, caps);
if (caps) {
gst_caps_unref(caps);
}
if (!gst_device_monitor_start(monitor)) {
qWarning() << "Failed to start device monitor";
}
}
DeviceMonitor::~DeviceMonitor()
{
gst_device_monitor_stop(monitor);
gst_object_unref(monitor);
}
QList<Device> DeviceMonitor::devices() const
{
QList<Device> devices;
GList *list = gst_device_monitor_get_devices(monitor);
while (list != NULL) {
gchar *name;
gchar *device_class;
GstDevice *device = (GstDevice *)list->data;
name = gst_device_get_display_name(device);
device_class = gst_device_get_device_class(device);
devices << Device(name, device_class);
g_free(name);
g_free(device_class);
gst_object_unref(device);
list = g_list_remove_link(list, list);
}
return devices;
}
void DeviceMonitor::device_added(QString name)
{
// qDebug() << "**** ADDED:" << name;
emit deviceAdded(name);
}
void DeviceMonitor::device_removed(QString name)
{
// qDebug() << "**** REMOVED:" << name;
emit deviceRemoved(name);
}

View File

@ -0,0 +1,73 @@
/**
******************************************************************************
*
* @file devicemonitor.h
* @author The LibrePilot Project, http://www.librepilot.org Copyright (C) 2017.
* @brief
* @see The GNU Public License (GPL) Version 3
* @defgroup
* @{
*
*****************************************************************************/
/*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* for more details.
*
* You should have received a copy of the GNU General Public License along
* with this program; if not, write to the Free Software Foundation, Inc.,
* 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
#ifndef DEVICEMONITOR_H_
#define DEVICEMONITOR_H_
#include "gst_global.h"
#include <QObject>
typedef struct _GstDeviceMonitor GstDeviceMonitor;
class Device {
public:
Device(QString displayName, QString deviceClass) : m_displayName(displayName), m_deviceClass(deviceClass)
{}
QString displayName() const
{
return m_displayName;
}
QString deviceClass() const
{
return m_deviceClass;
}
private:
QString m_displayName;
QString m_deviceClass;
};
class GST_LIB_EXPORT DeviceMonitor : public QObject {
Q_OBJECT
public:
DeviceMonitor(QObject *parent = NULL);
virtual ~DeviceMonitor();
QList<Device> devices() const;
signals:
void deviceAdded(QString name);
void deviceRemoved(QString name);
private:
GstDeviceMonitor *monitor;
private slots:
void device_added(QString name);
void device_removed(QString name);
};
#endif /* DEVICEMONITOR_H_ */

View File

@ -0,0 +1,38 @@
/**
******************************************************************************
*
* @file gst_global.h
* @author The LibrePilot Project, http://www.librepilot.org Copyright (C) 2017.
* @brief
* @see The GNU Public License (GPL) Version 3
* @defgroup
* @{
*
*****************************************************************************/
/*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* for more details.
*
* You should have received a copy of the GNU General Public License along
* with this program; if not, write to the Free Software Foundation, Inc.,
* 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
#ifndef GST_GLOBAL_H
#define GST_GLOBAL_H
#include <QtCore/qglobal.h>
#if defined(GST_LIB_LIBRARY)
# define GST_LIB_EXPORT Q_DECL_EXPORT
#else
# define GST_LIB_EXPORT Q_DECL_IMPORT
#endif
#endif // GST_GLOBAL_H

View File

@ -0,0 +1,136 @@
/**
******************************************************************************
*
* @file gst_util.cpp
* @author The LibrePilot Project, http://www.librepilot.org Copyright (C) 2017.
* @brief
* @see The GNU Public License (GPL) Version 3
* @defgroup
* @{
*
*****************************************************************************/
/*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* for more details.
*
* You should have received a copy of the GNU General Public License along
* with this program; if not, write to the Free Software Foundation, Inc.,
* 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
#include "gst_util.h"
#include <gst/gst.h>
#ifdef USE_OPENCV
#include "plugins/cameracalibration/gstcameracalibration.h"
#include "plugins/cameracalibration/gstcameraundistort.h"
#endif
#include "utils/pathutils.h"
#include <QDebug>
static bool initialized = false;
gboolean gst_plugin_librepilot_register(GstPlugin *plugin)
{
#ifdef USE_OPENCV
if (!gst_camera_calibration_plugin_init(plugin)) {
return FALSE;
}
if (!gst_camera_undistort_plugin_init(plugin)) {
return FALSE;
}
#else
Q_UNUSED(plugin)
#endif
return TRUE;
}
void gst_plugin_librepilot_register()
{
gst_plugin_register_static(GST_VERSION_MAJOR, GST_VERSION_MINOR, "librepilot",
"LibrePilot plugin", gst_plugin_librepilot_register, "1.10.0", "GPL",
"librepilot", "LibrePilot", "http://librepilot.org/");
}
// see http://gstreamer.freedesktop.org/data/doc/gstreamer/head/gstreamer/html/gst-running.html
void gst::init(int *argc, char * *argv[])
{
// TODO Not thread safe. Does it need to be?
if (initialized) {
return;
}
initialized = true;
// qputenv("GST_DEBUG", "3");
// qputenv("GST_DEBUG", "3,rtspsrc:6,udpsrc:6");
// qputenv("GST_DEBUG", "3,bin:6");
// qputenv("GST_DEBUG", "3,rtpjitterbuffer:6");
// qputenv("GST_DEBUG_FILE", "gst.log");
// qputenv("GST_DEBUG_DUMP_DOT_DIR", ".");
#ifdef Q_OS_WIN
qputenv("GST_PLUGIN_PATH_1_0", (Utils::GetLibraryPath() + "gstreamer-1.0").toLatin1());
#endif
qDebug() << "gstreamer - initializing";
GError *error = NULL;
if (!gst_init_check(argc, argv, &error)) {
qCritical() << "failed to initialize gstreamer";
return;
}
qDebug() << "gstreamer - version:" << gst_version_string();
qDebug() << "gstreamer - plugin system path:" << qgetenv("GST_PLUGIN_SYSTEM_PATH_1_0");
qDebug() << "gstreamer - plugin path:" << qgetenv("GST_PLUGIN_PATH_1_0");
qDebug() << "gstreamer - registering plugins";
// GST_PLUGIN_STATIC_REGISTER(librepilot);
gst_plugin_librepilot_register();
#ifdef Q_OS_MAC
GstRegistry *reg = gst_registry_get();
GstPluginFeature *feature = gst_registry_lookup_feature(reg, "osxvideosink");
if (feature) {
// raise rank of osxvideosink so it gets selected by autovideosink
// if not doing that then autovideosink selects the glimagesink which fails in Qt
gst_plugin_feature_set_rank(feature, GST_RANK_PRIMARY);
gst_object_unref(feature);
}
#endif
#ifdef USE_OPENCV
// see http://stackoverflow.com/questions/32477403/how-to-know-if-sse2-is-activated-in-opencv
// see http://answers.opencv.org/question/696/how-to-enable-vectorization-in-opencv/
if (!cv::checkHardwareSupport(CV_CPU_SSE)) {
qWarning() << "SSE not supported";
}
if (!cv::checkHardwareSupport(CV_CPU_SSE2)) {
qWarning() << "SSE2 not supported";
}
if (!cv::checkHardwareSupport(CV_CPU_SSE3)) {
qWarning() << "SSE3 not supported";
}
qDebug() << "MMX :" << cv::checkHardwareSupport(CV_CPU_MMX);
qDebug() << "SSE :" << cv::checkHardwareSupport(CV_CPU_SSE);
qDebug() << "SSE2 :" << cv::checkHardwareSupport(CV_CPU_SSE2);
qDebug() << "SSE3 :" << cv::checkHardwareSupport(CV_CPU_SSE3);
qDebug() << "SSE4_1 :" << cv::checkHardwareSupport(CV_CPU_SSE4_1);
qDebug() << "SSE4_2 :" << cv::checkHardwareSupport(CV_CPU_SSE4_2);
#endif
}
QString gst::version(void)
{
init(NULL, NULL);
return QString(gst_version_string());
}

View File

@ -0,0 +1,39 @@
/**
******************************************************************************
*
* @file gst_util.h
* @author The LibrePilot Project, http://www.librepilot.org Copyright (C) 2017.
* @brief
* @see The GNU Public License (GPL) Version 3
* @defgroup
* @{
*
*****************************************************************************/
/*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* for more details.
*
* You should have received a copy of the GNU General Public License along
* with this program; if not, write to the Free Software Foundation, Inc.,
* 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
#ifndef GST_UTIL_H
#define GST_UTIL_H
#include "gst_global.h"
#include <QString>
namespace gst {
GST_LIB_EXPORT void init(int *argc, char * *argv[]);
GST_LIB_EXPORT QString version();
}
#endif // GST_UTIL_H

View File

@ -0,0 +1,3 @@
LIBS *= -l$$qtLibraryName(GCSGStreamer)
INCLUDEPATH += $$GCS_SOURCE_TREE/src/libs/gstreamer

View File

@ -0,0 +1,29 @@
TEMPLATE = lib
TARGET = GCSGStreamer
DEFINES += GST_LIB_LIBRARY
QT += widgets
include(../../library.pri)
include(../utils/utils.pri)
include(gstreamer_dependencies.pri)
include(plugins/plugins.pro)
HEADERS += \
gst_global.h \
gst_util.h \
devicemonitor.h \
pipeline.h \
pipelineevent.h \
overlay.h \
videowidget.h
SOURCES += \
gst_util.cpp \
devicemonitor.cpp \
pipeline.cpp \
videowidget.cpp
copy_gstreamer:include(copydata.pro)

View File

@ -0,0 +1,32 @@
DEFINES += USE_GSTREAMER
opencv:DEFINES += USE_OPENCV
macx {
GLIB_DIR = $$system(brew --prefix glib)
GSTREAMER_DIR = $$system(brew --prefix gstreamer)
GST_BASE_DIR = $$system(brew --prefix gst-plugins-base)
message(Using glib from here: $$GLIB_DIR)
message(Using gstreamer from here: $$GSTREAMER_DIR)
message(Using gst base from here: $GST_BASE_DIR)
INCLUDEPATH += $$GLIB_DIR/include/glib-2.0
INCLUDEPATH += $$GLIB_DIR/lib/glib-2.0/include
INCLUDEPATH += $$GST_BASE_DIR/include/gstreamer-1.0/
INCLUDEPATH += $$GSTREAMER_DIR/include/gstreamer-1.0
INCLUDEPATH += $$GSTREAMER_DIR/lib/gstreamer-1.0/include
LIBS +=-L$$GLIB_DIR/lib
LIBS += -L$$GSTREAMER_DIR/lib -L$$GST_BASE_DIR/lib
LIBS += -lglib-2.0 -lgobject-2.0
LIBS += -lgstreamer-1.0 -lgstapp-1.0 -lgstpbutils-1.0 -lgstvideo-1.0
}
linux|win32 {
CONFIG += link_pkgconfig
PKGCONFIG += glib-2.0 gobject-2.0
PKGCONFIG += gstreamer-1.0 gstreamer-app-1.0 gstreamer-video-1.0
opencv:PKGCONFIG += opencv
}

View File

@ -0,0 +1,155 @@
Tips:
- Measuring video latency : display time on video + film video output -> the time between two frames is the latency
Limitations:
- It is not possible to view a web cam in two different gadgets (same is *not* true for DirectSound sources)
but it is not really an issue, as it possible to tee a video source in the pipeline itself
Issues:
- bad: libgstchromaprint - libchromaprint needs avcodec-56 (vs 57) and avutil-54 (vs 55)
- bad: libgstfragmented - needs libnettle-6-1 (vs 6-2) - was renamed to hls
- bad: libgstx265 - needs rebuild
- need to rebuild libgstpluginsbad and libchromaprint
Todo:
- should use openglvideosink for PFD
- save config as QR code and ...
- split cameraconfiguration -> cameraundistort
- exclude gst plugins from uncrustify
- fix crash on unsupported formats:
- undistort should be passthrough when not enabled
gst-launch-1.0.exe -v -m autovideosrc ! video/x-raw,format=BGRA,width=800,height=600 ! videoconvert ! queue ! x264enc pass=qual quantizer=20 tune=zerolatency ! rtph264pay ! udpsink host=127.0.0.1 port=5000
gst-launch-1.0.exe -v -m udpsrc port=5000 ! "application/x-rtp, payload=127" ! rtph264depay ! decodebin ! videoconvert ! timeoverlay ! autovideosink
autovideosrc ! videoconvert ! queue ! x264enc pass=qual quantizer=20 tune=zerolatency ! rtph264pay ! udpsink host=127.0.0.1 port=5000
autovideosrc ! queue ! videoscale ! video/x-raw,width=320,height=200 ! videoconvert ! x264enc tune=zerolatency ! rtph264pay ! udpsink host=127.0.0.1 port=5000
autovideosrc ! queue ! videoscale ! videorate ! video/x-raw,width=320,height=240,frame-rate=30/1 ! videoconvert ! x264enc tune=zerolatency ! rtph264pay ! udpsink host=127.0.0.1 port=5000
udpsrc port=5000 ! application/x-rtp,payload=96,clock-rate=90000 ! rtpjitterbuffer latency=30 ! rtph264depay ! decodebin ! videoconvert ! timeoverlay ! fpsdisplaysink
RTSP
server : ./test-launch.exe "( videotestsrc ! x264enc tune=zerolatency ! rtph264pay name=pay0 pt=96 )"
client : gst-launch-1.0.exe -v -m rtspsrc location=rtsp://127.0.0.1:8554/test latency=30 ! decodebin ! timeoverlay ! autovideosink
Qt:
Line 250058: 0:02:34.185436460 5988 d5a0480 DEBUG rtpjitterbuffer rtpjitterbuffer.c:563:calculate_skew: time 0:02:34.081238026, base 0:00:00.050268441, recv_diff 0:02:34.030969585, slope 8
Line 250059: 0:02:34.185499451 5988 d5a0480 DEBUG rtpjitterbuffer rtpjitterbuffer.c:642:calculate_skew: delta -2352638, new min: -2417925
Line 250060: 0:02:34.185552513 5988 d5a0480 DEBUG rtpjitterbuffer rtpjitterbuffer.c:664:calculate_skew: skew -2398275, out 0:02:34.081192389
RTP
server : gst-launch-1.0.exe -v -m videotestsrc ! x264enc tune=zerolatency ! rtph264pay ! udpsink host=127.0.0.1 port=5000
client : gst-launch-1.0.exe -v -m udpsrc port=5000 ! application/x-rtp,payload=96,clock-rate=90000 ! rtpjitterbuffer ! rtph264depay ! decodebin ! videoconvert ! timeoverlay ! autovideosink
WIFI CAM RTSP
client : gst-launch-1.0.exe -v -m rtspsrc location=rtsp://192.168.42.1/AmbaStreamTest latency=30 ! decodebin ! timeoverlay ! autovideosink
Qt:
Line 14594: 0:00:28.489562097 9388 d5b9518 DEBUG rtpjitterbuffer rtpjitterbuffer.c:563:calculate_skew: time 0:00:19.812089201, base 0:00:04.161093352, recv_diff 0:00:15.650995849, slope 7
Line 14595: 0:00:28.489625088 9388 d5b9518 DEBUG rtpjitterbuffer rtpjitterbuffer.c:642:calculate_skew: delta 2029182, new min: -2061750
Line 14596: 0:00:28.489677219 9388 d5b9518 DEBUG rtpjitterbuffer rtpjitterbuffer.c:664:calculate_skew: skew -2246751, out 0:00:19.807813268
Line 14612: 0:00:28.527222391 9388 d5b9518 DEBUG rtpjitterbuffer rtpjitterbuffer.c:563:calculate_skew: time 0:00:19.849735841, base 0:00:04.161093352, recv_diff 0:00:15.688642489, slope 7
Line 14613: 0:00:28.527285692 9388 d5b9518 DEBUG rtpjitterbuffer rtpjitterbuffer.c:642:calculate_skew: delta 6309156, new min: -2061750
Line 14614: 0:00:28.527339685 9388 d5b9518 DEBUG rtpjitterbuffer rtpjitterbuffer.c:664:calculate_skew: skew -2245270, out 0:00:19.841181415
Line 14630: 0:00:28.564027806 9388 d5b9518 DEBUG rtpjitterbuffer rtpjitterbuffer.c:563:calculate_skew: time 0:00:19.886522948, base 0:00:04.161093352, recv_diff 0:00:15.725429596, slope 7
Line 14631: 0:00:28.564091728 9388 d5b9518 DEBUG rtpjitterbuffer rtpjitterbuffer.c:642:calculate_skew: delta 9729596, new min: -2061750
Line 14632: 0:00:28.564145410 9388 d5b9518 DEBUG rtpjitterbuffer rtpjitterbuffer.c:664:calculate_skew: skew -2243801, out 0:00:19.874549551
Line 14654: 0:00:31.712747597 9388 d5b9518 DEBUG rtpjitterbuffer rtpjitterbuffer.c:563:calculate_skew: time 0:00:23.035042595, base 0:00:04.161093352, recv_diff 0:00:18.873949243, slope 6
Line 14655: 0:00:31.712811519 9388 d5b9518 WARN rtpjitterbuffer rtpjitterbuffer.c:570:calculate_skew: delta - skew: 0:00:03.127126377 too big, reset skew
Line 14656: 0:00:31.712867063 9388 d5b9518 DEBUG rtpjitterbuffer rtpjitterbuffer.c:580:calculate_skew: filling 0, delta 0
Line 14657: 0:00:31.712919194 9388 d5b9518 DEBUG rtpjitterbuffer rtpjitterbuffer.c:664:calculate_skew: skew 0, out 0:00:23.035042595
Line 14759: 0:00:31.720619622 9388 d5b9518 DEBUG rtpjitterbuffer rtpjitterbuffer.c:563:calculate_skew: time 0:00:23.043193270, base 0:00:23.035042595, recv_diff 0:00:00.008150675, slope 32
Line 14760: 0:00:31.720681061 9388 d5b9518 DEBUG rtpjitterbuffer rtpjitterbuffer.c:580:calculate_skew: filling 1, delta -25215991
Line 14761: 0:00:31.720734123 9388 d5b9518 DEBUG rtpjitterbuffer rtpjitterbuffer.c:664:calculate_skew: skew -2521, out 0:00:23.068406740
Line 14774: 0:00:31.721641753 9388 d5b9518 DEBUG rtpjitterbuffer rtpjitterbuffer.c:563:calculate_skew: time 0:00:23.044219745, base 0:00:23.035042595, recv_diff 0:00:00.009177150, slope 58
Line 14775: 0:00:31.721703813 9388 d5b9518 DEBUG rtpjitterbuffer rtpjitterbuffer.c:580:calculate_skew: filling 2, delta -57556183
Line 14776: 0:00:31.721755633 9388 d5b9518 DEBUG rtpjitterbuffer rtpjitterbuffer.c:664:calculate_skew: skew -54319, out 0:00:23.101721609
Line 14789: 0:00:31.722667608 9388 d5b9518 DEBUG rtpjitterbuffer rtpjitterbuffer.c:563:calculate_skew: time 0:00:23.045249324, base 0:00:23.035042595, recv_diff 0:00:00.010206729, slope 78
Line 14790: 0:00:31.722730288 9388 d5b9518 DEBUG rtpjitterbuffer rtpjitterbuffer.c:580:calculate_skew: filling 3, delta -89893271
Line 14791: 0:00:31.722782729 9388 d5b9518 DEBUG rtpjitterbuffer rtpjitterbuffer.c:664:calculate_skew: skew -278916, out 0:00:23.134863679
Line 14804: 0:00:31.723697497 9388 d5b9518 DEBUG rtpjitterbuffer rtpjitterbuffer.c:563:calculate_skew: time 0:00:23.046273007, base 0:00:23.035042595, recv_diff 0:00:00.011230412, slope 95
Line 14805: 0:00:31.723759557 9388 d5b9518 DEBUG rtpjitterbuffer rtpjitterbuffer.c:580:calculate_skew: filling 4, delta -122236254
Line 14806: 0:00:31.723811687 9388 d5b9518 DEBUG rtpjitterbuffer rtpjitterbuffer.c:664:calculate_skew: skew -717962, out 0:00:23.167791299
[...]
Line 14864: 0:00:31.727785401 9388 d5b9518 DEBUG rtpjitterbuffer rtpjitterbuffer.c:563:calculate_skew: time 0:00:23.050366186, base 0:00:23.035042595, recv_diff 0:00:00.015323591, slope 139
Line 14865: 0:00:31.727851185 9388 d5b9518 DEBUG rtpjitterbuffer rtpjitterbuffer.c:580:calculate_skew: filling 8, delta -251609742
Line 14866: 0:00:31.727903626 9388 d5b9518 DEBUG rtpjitterbuffer rtpjitterbuffer.c:664:calculate_skew: skew -10312755, out 0:00:23.291663173
Line 14964: 0:00:31.732567449 9388 d5b9518 DEBUG rtpjitterbuffer rtpjitterbuffer.c:563:calculate_skew: time 0:00:23.055371651, base 0:00:23.035042595, recv_diff 0:00:00.020329056, slope 118
Line 14965: 0:00:31.732595996 9388 d5b9518 DEBUG rtpjitterbuffer rtpjitterbuffer.c:580:calculate_skew: filling 9, delta -279970944
Line 14966: 0:00:31.732620200 9388 d5b9518 DEBUG rtpjitterbuffer rtpjitterbuffer.c:664:calculate_skew: skew -16380064, out 0:00:23.318962531
[...]
Line 15293: 0:00:31.746166387 9388 d5b9518 DEBUG rtpjitterbuffer rtpjitterbuffer.c:563:calculate_skew: time 0:00:23.068966555, base 0:00:23.035042595, recv_diff 0:00:00.033923960, slope 180
Line 15294: 0:00:31.746194935 9388 d5b9518 DEBUG rtpjitterbuffer rtpjitterbuffer.c:580:calculate_skew: filling 23, delta -733509373
Line 15295: 0:00:31.746218828 9388 d5b9518 DEBUG rtpjitterbuffer rtpjitterbuffer.c:664:calculate_skew: skew -430859680, out 0:00:23.371616248
Line 15310: 0:00:31.746791023 9388 d5b9518 DEBUG rtpjitterbuffer rtpjitterbuffer.c:563:calculate_skew: time 0:00:23.069478242, base 0:00:23.035042595, recv_diff 0:00:00.034435647, slope 186
Line 15311: 0:00:31.746820812 9388 d5b9518 DEBUG rtpjitterbuffer rtpjitterbuffer.c:580:calculate_skew: filling 24, delta -766364353
Line 15312: 0:00:31.746845636 9388 d5b9518 DEBUG rtpjitterbuffer rtpjitterbuffer.c:664:calculate_skew: skew -484540427, out 0:00:23.351302168
[...]
The high skew values can also be seen when using gst-launch but no pauses...
The pause duration is variable (~4s) but the pause is always on the beat every 10s (If the first pause is at 9s, then the next will be at 19s, then 29s, etc...).
Is it possible to disable the rtpjitterbuffer ?
Qos: element autovideosink1-actual-sink-d3dvideo sent qos event: live: 1; running time: 30719164049; stream time: 26558070697; timestamp: 30719164049; duration: 33366666 jitter: 3029708354; proportion: 0.15581; quality: 1000000; format: ; processed: 609; dropped: 2;
Wifi stall
0:04:39.600000124 8296 ee56ba0 LOG udpsrc gstudpsrc.c:882:gst_udpsrc_create:<udpsrc0> doing select, timeout -1
0:04:39.628925951 8296 ee56b10 DEBUG rtspsrc gstrtspsrc.c:2260:gst_rtspsrc_handle_src_event:<rtspsrc0> pad rtspsrc0:recv_rtp_src_0_275680090_96 received event qos
0:04:39.661793203 8296 ee56b10 DEBUG rtspsrc gstrtspsrc.c:2260:gst_rtspsrc_handle_src_event:<rtspsrc0> pad rtspsrc0:recv_rtp_src_0_275680090_96 received event qos
0:04:42.688912775 8296 ee56ba0 LOG udpsrc gstudpsrc.c:1014:gst_udpsrc_create:<udpsrc0> read packet of 26 bytes
0:04:42.689055513 8296 ee56ba0 WARN rtpjitterbuffer rtpjitterbuffer.c:570:calculate_skew: delta - skew: 0:00:03.058485393 too big, reset skew
Simply instantiating a QNetworkAccessManager will cause the active wifi network connection to stall for 3 seconds every 10s.
This affects, not just the Qt app, but also all other processes using the wifi connection.
From what I have gathered this is due to bearer management polling all interfaces every 10s (can be changed with the QT_BEARER_POLL_TIMEOUT environment variable).
On windows polling the wifi interface will trigger an ssid scan. That scan will stall the active connection. This might not happen with all wifi devices but does with mine.
In my case, setting QT_BEARER_POLL_TIMEOUT to less than 4 seconds results in a DoS ;)
https://msdn.microsoft.com/fr-fr/library/windows/desktop/ms706783(v=vs.85).aspx
quote: "Since it becomes more difficult for a wireless interface to send and receive data packets while a scan is occurring, the WlanScan function may increase latency until the network scan is complete."
# transmit gstreamer buffers over network
tcpserversrc host=0.0.0.0 port=50002 ! gdpdepay ! autovideoconvert ! autovideosink
v4l2src num-buffers=1 ! gdppay ! tcpclientsink host=0.0.0.0 port=50002
# play a rtsp stream
rtspsrc location=rtsp://192.168.42.1/AmbaStreamTest latency=30 ! decodebin ! timeoverlay ! autovideosink
# play video and sound
ksvideosrc ! queue ! mix.
directsoundsrc ! tee name=split ! queue ! directsoundsink
split. ! queue ! wavescope ! queue ! mix.
videomixer name=mix ! queue ! timeoverlay ! autovideosink
ksvideosrc ! queue ! timeoverlay ! autovideosink
directsoundsrc ! queue ! directsoundsink
directsoundsrc ! tee name=split ! queue ! directsoundsink
split. ! queue ! wavescope ! autovideosink
filesrc location=C:/Users/Utilisateur/Desktop/hst_2.mpg ! decodebin ! autovideosink
dx9screencapsrc ! queue ! videoconvert ! x264enc bitrate=498 ! avimux ! filesink location=capture.avi
compositor name=mixer background=black sink_0::offset=0 sink_1::offset=0 ! videoconvert ! autovideosink
ksvideosrc device_index=0 ! decodebin ! identity drop-probability=0 ! queue max-size-buffers=0 max-size-bytes=0 max-size-time=10000000000 ! mixer.
udpsrc port=9000 ! identity drop-probability=0 dump=false ! <something> ! video/x-raw, width=640, height=480 ! videorate drop-only=true ! video/x-raw, framerate=10/1 ! queue max-size-buffers=0 max-size-bytes=0 max-size-time=10000000000 ! mixer.
compositor name=mixer sink_1::ypos=50 ! videoconvert ! timeoverlay shaded-background=true auto-resize=false ! autovideosink sync=true
ksvideosrc ! video/x-raw ! decodebin ! queue ! mixer.
udpsrc port=9000 ! identity dump=false ! textrender halignment=left line-alignment=left ! video/x-raw, width=320, height=120 ! videorate drop-only=true ! video/x-raw, framerate=10/1 ! queue ! mixer.

View File

@ -0,0 +1,39 @@
/**
******************************************************************************
*
* @file overlay.h
* @author The LibrePilot Project, http://www.librepilot.org Copyright (C) 2017.
* @brief
* @see The GNU Public License (GPL) Version 3
* @defgroup
* @{
*
*****************************************************************************/
/*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* for more details.
*
* You should have received a copy of the GNU General Public License along
* with this program; if not, write to the Free Software Foundation, Inc.,
* 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
#ifndef OVERLAY_H_
#define OVERLAY_H_
class Overlay {
public:
Overlay()
{}
virtual ~Overlay()
{}
virtual void expose() = 0;
};
#endif /* OVERLAY_H_ */

View File

@ -0,0 +1,38 @@
/**
******************************************************************************
*
* @file pipeline.cpp
* @author The LibrePilot Project, http://www.librepilot.org Copyright (C) 2017.
* @brief
* @see The GNU Public License (GPL) Version 3
* @defgroup
* @{
*
*****************************************************************************/
/*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* for more details.
*
* You should have received a copy of the GNU General Public License along
* with this program; if not, write to the Free Software Foundation, Inc.,
* 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
#include "pipeline.h"
#include "gst_util.h"
Pipeline::Pipeline()
{
// initialize gstreamer
gst::init(NULL, NULL);
}
Pipeline::~Pipeline()
{}

View File

@ -0,0 +1,41 @@
/**
******************************************************************************
*
* @file pipeline.h
* @author The LibrePilot Project, http://www.librepilot.org Copyright (C) 2017.
* @brief
* @see The GNU Public License (GPL) Version 3
* @defgroup
* @{
*
*****************************************************************************/
/*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* for more details.
*
* You should have received a copy of the GNU General Public License along
* with this program; if not, write to the Free Software Foundation, Inc.,
* 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
#ifndef PIPELINE_H_
#define PIPELINE_H_
#include "gst_global.h"
class GST_LIB_EXPORT Pipeline {
public:
enum State {
VoidPending, Null, Ready, Paused, Playing
};
Pipeline();
virtual ~Pipeline();
};
#endif /* PIPELINE_H_ */

View File

@ -0,0 +1,413 @@
/**
******************************************************************************
*
* @file pipelineevent.h
* @author The LibrePilot Project, http://www.librepilot.org Copyright (C) 2017.
* @brief
* @see The GNU Public License (GPL) Version 3
* @defgroup
* @{
*
*****************************************************************************/
/*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* for more details.
*
* You should have received a copy of the GNU General Public License along
* with this program; if not, write to the Free Software Foundation, Inc.,
* 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
#ifndef PIPELINEEVENT_H_
#define PIPELINEEVENT_H_
#include <QEvent>
#include <QString>
#include "pipeline.h"
#include "overlay.h"
class PipelineEvent : public QEvent {
public:
// event types
static const QEvent::Type PrepareWindowId;
static const QEvent::Type StateChange;
static const QEvent::Type StreamStatus;
static const QEvent::Type NewClock;
static const QEvent::Type ClockProvide;
static const QEvent::Type ClockLost;
static const QEvent::Type Progress;
static const QEvent::Type Latency;
static const QEvent::Type Qos;
static const QEvent::Type Eos;
static const QEvent::Type Error;
static const QEvent::Type Warning;
static const QEvent::Type Info;
PipelineEvent(QEvent::Type type, QString src) :
QEvent(type), src(src)
{}
virtual ~PipelineEvent()
{}
public:
QString src;
};
const QEvent::Type PipelineEvent::PrepareWindowId = static_cast<QEvent::Type>(QEvent::registerEventType());
const QEvent::Type PipelineEvent::StateChange = static_cast<QEvent::Type>(QEvent::registerEventType());
const QEvent::Type PipelineEvent::StreamStatus = static_cast<QEvent::Type>(QEvent::registerEventType());
const QEvent::Type PipelineEvent::NewClock = static_cast<QEvent::Type>(QEvent::registerEventType());
const QEvent::Type PipelineEvent::ClockProvide = static_cast<QEvent::Type>(QEvent::registerEventType());
const QEvent::Type PipelineEvent::ClockLost = static_cast<QEvent::Type>(QEvent::registerEventType());
const QEvent::Type PipelineEvent::Progress = static_cast<QEvent::Type>(QEvent::registerEventType());
const QEvent::Type PipelineEvent::Latency = static_cast<QEvent::Type>(QEvent::registerEventType());
const QEvent::Type PipelineEvent::Qos = static_cast<QEvent::Type>(QEvent::registerEventType());
const QEvent::Type PipelineEvent::Eos = static_cast<QEvent::Type>(QEvent::registerEventType());
const QEvent::Type PipelineEvent::Error = static_cast<QEvent::Type>(QEvent::registerEventType());
const QEvent::Type PipelineEvent::Warning = static_cast<QEvent::Type>(QEvent::registerEventType());
const QEvent::Type PipelineEvent::Info = static_cast<QEvent::Type>(QEvent::registerEventType());
class PrepareWindowIdEvent : public PipelineEvent {
public:
PrepareWindowIdEvent(QString src, Overlay *overlay) :
PipelineEvent(PrepareWindowId, src), overlay(overlay)
{}
Overlay *getOverlay()
{
return overlay;
}
static QEvent::Type type()
{
return PrepareWindowId;
}
private:
Overlay *overlay;
};
class StateChangedEvent : public PipelineEvent {
public:
StateChangedEvent(QString src, Pipeline::State oldState, Pipeline::State newState, Pipeline::State pendingState) :
PipelineEvent(StateChange, src), oldState(oldState), newState(newState), pendingState(pendingState)
{}
static QEvent::Type type()
{
return StateChange;
}
Pipeline::State getOldState()
{
return oldState;
}
Pipeline::State getNewState()
{
return newState;
}
Pipeline::State getPendingState()
{
return pendingState;
}
static const char *stateName(Pipeline::State state)
{
switch (state) {
case Pipeline::VoidPending:
return "VoidPending";
case Pipeline::Null:
return "Null";
case Pipeline::Ready:
return "Ready";
case Pipeline::Paused:
return "Paused";
case Pipeline::Playing:
return "Playing";
}
return "<unknown>";
}
private:
Pipeline::State oldState;
Pipeline::State newState;
Pipeline::State pendingState;
};
class StreamStatusEvent : public PipelineEvent {
public:
enum StreamStatusType {
Create, Enter, Leave, Destroy, Start, Pause, Stop, Null
};
StreamStatusEvent(QString src, StreamStatusType status, QString owner) :
PipelineEvent(StreamStatus, src), status(status), owner(owner)
{}
static QEvent::Type type()
{
return StreamStatus;
}
StreamStatusType getStatus()
{
return status;
}
const char *getStatusName()
{
return statusName(status);
}
static const char *statusName(StreamStatusType status)
{
switch (status) {
case StreamStatusEvent::Create:
return "Create";
case StreamStatusEvent::Enter:
return "Enter";
case StreamStatusEvent::Leave:
return "Leave";
case StreamStatusEvent::Destroy:
return "Destroy";
case StreamStatusEvent::Start:
return "Start";
case StreamStatusEvent::Pause:
return "Pause";
case StreamStatusEvent::Stop:
return "Stop";
case StreamStatusEvent::Null:
return "Null";
}
return "<unknown>";
}
QString getOwner()
{
return owner;
}
private:
StreamStatusType status;
QString owner;
};
class ClockEvent : public PipelineEvent {
public:
ClockEvent(QEvent::Type type, QString src, QString name) : PipelineEvent(type, src), name(name)
{}
QString getName()
{
return name;
}
private:
QString name;
};
class NewClockEvent : public ClockEvent {
public:
NewClockEvent(QString src, QString name) : ClockEvent(NewClock, src, name)
{}
static QEvent::Type type()
{
return NewClock;
}
};
class ClockProvideEvent : public ClockEvent {
public:
ClockProvideEvent(QString src, QString name, bool ready) : ClockEvent(ClockProvide, src, name), ready(ready)
{}
static QEvent::Type type()
{
return ClockProvide;
}
bool isReady()
{
return ready;
}
private:
bool ready;
};
class ClockLostEvent : public ClockEvent {
public:
ClockLostEvent(QString src, QString name) : ClockEvent(ClockLost, src, name)
{}
static QEvent::Type type()
{
return ClockLost;
}
};
class ProgressEvent : public PipelineEvent {
public:
enum ProgressType {
Start, Continue, Complete, Cancelled, Error
};
ProgressEvent(QString src, ProgressType progressType, QString code, QString text) :
PipelineEvent(Progress, src), progressType(progressType), code(code), text(text)
{}
static QEvent::Type type()
{
return Progress;
}
ProgressType getProgressType()
{
return progressType;
}
QString getCode()
{
return code;
}
QString getText()
{
return text;
}
private:
ProgressType progressType;
QString code;
QString text;
};
class LatencyEvent : public PipelineEvent {
public:
LatencyEvent(QString src) :
PipelineEvent(Latency, src)
{}
static QEvent::Type type()
{
return Latency;
}
};
class QosData {
public:
// timestamps and live status
// If the message was generated by a live element
bool live;
// running_time, stream_time, timestamp and duration of the dropped buffer.
// Values of GST_CLOCK_TIME_NONE mean unknown values.
quint64 running_time;
quint64 stream_time;
quint64 timestamp;
quint64 duration;
// values
// The difference of the running-time against the deadline.
qint64 jitter;
// Long term prediction of the ideal rate relative to normal rate to get optimal quality.
qreal proportion; // won't work on ARM?
// An element dependent integer value that specifies the current quality level of the element.
// The default maximum quality is 1000000.
qint32 quality;
// stats
// QoS stats representing the history of the current continuous pipeline playback period.
// When format is GST_FORMAT_UNDEFINED both dropped and processed are invalid.
// Values of -1 for either processed or dropped mean unknown values.
// Units of the 'processed' and 'dropped' fields.
// Video sinks and video filters will use GST_FORMAT_BUFFERS (frames).
// Audio sinks and audio filters will likely use GST_FORMAT_DEFAULT (samples)
// GstFormat format;
// Total number of units correctly processed since the last state change to READY or a flushing operation.
quint64 processed;
// Total number of units dropped since the last state change to READY or a flushing operation.
quint64 dropped;
QString timestamps()
{
return QString("live: %0; running time: %1; stream time: %2; timestamp: %3; duration: %4").arg(live).arg(
running_time).arg(stream_time).arg(timestamp).arg(duration);
}
QString values()
{
return QString("jitter: %0; proportion: %1; quality: %2;").arg(jitter).arg(proportion).arg(quality);
}
QString stats()
{
return QString("format: %0; processed: %1; dropped: %2;").arg("").arg(processed).arg(dropped);
}
};
class QosEvent : public PipelineEvent {
public:
QosEvent(QString src, QosData data) : PipelineEvent(Qos, src), data(data)
{}
static QEvent::Type type()
{
return Qos;
}
QosData getData()
{
return data;
}
private:
QosData data;
};
class EosEvent : public PipelineEvent {
public:
EosEvent(QString src) : PipelineEvent(Eos, src)
{}
static QEvent::Type type()
{
return Eos;
}
};
class MessageEvent : public PipelineEvent {
public:
MessageEvent(QEvent::Type type, QString src, QString message, QString debug) :
PipelineEvent(type, src), message(message), debug(debug)
{}
QString getMessage()
{
return message;
}
QString getDebug()
{
return debug;
}
private:
QString message;
QString debug;
};
class ErrorEvent : public MessageEvent {
public:
ErrorEvent(QString src, QString message, QString debug) : MessageEvent(Error, src, message, debug)
{}
static QEvent::Type type()
{
return Error;
}
};
class WarningEvent : public MessageEvent {
public:
WarningEvent(QString src, QString message, QString debug) : MessageEvent(Warning, src, message, debug)
{}
static QEvent::Type type()
{
return Warning;
}
};
class InfoEvent : public MessageEvent {
public:
InfoEvent(QString src, QString message, QString debug) : MessageEvent(Info, src, message, debug)
{}
static QEvent::Type type()
{
return Info;
}
};
#endif /* PIPELINEEVENT_H_ */

View File

@ -0,0 +1,94 @@
/* GStreamer
* Copyright (C) <2017> Philippe Renon <philippe_renon@yahoo.fr>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
* Boston, MA 02110-1301, USA.
*/
#include "cameraevent.hpp"
#include <opencv2/opencv.hpp>
#include <QDebug>
#include <QString>
/**
* gst_video_event_new_still_frame:
* @in_still: boolean value for the still-frame state of the event.
*
* Creates a new Still Frame event. If @in_still is %TRUE, then the event
* represents the start of a still frame sequence. If it is %FALSE, then
* the event ends a still frame sequence.
*
* To parse an event created by gst_video_event_new_still_frame() use
* gst_video_event_parse_still_frame().
*
* Returns: The new GstEvent
*/
GstEvent *
gst_camera_event_new_calibrated (gchar * settings)
{
GstEvent *calibrated_event;
GstStructure *s;
s = gst_structure_new (GST_CAMERA_EVENT_CALIBRATED_NAME,
"undistort-settings", G_TYPE_STRING, g_strdup(settings), NULL);
calibrated_event = gst_event_new_custom (GST_EVENT_CUSTOM_BOTH, s);
return calibrated_event;
}
/**
* gst_video_event_parse_still_frame:
* @event: A #GstEvent to parse
* @in_still: A boolean to receive the still-frame status from the event, or NULL
*
* Parse a #GstEvent, identify if it is a Still Frame event, and
* return the still-frame state from the event if it is.
* If the event represents the start of a still frame, the in_still
* variable will be set to TRUE, otherwise FALSE. It is OK to pass NULL for the
* in_still variable order to just check whether the event is a valid still-frame
* event.
*
* Create a still frame event using gst_video_event_new_still_frame()
*
* Returns: %TRUE if the event is a valid still-frame event. %FALSE if not
*/
gboolean
gst_camera_event_parse_calibrated (GstEvent * event, gchar ** settings)
{
const GstStructure *s;
g_return_val_if_fail (event != NULL, FALSE);
if (GST_EVENT_TYPE (event) != GST_EVENT_CUSTOM_BOTH)
return FALSE; /* Not a calibrated event */
s = gst_event_get_structure (event);
if (s == NULL
|| !gst_structure_has_name (s, GST_CAMERA_EVENT_CALIBRATED_NAME))
return FALSE; /* Not a calibrated event */
const gchar *str = gst_structure_get_string(s, "undistort-settings");
if (!str)
return FALSE; /* Not calibrated frame event */
//qDebug() << "*** " << buf;//QString::fromStdString(buf);
*settings = g_strdup (str);
return TRUE;
}

View File

@ -0,0 +1,37 @@
/* GStreamer
* Copyright (C) <2017> Philippe Renon <philippe_renon@yahoo.fr>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
* Boston, MA 02110-1301, USA.
*/
#ifndef __GST_CAMERA_EVENT_H__
#define __GST_CAMERA_EVENT_H__
#include <gst/gst.h>
G_BEGIN_DECLS
#define GST_CAMERA_EVENT_CALIBRATED_NAME "GstEventCalibrated"
/* camera calibration event creation and parsing */
GstEvent * gst_camera_event_new_calibrated (gchar * settings);
gboolean gst_camera_event_parse_calibrated (GstEvent * event, gchar ** settings);
G_END_DECLS
#endif /* __GST_CAMERA_EVENT_H__ */

View File

@ -0,0 +1,46 @@
/* GStreamer
* Copyright (C) <2017> Philippe Renon <philippe_renon@yahoo.fr>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
* Boston, MA 02110-1301, USA.
*/
#include "camerautils.hpp"
#include <opencv2/opencv.hpp>
#include <QDebug>
#include <QString>
gchar *
camera_serialize_undistort_settings (cv::Mat &cameraMatrix, cv::Mat &distCoeffs)
{
cv::FileStorage fs(".xml", cv::FileStorage::WRITE + cv::FileStorage::MEMORY);
fs << "cameraMatrix" << cameraMatrix;
fs << "distCoeffs" << distCoeffs;
std::string buf = fs.releaseAndGetString();
return g_strdup(buf.c_str());
}
gboolean
camera_deserialize_undistort_settings (gchar * str, cv::Mat &cameraMatrix, cv::Mat &distCoeffs)
{
cv::FileStorage fs(str, cv::FileStorage::READ + cv::FileStorage::MEMORY);
fs["cameraMatrix"] >> cameraMatrix;
fs["distCoeffs"] >> distCoeffs;
return TRUE;
}

View File

@ -0,0 +1,34 @@
/* GStreamer
* Copyright (C) <2017> Philippe Renon <philippe_renon@yahoo.fr>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
* Boston, MA 02110-1301, USA.
*/
#ifndef __GST_CAMERA_UTILS_H__
#define __GST_CAMERA_UTILS_H__
#include <gst/gst.h>
#include <cv.h>
G_BEGIN_DECLS
gchar *camera_serialize_undistort_settings (cv::Mat &cameraMatrix, cv::Mat &distCoeffs);
gboolean camera_deserialize_undistort_settings (gchar *str, cv::Mat &cameraMatrix, cv::Mat &distCoeffs);
G_END_DECLS
#endif /* __GST_CAMERA_UTILS_H__ */

View File

@ -0,0 +1,929 @@
/*
* GStreamer
* Copyright (C) <2017> Philippe Renon <philippe_renon@yahoo.fr>
*
* Permission is hereby granted, free of charge, to any person obtaining a
* copy of this software and associated documentation files (the "Software"),
* to deal in the Software without restriction, including without limitation
* the rights to use, copy, modify, merge, publish, distribute, sublicense,
* and/or sell copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
* DEALINGS IN THE SOFTWARE.
*
* Alternatively, the contents of this file may be used under the
* GNU Lesser General Public License Version 2.1 (the "LGPL"), in
* which case the following provisions apply instead of the ones
* mentioned above:
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
* Boston, MA 02110-1301, USA.
*/
/**
* SECTION:element-cameracalibration
*
* Performs fcamera calibration.
*/
#ifdef HAVE_CONFIG_H
# include <config.h>
#endif
#include "gstcameracalibration.h"
#if (CV_MAJOR_VERSION >= 3)
#include <opencv2/imgproc.hpp>
#endif
#include <opencv2/calib3d.hpp>
#include <gst/opencv/gstopencvutils.h>
#include "camerautils.hpp"
#include "cameraevent.hpp"
#include <vector>
#include <QDebug>
GST_DEBUG_CATEGORY_STATIC (gst_camera_calibration_debug);
#define GST_CAT_DEFAULT gst_camera_calibration_debug
#define DEFAULT_CALIBRATON_PATTERN GST_CAMERACALIBRATION_PATTERN_CHESSBOARD
#define DEFAULT_BOARD_WIDTH 9
#define DEFAULT_BOARD_HEIGHT 6
#define DEFAULT_SQUARE_SIZE 50
#define DEFAULT_ASPECT_RATIO 1.0
#define DEFAULT_CORNER_SUB_PIXEL true
#define DEFAULT_ZERO_TANGENT_DISTORTION false
#define DEFAULT_CENTER_PRINCIPAL_POINT false
#define DEFAULT_USE_FISHEYE false
#define DEFAULT_FRAME_COUNT 25
#define DEFAULT_DELAY 350
#define DEFAULT_SHOW_CORNERS true
///* Filter signals and args */
//enum
//{
// /* FILL ME */
// LAST_SIGNAL
//};
enum
{
PROP_0,
PROP_CALIBRATON_PATTERN,
PROP_BOARD_WIDTH,
PROP_BOARD_HEIGHT,
PROP_SQUARE_SIZE,
PROP_ASPECT_RATIO,
PROP_CORNER_SUB_PIXEL,
PROP_ZERO_TANGENT_DISTORTION,
PROP_CENTER_PRINCIPAL_POINT,
PROP_USE_FISHEYE,
PROP_FRAME_COUNT,
PROP_DELAY,
PROP_SHOW_CORNERS
};
enum {
DETECTION = 0,
CAPTURING = 1,
CALIBRATED = 2
};
#define GST_TYPE_CAMERA_CALIBRATION_PATTERN (cameracalibration_pattern_get_type ())
static GType
cameracalibration_pattern_get_type (void)
{
static GType cameracalibration_pattern_type = 0;
static const GEnumValue cameracalibration_pattern[] = {
{GST_CAMERACALIBRATION_PATTERN_CHESSBOARD, "Chessboard", "chessboard"},
{GST_CAMERACALIBRATION_PATTERN_CIRCLES_GRID, "Circle Grids", "circle_grids"},
{GST_CAMERACALIBRATION_PATTERN_ASYMMETRIC_CIRCLES_GRID, "Asymmetric Circle Grids", "asymmetric_circle_grids"},
{0, NULL, NULL},
};
if (!cameracalibration_pattern_type) {
cameracalibration_pattern_type =
g_enum_register_static ("GstCameraCalibrationPattern", cameracalibration_pattern);
}
return cameracalibration_pattern_type;
}
G_DEFINE_TYPE (GstCameraCalibration, gst_camera_calibration, GST_TYPE_OPENCV_VIDEO_FILTER);
static void gst_camera_calibration_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec);
static void gst_camera_calibration_get_property (GObject * object, guint prop_id,
GValue * value, GParamSpec * pspec);
//static gboolean gst_camera_calibration_set_caps (GstOpencvVideoFilter * transform,
// gint in_width, gint in_height, gint in_depth, gint in_channels,
// gint out_width, gint out_height, gint out_depth, gint out_channels);
static GstFlowReturn gst_camera_calibration_transform_frame_ip (
GstOpencvVideoFilter * cvfilter, GstBuffer * frame, IplImage * img);
/* Clean up */
static void
gst_camera_calibration_finalize (GObject * obj)
{
G_OBJECT_CLASS (gst_camera_calibration_parent_class)->finalize (obj);
}
/* initialize the cameracalibration's class */
static void
gst_camera_calibration_class_init (GstCameraCalibrationClass * klass)
{
GObjectClass *gobject_class = G_OBJECT_CLASS (klass);
GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
GstOpencvVideoFilterClass *opencvfilter_class = GST_OPENCV_VIDEO_FILTER_CLASS (klass);
GstCaps *caps;
GstPadTemplate *templ;
gobject_class->finalize = GST_DEBUG_FUNCPTR (gst_camera_calibration_finalize);
gobject_class->set_property = gst_camera_calibration_set_property;
gobject_class->get_property = gst_camera_calibration_get_property;
opencvfilter_class->cv_trans_ip_func =
gst_camera_calibration_transform_frame_ip;
g_object_class_install_property (gobject_class, PROP_CALIBRATON_PATTERN,
g_param_spec_enum ("pattern", "Calibration Pattern",
"One of the chessboard, circles, or asymmetric circle pattern",
GST_TYPE_CAMERA_CALIBRATION_PATTERN, DEFAULT_CALIBRATON_PATTERN,
(GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)));
g_object_class_install_property (gobject_class, PROP_BOARD_WIDTH,
g_param_spec_int ("board-width", "Board Width",
"The board width in number of items",
1, G_MAXINT, DEFAULT_BOARD_WIDTH,
(GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)));
g_object_class_install_property (gobject_class, PROP_BOARD_HEIGHT,
g_param_spec_int ("board-height", "Board Height",
"The board height in number of items",
1, G_MAXINT, DEFAULT_BOARD_WIDTH,
(GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)));
g_object_class_install_property (gobject_class, PROP_SQUARE_SIZE,
g_param_spec_float ("square-size", "Square Size",
"The size of a square in your defined unit (point, millimeter, etc.)",
0.0, G_MAXFLOAT, DEFAULT_SQUARE_SIZE,
(GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)));
g_object_class_install_property (gobject_class, PROP_ASPECT_RATIO,
g_param_spec_float ("aspect-ratio", "Aspect Ratio",
"The aspect ratio",
0.0, G_MAXFLOAT, DEFAULT_ASPECT_RATIO,
(GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)));
g_object_class_install_property (gobject_class, PROP_CORNER_SUB_PIXEL,
g_param_spec_boolean ("corner-sub-pixel", "Corner Sub Pixel",
"Improve corner detection accuracy for chessboard",
DEFAULT_CORNER_SUB_PIXEL, (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)));
g_object_class_install_property (gobject_class, PROP_ZERO_TANGENT_DISTORTION,
g_param_spec_boolean ("zero-tangent-distorsion", "Zero Tangent Distorsion",
"Assume zero tangential distortion",
DEFAULT_ZERO_TANGENT_DISTORTION, (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)));
g_object_class_install_property (gobject_class, PROP_CENTER_PRINCIPAL_POINT,
g_param_spec_boolean ("center-principal-point", "Center Principal Point",
"Fix the principal point at the center",
DEFAULT_CENTER_PRINCIPAL_POINT, (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)));
g_object_class_install_property (gobject_class, PROP_USE_FISHEYE,
g_param_spec_boolean ("use-fisheye", "Use Fisheye",
"Use fisheye camera model for calibration",
DEFAULT_USE_FISHEYE, (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)));
g_object_class_install_property (gobject_class, PROP_DELAY,
g_param_spec_int ("delay", "Delay",
"Sampling periodicity in ms", 0, G_MAXINT,
DEFAULT_DELAY,
(GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)));
g_object_class_install_property (gobject_class, PROP_FRAME_COUNT,
g_param_spec_int ("frame-count", "Frame Count",
"The number of frames to use from the input for calibration", 1, G_MAXINT,
DEFAULT_FRAME_COUNT,
(GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)));
g_object_class_install_property (gobject_class, PROP_SHOW_CORNERS,
g_param_spec_boolean ("show-corners", "Show Corners",
"Show corners",
DEFAULT_SHOW_CORNERS, (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)));
gst_element_class_set_static_metadata (element_class,
"cameracalibration",
"Filter/Effect/Video",
"Performs camera calibration",
"Philippe Renon <philippe_renon@yahoo.fr>");
/* add sink and source pad templates */
caps = gst_opencv_caps_from_cv_image_type (CV_8UC4);
gst_caps_append (caps, gst_opencv_caps_from_cv_image_type (CV_8UC3));
gst_caps_append (caps, gst_opencv_caps_from_cv_image_type (CV_8UC1));
templ = gst_pad_template_new ("sink", GST_PAD_SINK, GST_PAD_ALWAYS,
gst_caps_ref (caps));
gst_element_class_add_pad_template (element_class, templ);
templ = gst_pad_template_new ("src", GST_PAD_SRC, GST_PAD_ALWAYS, caps);
gst_element_class_add_pad_template (element_class, templ);
// gst_element_class_add_static_pad_template (element_class, &src_factory);
// gst_element_class_add_static_pad_template (element_class, &sink_factory);
}
/* initialize the new element
* initialize instance structure
*/
static void
gst_camera_calibration_init (GstCameraCalibration * calib)
{
calib->calibrationPattern = DEFAULT_CALIBRATON_PATTERN;
calib->boardSize.width = DEFAULT_BOARD_WIDTH;
calib->boardSize.height = DEFAULT_BOARD_HEIGHT;
calib->squareSize = DEFAULT_SQUARE_SIZE;
calib->aspectRatio = DEFAULT_ASPECT_RATIO;
calib->cornerSubPix = DEFAULT_CORNER_SUB_PIXEL;
calib->calibZeroTangentDist = DEFAULT_ZERO_TANGENT_DISTORTION;
calib->calibFixPrincipalPoint = DEFAULT_CENTER_PRINCIPAL_POINT;
calib->useFisheye = DEFAULT_USE_FISHEYE;
calib->nrFrames = DEFAULT_FRAME_COUNT;
calib->delay = DEFAULT_DELAY;
calib->showCorners = DEFAULT_SHOW_CORNERS;
calib->flags = cv::CALIB_FIX_K4 | cv::CALIB_FIX_K5;
if (calib->calibFixPrincipalPoint) calib->flags |= cv::CALIB_FIX_PRINCIPAL_POINT;
if (calib->calibZeroTangentDist) calib->flags |= cv::CALIB_ZERO_TANGENT_DIST;
if (calib->aspectRatio) calib->flags |= cv::CALIB_FIX_ASPECT_RATIO;
if (calib->useFisheye) {
// the fisheye model has its own enum, so overwrite the flags
calib->flags = cv::fisheye::CALIB_FIX_SKEW | cv::fisheye::CALIB_RECOMPUTE_EXTRINSIC |
// cv::fisheye::CALIB_FIX_K1 |
cv::fisheye::CALIB_FIX_K2 | cv::fisheye::CALIB_FIX_K3 | cv::fisheye::CALIB_FIX_K4;
}
calib->mode = CAPTURING; //DETECTION;
calib->prevTimestamp = 0;
calib->imagePoints.clear();
calib->cameraMatrix = 0;
calib->distCoeffs = 0;
gst_opencv_video_filter_set_in_place (
GST_OPENCV_VIDEO_FILTER_CAST (calib), TRUE);
}
static void
gst_camera_calibration_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec)
{
GstCameraCalibration *calib = GST_CAMERA_CALIBRATION (object);
switch (prop_id) {
case PROP_CALIBRATON_PATTERN:
calib->calibrationPattern = g_value_get_enum (value);
break;
case PROP_BOARD_WIDTH:
calib->boardSize.width = g_value_get_int (value);
break;
case PROP_BOARD_HEIGHT:
calib->boardSize.height = g_value_get_int (value);
break;
case PROP_SQUARE_SIZE:
calib->squareSize = g_value_get_float (value);
break;
case PROP_ASPECT_RATIO:
calib->aspectRatio = g_value_get_float (value);
break;
case PROP_CORNER_SUB_PIXEL:
calib->cornerSubPix = g_value_get_boolean (value);
break;
case PROP_ZERO_TANGENT_DISTORTION:
calib->calibZeroTangentDist = g_value_get_boolean (value);
break;
case PROP_CENTER_PRINCIPAL_POINT:
calib->calibFixPrincipalPoint = g_value_get_boolean (value);
break;
case PROP_USE_FISHEYE:
calib->useFisheye = g_value_get_boolean (value);
break;
case PROP_FRAME_COUNT:
calib->nrFrames = g_value_get_int (value);
break;
case PROP_DELAY:
calib->delay = g_value_get_int (value);
break;
case PROP_SHOW_CORNERS:
calib->showCorners = g_value_get_boolean (value);
break;
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
break;
}
}
static void
gst_camera_calibration_get_property (GObject * object, guint prop_id,
GValue * value, GParamSpec * pspec)
{
GstCameraCalibration *calib = GST_CAMERA_CALIBRATION (object);
switch (prop_id) {
case PROP_CALIBRATON_PATTERN:
g_value_set_enum (value, calib->calibrationPattern);
break;
case PROP_BOARD_WIDTH:
g_value_set_int (value, calib->boardSize.width);
break;
case PROP_BOARD_HEIGHT:
g_value_set_int (value, calib->boardSize.height);
break;
case PROP_SQUARE_SIZE:
g_value_set_float (value, calib->squareSize);
break;
case PROP_ASPECT_RATIO:
g_value_set_float (value, calib->aspectRatio);
break;
case PROP_CORNER_SUB_PIXEL:
g_value_set_boolean (value, calib->cornerSubPix);
break;
case PROP_ZERO_TANGENT_DISTORTION:
g_value_set_boolean (value, calib->calibZeroTangentDist);
break;
case PROP_CENTER_PRINCIPAL_POINT:
g_value_set_boolean (value, calib->calibFixPrincipalPoint);
break;
case PROP_USE_FISHEYE:
g_value_set_boolean (value, calib->useFisheye);
break;
case PROP_FRAME_COUNT:
g_value_set_int (value, calib->nrFrames);
break;
case PROP_DELAY:
g_value_set_int (value, calib->delay);
break;
case PROP_SHOW_CORNERS:
g_value_set_boolean (value, calib->showCorners);
break;
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
break;
}
}
/* GstElement vmethod implementations */
/* this function handles the link with other elements */
//static gboolean
//gst_camera_calibration_set_caps (GstOpencvVideoFilter * transform, gint in_width,
// gint in_height, gint in_depth, gint in_channels,
// gint out_width, gint out_height, gint out_depth, gint out_channels)
//{
// GstCameraCalibration *calib;
//
// calib = GST_CAMERA_CALIBRATION (transform);
//
// if (calib->cvGray)
// cvReleaseImage (&calib->cvGray);
//
// calib->cvGray = cvCreateImage (cvSize (in_width, in_height), IPL_DEPTH_8U,
// 1);
//
// return TRUE;
//}
//static GstMessage *
//gst_camera_calibration_message_new (GstCameraCalibration * calib, GstBuffer * buf)
//{
// GstBaseTransform *trans = GST_BASE_TRANSFORM_CAST (calib);
// GstStructure *s;
// GstClockTime running_time, stream_time;
//
// running_time = gst_segment_to_running_time (&trans->segment, GST_FORMAT_TIME,
// GST_BUFFER_TIMESTAMP (buf));
// stream_time = gst_segment_to_stream_time (&trans->segment, GST_FORMAT_TIME,
// GST_BUFFER_TIMESTAMP (buf));
//
// s = gst_structure_new ("cameracalibration",
// "timestamp", G_TYPE_UINT64, GST_BUFFER_TIMESTAMP (buf),
// "stream-time", G_TYPE_UINT64, stream_time,
// "running-time", G_TYPE_UINT64, running_time,
// "duration", G_TYPE_UINT64, GST_BUFFER_DURATION (buf), NULL);
//
// return gst_message_new_element (GST_OBJECT (calib), s);
//}
void camera_calibration_run(GstCameraCalibration *calib, IplImage *img);
/*
* Performs the camera calibration
*/
static GstFlowReturn
gst_camera_calibration_transform_frame_ip (GstOpencvVideoFilter * cvfilter,
G_GNUC_UNUSED GstBuffer * frame, IplImage * img)
{
GstCameraCalibration *calib = GST_CAMERA_CALIBRATION (cvfilter);
camera_calibration_run(calib, img);
return GST_FLOW_OK;
}
/* entry point to initialize the plug-in
* initialize the plug-in itself
* register the element factories and other features
*/
gboolean
gst_camera_calibration_plugin_init (GstPlugin * plugin)
{
/* debug category for filtering log messages */
GST_DEBUG_CATEGORY_INIT (gst_camera_calibration_debug, "cameracalibration",
0,
"Performs camera calibration");
return gst_element_register (plugin, "cameracalibration", GST_RANK_NONE,
GST_TYPE_CAMERA_CALIBRATION);
}
// void validate()
// {
// goodInput = true;
// if (boardSize.width <= 0 || boardSize.height <= 0)
// {
// cerr << "Invalid Board size: " << boardSize.width << " " << boardSize.height << endl;
// goodInput = false;
// }
// if (squareSize <= 10e-6)
// {
// cerr << "Invalid square size " << squareSize << endl;
// goodInput = false;
// }
// if (nrFrames <= 0)
// {
// cerr << "Invalid number of frames " << nrFrames << endl;
// goodInput = false;
// }
//
// if (input.empty()) // Check for valid input
// inputType = INVALID;
// else
// {
// if (input[0] >= '0' && input[0] <= '9')
// {
// stringstream ss(input);
// ss >> cameraID;
// inputType = CAMERA;
// }
// else
// {
// if (readStringList(input, imageList))
// {
// inputType = IMAGE_LIST;
// nrFrames = (nrFrames < (int)imageList.size()) ? nrFrames : (int)imageList.size();
// }
// else
// inputType = VIDEO_FILE;
// }
// if (inputType == CAMERA)
// inputCapture.open(cameraID);
// if (inputType == VIDEO_FILE)
// inputCapture.open(input);
// if (inputType != IMAGE_LIST && !inputCapture.isOpened())
// inputType = INVALID;
// }
// if (inputType == INVALID)
// {
// cerr << " Input does not exist: " << input;
// goodInput = false;
// }
//
// flag = CALIB_FIX_K4 | CALIB_FIX_K5;
// if(calibFixPrincipalPoint) flag |= CALIB_FIX_PRINCIPAL_POINT;
// if(calibZeroTangentDist) flag |= CALIB_ZERO_TANGENT_DIST;
// if(aspectRatio) flag |= CALIB_FIX_ASPECT_RATIO;
//
// if (useFisheye) {
// // the fisheye model has its own enum, so overwrite the flags
// flag = fisheye::CALIB_FIX_SKEW | fisheye::CALIB_RECOMPUTE_EXTRINSIC |
// // fisheye::CALIB_FIX_K1 |
// fisheye::CALIB_FIX_K2 | fisheye::CALIB_FIX_K3 | fisheye::CALIB_FIX_K4;
// }
//
// calibrationPattern = NOT_EXISTING;
// if (!patternToUse.compare("CHESSBOARD")) calibrationPattern = CHESSBOARD;
// if (!patternToUse.compare("CIRCLES_GRID")) calibrationPattern = CIRCLES_GRID;
// if (!patternToUse.compare("ASYMMETRIC_CIRCLES_GRID")) calibrationPattern = ASYMMETRIC_CIRCLES_GRID;
// if (calibrationPattern == NOT_EXISTING)
// {
// cerr << " Camera calibration mode does not exist: " << patternToUse << endl;
// goodInput = false;
// }
// atImageList = 0;
//
// }
bool runCalibration(GstCameraCalibration *calib, cv::Size imageSize, cv::Mat& cameraMatrix, cv::Mat& distCoeffs,
std::vector<std::vector<cv::Point2f> > imagePoints );
void doCalibration(GstElement *element, gpointer user_data);
void camera_calibration_run(GstCameraCalibration *calib, IplImage *img)
{
cv::Mat view = cv::cvarrToMat(img);
// For camera only take new samples after delay time
if (calib->mode == CAPTURING) {
// get_input
cv::Size imageSize = view.size();
// find_pattern
// FIXME find ways to reduce CPU usage
// don't do it on all frames ? will it help ? corner display will be affected.
// in a separate frame?
// in a separate element that gets composited back into the main stream (video is tee-d into it and can then be decimated, scaled, etc..)
std::vector<cv::Point2f> pointBuf;
bool found;
int chessBoardFlags = cv::CALIB_CB_ADAPTIVE_THRESH | cv::CALIB_CB_NORMALIZE_IMAGE;
if (!calib->useFisheye) {
// fast check erroneously fails with high distortions like fisheye
chessBoardFlags |= cv::CALIB_CB_FAST_CHECK;
}
// Find feature points on the input format
switch(calib->calibrationPattern) {
case GST_CAMERACALIBRATION_PATTERN_CHESSBOARD:
found = cv::findChessboardCorners(view, calib->boardSize, pointBuf, chessBoardFlags);
break;
case GST_CAMERACALIBRATION_PATTERN_CIRCLES_GRID:
found = cv::findCirclesGrid(view, calib->boardSize, pointBuf);
break;
case GST_CAMERACALIBRATION_PATTERN_ASYMMETRIC_CIRCLES_GRID:
found = cv::findCirclesGrid(view, calib->boardSize, pointBuf, cv::CALIB_CB_ASYMMETRIC_GRID );
break;
default:
found = false;
break;
}
bool blinkOutput = false;
if (found) {
// improve the found corners' coordinate accuracy for chessboard
if (calib->calibrationPattern == GST_CAMERACALIBRATION_PATTERN_CHESSBOARD && calib->cornerSubPix) {
// FIXME findChessboardCorners and alike do a cv::COLOR_BGR2GRAY (and a histogram balance)
// the color convert should be done once (if needed) and shared
// FIXME keep viewGray around to avoid reallocating it each time...
cv::Mat viewGray;
cv::cvtColor(view, viewGray, cv::COLOR_BGR2GRAY);
cv::cornerSubPix(viewGray, pointBuf, cv::Size(11, 11),
cv::Size(-1, -1), cv::TermCriteria(cv::TermCriteria::EPS + cv::TermCriteria::COUNT, 30, 0.1));
}
// For camera only take new samples after delay time
if ((calib->mode == CAPTURING) && ((clock() - calib->prevTimestamp) > calib->delay * 1e-3 * CLOCKS_PER_SEC)) {
calib->imagePoints.push_back(pointBuf);
calib->prevTimestamp = clock();
blinkOutput = true;
}
// Draw the cornerfilter
if (calib->showCorners) {
cv::drawChessboardCorners(view, calib->boardSize, cv::Mat(pointBuf), found);
}
}
// If got enough frames then stop calibration and show result
if (calib->mode == CAPTURING && calib->imagePoints.size() >= (size_t)calib->nrFrames) {
//GstElementCallAsyncFunc func;
//gst_element_call_async (GST_ELEMENT (calib), /*GstElementCallAsyncFunc*/ doCalibration, NULL, NULL);
if (runCalibration(calib, imageSize, calib->cameraMatrix, calib->distCoeffs, calib->imagePoints)) {
calib->mode = CALIBRATED;
GstPad *sinkPad = GST_BASE_TRANSFORM_SINK_PAD (calib);
//GstPad *srcPad = GST_BASE_TRANSFORM_SRC_PAD (calib);
GstEvent *event;
//gboolean result;
// create calibrated event and send upstream and downstream
// FIXME should keep settings around for answering queries
gchar *settings = camera_serialize_undistort_settings(calib->cameraMatrix, calib->distCoeffs);
event = gst_camera_event_new_calibrated(settings);
g_free (settings);
//gst_event_ref(event);
GST_LOG_OBJECT (sinkPad, "Sending upstream event %s.", GST_EVENT_TYPE_NAME (event));
if (!gst_pad_push_event (sinkPad, event)) {
GST_WARNING_OBJECT (sinkPad, "Sending upstream event %p (%s) failed.",
event, GST_EVENT_TYPE_NAME (event));
}
// GST_LOG_OBJECT (srcPad, "Sending downstream event %s.", GST_EVENT_TYPE_NAME (event));
// if (!gst_pad_push_event (srcPad, event)) {
// GST_WARNING_OBJECT (srcPad, "Sending downstream event %p (%s) failed.",
// event, GST_EVENT_TYPE_NAME (event));
// }
} else {
calib->mode = DETECTION;
}
}
if (calib->mode == CAPTURING && blinkOutput) {
bitwise_not(view, view);
}
}
// Output Text
// FIXME all additional rendering (text, corners, ...) should be done with cairo or another gst framework.
// this will relax the conditions on the input format (RBG only at the moment).
// the calibration itself accepts more formats...
std::string msg = (calib->mode == CAPTURING) ? "100/100" :
(calib->mode == CALIBRATED) ? "Calibrated" : "Press 'g' to start";
int baseLine = 0;
cv::Size textSize = cv::getTextSize(msg, 1, 1, 1, &baseLine);
cv::Point textOrigin(view.cols - 2 * textSize.width - 10, view.rows - 2 * baseLine - 10);
if (calib->mode == CAPTURING) {
msg = cv::format( "%d/%d", (int)calib->imagePoints.size(), calib->nrFrames );
}
const cv::Scalar RED(0,0,255);
const cv::Scalar GREEN(0,255,0);
cv::putText(view, msg, textOrigin, 1, 1, calib->mode == CALIBRATED ? GREEN : RED);
}
void doCalibration(__attribute__((unused)) GstElement *element, __attribute__((unused)) gpointer user_data)
{
// GstCameraCalibration *calib = GST_CAMERA_CALIBRATION (element);
}
static double computeReprojectionErrors( const std::vector<std::vector<cv::Point3f> >& objectPoints,
const std::vector<std::vector<cv::Point2f> >& imagePoints,
const std::vector<cv::Mat>& rvecs, const std::vector<cv::Mat>& tvecs,
const cv::Mat& cameraMatrix , const cv::Mat& distCoeffs,
std::vector<float>& perViewErrors, bool fisheye)
{
std::vector<cv::Point2f> imagePoints2;
size_t totalPoints = 0;
double totalErr = 0, err;
perViewErrors.resize(objectPoints.size());
for(size_t i = 0; i < objectPoints.size(); ++i)
{
if (fisheye)
{
cv::fisheye::projectPoints(objectPoints[i], imagePoints2, rvecs[i], tvecs[i], cameraMatrix,
distCoeffs);
}
else
{
cv::projectPoints(objectPoints[i], rvecs[i], tvecs[i], cameraMatrix, distCoeffs, imagePoints2);
}
err = cv::norm(imagePoints[i], imagePoints2, cv::NORM_L2);
size_t n = objectPoints[i].size();
perViewErrors[i] = (float) std::sqrt(err*err/n);
totalErr += err*err;
totalPoints += n;
}
return std::sqrt(totalErr/totalPoints);
}
static void calcBoardCornerPositions(cv::Size boardSize, float squareSize, std::vector<cv::Point3f>& corners,
gint patternType /*= CHESSBOARD*/)
{
corners.clear();
switch(patternType)
{
case GST_CAMERACALIBRATION_PATTERN_CHESSBOARD:
case GST_CAMERACALIBRATION_PATTERN_CIRCLES_GRID:
for( int i = 0; i < boardSize.height; ++i)
for( int j = 0; j < boardSize.width; ++j)
corners.push_back(cv::Point3f(j * squareSize, i * squareSize, 0));
break;
case GST_CAMERACALIBRATION_PATTERN_ASYMMETRIC_CIRCLES_GRID:
for( int i = 0; i < boardSize.height; i++)
for( int j = 0; j < boardSize.width; j++)
corners.push_back(cv::Point3f((2 * j + i % 2) * squareSize, i * squareSize, 0));
break;
default:
break;
}
}
static bool runCalibration(GstCameraCalibration *calib, cv::Size& imageSize, cv::Mat& cameraMatrix, cv::Mat& distCoeffs,
std::vector<std::vector<cv::Point2f> > imagePoints, std::vector<cv::Mat>& rvecs, std::vector<cv::Mat>& tvecs,
std::vector<float>& reprojErrs, double& totalAvgErr)
{
//! [fixed_aspect]
cameraMatrix = cv::Mat::eye(3, 3, CV_64F);
if (calib->flags & cv::CALIB_FIX_ASPECT_RATIO) {
cameraMatrix.at<double>(0,0) = calib->aspectRatio;
}
//! [fixed_aspect]
if (calib->useFisheye) {
distCoeffs = cv::Mat::zeros(4, 1, CV_64F);
} else {
distCoeffs = cv::Mat::zeros(8, 1, CV_64F);
}
std::vector<std::vector<cv::Point3f> > objectPoints(1);
calcBoardCornerPositions(calib->boardSize, calib->squareSize, objectPoints[0], calib->calibrationPattern);
objectPoints.resize(imagePoints.size(), objectPoints[0]);
// Find intrinsic and extrinsic camera parameters
double rms;
if (calib->useFisheye) {
cv::Mat _rvecs, _tvecs;
rms = cv::fisheye::calibrate(objectPoints, imagePoints, imageSize, cameraMatrix, distCoeffs, _rvecs,
_tvecs, calib->flags);
rvecs.reserve(_rvecs.rows);
tvecs.reserve(_tvecs.rows);
for(int i = 0; i < int(objectPoints.size()); i++){
rvecs.push_back(_rvecs.row(i));
tvecs.push_back(_tvecs.row(i));
}
} else {
rms = cv::calibrateCamera(objectPoints, imagePoints, imageSize, cameraMatrix, distCoeffs, rvecs, tvecs,
calib->flags);
}
GST_LOG_OBJECT (calib,
"Re-projection error reported by calibrateCamera: %f", rms);
qDebug() << "Re-projection error reported by calibrateCamera:" << rms;
bool ok = checkRange(cameraMatrix) && checkRange(distCoeffs);
totalAvgErr = computeReprojectionErrors(objectPoints, imagePoints, rvecs, tvecs, cameraMatrix,
distCoeffs, reprojErrs, calib->useFisheye);
return ok;
}
// Print camera parameters to the output file
//static void saveCameraParams( Settings& s, Size& imageSize, Mat& cameraMatrix, Mat& distCoeffs,
// const vector<Mat>& rvecs, const vector<Mat>& tvecs,
// const vector<float>& reprojErrs, const vector<vector<Point2f> >& imagePoints,
// double totalAvgErr)
//{
// FileStorage fs( s.outputFileName, FileStorage::WRITE);
//
// time_t tm;
// time( &tm);
// struct tm *t2 = localtime( &tm);
// char buf[1024];
// strftime( buf, sizeof(buf), "%c", t2);
//
// fs << "calibration_time" << buf;
//
// if (!rvecs.empty() || !reprojErrs.empty())
// fs << "nr_of_frames" << (int)std::max(rvecs.size(), reprojErrs.size());
// fs << "image_width" << imageSize.width;
// fs << "image_height" << imageSize.height;
// fs << "board_width" << s.boardSize.width;
// fs << "board_height" << s.boardSize.height;
// fs << "square_size" << s.squareSize;
//
// if (s.flag & CALIB_FIX_ASPECT_RATIO)
// fs << "fix_aspect_ratio" << s.aspectRatio;
//
// if (s.flag)
// {
// if (s.useFisheye)
// {
// sprintf(buf, "flags:%s%s%s%s%s%s",
// s.flag & fisheye::CALIB_FIX_SKEW ? " +fix_skew" : "",
// s.flag & fisheye::CALIB_FIX_K1 ? " +fix_k1" : "",
// s.flag & fisheye::CALIB_FIX_K2 ? " +fix_k2" : "",
// s.flag & fisheye::CALIB_FIX_K3 ? " +fix_k3" : "",
// s.flag & fisheye::CALIB_FIX_K4 ? " +fix_k4" : "",
// s.flag & fisheye::CALIB_RECOMPUTE_EXTRINSIC ? " +recompute_extrinsic" : "");
// }
// else
// {
// sprintf(buf, "flags:%s%s%s%s",
// s.flag & CALIB_USE_INTRINSIC_GUESS ? " +use_intrinsic_guess" : "",
// s.flag & CALIB_FIX_ASPECT_RATIO ? " +fix_aspectRatio" : "",
// s.flag & CALIB_FIX_PRINCIPAL_POINT ? " +fix_principal_point" : "",
// s.flag & CALIB_ZERO_TANGENT_DIST ? " +zero_tangent_dist" : "");
// }
// cvWriteComment(*fs, buf, 0);
// }
//
// fs << "flags" << s.flag;
//
// fs << "fisheye_model" << s.useFisheye;
//
// fs << "camera_matrix" << cameraMatrix;
// fs << "distortion_coefficients" << distCoeffs;
//
// fs << "avg_reprojection_error" << totalAvgErr;
// if (s.writeExtrinsics && !reprojErrs.empty())
// fs << "per_view_reprojection_errors" << Mat(reprojErrs);
//
// if(s.writeExtrinsics && !rvecs.empty() && !tvecs.empty())
// {
// CV_Assert(rvecs[0].type() == tvecs[0].type());
// Mat bigmat((int)rvecs.size(), 6, rvecs[0].type());
// for( size_t i = 0; i < rvecs.size(); i++)
// {
// Mat r = bigmat(Range(int(i), int(i+1)), Range(0,3));
// Mat t = bigmat(Range(int(i), int(i+1)), Range(3,6));
//
// CV_Assert(rvecs[i].rows == 3 && rvecs[i].cols == 1);
// CV_Assert(tvecs[i].rows == 3 && tvecs[i].cols == 1);
// //*.t() is MatExpr (not Mat) so we can use assignment operator
// r = rvecs[i].t();
// t = tvecs[i].t();
// }
// //cvWriteComment( *fs, "a set of 6-tuples (rotation vector + translation vector) for each view", 0);
// fs << "extrinsic_parameters" << bigmat;
// }
//
// if(s.writePoints && !imagePoints.empty())
// {
// Mat imagePtMat((int)imagePoints.size(), (int)imagePoints[0].size(), CV_32FC2);
// for( size_t i = 0; i < imagePoints.size(); i++)
// {
// Mat r = imagePtMat.row(int(i)).reshape(2, imagePtMat.cols);
// Mat imgpti(imagePoints[i]);
// imgpti.copyTo(r);
// }
// fs << "image_points" << imagePtMat;
// }
//}
//! [run_and_save]
//bool runCalibrationAndSave(Settings& s, Size imageSize, Mat& cameraMatrix, Mat& distCoeffs,
// vector<vector<Point2f> > imagePoints)
//{
// vector<Mat> rvecs, tvecs;
// vector<float> reprojErrs;
// double totalAvgErr = 0;
//
// bool ok = runCalibration(s, imageSize, cameraMatrix, distCoeffs, imagePoints, rvecs, tvecs, reprojErrs,
// totalAvgErr);
// cout << (ok ? "Calibration succeeded" : "Calibration failed")
// << ". avg re projection error = " << totalAvgErr << endl;
//
//// if (ok)
//// saveCameraParams(s, imageSize, cameraMatrix, distCoeffs, rvecs, tvecs, reprojErrs, imagePoints,
//// totalAvgErr);
// return ok;
//}
//! [run_and_save]
bool runCalibration(GstCameraCalibration *calib, cv::Size imageSize, cv::Mat& cameraMatrix, cv::Mat& distCoeffs,
std::vector<std::vector<cv::Point2f> > imagePoints)
{
std::vector<cv::Mat> rvecs, tvecs;
std::vector<float> reprojErrs;
double totalAvgErr = 0;
bool ok = runCalibration(calib, imageSize, cameraMatrix, distCoeffs, imagePoints, rvecs, tvecs, reprojErrs,
totalAvgErr);
GST_LOG_OBJECT (calib,
(ok ? "Calibration succeeded" : "Calibration failed"));// + ". avg re projection error = " + totalAvgErr);
return ok;
}

View File

@ -0,0 +1,110 @@
/*
* GStreamer
* Copyright (C) <2017> Philippe Renon <philippe_renon@yahoo.fr>
*
* Permission is hereby granted, free of charge, to any person obtaining a
* copy of this software and associated documentation files (the "Software"),
* to deal in the Software without restriction, including without limitation
* the rights to use, copy, modify, merge, publish, distribute, sublicense,
* and/or sell copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
* DEALINGS IN THE SOFTWARE.
*
* Alternatively, the contents of this file may be used under the
* GNU Lesser General Public License Version 2.1 (the "LGPL"), in
* which case the following provisions apply instead of the ones
* mentioned above:
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
* Boston, MA 02110-1301, USA.
*/
#ifndef __GST_CAMERA_CALIBRATION_H__
#define __GST_CAMERA_CALIBRATION_H__
#include <gst/gst.h>
#include <gst/opencv/gstopencvvideofilter.h>
#include <opencv2/core.hpp>
G_BEGIN_DECLS
#define GST_TYPE_CAMERA_CALIBRATION \
(gst_camera_calibration_get_type())
#define GST_CAMERA_CALIBRATION(obj) \
(G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_CAMERA_CALIBRATION,GstCameraCalibration))
#define GST_CAMERA_CALIBRATION_CLASS(klass) \
(G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_CAMERA_CALIBRATION,GstCameraCalibrationClass))
#define GST_IS_CAMERA_CALIBRATION(obj) \
(G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_CAMERA_CALIBRATION))
#define GST_IS_CAMERA_CALIBRATION_CLASS(klass) \
(G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_CAMERA_CALIBRATION))
typedef struct _GstCameraCalibration GstCameraCalibration;
typedef struct _GstCameraCalibrationClass GstCameraCalibrationClass;
enum _GstCameraCalibrationPattern {
GST_CAMERACALIBRATION_PATTERN_CHESSBOARD,
GST_CAMERACALIBRATION_PATTERN_CIRCLES_GRID,
GST_CAMERACALIBRATION_PATTERN_ASYMMETRIC_CIRCLES_GRID
};
struct _GstCameraCalibration
{
GstOpencvVideoFilter cvfilter;
// settings
gint calibrationPattern; // One of the chessboard, circles, or asymmetric circle pattern
cv::Size boardSize; // The size of the board -> Number of items by width and height
float squareSize; // The size of a square in your defined unit (point, millimeter,etc).
float aspectRatio; // The aspect ratio
bool cornerSubPix; //
bool calibZeroTangentDist; // Assume zero tangential distortion
bool calibFixPrincipalPoint; // Fix the principal point at the center
bool useFisheye; // use fisheye camera model for calibration
int nrFrames; // The number of frames to use from the input for calibration
int delay; // In case of a video input
bool showUndistorsed; // Show undistorted images after calibration
bool showCorners; // Show corners
// state
int flags;
int mode;
clock_t prevTimestamp;
std::vector<std::vector<cv::Point2f> > imagePoints;
cv::Mat cameraMatrix, distCoeffs;
};
struct _GstCameraCalibrationClass
{
GstOpencvVideoFilterClass parent_class;
};
GType gst_camera_calibration_get_type (void);
gboolean gst_camera_calibration_plugin_init (GstPlugin * plugin);
G_END_DECLS
#endif /* __GST_CAMERA_CALIBRATION_H__ */

View File

@ -0,0 +1,491 @@
/*
* GStreamer
* Copyright (C) <2017> Philippe Renon <philippe_renon@yahoo.fr>
*
* Permission is hereby granted, free of charge, to any person obtaining a
* copy of this software and associated documentation files (the "Software"),
* to deal in the Software without restriction, including without limitation
* the rights to use, copy, modify, merge, publish, distribute, sublicense,
* and/or sell copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
* DEALINGS IN THE SOFTWARE.
*
* Alternatively, the contents of this file may be used under the
* GNU Lesser General Public License Version 2.1 (the "LGPL"), in
* which case the following provisions apply instead of the ones
* mentioned above:
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
* Boston, MA 02110-1301, USA.
*/
/**
* SECTION:element-cameraundistort
*
* Performs camera distortion correction.
*
*/
#ifdef HAVE_CONFIG_H
# include <config.h>
#endif
#include <vector>
#include "camerautils.hpp"
#include "cameraevent.hpp"
#include <QDebug>
#include <QElapsedTimer>
#include "gstcameraundistort.h"
#if (CV_MAJOR_VERSION >= 3)
#include <opencv2/imgproc.hpp>
#endif
#include <opencv2/calib3d.hpp>
#include <gst/opencv/gstopencvutils.h>
GST_DEBUG_CATEGORY_STATIC (gst_camera_undistort_debug);
#define GST_CAT_DEFAULT gst_camera_undistort_debug
#define DEFAULT_SHOW_UNDISTORTED true
#define DEFAULT_ALPHA 1.0
#define DEFAULT_CROP true
enum
{
PROP_0,
PROP_SHOW_UNDISTORTED,
PROP_ALPHA,
PROP_CROP,
PROP_SETTINGS
};
/*#define GST_CAMERA_UNDISTORT_GET_LOCK(playsink) (&((GstCameraUndistort *)undist)->lock)
#define GST_CAMERA_UNDISTORT_LOCK(undist) G_STMT_START { \
GST_LOG_OBJECT (playsink, "locking from thread %p", g_thread_self ()); \
g_rec_mutex_lock (GST_CAMERA_UNDISTORT_GET_LOCK (undist)); \
GST_LOG_OBJECT (playsink, "locked from thread %p", g_thread_self ()); \
} G_STMT_END
#define GST_CAMERA_UNDISTORT_UNLOCK(undist) G_STMT_START { \
GST_LOG_OBJECT (playsink, "unlocking from thread %p", g_thread_self ()); \
g_rec_mutex_unlock (GST_CAMERA_UNDISTORT_GET_LOCK (undist)); \
} G_STMT_END*/
G_DEFINE_TYPE (GstCameraUndistort, gst_camera_undistort, GST_TYPE_OPENCV_VIDEO_FILTER);
static void gst_camera_undistort_dispose (GObject * object);
static void gst_camera_undistort_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec);
static void gst_camera_undistort_get_property (GObject * object, guint prop_id,
GValue * value, GParamSpec * pspec);
static gboolean gst_camera_undistort_set_info (GstOpencvVideoFilter * cvfilter,
gint in_width, gint in_height, gint in_depth, gint in_channels,
gint out_width, gint out_height, gint out_depth, gint out_channels);
static GstFlowReturn gst_camera_undistort_transform_frame (
GstOpencvVideoFilter * cvfilter,
GstBuffer * frame, IplImage * img,
GstBuffer * outframe, IplImage * outimg);
static gboolean gst_camera_undistort_sink_event (GstBaseTransform *trans, GstEvent *event);
static gboolean gst_camera_undistort_src_event (GstBaseTransform *trans, GstEvent *event);
static void camera_undistort_run(GstCameraUndistort *undist, IplImage *img, IplImage *outimg);
static gboolean camera_undistort_init_undistort_rectify_map(GstCameraUndistort *undist);
/* initialize the cameraundistort's class */
static void
gst_camera_undistort_class_init (GstCameraUndistortClass * klass)
{
GObjectClass *gobject_class = G_OBJECT_CLASS (klass);
GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
GstBaseTransformClass *trans_class = GST_BASE_TRANSFORM_CLASS (klass);
GstOpencvVideoFilterClass *opencvfilter_class = GST_OPENCV_VIDEO_FILTER_CLASS (klass);
GstCaps *caps;
GstPadTemplate *templ;
gobject_class->dispose = gst_camera_undistort_dispose;
gobject_class->set_property = gst_camera_undistort_set_property;
gobject_class->get_property = gst_camera_undistort_get_property;
trans_class->sink_event =
GST_DEBUG_FUNCPTR (gst_camera_undistort_sink_event);
trans_class->src_event =
GST_DEBUG_FUNCPTR (gst_camera_undistort_src_event);
opencvfilter_class->cv_set_caps = gst_camera_undistort_set_info;
opencvfilter_class->cv_trans_func =
gst_camera_undistort_transform_frame;
g_object_class_install_property (gobject_class, PROP_SHOW_UNDISTORTED,
g_param_spec_boolean ("show-undistorted", "Show Undistorted",
"Show undistorted images",
DEFAULT_SHOW_UNDISTORTED, (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)));
g_object_class_install_property (gobject_class, PROP_ALPHA,
g_param_spec_float ("alpha", "Pixels",
"Show all pixels (1), only valid ones (0) or something in between",
0.0, 1.0, DEFAULT_ALPHA,
(GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)));
g_object_class_install_property (gobject_class, PROP_SETTINGS,
g_param_spec_string ("settings", "Settings",
"Undistort settings (OpenCV serialized opaque string)",
NULL, (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)));
gst_element_class_set_static_metadata (element_class,
"cameraundistort",
"Filter/Effect/Video",
"Performs camera undistort",
"Philippe Renon <philippe_renon@yahoo.fr>");
/* add sink and source pad templates */
caps = gst_opencv_caps_from_cv_image_type (CV_16UC1);
gst_caps_append (caps, gst_opencv_caps_from_cv_image_type (CV_8UC4));
gst_caps_append (caps, gst_opencv_caps_from_cv_image_type (CV_8UC3));
gst_caps_append (caps, gst_opencv_caps_from_cv_image_type (CV_8UC1));
templ = gst_pad_template_new ("sink", GST_PAD_SINK, GST_PAD_ALWAYS,
gst_caps_ref (caps));
gst_element_class_add_pad_template (element_class, templ);
templ = gst_pad_template_new ("src", GST_PAD_SRC, GST_PAD_ALWAYS, caps);
gst_element_class_add_pad_template (element_class, templ);
}
/* initialize the new element
* initialize instance structure
*/
static void
gst_camera_undistort_init (GstCameraUndistort * undist)
{
undist->showUndistorted = DEFAULT_SHOW_UNDISTORTED;
undist->alpha = DEFAULT_ALPHA;
undist->crop = DEFAULT_CROP;
undist->doUndistort = false;
undist->settingsChanged = false;
undist->cameraMatrix = 0;
undist->distCoeffs = 0;
undist->map1 = 0;
undist->map2 = 0;
//undist->validPixROI = 0;
undist->settings = NULL;
}
static void
gst_camera_undistort_dispose (GObject * object)
{
GstCameraUndistort *undist = GST_CAMERA_UNDISTORT (object);
g_free (undist->settings);
undist->settings = NULL;
G_OBJECT_CLASS (gst_camera_undistort_parent_class)->dispose (object);
}
static void
gst_camera_undistort_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec)
{
GstCameraUndistort *undist = GST_CAMERA_UNDISTORT (object);
const char *str;
switch (prop_id) {
case PROP_SHOW_UNDISTORTED:
undist->showUndistorted = g_value_get_boolean (value);
undist->settingsChanged = true;
break;
case PROP_ALPHA:
undist->alpha = g_value_get_float (value);
undist->settingsChanged = true;
break;
case PROP_CROP:
undist->crop = g_value_get_boolean (value);
break;
case PROP_SETTINGS:
if (undist->settings) {
g_free (undist->settings);
undist->settings = NULL;
}
str = g_value_get_string (value);
if (str)
undist->settings = g_strdup (str);
undist->settingsChanged = true;
break;
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
break;
}
}
static void
gst_camera_undistort_get_property (GObject * object, guint prop_id,
GValue * value, GParamSpec * pspec)
{
GstCameraUndistort *undist = GST_CAMERA_UNDISTORT (object);
switch (prop_id) {
case PROP_SHOW_UNDISTORTED:
g_value_set_boolean (value, undist->showUndistorted);
break;
case PROP_ALPHA:
g_value_set_float (value, undist->alpha);
break;
case PROP_CROP:
g_value_set_boolean (value, undist->crop);
break;
case PROP_SETTINGS:
g_value_set_string (value, undist->settings);
break;
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
break;
}
}
gboolean
gst_camera_undistort_set_info (GstOpencvVideoFilter * cvfilter,
gint in_width, gint in_height,
__attribute__((unused)) gint in_depth, __attribute__((unused)) gint in_channels,
__attribute__((unused)) gint out_width, __attribute__((unused)) gint out_height,
__attribute__((unused)) gint out_depth, __attribute__((unused)) gint out_channels)
{
GstCameraUndistort *undist = GST_CAMERA_UNDISTORT (cvfilter);
undist->imageSize = cv::Size(in_width, in_height);
return TRUE;
}
//static GstMessage *
//gst_camera_undistort_message_new (GstCameraUndistort * undist, GstBuffer * buf)
//{
// GstBaseTransform *trans = GST_BASE_TRANSFORM_CAST (undist);
// GstStructure *s;
// GstClockTime running_time, stream_time;
//
// running_time = gst_segment_to_running_time (&trans->segment, GST_FORMAT_TIME,
// GST_BUFFER_TIMESTAMP (buf));
// stream_time = gst_segment_to_stream_time (&trans->segment, GST_FORMAT_TIME,
// GST_BUFFER_TIMESTAMP (buf));
//
// s = gst_structure_new ("cameracalibration",
// "timestamp", G_TYPE_UINT64, GST_BUFFER_TIMESTAMP (buf),
// "stream-time", G_TYPE_UINT64, stream_time,
// "running-time", G_TYPE_UINT64, running_time,
// "duration", G_TYPE_UINT64, GST_BUFFER_DURATION (buf), NULL);
//
// return gst_message_new_element (GST_OBJECT (undist), s);
//}
/*
* Performs the camera calibration
*/
static GstFlowReturn
gst_camera_undistort_transform_frame (GstOpencvVideoFilter * cvfilter,
G_GNUC_UNUSED GstBuffer * frame, IplImage * img,
G_GNUC_UNUSED GstBuffer * outframe, IplImage * outimg)
{
GstCameraUndistort *undist = GST_CAMERA_UNDISTORT (cvfilter);
camera_undistort_run(undist, img, outimg);
return GST_FLOW_OK;
}
/* entry point to initialize the plug-in
* initialize the plug-in itself
* register the element factories and other features
*/
gboolean
gst_camera_undistort_plugin_init (GstPlugin * plugin)
{
/* debug category for filtering log messages */
GST_DEBUG_CATEGORY_INIT (gst_camera_undistort_debug, "cameraundistort",
0,
"Performs camera undistortion");
return gst_element_register (plugin, "cameraundistort", GST_RANK_NONE,
GST_TYPE_CAMERA_UNDISTORT);
}
static void
camera_undistort_run(GstCameraUndistort *undist, IplImage *img, IplImage *outimg)
{
const cv::Mat view = cv::cvarrToMat(img);
cv::Mat outview = cv::cvarrToMat(outimg);
if (undist->settingsChanged) {
undist->doUndistort = false;
if (undist->showUndistorted && undist->settings) {
//qDebug() << undist->settings;
if (camera_deserialize_undistort_settings(
undist->settings, undist->cameraMatrix, undist->distCoeffs)) {
undist->doUndistort = camera_undistort_init_undistort_rectify_map(undist);
}
}
undist->settingsChanged = false;
}
if (undist->showUndistorted && undist->doUndistort) {
QElapsedTimer timer;
timer.start();
cv::remap(view, outview, undist->map1, undist->map2, cv::INTER_LINEAR);
qDebug() << "remap took" << timer.elapsed() << "ms";
if (undist->crop) {
const cv::Scalar CROP_COLOR(0, 255, 0);
cv::rectangle(outview, undist->validPixROI, CROP_COLOR);
}
}
else {
// FIXME should use passthrough to avoid this copy...
view.copyTo(outview);
}
}
// {
// Mat view, rview, map1, map2;
//
// if (undist->useFisheye)
// {
// Mat newCamMat;
// fisheye::estimateNewCameraMatrixForUndistortRectify(cameraMatrix, distCoeffs, imageSize,
// Matx33d::eye(), newCamMat, 1);
// fisheye::initUndistortRectifyMap(cameraMatrix, distCoeffs, Matx33d::eye(), newCamMat, imageSize,
// CV_16SC2, map1, map2);
// }
// else
// {
// initUndistortRectifyMap(
// cameraMatrix, distCoeffs, Mat(),
// getOptimalNewCameraMatrix(cameraMatrix, distCoeffs, imageSize, 1, imageSize, 0), imageSize,
// CV_16SC2, map1, map2);
// }
// }
static gboolean
camera_undistort_init_undistort_rectify_map(GstCameraUndistort *undist)
{
QElapsedTimer timer;
timer.start();
cv::Size newImageSize;
cv::Rect validPixROI;
cv::Mat newCameraMatrix = cv::getOptimalNewCameraMatrix(
undist->cameraMatrix, undist->distCoeffs, undist->imageSize,
undist->alpha, newImageSize, &validPixROI);
undist->validPixROI = validPixROI;
cv::initUndistortRectifyMap(undist->cameraMatrix, undist->distCoeffs, cv::Mat(),
newCameraMatrix, undist->imageSize, CV_16SC2, undist->map1, undist->map2);
qDebug() << "init rectify took" << timer.elapsed() << "ms";
return TRUE;
}
/*
qDebug() << "imageSize" << imageSize.width << imageSize.height;
qDebug() << "newImageSize" << imageSize.width << imageSize.height;
qDebug() << "alpha" << undist->alpha;
qDebug() << "roi" << undist->validPixROI.x << undist->validPixROI.y << undist->validPixROI.width << undist->validPixROI.height;
cv::FileStorage fs1(".xml", cv::FileStorage::WRITE + cv::FileStorage::MEMORY);
fs1 << "cameraMatrix" << undist->cameraMatrix;
const std::string buf1 = fs1.releaseAndGetString();
qDebug() << "cameraMatrix" << QString::fromStdString(buf1);
cv::FileStorage fs2(".xml", cv::FileStorage::WRITE + cv::FileStorage::MEMORY);
fs2 << "newCameraMatrix" << newCameraMatrix;
const std::string buf2 = fs2.releaseAndGetString();
qDebug() << "newCameraMatrix" << QString::fromStdString(buf2);
cv::FileStorage fs3(".xml", cv::FileStorage::WRITE + cv::FileStorage::MEMORY);
fs3 << "distCoeffs" << undist->distCoeffs;
const std::string buf3 = fs3.releaseAndGetString();
qDebug() << "distCoeffs" << QString::fromStdString(buf3);
*/
static gboolean camera_undistort_calibration_event(GstCameraUndistort *undist, GstEvent *event)
{
g_free (undist->settings);
if (!gst_camera_event_parse_calibrated(event, &(undist->settings))) {
qDebug() << "Failed to parse";
return FALSE;
}
undist->settingsChanged = true;
return TRUE;
}
static gboolean
gst_camera_undistort_sink_event (GstBaseTransform *trans, GstEvent *event)
{
GstCameraUndistort *undist = GST_CAMERA_UNDISTORT (trans);
const GstStructure *structure = gst_event_get_structure (event);
if (GST_EVENT_TYPE (event) == GST_EVENT_CUSTOM_BOTH && structure) {
if (strcmp (gst_structure_get_name (structure), GST_CAMERA_EVENT_CALIBRATED_NAME) == 0) {
qDebug() << "GOT CALIBRATION EVENT FROM UPSTREAM";
return camera_undistort_calibration_event(undist, event);
}
}
return GST_BASE_TRANSFORM_CLASS (gst_camera_undistort_parent_class)->sink_event (trans, event);
}
static gboolean
gst_camera_undistort_src_event (GstBaseTransform *trans, GstEvent *event)
{
GstCameraUndistort *undist = GST_CAMERA_UNDISTORT (trans);
const GstStructure *structure = gst_event_get_structure (event);
if (GST_EVENT_TYPE (event) == GST_EVENT_CUSTOM_BOTH && structure) {
if (strcmp (gst_structure_get_name (structure), GST_CAMERA_EVENT_CALIBRATED_NAME) == 0) {
qDebug() << "GOT CALIBRATION EVENT FROM DOWNSTREAM";
return camera_undistort_calibration_event(undist, event);
}
}
return GST_BASE_TRANSFORM_CLASS (gst_camera_undistort_parent_class)->src_event (trans, event);
}

View File

@ -0,0 +1,104 @@
/*
* GStreamer
* Copyright (C) <2017> Philippe Renon <philippe_renon@yahoo.fr>
*
* Permission is hereby granted, free of charge, to any person obtaining a
* copy of this software and associated documentation files (the "Software"),
* to deal in the Software without restriction, including without limitation
* the rights to use, copy, modify, merge, publish, distribute, sublicense,
* and/or sell copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
* DEALINGS IN THE SOFTWARE.
*
* Alternatively, the contents of this file may be used under the
* GNU Lesser General Public License Version 2.1 (the "LGPL"), in
* which case the following provisions apply instead of the ones
* mentioned above:
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
* Boston, MA 02110-1301, USA.
*/
#ifndef __GST_CAMERA_UNDISTORT_H__
#define __GST_CAMERA_UNDISTORT_H__
#include <gst/gst.h>
#include <gst/opencv/gstopencvvideofilter.h>
#include <opencv2/core.hpp>
G_BEGIN_DECLS
#define GST_TYPE_CAMERA_UNDISTORT \
(gst_camera_undistort_get_type())
#define GST_CAMERA_UNDISTORT(obj) \
(G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_CAMERA_UNDISTORT,GstCameraUndistort))
#define GST_CAMERA_UNDISTORT_CLASS(klass) \
(G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_CAMERA_UNDISTORT,GstCameraUndistortClass))
#define GST_IS_CAMERA_UNDISTORT(obj) \
(G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_CAMERA_UNDISTORT))
#define GST_IS_CAMERA_UNDISTORT_CLASS(klass) \
(G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_CAMERA_UNDISTORT))
typedef struct _GstCameraUndistort GstCameraUndistort;
typedef struct _GstCameraUndistortClass GstCameraUndistortClass;
struct _GstCameraUndistort
{
GstOpencvVideoFilter cvfilter;
//GRecMutex stream_lock;
// settings
bool showUndistorted;
float alpha;
bool crop;
// obscure string containing opencv calibration settings
gchar *settings;
// opencv calibration settings
cv::Mat cameraMatrix, distCoeffs;
// state
bool doUndistort;
bool settingsChanged;
cv::Size imageSize;
cv::Mat map1, map2;
cv::Rect validPixROI;
};
struct _GstCameraUndistortClass
{
GstOpencvVideoFilterClass parent_class;
};
GType gst_camera_undistort_get_type (void);
gboolean gst_camera_undistort_plugin_init (GstPlugin * plugin);
G_END_DECLS
#endif /* __GST_CAMERA_UNDISTORT_H__ */

View File

@ -0,0 +1,23 @@
DEFINES += GST_PLUGIN_BUILD_STATIC
#CONFIG += link_pkgconfig
PKGCONFIG += gstreamer-base-1.0
opencv {
# there is no package for gst opencv yet...
GSTREAMER_SDK_DIR = $$system(pkg-config --variable=exec_prefix gstreamer-1.0)
LIBS += -L$(GSTREAMER_SDK_DIR)/lib/gstreamer-1.0/opencv
LIBS += -lgstopencv-1.0
HEADERS += \
plugins/cameracalibration/camerautils.hpp \
plugins/cameracalibration/cameraevent.hpp \
plugins/cameracalibration/gstcameracalibration.h \
plugins/cameracalibration/gstcameraundistort.h
SOURCES += \
plugins/cameracalibration/camerautils.cpp \
plugins/cameracalibration/cameraevent.cpp \
plugins/cameracalibration/gstcameracalibration.cpp \
plugins/cameracalibration/gstcameraundistort.cpp
}

View File

@ -0,0 +1,67 @@
###############################################################################
# General
###############################################################################
Add the following line to your build config file:
make config_append GCS_WITH_GSTREAMER=1
The build config file is at the root of your source directory.
###############################################################################
# Windows (msys2)
###############################################################################
i686:
$ pacman -S mingw-w64-i686-gst-plugins-base mingw-w64-i686-gst-plugins-good mingw-w64-i686-gst-plugins-bad mingw-w64-i686-gst-plugins-ugly mingw-w64-i686-gst-libav
x86_64:
$ pacman -S mingw-w64-x86_64-gst-plugins-base mingw-w64-x86_64-gst-plugins-good mingw-w64-x86_64-gst-plugins-bad mingw-w64-x86_64-gst-plugins-ugly mingw-w64-x86_64-gst-libav
###############################################################################
# Linux
###############################################################################
Get all the gstreamer libraries.
This might work:
Add the repository ppa:gstreamer-developers/ppa using Synaptic Package Manager or CLI
> sudo add-apt-repository ppa:gstreamer-developers/ppa
> sudo apt-get update
Upgrade to latest version of the packages using Synaptic Package Manager or CLI
> sudo apt-get install gstreamer1.0-tools gstreamer1.0-plugins-base gstreamer1.0-plugins-good gstreamer1.0-plugins-bad gstreamer1.0-plugins-ugly gstreamer1.0-libav
> sudo apt-get install gstreamer1.0-dev gstreamer-plugins-base1.0-dev
###############################################################################
# Mac
###############################################################################
brew install gstreamer gst-plugins-base gst-plugins-good gst-plugins-bad gst-plugins-ugly gst-libav libav x264
###############################################################################
# How to find required libraries (for copydata.pro)
###############################################################################
use gst-inspect with an element or plugin and look at Filename.
$ gst-inspect-1.0.exe ksvideosrc
Factory Details:
Rank none (0)
Long-name KsVideoSrc
Klass Source/Video
Description Stream data from a video capture device through Windows kernel streaming
Author Ole André Vadla Ravnås <ole.andre.ravnas@tandberg.com>
Haakon Sporsheim <hakon.sporsheim@tandberg.com>
Andres Colubri <andres.colubri@gmail.com>
Plugin Details:
Name winks
Description Windows kernel streaming plugin
Filename C:\msys64\mingw64\lib\gstreamer-1.0\libgstwinks.dll
Version 1.6.3
License LGPL
Source module gst-plugins-bad
Source release date 2016-01-20
Binary package GStreamer
Origin URL http://gstreamer.net/

View File

@ -0,0 +1,854 @@
/**
******************************************************************************
*
* @file videowidget.cpp
* @author The LibrePilot Project, http://www.librepilot.org Copyright (C) 2017.
* @brief
* @see The GNU Public License (GPL) Version 3
* @defgroup
* @{
*
*****************************************************************************/
/*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* for more details.
*
* You should have received a copy of the GNU General Public License along
* with this program; if not, write to the Free Software Foundation, Inc.,
* 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
#include "videowidget.h"
#include "gst_util.h"
#include "overlay.h"
#include "pipelineevent.h"
// #include "devicemonitor.h"
#include <gst/gst.h>
#include <gst/video/videooverlay.h>
#include <QtCore>
#include <QPainter>
#include <QDebug>
#include <QRect>
#include <QTextDocument>
#include <string>
// TODO find a better way and move away from this file
static Pipeline::State cvt(GstState state);
static const char *name(Pipeline::State state);
static ProgressEvent::ProgressType cvt(GstProgressType type);
class GstOverlayImpl : public Overlay {
public:
GstOverlayImpl(GstVideoOverlay *gst_overlay) :
gst_overlay(gst_overlay)
{}
void expose()
{
if (gst_overlay) {
gst_video_overlay_expose(gst_overlay);
}
}
private:
GstVideoOverlay *gst_overlay;
};
class BusSyncHandler {
public:
BusSyncHandler(VideoWidget *widget, WId wid) :
widget(widget), wId(wid)
{}
bool handleMessage(GstMessage *msg);
private:
VideoWidget *widget;
WId wId;
};
static GstElement *createPipelineFromDesc(const char *, QString &lastError);
static GstBusSyncReply gst_bus_sync_handler(GstBus *, GstMessage *, BusSyncHandler *);
VideoWidget::VideoWidget(QWidget *parent) :
QWidget(parent), pipeline(NULL), overlay(NULL)
{
qDebug() << "VideoWidget::VideoWidget";
// initialize gstreamer
gst::init(NULL, NULL);
// foreach(Device d, m.devices()) {
// qDebug() << d.displayName();
// }
// make the widget native so it gets its own native window id that we will pass to gstreamer
setAttribute(Qt::WA_NativeWindow);
#ifdef Q_OS_MAC
// WA_DontCreateNativeAncestors is needed on mac
setAttribute(Qt::WA_DontCreateNativeAncestors);
#endif
// set black background
QPalette pal(palette());
pal.setColor(backgroundRole(), Qt::black);
setPalette(pal);
// calling winId() will realize the window if it is not yet realized
// so we need to call winId() here and not later from a gstreamer thread...
WId wid = winId();
qDebug() << "VideoWidget::VideoWidget - video winId :" << (gulong)wid;
handler = new BusSyncHandler(this, wid);
// init widget state (see setOverlay() for more information)
// setOverlay(NULL);
setAutoFillBackground(true);
setAttribute(Qt::WA_OpaquePaintEvent, false);
setAttribute(Qt::WA_PaintOnScreen, false);
// init state
lastError = "";
}
VideoWidget::~VideoWidget()
{
if (pipeline) {
dispose();
}
if (handler) {
delete handler;
handler = NULL;
}
}
bool VideoWidget::isPlaying()
{
return pipeline && (GST_STATE(pipeline) == GST_STATE_PLAYING);
}
QString VideoWidget::pipelineDesc()
{
return m_pipelineDesc;
}
void VideoWidget::setPipelineDesc(QString pipelineDesc)
{
qDebug() << "VideoWidget::setPipelineDesc -" << pipelineDesc;
stop();
this->m_pipelineDesc = pipelineDesc;
}
void VideoWidget::start()
{
qDebug() << "VideoWidget::start -" << m_pipelineDesc;
init();
update();
if (pipeline) {
gst_element_set_state(GST_ELEMENT(pipeline), GST_STATE_PLAYING);
}
}
void VideoWidget::pause()
{
qDebug() << "VideoWidget::pause -" << m_pipelineDesc;
init();
update();
if (pipeline) {
if (GST_STATE(pipeline) == GST_STATE_PAUSED) {
gst_element_set_state(GST_ELEMENT(pipeline), GST_STATE_PLAYING);
} else if (GST_STATE(pipeline) == GST_STATE_PLAYING) {
gst_element_set_state(GST_ELEMENT(pipeline), GST_STATE_PAUSED);
}
}
}
void VideoWidget::stop()
{
qDebug() << "VideoWidget::stop -" << m_pipelineDesc;
if (pipeline) {
dispose();
} else {
// emit fake state change event. this is needed by the UI...
emit stateChanged(Pipeline::Null, Pipeline::Null, Pipeline::VoidPending);
}
update();
}
void VideoWidget::init()
{
if (pipeline) {
// if pipeline is already created, reset some state and return
qDebug() << "VideoWidget::init - reseting pipeline state :" << m_pipelineDesc;
lastError = "";
return;
}
// reset state
lastError = "";
// create pipeline
qDebug() << "VideoWidget::init - initializing pipeline :" << m_pipelineDesc;
pipeline = createPipelineFromDesc(m_pipelineDesc.toStdString().c_str(), lastError);
if (pipeline) {
gst_pipeline_set_auto_flush_bus(GST_PIPELINE(pipeline), true);
// register bus synchronous handler
GstBus *bus = gst_pipeline_get_bus(GST_PIPELINE(pipeline));
gst_bus_set_sync_handler(bus, (GstBusSyncHandler)gst_bus_sync_handler, handler, NULL);
gst_object_unref(bus);
} else {
// emit fake state change event. this is needed by the UI...
emit stateChanged(Pipeline::Null, Pipeline::Null, Pipeline::VoidPending);
}
}
void VideoWidget::dispose()
{
qDebug() << "VideoWidget::dispose -" << m_pipelineDesc;
setOverlay(NULL);
if (pipeline) {
gst_element_set_state(pipeline, GST_STATE_NULL);
gst_object_unref(pipeline);
pipeline = NULL;
}
}
void VideoWidget::paintEvent(QPaintEvent *event)
{
if (overlay) {
overlay->expose();
} else {
QWidget::paintEvent(event);
paintStatus(event);
}
}
void VideoWidget::paintStatus(QPaintEvent *event)
{
Q_UNUSED(event);
QTextDocument doc;
doc.setDefaultStyleSheet("* { color:red; }");
QString html = "<p align=center><font size=+2>" + getStatusMessage() + "</font></p>";
QRect widgetRect = QWidget::rect();
int x = 0;
int w = widgetRect.width();
int hh = widgetRect.height() / 4;
int y = (widgetRect.height() - hh) / 2;
int h = widgetRect.height() - y;
QRect rect = QRect(x, y, w, h);
doc.setHtml(html);
doc.setTextWidth(rect.width());
QPainter painter(this);
painter.save();
painter.translate(rect.topLeft());
doc.drawContents(&painter, rect.translated(-rect.topLeft()));
painter.restore();
// painter.drawRect(rect);
// QBrush brush( Qt::yellow );
// painter.setBrush( brush ); // set the yellow brush
// painter.setPen( Qt::NoPen ); // do not draw outline
// painter.drawRect(0, 0, width(), height()); // draw filled rectangle
// painter.end();
// QFont font = QApplication::font();
// font.setPixelSize( rect.height() );
// painter.setFont( font );
}
QString VideoWidget::getStatus()
{
if (!lastError.isEmpty()) {
return "ERROR";
} else if (!pipeline && m_pipelineDesc.isEmpty()) {
return "NO PIPELINE";
}
return "";
}
QString VideoWidget::getStatusMessage()
{
if (!lastError.isEmpty()) {
return lastError;
} else if (!pipeline && m_pipelineDesc.isEmpty()) {
return "No pipeline";
}
return "";
}
void VideoWidget::mouseDoubleClickEvent(QMouseEvent *event)
{
Q_UNUSED(event);
}
void VideoWidget::resizeEvent(QResizeEvent *event)
{
if (overlay) {
overlay->expose();
} else {
QWidget::resizeEvent(event);
}
}
QPaintEngine *VideoWidget::paintEngine() const
{
// bypass double buffering, see setOverlay() for explanation
return overlay ? NULL : QWidget::paintEngine();
}
static Pipeline::State cvt(GstState state)
{
switch (state) {
case GST_STATE_VOID_PENDING:
return Pipeline::VoidPending;
case GST_STATE_NULL:
return Pipeline::Null;
case GST_STATE_READY:
return Pipeline::Ready;
case GST_STATE_PAUSED:
return Pipeline::Paused;
case GST_STATE_PLAYING:
return Pipeline::Playing;
}
return Pipeline::Null;
}
static const char *name(Pipeline::State state)
{
switch (state) {
case Pipeline::VoidPending:
return "VoidPending";
case Pipeline::Null:
return "Null";
case Pipeline::Ready:
return "Ready";
case Pipeline::Paused:
return "Paused";
case Pipeline::Playing:
return "Playing";
}
return "<unknown>";
}
// static StreamStatusEvent::StreamStatusType cvt(GstStreamStatusType type)
// {
// switch (type) {
// case GST_STREAM_STATUS_TYPE_CREATE:
// return StreamStatusEvent::Create;
//
// case GST_STREAM_STATUS_TYPE_ENTER:
// return StreamStatusEvent::Enter;
//
// case GST_STREAM_STATUS_TYPE_LEAVE:
// return StreamStatusEvent::Leave;
//
// case GST_STREAM_STATUS_TYPE_DESTROY:
// return StreamStatusEvent::Destroy;
//
// case GST_STREAM_STATUS_TYPE_START:
// return StreamStatusEvent::Start;
//
// case GST_STREAM_STATUS_TYPE_PAUSE:
// return StreamStatusEvent::Pause;
//
// case GST_STREAM_STATUS_TYPE_STOP:
// return StreamStatusEvent::Stop;
// }
// return StreamStatusEvent::Null;
// }
static ProgressEvent::ProgressType cvt(GstProgressType type)
{
switch (type) {
case GST_PROGRESS_TYPE_START:
return ProgressEvent::Start;
case GST_PROGRESS_TYPE_CONTINUE:
return ProgressEvent::Continue;
case GST_PROGRESS_TYPE_COMPLETE:
return ProgressEvent::Complete;
case GST_PROGRESS_TYPE_CANCELED:
return ProgressEvent::Cancelled;
case GST_PROGRESS_TYPE_ERROR:
return ProgressEvent::Error;
}
return ProgressEvent::Error;
}
bool VideoWidget::event(QEvent *event)
{
if (event->type() == PipelineEvent::PrepareWindowId) {
PrepareWindowIdEvent *pe = static_cast<PrepareWindowIdEvent *>(event);
// we take ownership of the overlay object
setOverlay(pe->getOverlay());
QString msg = QString("PrepareWindowId: element %0 prepare window id").arg(pe->src);
emitEventMessage(msg);
return true;
} else if (event->type() == PipelineEvent::StateChange) {
StateChangedEvent *sce = static_cast<StateChangedEvent *>(event);
QString msg = QString("StateChange: element %0 changed state from %1 to %2")
.arg(sce->src).arg(name(sce->getOldState())).arg(name(sce->getNewState()));
emitEventMessage(msg);
emit stateChanged(sce->getOldState(), sce->getNewState(), sce->getPendingState());
if (sce->getNewState() == Pipeline::Playing) {
if (pipeline) {
toDotFile("pipeline");
}
}
return true;
} else if (event->type() == PipelineEvent::StreamStatus) {
StreamStatusEvent *sse = static_cast<StreamStatusEvent *>(event);
QString msg = QString("StreamStatus: %0 %1 (%2)").arg(sse->src).arg(sse->getStatusName()).arg(sse->getOwner());
emitEventMessage(msg);
return true;
} else if (event->type() == PipelineEvent::NewClock) {
NewClockEvent *nce = static_cast<NewClockEvent *>(event);
QString msg = QString("NewClock : element %0 has new clock %1").arg(nce->src).arg(nce->getName());
emitEventMessage(msg);
return true;
} else if (event->type() == PipelineEvent::ClockProvide) {
ClockProvideEvent *cpe = static_cast<ClockProvideEvent *>(event);
QString msg = QString("ClockProvide: element %0 clock provide %1 ready=%2").arg(cpe->src).arg(cpe->getName()).arg(cpe->isReady());
emitEventMessage(msg);
return true;
} else if (event->type() == PipelineEvent::ClockLost) {
ClockLostEvent *cle = static_cast<ClockLostEvent *>(event);
QString msg = QString("ClockLost: element %0 lost clock %1").arg(cle->src).arg(cle->getName());
emitEventMessage(msg);
// PRINT ("Clock lost, selecting a new one\n");
// gst_element_set_state (pipeline, GST_STATE_PAUSED);
// gst_element_set_state (pipeline, GST_STATE_PLAYING);
return true;
} else if (event->type() == PipelineEvent::Progress) {
ProgressEvent *pe = static_cast<ProgressEvent *>(event);
QString msg = QString("Progress: element %0 sent progress event: %1 %2 (%3)").arg(pe->src).arg(pe->getProgressType()).arg(
pe->getCode()).arg(pe->getText());
emitEventMessage(msg);
return true;
} else if (event->type() == PipelineEvent::Latency) {
LatencyEvent *le = static_cast<LatencyEvent *>(event);
QString msg = QString("Latency: element %0 sent latency event").arg(le->src);
emitEventMessage(msg);
bool success = gst_bin_recalculate_latency(GST_BIN(pipeline));
if (!success) {
qWarning() << "Failed to recalculate latency";
}
return true;
} else if (event->type() == PipelineEvent::Qos) {
QosEvent *qe = static_cast<QosEvent *>(event);
QString msg = QString("Qos: element %0 sent QOS event: %1 %2 %3").arg(qe->src).arg(qe->getData().timestamps()).arg(
qe->getData().values()).arg(qe->getData().stats());
emitEventMessage(msg);
if (pipeline) {
toDotFile("pipeline_qos");
}
return true;
} else if (event->type() == PipelineEvent::Eos) {
QString msg = QString("Eos: element %0 sent EOS event");
emitEventMessage(msg);
if (pipeline) {
toDotFile("pipeline_eos");
}
return true;
} else if (event->type() == PipelineEvent::Error) {
ErrorEvent *ee = static_cast<ErrorEvent *>(event);
QString msg = QString("Error: element %0 sent error event: %1 (%2)").arg(ee->src).arg(ee->getMessage()).arg(
ee->getDebug());
emitEventMessage(msg);
if (lastError.isEmpty()) {
// remember first error only (usually the most useful)
lastError = QString("Pipeline error: %0").arg(ee->getMessage());
// stop pipeline...
stop();
} else {
// TODO record subsequent errors separately
}
return true;
} else if (event->type() == PipelineEvent::Warning) {
WarningEvent *we = static_cast<WarningEvent *>(event);
QString msg = QString("Warning: element %0 sent warning event: %1 (%2)").arg(we->src).arg(we->getMessage()).arg(
we->getDebug());
emitEventMessage(msg);
return true;
} else if (event->type() == PipelineEvent::Info) {
InfoEvent *ie = static_cast<InfoEvent *>(event);
QString msg = QString("Info: element %0 sent info event: %1 (%2)").arg(ie->src).arg(ie->getMessage()).arg(
ie->getDebug());
emitEventMessage(msg);
return true;
}
return QWidget::event(event);
}
void VideoWidget::emitEventMessage(QString msg)
{
// qDebug() << "VideoWidget::event -" << msg;
emit message(msg);
}
void VideoWidget::setOverlay(Overlay *overlay)
{
if (this->overlay != overlay) {
Overlay *oldOverlay = this->overlay;
this->overlay = overlay;
if (oldOverlay) {
delete oldOverlay;
}
}
bool hasOverlay = overlay ? true : false;
setAutoFillBackground(!hasOverlay);
// disable background painting to avoid flickering when resizing
setAttribute(Qt::WA_OpaquePaintEvent, hasOverlay);
// setAttribute(Qt::WA_NoSystemBackground, hasOverlay); // not sure it is needed
// disable double buffering to avoid flickering when resizing
// for this to work we also need to override paintEngine() and make it return NULL.
// see http://qt-project.org/faq/answer/how_does_qtwa_paintonscreen_relate_to_the_backing_store_widget_composition_
// drawback is that this widget won't participate in composition...
setAttribute(Qt::WA_PaintOnScreen, hasOverlay);
}
void VideoWidget::toDotFile(QString name)
{
if (!pipeline) {
return;
}
GST_DEBUG_BIN_TO_DOT_FILE_WITH_TS(GST_BIN(pipeline), GST_DEBUG_GRAPH_SHOW_VERBOSE, name.toStdString().c_str());
}
static GstElement *createPipelineFromDesc(const char *desc, QString &lastError)
{
qDebug() << "VideoWidget::createPipelineFromDesc - creating pipeline :" << desc;
GError *error = NULL;
GstElement *pipeline = gst_parse_launch_full(desc, NULL, GST_PARSE_FLAG_FATAL_ERRORS, &error);
if (!pipeline) {
if (error) {
// no pipeline and error...
// report error to user
QString msg = QString("Failed to create pipeline: %0").arg(error->message);
qCritical() << "VideoWidget::createPipelineFromDesc -" << msg;
lastError = msg;
} else {
// no pipeline and no error...
// report generic error
QString msg = QString("Failed to create pipeline (no error reported!)");
qCritical() << "VideoWidget::createPipelineFromDesc -" << msg;
lastError = msg;
}
} else if (error) {
// pipeline and error...
// report error to user?
// warning?
QString msg = QString("Created pipeline with error: %0").arg(error->message);
qWarning() << "VideoWidget::createPipelineFromDesc -" << msg;
} else {
// qDebug() << gst_bin_get_by_name(GST_BIN(pipeline), "videotestsrc0");
}
if (error) {
g_error_free(error);
}
return pipeline;
}
bool BusSyncHandler::handleMessage(GstMessage *message)
{
// this method is called by gstreamer as a callback
// and as such is not necessarily called on the QT event handling thread
bool handled = false;
switch (GST_MESSAGE_TYPE(message)) {
case GST_MESSAGE_ELEMENT:
{
if (gst_is_video_overlay_prepare_window_handle_message(message)) {
qDebug().noquote() << QString("VideoWidget::handleMessage - element %0 prepare window with id #%1").arg(GST_OBJECT_NAME(message->src)).arg((gulong)wId);
// prepare-xwindow-id must be handled synchronously in order to have gstreamer use our window
GstVideoOverlay *gst_video_overlay = GST_VIDEO_OVERLAY(GST_MESSAGE_SRC(message));
gst_video_overlay_set_window_handle(gst_video_overlay, (gulong)wId);
// and now post event asynchronously
Overlay *overlay = new GstOverlayImpl(gst_video_overlay);
QString src(GST_OBJECT_NAME(message->src));
QCoreApplication::postEvent(widget, new PrepareWindowIdEvent(src, overlay));
// notify that the message was handled
handled = true;
}
break;
}
case GST_MESSAGE_STATE_CHANGED:
{
if (GST_IS_PIPELINE(message->src)) {
GstState old_state, new_state, pending_state;
gst_message_parse_state_changed(message, &old_state, &new_state, &pending_state);
QString src(GST_OBJECT_NAME(message->src));
QCoreApplication::postEvent(widget, new StateChangedEvent(src, cvt(old_state), cvt(new_state), cvt(pending_state)));
}
break;
}
case GST_MESSAGE_STREAM_STATUS:
{
GstStreamStatusType type;
GstElement *owner;
gst_message_parse_stream_status(message, &type, &owner);
// QString src(GST_OBJECT_NAME(message->src));
// QString name(GST_OBJECT_NAME(owner));
// QCoreApplication::postEvent(widget, new StreamStatusEvent(src, cvt(type), name));
break;
}
case GST_MESSAGE_NEW_CLOCK:
{
if (GST_IS_PIPELINE(message->src)) {
GstClock *clock;
gst_message_parse_new_clock(message, &clock);
QString src(GST_OBJECT_NAME(message->src));
QString name(GST_OBJECT_NAME(clock));
QCoreApplication::postEvent(widget, new NewClockEvent(src, name));
}
break;
}
case GST_MESSAGE_CLOCK_PROVIDE:
{
if (GST_IS_PIPELINE(message->src)) {
GstClock *clock;
gboolean ready;
gst_message_parse_clock_provide(message, &clock, &ready);
QString src(GST_OBJECT_NAME(message->src));
QString name(GST_OBJECT_NAME(clock));
QCoreApplication::postEvent(widget, new ClockProvideEvent(src, name, ready));
}
break;
}
case GST_MESSAGE_CLOCK_LOST:
{
if (GST_IS_PIPELINE(message->src)) {
GstClock *clock;
gst_message_parse_clock_lost(message, &clock);
QString src(GST_OBJECT_NAME(message->src));
QString name(GST_OBJECT_NAME(clock));
QCoreApplication::postEvent(widget, new ClockLostEvent(src, name));
}
break;
}
case GST_MESSAGE_PROGRESS:
{
GstProgressType type;
gchar *code;
gchar *text;
gst_message_parse_progress(message, &type, &code, &text);
QString src(GST_OBJECT_NAME(message->src));
QCoreApplication::postEvent(widget, new ProgressEvent(src, cvt(type), QString(code), QString(text)));
g_free(code);
g_free(text);
break;
}
case GST_MESSAGE_SEGMENT_START:
{
GstFormat format;
gint64 position;
gst_message_parse_segment_start(message, &format, &position);
// QString src(GST_OBJECT_NAME(message->src));
// QCoreApplication::postEvent(widget, new InfoEvent(src, QString("Segment start %0").arg(position), ""));
break;
}
case GST_MESSAGE_SEGMENT_DONE:
{
GstFormat format;
gint64 position;
gst_message_parse_segment_done(message, &format, &position);
QString src(GST_OBJECT_NAME(message->src));
QCoreApplication::postEvent(widget, new InfoEvent(src, QString("Segment done %0").arg(position), ""));
break;
}
case GST_MESSAGE_LATENCY:
{
QString src(GST_OBJECT_NAME(message->src));
QCoreApplication::postEvent(widget, new LatencyEvent(src));
break;
}
case GST_MESSAGE_BUFFERING:
{
gint percent;
gst_message_parse_buffering(message, &percent);
QString src(GST_OBJECT_NAME(message->src));
QCoreApplication::postEvent(widget, new InfoEvent(src, QString("%0%").arg(percent), ""));
break;
}
case GST_MESSAGE_QOS:
{
QosData data;
gboolean live;
guint64 running_time;
guint64 stream_time;
guint64 timestamp;
guint64 duration;
gst_message_parse_qos(message, &live, &running_time, &stream_time, &timestamp, &duration);
data.live = (live == true);
data.running_time = running_time;
data.stream_time = stream_time;
data.timestamp = timestamp;
data.duration = duration;
gint64 jitter;
gdouble proportion;
gint quality;
gst_message_parse_qos_values(message, &jitter, &proportion, &quality);
data.jitter = jitter;
data.proportion = proportion;
data.quality = quality;
guint64 processed;
guint64 dropped;
gst_message_parse_qos_stats(message, NULL, &processed, &dropped);
data.processed = processed;
data.dropped = dropped;
QString src(GST_OBJECT_NAME(message->src));
QCoreApplication::postEvent(widget, new QosEvent(src, data));
break;
}
case GST_MESSAGE_EOS:
{
/* end-of-stream */
// g_main_loop_quit (loop);
QString src(GST_OBJECT_NAME(message->src));
QCoreApplication::postEvent(widget, new EosEvent(src));
break;
}
case GST_MESSAGE_ERROR:
{
GError *err;
gchar *debug;
gst_message_parse_error(message, &err, &debug);
QString src(GST_OBJECT_NAME(message->src));
QCoreApplication::postEvent(widget, new ErrorEvent(src, QString(err->message), QString(debug)));
g_error_free(err);
g_free(debug);
break;
}
case GST_MESSAGE_WARNING:
{
GError *err;
gchar *debug;
gst_message_parse_warning(message, &err, &debug);
QString src(GST_OBJECT_NAME(message->src));
QCoreApplication::postEvent(widget, new WarningEvent(src, QString(err->message), QString(debug)));
g_error_free(err);
g_free(debug);
break;
}
case GST_MESSAGE_INFO:
{
GError *err;
gchar *debug;
gst_message_parse_info(message, &err, &debug);
QString src(GST_OBJECT_NAME(message->src));
QCoreApplication::postEvent(widget, new InfoEvent(src, QString(err->message), QString(debug)));
g_error_free(err);
g_free(debug);
break;
}
case GST_MESSAGE_TAG:
{
GstTagList *tags = NULL;
gst_message_parse_tag(message, &tags);
// QString src(GST_OBJECT_NAME(message->src));
// QCoreApplication::postEvent(widget, new InfoEvent(src, QString(err->message), QString(debug)));
gst_tag_list_unref(tags);
break;
}
default:
{
// const GstStructure *s;
// const gchar *name;
//
// s = gst_message_get_structure (message);
//
// name = gst_structure_get_name(s);
QString src(GST_OBJECT_NAME(message->src));
QCoreApplication::postEvent(widget, new InfoEvent(src, "Unhandled message", QString("%0").arg(GST_MESSAGE_TYPE_NAME(message))));
break;
}
}
return handled;
}
static GstBusSyncReply gst_bus_sync_handler(GstBus *bus, GstMessage *message, BusSyncHandler *handler)
{
Q_UNUSED(bus);
// qDebug().noquote() << QString("VideoWidget::gst_bus_sync_handler (%0) : %1 : %2")
// .arg((long)QThread::currentThreadId())
// .arg(GST_MESSAGE_SRC_NAME(message))
// .arg(GST_MESSAGE_TYPE_NAME(message));
if (handler->handleMessage(message)) {
gst_message_unref(message);
return GST_BUS_DROP;
}
return GST_BUS_PASS;
}

View File

@ -0,0 +1,94 @@
/**
******************************************************************************
*
* @file videogadgetwidget.h
* @author The LibrePilot Project, http://www.librepilot.org Copyright (C) 2017.
* @brief
* @see The GNU Public License (GPL) Version 3
* @defgroup
* @{
*
*****************************************************************************/
/*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* for more details.
*
* You should have received a copy of the GNU General Public License along
* with this program; if not, write to the Free Software Foundation, Inc.,
* 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
#ifndef VIDEOWIDGET_H_
#define VIDEOWIDGET_H_
#include "gst_global.h"
#include "pipeline.h"
#include "overlay.h"
#include <QWidget>
#include <QResizeEvent>
#include <QPaintEvent>
#include <QMouseEvent>
typedef struct _GstElement GstElement;
class BusSyncHandler;
class GST_LIB_EXPORT VideoWidget : public QWidget {
Q_OBJECT
public:
VideoWidget(QWidget *parent = 0);
~VideoWidget();
QString pipelineDesc();
void setPipelineDesc(QString pipelineDesc);
bool isPlaying();
public slots:
void start();
void pause();
void stop();
signals:
void message(QString);
void stateChanged(Pipeline::State oldState, Pipeline::State newState, Pipeline::State pendingState);
protected:
QString getStatus();
QString getStatusMessage();
void paintStatus(QPaintEvent *);
// QWidget overrides
void paintEvent(QPaintEvent *);
void resizeEvent(QResizeEvent *);
void mouseDoubleClickEvent(QMouseEvent *);
private:
void init();
void dispose();
void setOverlay(Overlay *);
void emitEventMessage(QString msg);
void toDotFile(QString name);
// QWidget overrides
bool event(QEvent *);
QPaintEngine *paintEngine() const;
QString m_pipelineDesc;
QString lastError;
GstElement *pipeline;
Overlay *overlay;
BusSyncHandler *handler;
// DeviceMonitor m;
};
#endif /* VIDEOWIDGET_H_ */

View File

@ -11,6 +11,6 @@ SUBDIRS = \
qwt \
sdlgamepad
osg {
SUBDIRS += osgearth
}
gstreamer:SUBDIRS += gstreamer
osg:SUBDIRS += osgearth

View File

@ -13,24 +13,24 @@ contains(QT_ARCH, x86_64) {
# set debug suffix if needed
win32:CONFIG(debug, debug|release):DS = "d"
osg:linux {
linux:osg {
# copy osg libraries
data_copy.commands += $(MKDIR) $$GCS_LIBRARY_PATH/osg $$addNewline()
data_copy.commands += $(COPY_DIR) $$shell_quote($$OSG_SDK_DIR/$$LIB_DIR_NAME/)* $$shell_quote($$GCS_LIBRARY_PATH/osg/) $$addNewline()
}
osgearth:linux {
linux:osgearth {
# copy osgearth libraries
data_copy.commands += $(MKDIR) $$GCS_LIBRARY_PATH/osg $$addNewline()
data_copy.commands += $(COPY_DIR) $$shell_quote($$OSGEARTH_SDK_DIR/$$LIB_DIR_NAME/)* $$shell_quote($$GCS_LIBRARY_PATH/osg/) $$addNewline()
}
osg:macx {
macx:osg {
# copy osg libraries
data_copy.commands += $(COPY_DIR) $$shell_quote($$OSG_SDK_DIR/lib/)* $$shell_quote($$GCS_LIBRARY_PATH/) $$addNewline()
}
osgearth:macx {
macx:osgearth {
# copy osgearth libraries
data_copy.commands += $(COPY_DIR) $$shell_quote($$OSGEARTH_SDK_DIR/lib/)* $$shell_quote($$GCS_LIBRARY_PATH/) $$addNewline()
}
@ -43,8 +43,8 @@ linux|macx {
QMAKE_EXTRA_TARGETS += data_copy
}
osg:win32 {
# osg and osgearth dependencies
win32:osg {
OSG_PLUGINS_DIR = $${OSG_SDK_DIR}/bin/osgPlugins-$${OSG_VERSION}
# osg libraries
OSG_LIBS += \
@ -71,7 +71,7 @@ osg:win32 {
for(lib, OSG_LIBS) {
addCopyFileTarget($${lib},$${OSG_SDK_DIR}/bin,$${GCS_APP_PATH})
win32:addCopyDependenciesTarget($${lib},$${OSG_SDK_DIR}/bin,$${GCS_APP_PATH})
addCopyDependenciesTarget($${lib},$${OSG_SDK_DIR}/bin,$${GCS_APP_PATH})
}
# osg plugins
@ -85,7 +85,8 @@ osg:win32 {
mingw_osgdb_zip$${DS}.dll \
mingw_osgdb_serializers_osg$${DS}.dll
osg_extra:OSG_PLUGINS = \
# more osg plugins
osg_more_plugins:OSG_PLUGINS = \
mingw_osgdb_3dc$${DS}.dll \
mingw_osgdb_ac$${DS}.dll \
mingw_osgdb_bmp$${DS}.dll \
@ -151,12 +152,12 @@ osg:win32 {
mingw_osgdb_serializers_osgvolume$${DS}.dll
for(lib, OSG_PLUGINS) {
addCopyFileTarget($${lib},$${OSG_SDK_DIR}/bin/osgPlugins-$${OSG_VERSION},$${GCS_LIBRARY_PATH}/osg/osgPlugins-$${OSG_VERSION})
win32:addCopyDependenciesTarget($${lib},$${OSG_SDK_DIR}/bin/osgPlugins-$${OSG_VERSION},$${GCS_APP_PATH})
addCopyFileTarget($${lib},$${OSG_PLUGINS_DIR},$${GCS_LIBRARY_PATH}/osg/osgPlugins-$${OSG_VERSION})
addCopyDependenciesTarget($${lib},$${OSG_PLUGINS_DIR},$${GCS_APP_PATH})
}
}
osgearth:win32 {
win32:osgearth {
# osgearth libraries
OSGEARTH_LIBS = \
libosgEarth$${DS}.dll \
@ -174,7 +175,7 @@ osgearth:win32 {
for(lib, OSGEARTH_LIBS) {
addCopyFileTarget($${lib},$${OSGEARTH_SDK_DIR}/bin,$${GCS_APP_PATH})
win32:addCopyDependenciesTarget($${lib},$${OSGEARTH_SDK_DIR}/bin,$${GCS_APP_PATH})
addCopyDependenciesTarget($${lib},$${OSGEARTH_SDK_DIR}/bin,$${GCS_APP_PATH})
}
# osgearth plugins
@ -187,7 +188,8 @@ osgearth:win32 {
mingw_osgdb_osgearth_xyz$${DS}.dll \
mingw_osgdb_osgearth_cache_filesystem$${DS}.dll
osgearth_extra:OSGEARTH_PLUGINS += \
# more osgearth plugins
more_osgearth_plugins:OSGEARTH_PLUGINS += \
mingw_osgdb_kml$${DS}.dll \
mingw_osgdb_osgearth_agglite$${DS}.dll \
mingw_osgdb_osgearth_arcgis_map_cache$${DS}.dll \
@ -225,7 +227,7 @@ osgearth:win32 {
mingw_osgdb_osgearth_yahoo$${DS}.dll
for(lib, OSGEARTH_PLUGINS) {
addCopyFileTarget($${lib},$${OSGEARTH_SDK_DIR}/bin/osgPlugins-$${OSG_VERSION},$${GCS_LIBRARY_PATH}/osg/osgPlugins-$${OSG_VERSION})
win32:addCopyDependenciesTarget($${lib},$${OSGEARTH_SDK_DIR}/bin/osgPlugins-$${OSG_VERSION},$${GCS_APP_PATH})
addCopyFileTarget($${lib},$${OSG_PLUGINS_DIR},$${GCS_LIBRARY_PATH}/osg/osgPlugins-$${OSG_VERSION})
addCopyDependenciesTarget($${lib},$${OSG_PLUGINS_DIR},$${GCS_APP_PATH})
}
}

View File

@ -27,9 +27,16 @@
#include "OSGImageNode.hpp"
#include <osg/Texture2D>
#include "utils/imagesource.hpp"
#include <osgDB/ReadFile>
#ifdef USE_GSTREAMER
#include "utils/gstreamer/gstimagesource.hpp"
#endif
#include <osg/Image>
#include <osg/Geometry>
#include <osg/Geode>
#include <osg/Texture2D>
#include <QUrl>
#include <QDebug>
@ -45,44 +52,111 @@ private:
osg::ref_ptr<osg::Texture2D> texture;
public:
QUrl url;
ImageSource *imageSource;
Hidden(OSGImageNode *self) : QObject(self), self(self), url()
{}
public:
QUrl imageUrl;
Hidden(OSGImageNode *self) : QObject(self), self(self), imageSource(NULL), imageUrl()
{
if (imageSource) {
delete imageSource;
}
}
osg::Node *createNode()
{
osg::Drawable *quad = osg::createTexturedQuadGeometry(osg::Vec3(0, 0, 0), osg::Vec3(1, 0, 0), osg::Vec3(0, 1, 0));
osg::Geode *geode = new osg::Geode;
geode->addDrawable(quad);
geode->setStateSet(createState());
osg::Geode *geode = new osg::Geode;
return geode;
}
osg::StateSet *createState()
osg::Image *loadImage()
{
texture = new osg::Texture2D;
// create the StateSet to store the texture data
osg::StateSet *stateset = new osg::StateSet;
stateset->setTextureAttributeAndModes(0, texture, osg::StateAttribute::ON);
return stateset;
if (!imageSource) {
if (imageUrl.scheme() == "gst") {
#ifdef USE_GSTREAMER
imageSource = new GstImageSource();
#else
qWarning() << "gstreamer image source is not supported";
#endif
} else {
imageSource = new ImageSource();
}
}
return imageSource ? imageSource->createImage(imageUrl) : 0;
}
void updateImageFile()
{
qDebug() << "OSGImageNode::updateImageFile - reading image file" << url.path();
osg::Image *image = osgDB::readImageFile(url.path().toStdString());
if (texture.valid()) {
texture->setImage(image);
update();
}
void update()
{
osg::Image *image = loadImage();
if (!image) {
return;
}
// qDebug() << "OSGImageNode::update" << image;
osg::Node *geode = createGeodeForImage(image);
self->setNode(geode);
}
osg::Geode *createGeodeForImage(osg::Image *image)
{
// vertex
osg::Vec3Array *coords = new osg::Vec3Array(4);
(*coords)[0].set(0, 1, 0);
(*coords)[1].set(0, 0, 0);
(*coords)[2].set(1, 0, 0);
(*coords)[3].set(1, 1, 0);
// texture coords
osg::Vec2Array *texcoords = new osg::Vec2Array(4);
float x_b = 0.0f;
float x_t = 1.0f;
float y_b = (image->getOrigin() == osg::Image::BOTTOM_LEFT) ? 0.0f : 1.0f;
float y_t = (image->getOrigin() == osg::Image::BOTTOM_LEFT) ? 1.0f : 0.0f;
(*texcoords)[0].set(x_b, y_t);
(*texcoords)[1].set(x_b, y_b);
(*texcoords)[2].set(x_t, y_b);
(*texcoords)[3].set(x_t, y_t);
// color
osg::Vec4Array *color = new osg::Vec4Array(1);
(*color)[0].set(1.0f, 1.0f, 1.0f, 1.0f);
// setup the geometry
osg::Geometry *geom = new osg::Geometry;
geom->setVertexArray(coords);
geom->setTexCoordArray(0, texcoords);
geom->setColorArray(color, osg::Array::BIND_OVERALL);
geom->addPrimitiveSet(new osg::DrawArrays(osg::PrimitiveSet::QUADS, 0, 4));
// set up the texture.
osg::Texture2D *texture = new osg::Texture2D;
texture->setFilter(osg::Texture::MIN_FILTER, osg::Texture::LINEAR);
texture->setFilter(osg::Texture::MAG_FILTER, osg::Texture::LINEAR);
texture->setResizeNonPowerOfTwoHint(false);
texture->setImage(image);
// set up the state.
osg::StateSet *state = new osg::StateSet;
state->setMode(GL_CULL_FACE, osg::StateAttribute::OFF);
state->setMode(GL_LIGHTING, osg::StateAttribute::OFF);
state->setTextureAttributeAndModes(0, texture, osg::StateAttribute::ON);
geom->setStateSet(state);
// set up the geode.
osg::Geode *geode = new osg::Geode;
geode->addDrawable(geom);
return geode;
}
};
@ -96,17 +170,17 @@ OSGImageNode::~OSGImageNode()
delete h;
}
const QUrl OSGImageNode::imageFile() const
const QUrl OSGImageNode::imageUrl() const
{
return h->url;
return h->imageUrl;
}
void OSGImageNode::setImageFile(const QUrl &url)
void OSGImageNode::setImageUrl(QUrl &url)
{
if (h->url != url) {
h->url = url;
if (h->imageUrl != url) {
h->imageUrl = url;
setDirty(ImageFile);
emit imageFileChanged(url);
emit imageUrlChanged(url);
}
}

View File

@ -25,8 +25,7 @@
* 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
#ifndef _H_OSGQTQUICK_IMAGENODE_H_
#define _H_OSGQTQUICK_IMAGENODE_H_
#pragma once
#include "Export.hpp"
#include "OSGNode.hpp"
@ -35,7 +34,7 @@
namespace osgQtQuick {
class OSGQTQUICK_EXPORT OSGImageNode : public OSGNode {
Q_OBJECT Q_PROPERTY(QUrl imageFile READ imageFile WRITE setImageFile NOTIFY imageFileChanged)
Q_OBJECT Q_PROPERTY(QUrl imageUrl READ imageUrl WRITE setImageUrl NOTIFY imageUrlChanged)
typedef OSGNode Inherited;
@ -43,11 +42,11 @@ public:
OSGImageNode(QObject *parent = 0);
virtual ~OSGImageNode();
const QUrl imageFile() const;
void setImageFile(const QUrl &url);
const QUrl imageUrl() const;
void setImageUrl(QUrl &url);
signals:
void imageFileChanged(const QUrl &url);
void imageUrlChanged(const QUrl &url);
protected:
virtual osg::Node *createNode();
@ -58,5 +57,3 @@ private:
Hidden *const h;
};
} // namespace osgQtQuick
#endif // _H_OSGQTQUICK_IMAGENODE_H_

View File

@ -59,7 +59,7 @@ public:
osg::Node *nodeToUpdate() const
{
return manipulator->getNode();
return manipulator ? manipulator->getNode() : NULL;
}
void update()

View File

@ -38,13 +38,15 @@ HEADERS += \
osgearth.h \
utils/qtwindowingsystem.h \
utils/utility.h \
utils/shapeutils.h
utils/shapeutils.h \
utils/imagesource.hpp
SOURCES += \
osgearth.cpp \
utils/qtwindowingsystem.cpp \
utils/utility.cpp \
utils/shapeutils.cpp
utils/shapeutils.cpp \
utils/imagesource.cpp
HEADERS += \
osgQtQuick/Export.hpp \
@ -83,6 +85,14 @@ SOURCES += \
osgQtQuick/ga/OSGNodeTrackerManipulator.cpp \
osgQtQuick/ga/OSGTrackballManipulator.cpp
gstreamer:HEADERS += \
utils/gstreamer/gstimagestream.hpp \
utils/gstreamer/gstimagesource.hpp
gstreamer:SOURCES += \
utils/gstreamer/gstimagestream.cpp \
utils/gstreamer/gstimagesource.cpp
osgearth:HEADERS += \
osgQtQuick/OSGSkyNode.hpp \
osgQtQuick/OSGGeoTransformNode.hpp

View File

@ -14,6 +14,11 @@ contains(QT_ARCH, x86_64) {
LIB_DIR_NAME = lib
}
gstreamer {
include(../gstreamer/gstreamer.pri)
include(../gstreamer/gstreamer_dependencies.pri)
}
osg {
OSG_SDK_DIR = $$clean_path($$(OSG_SDK_DIR))
message(Using osg from here: $$OSG_SDK_DIR)

View File

@ -0,0 +1,90 @@
/**
******************************************************************************
*
* @file gstimagesource.cpp
* @author The LibrePilot Project, http://www.librepilot.org Copyright (C) 2016.
* @addtogroup
* @{
* @addtogroup
* @{
* @brief
*****************************************************************************/
/*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* for more details.
*
* You should have received a copy of the GNU General Public License along
* with this program; if not, write to the Free Software Foundation, Inc.,
* 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
#include "gstimagesource.hpp"
#include "gstimagestream.hpp"
#include <osg/Image>
#include <QUrl>
#include <QDebug>
GstImageSource::GstImageSource() : is(NULL)
{}
GstImageSource::~GstImageSource()
{
if (is) {
delete is;
}
}
osg::Image *GstImageSource::createImage(QUrl &url)
{
// qDebug() << "GstImageSource::createImage - reading image file" << url.path();
QString pipeline = url.query(QUrl::FullyDecoded);
GSTImageStream *is = new GSTImageStream();
is->setPipeline(pipeline.toStdString());
this->is = is;
play();
return this->is;
}
void GstImageSource::play()
{
if (is) {
is->play();
}
}
void GstImageSource::pause()
{
if (is) {
is->pause();
}
}
void GstImageSource::rewind()
{
if (is) {
is->rewind();
}
}
void GstImageSource::seek(double time)
{
if (is) {
is->seek(time);
}
}

View File

@ -0,0 +1,52 @@
/**
******************************************************************************
*
* @file gstimagesource.hpp
* @author The LibrePilot Project, http://www.librepilot.org Copyright (C) 2016.
* @addtogroup
* @{
* @addtogroup
* @{
* @brief
*****************************************************************************/
/*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* for more details.
*
* You should have received a copy of the GNU General Public License along
* with this program; if not, write to the Free Software Foundation, Inc.,
* 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
#pragma once
#include "utils/imagesource.hpp"
namespace osg {
class Image;
}
class GSTImageStream;
class GstImageSource : public ImageSource {
public:
GstImageSource();
virtual ~GstImageSource();
virtual osg::Image *createImage(QUrl &url);
virtual void play();
virtual void pause();
virtual void rewind();
virtual void seek(double time);
private:
GSTImageStream *is;
};

View File

@ -0,0 +1,384 @@
/**
******************************************************************************
*
* @file gstimagestream.cpp
* @author The LibrePilot Project, http://www.librepilot.org Copyright (C) 2017.
* OpenSceneGraph, http://www.openscenegraph.org/
* Julen Garcia <jgarcia@vicomtech.org>
* @addtogroup
* @{
* @addtogroup
* @{
* @brief
*****************************************************************************/
/*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* for more details.
*
* You should have received a copy of the GNU General Public License along
* with this program; if not, write to the Free Software Foundation, Inc.,
* 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
#include "gstimagestream.hpp"
#include "gst_util.h"
#include <osgDB/ReaderWriter>
#include <QDebug>
GSTImageStream::GSTImageStream() :
_loop(0),
_pipeline(0),
_internal_buffer(0),
_width(0),
_height(0)
{
setOrigin(osg::Image::TOP_LEFT);
_loop = g_main_loop_new(NULL, FALSE);
}
GSTImageStream::GSTImageStream(const GSTImageStream & image, const osg::CopyOp & copyop) :
osg::ImageStream(image, copyop), OpenThreads::Thread(),
_loop(0),
_pipeline(0),
_internal_buffer(0),
_width(0),
_height(0)
{
setOrigin(osg::Image::TOP_LEFT);
_loop = g_main_loop_new(NULL, FALSE);
}
GSTImageStream::~GSTImageStream()
{
gst_element_set_state(_pipeline, GST_STATE_NULL);
gst_element_get_state(_pipeline, NULL, NULL, GST_CLOCK_TIME_NONE); // wait until the state changed
g_main_loop_quit(_loop);
g_main_loop_unref(_loop);
free(_internal_buffer);
}
// osgDB::ReaderWriter::ReadResult readImage(const std::string & filename, const osgDB::ReaderWriter::Options *options)
// {
// const std::string ext = osgDB::getLowerCaseFileExtension(filename);
//
//// if (!acceptsExtension(ext)) return ReadResult::FILE_NOT_HANDLED;
//
// const std::string path = osgDB::containsServerAddress(filename) ?
// filename :
// osgDB::findDataFile(filename, options);
//
// if (path.empty()) {
// return osgDB::ReaderWriter::ReadResult::FILE_NOT_FOUND;
// }
//
// osg::ref_ptr<GSTImageStream> imageStream = new GSTImageStream();
//
// if (!imageStream->open(filename)) {
// return osgDB::ReaderWriter::ReadResult::FILE_NOT_HANDLED;
// }
//
// return imageStream.release();
// }
bool GSTImageStream::setPipeline(const std::string &pipeline)
{
GError *error = NULL;
// TODO not the most appropriate place to do that...
gst::init(NULL, NULL);
gchar *string = g_strdup_printf("%s", pipeline.c_str());
_pipeline = gst_parse_launch(string, &error);
// TODO make sure that there is "! videoconvert ! video/x-raw,format=RGB ! appsink name=sink emit-signals=true"
// TOOD remove the need for a videoconvert element by adapting dynamically to format
// TODO try to use GL buffers...
g_free(string);
if (error) {
g_printerr("Error: %s\n", error->message);
g_error_free(error);
// TODO submit fix to osg...
return false;
}
if (_pipeline == NULL) {
return false;
}
// bus
GstBus *bus = gst_pipeline_get_bus(GST_PIPELINE(_pipeline));
gst_bus_add_watch(bus, (GstBusFunc)on_message, this);
gst_object_unref(bus);
// sink
GstElement *sink = gst_bin_get_by_name(GST_BIN(_pipeline), "sink");
g_signal_connect(sink, "new-sample", G_CALLBACK(on_new_sample), this);
g_signal_connect(sink, "new-preroll", G_CALLBACK(on_new_preroll), this);
gst_object_unref(sink);
gst_element_set_state(_pipeline, GST_STATE_PAUSED);
gst_element_get_state(_pipeline, 0, 0, GST_CLOCK_TIME_NONE); // wait until the state changed
if (_width == 0 || _height == 0) {
// no valid image has been setup by a on_new_preroll() call.
return false;
}
// setLoopingMode(osg::ImageStream::NO_LOOPING);
// start the thread to run gstreamer main loop
start();
return true;
}
/*
bool GSTImageStream::open(const std::string & filename)
{
setFileName(filename);
GError *error = NULL;
// get stream info
bool has_audio_stream = false;
gchar *uri = g_filename_to_uri(filename.c_str(), NULL, NULL);
if (uri != 0 && gst_uri_is_valid(uri)) {
GstDiscoverer *item = gst_discoverer_new(1 * GST_SECOND, &error);
GstDiscovererInfo *info = gst_discoverer_discover_uri(item, uri, &error);
GList *audio_list = gst_discoverer_info_get_audio_streams(info);
if (g_list_length(audio_list) > 0) {
has_audio_stream = true;
}
gst_discoverer_info_unref(info);
g_free(uri);
}
// build pipeline
const gchar *audio_pipe = "";
if (has_audio_stream) {
audio_pipe = "deco. ! queue ! audioconvert ! autoaudiosink";
}
gchar *string = g_strdup_printf("filesrc location=%s ! \
decodebin name=deco \
deco. ! queue ! videoconvert ! video/x-raw,format=RGB ! appsink name=sink emit-signals=true \
%s", filename.c_str(), audio_pipe);
_pipeline = gst_parse_launch(string, &error);
g_free(string);
if (error) {
g_printerr("Error: %s\n", error->message);
g_error_free(error);
}
if (_pipeline == NULL) {
return false;
}
// bus
GstBus *bus = gst_pipeline_get_bus(GST_PIPELINE(_pipeline));
gst_bus_add_watch(bus, (GstBusFunc)on_message, this);
gst_object_unref(bus);
// sink
GstElement *sink = gst_bin_get_by_name(GST_BIN(_pipeline), "sink");
g_signal_connect(sink, "new-sample", G_CALLBACK(on_new_sample), this);
g_signal_connect(sink, "new-preroll", G_CALLBACK(on_new_preroll), this);
gst_object_unref(sink);
gst_element_set_state(_pipeline, GST_STATE_PAUSED);
gst_element_get_state(_pipeline, NULL, NULL, GST_CLOCK_TIME_NONE); // wait until the state changed
if (_width == 0 || _height == 0) {
// no valid image has been setup by a on_new_preroll() call.
return false;
}
// setLoopingMode(osg::ImageStream::NO_LOOPING);
// start the thread to run gstreamer main loop
start();
return true;
}
*/
// ** Controls **
void GSTImageStream::play()
{
gst_element_set_state(_pipeline, GST_STATE_PLAYING);
}
void GSTImageStream::pause()
{
gst_element_set_state(_pipeline, GST_STATE_PAUSED);
}
void GSTImageStream::rewind()
{
gst_element_seek_simple(_pipeline, GST_FORMAT_TIME, GstSeekFlags(GST_SEEK_FLAG_FLUSH | GST_SEEK_FLAG_KEY_UNIT), 0);
}
void GSTImageStream::seek(double time)
{
gst_element_seek_simple(_pipeline, GST_FORMAT_TIME, GstSeekFlags(GST_SEEK_FLAG_FLUSH | GST_SEEK_FLAG_KEY_UNIT), time * GST_MSECOND);
}
// ** Callback implementations **
GstFlowReturn GSTImageStream::on_new_sample(GstAppSink *appsink, GSTImageStream *user_data)
{
// get the buffer from appsink
GstSample *sample = gst_app_sink_pull_sample(appsink);
GstBuffer *buffer = gst_sample_get_buffer(sample);
if (!user_data->allocateInternalBuffer(sample)) {
gst_sample_unref(sample);
return GST_FLOW_ERROR;
}
// upload data
GstMapInfo info;
if (!gst_buffer_map(buffer, &info, GST_MAP_READ)) {
qWarning() << "Failed to map buffer";
// TODO
}
gsize size = gst_buffer_extract(buffer, 0, user_data->_internal_buffer, info.size);
if (size != info.size) {
qWarning() << "GSTImageStream::on_new_sample : extracted" << size << "/" << info.size;
// TODO
}
// data has been modified so dirty the image so the texture will be updated
user_data->dirty();
// clean resources
gst_buffer_unmap(buffer, &info);
gst_sample_unref(sample);
return GST_FLOW_OK;
}
GstFlowReturn GSTImageStream::on_new_preroll(GstAppSink *appsink, GSTImageStream *user_data)
{
qDebug() << "ON NEW PREROLL";
// get the sample from appsink
GstSample *sample = gst_app_sink_pull_preroll(appsink);
if (!user_data->allocateInternalBuffer(sample)) {
gst_sample_unref(sample);
return GST_FLOW_ERROR;
}
// clean resources
gst_sample_unref(sample);
return GST_FLOW_OK;
}
gboolean GSTImageStream::on_message(GstBus *bus, GstMessage *message, GSTImageStream *user_data)
{
if (GST_MESSAGE_TYPE(message) == GST_MESSAGE_EOS) {
if (user_data->getLoopingMode() == osg::ImageStream::LOOPING) {
user_data->rewind();
}
}
return true;
}
bool GSTImageStream::allocateInternalBuffer(GstSample *sample)
{
// get sample info
GstCaps *caps = gst_sample_get_caps(sample);
GstStructure *structure = gst_caps_get_structure(caps, 0);
/*
GstVideoInfo info;
if (!gst_video_info_from_caps (&info, caps)) {
gchar *caps_str = gst_caps_to_string (caps);
GST_ERROR ("Failed to get video info from caps %s", caps_str);
//g_set_error (err, GST_CORE_ERROR, GST_CORE_ERROR_NEGOTIATION,
// "Failed to get video info from caps %s", caps_str);
g_free (caps_str);
return FALSE;
GstVideoFormat format;
format = GST_VIDEO_INFO_FORMAT (info);
*/
int width;
int height;
gst_structure_get_int(structure, "width", &width);
gst_structure_get_int(structure, "height", &height);
if (width <= 0 || height <= 0) {
qCritical() << "invalid video size: width=" << width << ", height=" << height;
return false;
}
if (_width != width || _height != height) {
_width = width;
_height = height;
int row_width = width * 3;
if ((row_width % 4) != 0) {
row_width += (4 - (row_width % 4));
}
// qDebug() << "image width=" << width << ", height=" << height << row_width << (row_width * height);
// if buffer previously assigned free it before allocating new buffer.
if (_internal_buffer) {
free(_internal_buffer);
}
// allocate buffer
_internal_buffer = (unsigned char *)malloc(sizeof(unsigned char) * row_width * height);
// assign buffer to image
setImage(_width, _height, 1, GL_RGB, GL_RGB, GL_UNSIGNED_BYTE, _internal_buffer, osg::Image::NO_DELETE, 4);
}
return true;
}
void GSTImageStream::run()
{
g_main_loop_run(_loop);
}

View File

@ -0,0 +1,70 @@
/**
******************************************************************************
*
* @file gstimagestream.hpp
* @author The LibrePilot Project, http://www.librepilot.org Copyright (C) 2017.
* OpenSceneGraph, http://www.openscenegraph.org/
* Julen Garcia <jgarcia@vicomtech.org>
* @addtogroup
* @{
* @addtogroup
* @{
* @brief
*****************************************************************************/
/*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* for more details.
*
* You should have received a copy of the GNU General Public License along
* with this program; if not, write to the Free Software Foundation, Inc.,
* 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
#pragma once
#include <gst/app/gstappsink.h>
#include <osg/ImageStream>
#include <OpenThreads/Thread>
class GSTImageStream : public osg::ImageStream, public OpenThreads::Thread {
public:
GSTImageStream();
GSTImageStream(const GSTImageStream & image, const osg::CopyOp & copyop = osg::CopyOp::SHALLOW_COPY);
virtual ~GSTImageStream();
META_Object(osgGStreamer, GSTImageStream);
bool setPipeline(const std::string &pipeline);
virtual void play();
virtual void pause();
virtual void rewind();
virtual void seek(double time);
private:
virtual void run();
static gboolean on_message(GstBus *bus, GstMessage *message, GSTImageStream *user_data);
static GstFlowReturn on_new_sample(GstAppSink *appsink, GSTImageStream *user_data);
static GstFlowReturn on_new_preroll(GstAppSink *appsink, GSTImageStream *user_data);
bool allocateInternalBuffer(GstSample *sample);
GMainLoop *_loop;
GstElement *_pipeline;
unsigned char *_internal_buffer;
int _width;
int _height;
};

View File

@ -0,0 +1,40 @@
/**
******************************************************************************
*
* @file imagesource.cpp
* @author The LibrePilot Project, http://www.librepilot.org Copyright (C) 2016.
* @addtogroup
* @{
* @addtogroup
* @{
* @brief
*****************************************************************************/
/*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* for more details.
*
* You should have received a copy of the GNU General Public License along
* with this program; if not, write to the Free Software Foundation, Inc.,
* 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
#include "imagesource.hpp"
#include <osg/Image>
#include <osgDB/ReadFile>
#include <QDebug>
osg::Image *ImageSource::createImage(QUrl &url)
{
qDebug() << "ImageSource::createImage - reading image file" << url.path();
osg::Image *image = osgDB::readImageFile(url.path().toStdString());
return image;
}

View File

@ -0,0 +1,47 @@
/**
******************************************************************************
*
* @file imagesource.hpp
* @author The LibrePilot Project, http://www.librepilot.org Copyright (C) 2016.
* @addtogroup
* @{
* @addtogroup
* @{
* @brief
*****************************************************************************/
/*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* for more details.
*
* You should have received a copy of the GNU General Public License along
* with this program; if not, write to the Free Software Foundation, Inc.,
* 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
#pragma once
#include <QUrl>
namespace osg {
class Image;
}
class ImageSource {
public:
ImageSource() {}
virtual ~ImageSource() {}
virtual osg::Image *createImage(QUrl &url);
virtual void play() {}
virtual void pause() {}
virtual void rewind() {}
virtual void seek(double time) {}
};

View File

@ -82,8 +82,9 @@ AboutDialog::AboutDialog(QWidget *parent) :
+ creditRow.arg("dRonin", "", "http://www.dronin.org")
+ creditRow.arg("OpenSceneGraph", "<br/>Open source high performance 3D graphics toolkit", "http://www.openscenegraph.org")
+ creditRow.arg("osgEarth", "<br/>Geospatial SDK for OpenSceneGraph", "http://osgearth.org")
+ creditRow.arg("MSYS2", "<br/>An independent rewrite of MSYS", "https://sourceforge.net/p/msys2/wiki/Home")
+ creditRow.arg("The Qt Company", "", "http://www.qt.io")
+ creditRow.arg("gstreamer", "<br/>Open source multimedia framework", "https://gstreamer.freedesktop.org/")
+ creditRow.arg("MSYS2", "<br/>An independent rewrite of MSYS", "https://github.com/msys2/msys2/wiki")
+ creditRow.arg("The Qt Company", "", "https://www.qt.io")
+ "</table>";
// uses Text.StyledText (see http://doc.qt.io/qt-5/qml-qtquick-text.html#textFormat-prop)

View File

@ -282,6 +282,19 @@ void PfdQmlContext::resetConsumedEnergy()
batterySettings->setData(batterySettings->getData());
}
QString PfdQmlContext::gstPipeline() const
{
return m_gstPipeline;
}
void PfdQmlContext::setGstPipeline(const QString &arg)
{
if (m_gstPipeline != arg) {
m_gstPipeline = arg;
emit gstPipelineChanged(gstPipeline());
}
}
void PfdQmlContext::loadConfiguration(PfdQmlGadgetConfiguration *config)
{
setSpeedFactor(config->speedFactor());
@ -307,6 +320,9 @@ void PfdQmlContext::loadConfiguration(PfdQmlGadgetConfiguration *config)
// background image
setBackgroundImageFile(config->backgroundImageFile());
// gstreamer pipeline
setGstPipeline(config->gstPipeline());
}

View File

@ -59,6 +59,9 @@ class PfdQmlContext : public QObject {
// background
Q_PROPERTY(QString backgroundImageFile READ backgroundImageFile WRITE setBackgroundImageFile NOTIFY backgroundImageFileChanged)
// gstreamer pipeline
Q_PROPERTY(QString gstPipeline READ gstPipeline WRITE setGstPipeline NOTIFY gstPipelineChanged)
public:
PfdQmlContext(QObject *parent = 0);
virtual ~PfdQmlContext();
@ -102,6 +105,10 @@ public:
QString backgroundImageFile() const;
void setBackgroundImageFile(const QString &arg);
// gstreamer pipeline
QString gstPipeline() const;
void setGstPipeline(const QString &arg);
Q_INVOKABLE void resetConsumedEnergy();
void loadConfiguration(PfdQmlGadgetConfiguration *config);
@ -130,6 +137,8 @@ signals:
void modelFileChanged(QString arg);
void backgroundImageFileChanged(QString arg);
void gstPipelineChanged(QString arg);
private:
// constants
static const QString CONTEXT_PROPERTY_NAME;
@ -156,6 +165,8 @@ private:
QString m_backgroundImageFile;
QString m_gstPipeline;
void addModelDir(QString dir);
};
#endif /* PFDQMLCONTEXT_H_ */

View File

@ -35,6 +35,7 @@
PfdQmlGadgetConfiguration::PfdQmlGadgetConfiguration(QString classId, QSettings &settings, QObject *parent) :
IUAVGadgetConfiguration(classId, parent)
{
// TODO move to some conversion utility class
m_speedMap[1.0] = "m/s";
m_speedMap[3.6] = "km/h";
m_speedMap[2.2369] = "mph";
@ -73,6 +74,9 @@ PfdQmlGadgetConfiguration::PfdQmlGadgetConfiguration(QString classId, QSettings
// background image
m_backgroundImageFile = settings.value("backgroundImageFile", "Unknown").toString();
m_backgroundImageFile = Utils::InsertDataPath(m_backgroundImageFile);
// gstreamer pipeline
m_gstPipeline = settings.value("gstPipeline").toString();
}
PfdQmlGadgetConfiguration::PfdQmlGadgetConfiguration(const PfdQmlGadgetConfiguration &obj) :
@ -104,6 +108,9 @@ PfdQmlGadgetConfiguration::PfdQmlGadgetConfiguration(const PfdQmlGadgetConfigura
// background image
m_backgroundImageFile = obj.m_backgroundImageFile;
// gstreamer pipeline
m_gstPipeline = obj.m_gstPipeline;
}
/**
@ -152,4 +159,7 @@ void PfdQmlGadgetConfiguration::saveConfig(QSettings &settings) const
// background image
QString backgroundImageFile = Utils::RemoveDataPath(m_backgroundImageFile);
settings.setValue("backgroundImageFile", backgroundImageFile);
// gstreamer pipeline
settings.setValue("gstPipeline", m_gstPipeline);
}

View File

@ -200,6 +200,15 @@ public:
m_backgroundImageFile = fileName;
}
QString gstPipeline() const
{
return m_gstPipeline;
}
void setGstPipeline(const QString &pipeline)
{
m_gstPipeline = pipeline;
}
QMapIterator<double, QString> speedMapIterator()
{
return QMapIterator<double, QString>(m_speedMap);
@ -234,6 +243,8 @@ private:
QString m_backgroundImageFile;
QString m_gstPipeline;
QMap<double, QString> m_speedMap;
QMap<double, QString> m_altitudeMap;
};

View File

@ -111,6 +111,9 @@ QWidget *PfdQmlGadgetOptionsPage::createPage(QWidget *parent)
options_page->backgroundImageFile->setPromptDialogTitle(tr("Choose Background Image File"));
options_page->backgroundImageFile->setPath(m_config->backgroundImageFile());
// gstreamer pipeline
options_page->pipelineTextEdit->setPlainText(m_config->gstPipeline());
#ifndef USE_OSG
options_page->showTerrain->setChecked(false);
options_page->showTerrain->setVisible(false);
@ -170,6 +173,8 @@ void PfdQmlGadgetOptionsPage::apply()
#else
m_config->setModelEnabled(false);
#endif
m_config->setGstPipeline(options_page->pipelineTextEdit->toPlainText());
}
void PfdQmlGadgetOptionsPage::finish()

View File

@ -138,7 +138,7 @@
<property name="currentIndex">
<number>0</number>
</property>
<widget class="QWidget" name="tab">
<widget class="QWidget" name="terrainTab">
<attribute name="title">
<string>Terrain</string>
</attribute>
@ -295,7 +295,7 @@
</item>
</layout>
</widget>
<widget class="QWidget" name="tab_3">
<widget class="QWidget" name="modelTab">
<attribute name="title">
<string>Model</string>
</attribute>
@ -391,7 +391,7 @@
</item>
</layout>
</widget>
<widget class="QWidget" name="tab_2">
<widget class="QWidget" name="environmentTab">
<attribute name="title">
<string>Environment</string>
</attribute>
@ -511,6 +511,43 @@
</item>
</layout>
</widget>
<widget class="QWidget" name="videoTab">
<attribute name="title">
<string>Video</string>
</attribute>
<layout class="QVBoxLayout" name="verticalLayout_10">
<item>
<widget class="QLabel" name="label_10">
<property name="text">
<string>Pipeline:</string>
</property>
</widget>
</item>
<item>
<widget class="QPlainTextEdit" name="pipelineTextEdit">
<property name="plainText">
<string notr="true"/>
</property>
<property name="placeholderText">
<string>&lt;enter your gstreamer pipeline here&gt;</string>
</property>
</widget>
</item>
<item>
<spacer name="verticalSpacer_4">
<property name="orientation">
<enum>Qt::Vertical</enum>
</property>
<property name="sizeHint" stdset="0">
<size>
<width>20</width>
<height>40</height>
</size>
</property>
</spacer>
</item>
</layout>
</widget>
</widget>
</item>
</layout>

View File

@ -231,6 +231,13 @@ plugin_flightlog.depends += plugin_uavobjects
plugin_flightlog.depends += plugin_uavtalk
SUBDIRS += plugin_flightlog
# Video plugin
gstreamer {
plugin_video.subdir = video
plugin_video.depends = plugin_coreplugin
SUBDIRS += plugin_video
}
# Usage Tracker plugin
plugin_usagetracker.subdir = usagetracker
plugin_usagetracker.depends = plugin_coreplugin

View File

@ -0,0 +1,10 @@
<plugin name="VideoGadget" version="1.0.0" compatVersion="1.0.0">
<vendor>The LibrePilot Project</vendor>
<copyright>(C) 2017 The LibrePilot Project</copyright>
<license>The GNU Public License (GPL) Version 3</license>
<description>A video gadget</description>
<url>http://www.librepilot.org</url>
<dependencyList>
<dependency name="Core" version="1.0.0"/>
</dependencyList>
</plugin>

View File

@ -0,0 +1,86 @@
/**
******************************************************************************
*
* @file helpdialog.cpp
* @author The LibrePilot Project, http://www.librepilot.org Copyright (C) 2017.
* @addtogroup GCSPlugins GCS Plugins
* @{
* @addtogroup VideoGadgetPlugin Video Gadget Plugin
* @{
* @brief A video gadget plugin
*****************************************************************************/
/*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* for more details.
*
* You should have received a copy of the GNU General Public License along
* with this program; if not, write to the Free Software Foundation, Inc.,
* 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
#include "helpdialog.h"
#include "ui_helpdialog.h"
#include <QDebug>
#include <QPushButton>
HelpDialog::HelpDialog(QWidget *parent, const QString &elementId)
: QDialog(parent),
m_windowWidth(0),
m_windowHeight(0)
{
Q_UNUSED(elementId)
m_helpDialog = new Ui_HelpDialog();
m_helpDialog->setupUi(this);
setWindowTitle(tr("GStreamer Help"));
if (m_windowWidth > 0 && m_windowHeight > 0) {
resize(m_windowWidth, m_windowHeight);
}
m_helpDialog->buttonBox->button(QDialogButtonBox::Close)->setDefault(true);
connect(m_helpDialog->buttonBox->button(QDialogButtonBox::Close), SIGNAL(clicked()), this, SLOT(close()));
m_helpDialog->splitter->setCollapsible(0, false);
m_helpDialog->splitter->setCollapsible(1, false);
connect(m_helpDialog->elementListWidget, SIGNAL(currentItemChanged(QListWidgetItem *, QListWidgetItem *)),
this, SLOT(pageSelected()));
QList<QString> plugins; // = gst::pluginList();
// foreach(QString pluginName, plugins) {
// new QListWidgetItem(pluginName, m_helpDialog->elementListWidget);
// }
}
HelpDialog::~HelpDialog()
{
// foreach(QString category, m_categoryItemsMap.keys()) {
// QList<QTreeWidgetItem *> *categoryItemList = m_categoryItemsMap.value(category);
// delete categoryItemList;
// }
}
void HelpDialog::itemSelected()
{}
void HelpDialog::close()
{}
bool HelpDialog::execDialog()
{
exec();
return true;
}

View File

@ -0,0 +1,60 @@
/**
******************************************************************************
*
* @file helpdialog.h
* @author The LibrePilot Project, http://www.librepilot.org Copyright (C) 2017.
* @addtogroup GCSPlugins GCS Plugins
* @{
* @addtogroup VideoGadgetPlugin Video Gadget Plugin
* @{
* @brief A video gadget plugin
*****************************************************************************/
/*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* for more details.
*
* You should have received a copy of the GNU General Public License along
* with this program; if not, write to the Free Software Foundation, Inc.,
* 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
#ifndef HELPDIALOG_H
#define HELPDIALOG_H
#include <QDialog>
#include <QList>
class Ui_HelpDialog;
class HelpDialog : public QDialog {
Q_OBJECT
public:
HelpDialog(QWidget *parent, const QString &initialElement = QString());
~HelpDialog();
// Run the dialog and return true if 'Ok' was choosen or 'Apply' was invoked
// at least once
bool execDialog();
private slots:
void itemSelected();
void close();
private:
Ui_HelpDialog *m_helpDialog;
QList<QString> m_elements;
int m_windowWidth;
int m_windowHeight;
};
#endif // HELPDIALOG_H

View File

@ -0,0 +1,82 @@
<?xml version="1.0" encoding="UTF-8"?>
<ui version="4.0">
<class>HelpDialog</class>
<widget class="QDialog" name="HelpDialog">
<property name="geometry">
<rect>
<x>0</x>
<y>0</y>
<width>697</width>
<height>476</height>
</rect>
</property>
<property name="windowTitle">
<string>Options</string>
</property>
<layout class="QVBoxLayout" name="verticalLayout">
<item>
<widget class="QSplitter" name="splitter">
<property name="orientation">
<enum>Qt::Vertical</enum>
</property>
<widget class="QWidget" name="layoutWidget">
<layout class="QHBoxLayout" name="horizontalLayout">
<item>
<widget class="QListWidget" name="pluginListWidget"/>
</item>
<item>
<widget class="QListWidget" name="elementListWidget"/>
</item>
</layout>
</widget>
<widget class="QTextBrowser" name="textBrowser"/>
</widget>
</item>
<item>
<widget class="QDialogButtonBox" name="buttonBox">
<property name="orientation">
<enum>Qt::Horizontal</enum>
</property>
<property name="standardButtons">
<set>QDialogButtonBox::Close</set>
</property>
</widget>
</item>
</layout>
</widget>
<resources/>
<connections>
<connection>
<sender>buttonBox</sender>
<signal>accepted()</signal>
<receiver>HelpDialog</receiver>
<slot>accept()</slot>
<hints>
<hint type="sourcelabel">
<x>297</x>
<y>361</y>
</hint>
<hint type="destinationlabel">
<x>297</x>
<y>193</y>
</hint>
</hints>
</connection>
<connection>
<sender>buttonBox</sender>
<signal>rejected()</signal>
<receiver>HelpDialog</receiver>
<slot>reject()</slot>
<hints>
<hint type="sourcelabel">
<x>297</x>
<y>361</y>
</hint>
<hint type="destinationlabel">
<x>297</x>
<y>193</y>
</hint>
</hints>
</connection>
</connections>
</ui>

Binary file not shown.

After

Width:  |  Height:  |  Size: 729 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 655 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 961 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 513 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.0 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1003 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.0 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.0 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.1 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 990 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.0 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 987 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 481 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.0 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 571 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.2 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.0 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.2 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.2 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.2 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.8 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.5 KiB

View File

@ -0,0 +1,37 @@
TEMPLATE = lib
TARGET = VideoGadget
QT += widgets
include(../../plugin.pri)
include(../../plugins/coreplugin/coreplugin.pri)
include(../../libs/gstreamer/gstreamer.pri)
HEADERS += \
helpdialog.h \
videoplugin.h \
videogadgetconfiguration.h \
videogadget.h \
videogadgetwidget.h \
videogadgetfactory.h \
videogadgetoptionspage.h
SOURCES += \
helpdialog.cpp \
videoplugin.cpp \
videogadgetconfiguration.cpp \
videogadget.cpp \
videogadgetfactory.cpp \
videogadgetwidget.cpp \
videogadgetoptionspage.cpp
OTHER_FILES += \
VideoGadget.pluginspec
FORMS += \
helpdialog.ui \
video.ui \
videooptionspage.ui
RESOURCES += \
video.qrc

View File

@ -0,0 +1,33 @@
<RCC>
<qresource prefix="/video" >
<file>resources/22x22/media-eject.png</file>
<file>resources/32x32/media-eject.png</file>
<file>resources/22x22/media-playback-pause.png</file>
<file>resources/32x32/media-playback-pause.png</file>
<file>resources/22x22/media-playback-start.png</file>
<file>resources/32x32/media-playback-start.png</file>
<file>resources/22x22/media-playback-stop.png</file>
<file>resources/32x32/media-playback-stop.png</file>
<file>resources/22x22/media-record.png</file>
<file>resources/32x32/media-record.png</file>
<file>resources/22x22/media-seek-backward.png</file>
<file>resources/32x32/media-seek-backward.png</file>
<file>resources/22x22/media-seek-forward.png</file>
<file>resources/32x32/media-seek-forward.png</file>
<file>resources/22x22/media-skip-backward.png</file>
<file>resources/32x32/media-skip-backward.png</file>
<file>resources/22x22/media-skip-forward.png</file>
<file>resources/32x32/media-skip-forward.png</file>
<file>resources/22x22/utilities-terminal.png</file>
<file>resources/32x32/utilities-terminal.png</file>
</qresource>
</RCC>

View File

@ -0,0 +1,204 @@
<?xml version="1.0" encoding="UTF-8"?>
<ui version="4.0">
<class>Form</class>
<widget class="QWidget" name="Form">
<property name="geometry">
<rect>
<x>0</x>
<y>0</y>
<width>400</width>
<height>572</height>
</rect>
</property>
<property name="windowTitle">
<string>Form</string>
</property>
<property name="autoFillBackground">
<bool>false</bool>
</property>
<layout class="QVBoxLayout">
<property name="spacing">
<number>0</number>
</property>
<property name="leftMargin">
<number>0</number>
</property>
<property name="topMargin">
<number>0</number>
</property>
<property name="rightMargin">
<number>0</number>
</property>
<property name="bottomMargin">
<number>0</number>
</property>
<item>
<widget class="VideoWidget" name="video" native="true">
<property name="sizePolicy">
<sizepolicy hsizetype="Preferred" vsizetype="Expanding">
<horstretch>0</horstretch>
<verstretch>0</verstretch>
</sizepolicy>
</property>
</widget>
</item>
<item>
<widget class="QTextBrowser" name="consoleTextBrowser">
<property name="sizePolicy">
<sizepolicy hsizetype="Expanding" vsizetype="Minimum">
<horstretch>0</horstretch>
<verstretch>0</verstretch>
</sizepolicy>
</property>
<property name="lineWrapMode">
<enum>QTextEdit::NoWrap</enum>
</property>
</widget>
</item>
<item>
<layout class="QVBoxLayout" name="verticalLayout">
<property name="spacing">
<number>0</number>
</property>
<property name="leftMargin">
<number>0</number>
</property>
<property name="topMargin">
<number>0</number>
</property>
<property name="rightMargin">
<number>0</number>
</property>
<property name="bottomMargin">
<number>0</number>
</property>
<item>
<layout class="QHBoxLayout" name="horizontalLayout">
<property name="leftMargin">
<number>6</number>
</property>
<property name="topMargin">
<number>6</number>
</property>
<property name="rightMargin">
<number>6</number>
</property>
<property name="bottomMargin">
<number>6</number>
</property>
<item>
<layout class="QHBoxLayout" name="horizontalLayout_3">
<item>
<widget class="QPushButton" name="startButton">
<property name="text">
<string/>
</property>
<property name="icon">
<iconset>
<normaloff>:/video/resources/22x22/media-playback-start.png</normaloff>
<normalon>:/video/resources/22x22/media-playback-start.png</normalon>:/video/resources/22x22/media-playback-start.png</iconset>
</property>
<property name="iconSize">
<size>
<width>22</width>
<height>22</height>
</size>
</property>
</widget>
</item>
<item>
<widget class="QPushButton" name="pauseButton">
<property name="text">
<string/>
</property>
<property name="icon">
<iconset>
<normaloff>:/video/resources/22x22/media-playback-pause.png</normaloff>
<normalon>:/video/resources/22x22/media-playback-pause.png</normalon>:/video/resources/22x22/media-playback-pause.png</iconset>
</property>
<property name="iconSize">
<size>
<width>22</width>
<height>22</height>
</size>
</property>
</widget>
</item>
<item>
<widget class="QPushButton" name="stopButton">
<property name="mouseTracking">
<bool>true</bool>
</property>
<property name="text">
<string/>
</property>
<property name="icon">
<iconset>
<normaloff>:/video/resources/22x22/media-playback-stop.png</normaloff>
<normalon>:/video/resources/22x22/media-playback-stop.png</normalon>:/video/resources/22x22/media-playback-stop.png</iconset>
</property>
<property name="iconSize">
<size>
<width>22</width>
<height>22</height>
</size>
</property>
</widget>
</item>
</layout>
</item>
<item>
<spacer name="horizontalSpacer">
<property name="orientation">
<enum>Qt::Horizontal</enum>
</property>
<property name="sizeHint" stdset="0">
<size>
<width>40</width>
<height>20</height>
</size>
</property>
</spacer>
</item>
<item>
<layout class="QHBoxLayout" name="horizontalLayout_2">
<item>
<widget class="QPushButton" name="consoleButton">
<property name="mouseTracking">
<bool>true</bool>
</property>
<property name="text">
<string/>
</property>
<property name="icon">
<iconset>
<normaloff>:/video/resources/22x22/utilities-terminal.png</normaloff>
<normalon>:/video/resources/22x22/utilities-terminal.png</normalon>:/video/resources/22x22/utilities-terminal.png</iconset>
</property>
<property name="iconSize">
<size>
<width>22</width>
<height>22</height>
</size>
</property>
</widget>
</item>
</layout>
</item>
</layout>
</item>
</layout>
</item>
</layout>
</widget>
<customwidgets>
<customwidget>
<class>VideoWidget</class>
<extends>QWidget</extends>
<header>videowidget.h</header>
<container>1</container>
</customwidget>
</customwidgets>
<resources/>
<connections/>
</ui>

View File

@ -0,0 +1,46 @@
/**
******************************************************************************
*
* @file videogadget.cpp
* @author The LibrePilot Project, http://www.librepilot.org Copyright (C) 2017.
* @addtogroup GCSPlugins GCS Plugins
* @{
* @addtogroup VideoGadgetPlugin Video Gadget Plugin
* @{
* @brief A video gadget plugin
*****************************************************************************/
/*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* for more details.
*
* You should have received a copy of the GNU General Public License along
* with this program; if not, write to the Free Software Foundation, Inc.,
* 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
#include "videogadgetconfiguration.h"
#include "videogadgetwidget.h"
#include "videogadget.h"
VideoGadget::VideoGadget(QString classId, VideoGadgetWidget *widget, QWidget *parent) :
IUAVGadget(classId, parent),
m_widget(widget)
{}
VideoGadget::~VideoGadget()
{
delete m_widget;
}
void VideoGadget::loadConfiguration(IUAVGadgetConfiguration *config)
{
VideoGadgetConfiguration *m = qobject_cast<VideoGadgetConfiguration *>(config);
m_widget->setConfiguration(m);
}

View File

@ -0,0 +1,70 @@
/**
******************************************************************************
*
* @file videogadget.h
* @author The LibrePilot Project, http://www.librepilot.org Copyright (C) 2017.
* @addtogroup GCSPlugins GCS Plugins
* @{
* @addtogroup VideoGadgetPlugin Video Gadget Plugin
* @{
* @brief A video gadget plugin
*****************************************************************************/
/*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* for more details.
*
* You should have received a copy of the GNU General Public License along
* with this program; if not, write to the Free Software Foundation, Inc.,
* 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
#ifndef VIDEOGADGET_H_
#define VIDEOGADGET_H_
#include <coreplugin/iuavgadget.h>
#include "videogadgetwidget.h"
namespace Core {
class IUAVGadget;
}
class IUAVGadget;
class QWidget;
class QString;
class VideoGadgetWidget;
using namespace Core;
class VideoGadget : public Core::IUAVGadget {
Q_OBJECT
public:
VideoGadget(QString classId, VideoGadgetWidget *widget, QWidget *parent = 0);
~VideoGadget();
QList<int> context() const
{
return m_context;
}
QWidget *widget()
{
return m_widget;
}
void loadConfiguration(IUAVGadgetConfiguration *config);
QString contextHelpId() const
{
return QString();
}
private:
VideoGadgetWidget *m_widget;
QList<int> m_context;
};
#endif // VIDEOGADGET_H_

View File

@ -0,0 +1,73 @@
/**
******************************************************************************
*
* @file videogadgetconfiguration.cpp
* @author The LibrePilot Project, http://www.librepilot.org Copyright (C) 2017.
* @addtogroup GCSPlugins GCS Plugins
* @{
* @addtogroup VideoGadgetPlugin Video Gadget Plugin
* @{
* @brief A video gadget plugin
*****************************************************************************/
/*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* for more details.
*
* You should have received a copy of the GNU General Public License along
* with this program; if not, write to the Free Software Foundation, Inc.,
* 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
#include "videogadgetconfiguration.h"
VideoGadgetConfiguration::VideoGadgetConfiguration(QString classId, QSettings &settings, QObject *parent) :
IUAVGadgetConfiguration(classId, parent)
{
m_displayVideo = settings.value("displayVideo").toBool();
m_autoStart = settings.value("autoStart").toBool();
m_displayControls = settings.value("displayControls").toBool();
m_respectAspectRatio = settings.value("respectAspectRatio").toBool();
m_pipelineDesc = settings.value("pipelineDesc").toString();
m_pipelineInfo = settings.value("pipelineInfo").toString();
}
VideoGadgetConfiguration::VideoGadgetConfiguration(const VideoGadgetConfiguration &obj) :
IUAVGadgetConfiguration(obj.classId(), obj.parent())
{
m_displayVideo = obj.m_displayVideo;
m_autoStart = obj.m_autoStart;
m_displayControls = obj.m_displayControls;
m_respectAspectRatio = obj.m_respectAspectRatio;
m_pipelineDesc = obj.m_pipelineDesc;
m_pipelineInfo = obj.m_pipelineInfo;
}
/**
* Clones a configuration.
*
*/
IUAVGadgetConfiguration *VideoGadgetConfiguration::clone() const
{
return new VideoGadgetConfiguration(*this);
}
/**
* Saves a configuration.
*
*/
void VideoGadgetConfiguration::saveConfig(QSettings &settings) const
{
settings.setValue("displayVideo", m_displayVideo);
settings.setValue("autoStart", m_autoStart);
settings.setValue("displayControls", m_displayControls);
settings.setValue("respectAspectRatio", m_respectAspectRatio);
settings.setValue("pipelineDesc", m_pipelineDesc);
settings.setValue("pipelineInfo", m_pipelineInfo);
}

View File

@ -0,0 +1,104 @@
/**
******************************************************************************
*
* @file videogadgetconfiguration.h
* @author The LibrePilot Project, http://www.librepilot.org Copyright (C) 2017.
* @addtogroup GCSPlugins GCS Plugins
* @{
* @addtogroup VideoGadgetPlugin Video Gadget Plugin
* @{
* @brief A video gadget plugin
*****************************************************************************/
/*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* for more details.
*
* You should have received a copy of the GNU General Public License along
* with this program; if not, write to the Free Software Foundation, Inc.,
* 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
#ifndef VIDEOGADGETCONFIGURATION_H
#define VIDEOGADGETCONFIGURATION_H
#include <coreplugin/iuavgadgetconfiguration.h>
using namespace Core;
class VideoGadgetConfiguration : public IUAVGadgetConfiguration {
Q_OBJECT
public:
explicit VideoGadgetConfiguration(QString classId, QSettings &Settings, QObject *parent = 0);
explicit VideoGadgetConfiguration(const VideoGadgetConfiguration &obj);
IUAVGadgetConfiguration *clone() const;
void saveConfig(QSettings &settings) const;
bool displayVideo() const
{
return m_displayVideo;
}
void setDisplayVideo(bool displayVideo)
{
m_displayVideo = displayVideo;
}
bool displayControls() const
{
return m_displayControls;
}
void setDisplayControls(bool displayControls)
{
m_displayControls = displayControls;
}
bool autoStart() const
{
return m_autoStart;
}
void setAutoStart(bool autoStart)
{
m_autoStart = autoStart;
}
bool respectAspectRatio() const
{
return m_respectAspectRatio;
}
void setRespectAspectRatio(bool respectAspectRatio)
{
m_respectAspectRatio = respectAspectRatio;
}
QString pipelineDesc() const
{
return m_pipelineDesc;
}
void setPipelineDesc(QString pipelineDesc)
{
m_pipelineDesc = pipelineDesc;
}
QString pipelineInfo() const
{
return m_pipelineInfo;
}
void setPipelineInfo(QString pipelineInfo)
{
m_pipelineInfo = pipelineInfo;
}
private:
// video
bool m_displayVideo;
bool m_respectAspectRatio;
// controls
bool m_displayControls;
bool m_autoStart;
QString m_pipelineDesc;
QString m_pipelineInfo;
};
#endif // VIDEOGADGETCONFIGURATION_H

View File

@ -0,0 +1,57 @@
/**
******************************************************************************
*
* @file videogadgetfactory.cpp
* @author The LibrePilot Project, http://www.librepilot.org Copyright (C) 2017.
* @addtogroup GCSPlugins GCS Plugins
* @{
* @addtogroup VideoGadgetPlugin Video Gadget Plugin
* @{
* @brief A video gadget plugin
*****************************************************************************/
/*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* for more details.
*
* You should have received a copy of the GNU General Public License along
* with this program; if not, write to the Free Software Foundation, Inc.,
* 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
#include "videogadgetfactory.h"
#include "videogadgetwidget.h"
#include "videogadget.h"
#include "videogadgetconfiguration.h"
#include "videogadgetoptionspage.h"
#include <coreplugin/uavgadgetoptionspagedecorator.h>
#include <coreplugin/iuavgadget.h>
VideoGadgetFactory::VideoGadgetFactory(QObject *parent) :
IUAVGadgetFactory(QString("VideoGadget"), tr("Video"), parent)
{}
VideoGadgetFactory::~VideoGadgetFactory()
{}
Core::IUAVGadget *VideoGadgetFactory::createGadget(QWidget *parent)
{
VideoGadgetWidget *gadgetWidget = new VideoGadgetWidget(parent);
return new VideoGadget(QString("VideoGadget"), gadgetWidget, parent);
}
IUAVGadgetConfiguration *VideoGadgetFactory::createConfiguration(QSettings &settings)
{
return new VideoGadgetConfiguration(QString("VideoGadget"), settings);
}
IOptionsPage *VideoGadgetFactory::createOptionsPage(IUAVGadgetConfiguration *config)
{
return new VideoGadgetOptionsPage(qobject_cast<VideoGadgetConfiguration *>(config));
}

View File

@ -0,0 +1,51 @@
/**
******************************************************************************
*
* @file videogadgetfactory.h
* @author The LibrePilot Project, http://www.librepilot.org Copyright (C) 2017.
* @addtogroup GCSPlugins GCS Plugins
* @{
* @addtogroup VideoGadgetPlugin Video Gadget Plugin
* @{
* @brief A video gadget plugin
*****************************************************************************/
/*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* for more details.
*
* You should have received a copy of the GNU General Public License along
* with this program; if not, write to the Free Software Foundation, Inc.,
* 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
#ifndef VIDEOGADGETFACTORY_H_
#define VIDEOGADGETFACTORY_H_
#include <coreplugin/iuavgadgetfactory.h>
namespace Core {
class IUAVGadget;
class IUAVGadgetFactory;
}
using namespace Core;
class VideoGadgetFactory : public IUAVGadgetFactory {
Q_OBJECT
public:
VideoGadgetFactory(QObject *parent = 0);
~VideoGadgetFactory();
IUAVGadget *createGadget(QWidget *parent);
IUAVGadgetConfiguration *createConfiguration(QSettings &settings);
IOptionsPage *createOptionsPage(IUAVGadgetConfiguration *config);
};
#endif // VIDEOGADGETFACTORY_H_

View File

@ -0,0 +1,82 @@
/**
******************************************************************************
*
* @file videogadgetoptionspage.cpp
* @author The LibrePilot Project, http://www.librepilot.org Copyright (C) 2017.
* @addtogroup GCSPlugins GCS Plugins
* @{
* @addtogroup VideoGadgetPlugin Video Gadget Plugin
* @{
* @brief A video gadget plugin
*****************************************************************************/
/*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* for more details.
*
* You should have received a copy of the GNU General Public License along
* with this program; if not, write to the Free Software Foundation, Inc.,
* 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
#include "videogadgetoptionspage.h"
#include "videogadgetconfiguration.h"
#include "helpdialog.h"
#include "ui_videooptionspage.h"
VideoGadgetOptionsPage::VideoGadgetOptionsPage(VideoGadgetConfiguration *config, QObject *parent) :
IOptionsPage(parent), m_config(config)
{
m_page = 0;
}
QWidget *VideoGadgetOptionsPage::createPage(QWidget *parent)
{
m_page = new Ui::VideoOptionsPage();
QWidget *w = new QWidget(parent);
m_page->setupUi(w);
// TODO
m_page->respectAspectRatioCheckBox->setVisible(false);
m_page->helpButton->setVisible(false);
m_page->displayVideoCheckBox->setChecked(m_config->displayVideo());
m_page->displayControlsCheckBox->setChecked(m_config->displayControls());
m_page->autoStartCheckBox->setChecked(m_config->autoStart());
m_page->respectAspectRatioCheckBox->setChecked(m_config->respectAspectRatio());
m_page->descPlainTextEdit->setPlainText(m_config->pipelineDesc());
m_page->infoPlainTextEdit->setPlainText(m_config->pipelineInfo());
connect(m_page->helpButton, SIGNAL(clicked()), this, SLOT(openHelpDialog()));
return w;
}
void VideoGadgetOptionsPage::apply()
{
m_config->setDisplayVideo(m_page->displayVideoCheckBox->isChecked());
m_config->setDisplayControls(m_page->displayControlsCheckBox->isChecked());
m_config->setAutoStart(m_page->autoStartCheckBox->isChecked());
m_config->setRespectAspectRatio(m_page->respectAspectRatioCheckBox->isChecked());
m_config->setPipelineDesc(m_page->descPlainTextEdit->toPlainText());
m_config->setPipelineInfo(m_page->infoPlainTextEdit->toPlainText());
}
void VideoGadgetOptionsPage::finish()
{
delete m_page;
}
void VideoGadgetOptionsPage::openHelpDialog()
{
HelpDialog dlg(0);
dlg.execDialog();
}

View File

@ -0,0 +1,80 @@
/**
******************************************************************************
*
* @file videogadgetoptionspage.h
* @author The LibrePilot Project, http://www.librepilot.org Copyright (C) 2017.
* @addtogroup GCSPlugins GCS Plugins
* @{
* @addtogroup VideoGadgetPlugin Video Gadget Plugin
* @{
* @brief A video gadget plugin
*****************************************************************************/
/*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* for more details.
*
* You should have received a copy of the GNU General Public License along
* with this program; if not, write to the Free Software Foundation, Inc.,
* 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
#ifndef VIDEOGADGETOPTIONSPAGE_H
#define VIDEOGADGETOPTIONSPAGE_H
#include "coreplugin/dialogs/ioptionspage.h"
#include <QLabel>
#include <QPlainTextEdit>
class VideoGadgetConfiguration;
namespace Ui {
class VideoOptionsPage;
}
using namespace Core;
class VideoGadgetOptionsPage : public IOptionsPage {
Q_OBJECT
public:
explicit VideoGadgetOptionsPage(VideoGadgetConfiguration *config, QObject *parent = 0);
QString id() const
{
return "";
}
QString trName() const
{
return "";
}
QString category() const
{
return "";
}
QString trCategory() const
{
return "";
}
QWidget *createPage(QWidget *parent);
void apply();
void finish();
// private signals:
// public slots:
private slots:
void openHelpDialog();
private:
VideoGadgetConfiguration *m_config;
Ui::VideoOptionsPage *m_page;
};
#endif // VIDEOGADGETOPTIONSPAGE_H

View File

@ -0,0 +1,170 @@
/**
******************************************************************************
*
* @file videogadgetwidget.cpp
* @author The LibrePilot Project, http://www.librepilot.org Copyright (C) 2017.
* @addtogroup GCSPlugins GCS Plugins
* @{
* @addtogroup VideoGadgetPlugin Video Gadget Plugin
* @{
* @brief A video gadget plugin
*****************************************************************************/
/*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* for more details.
*
* You should have received a copy of the GNU General Public License along
* with this program; if not, write to the Free Software Foundation, Inc.,
* 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
#include "videogadgetconfiguration.h"
#include "videogadgetwidget.h"
#include "pipeline.h"
#include <QtCore>
#include <QDebug>
#include <QStringList>
#include <QTextEdit>
#include <QPushButton>
#include <QWidget>
VideoGadgetWidget::VideoGadgetWidget(QWidget *parent) :
QFrame(parent)
{
m_ui = new Ui_Form();
m_ui->setupUi(this);
m_ui->consoleTextBrowser->setVisible(false);
connect(videoWidget(), &VideoWidget::stateChanged, this, &VideoGadgetWidget::onStateChanged);
connect(videoWidget(), &VideoWidget::message, this, &VideoGadgetWidget::msg);
connect(m_ui->startButton, &QPushButton::clicked, this, &VideoGadgetWidget::start);
connect(m_ui->pauseButton, &QPushButton::clicked, this, &VideoGadgetWidget::pause);
connect(m_ui->stopButton, &QPushButton::clicked, this, &VideoGadgetWidget::stop);
connect(m_ui->consoleButton, &QPushButton::clicked, this, &VideoGadgetWidget::console);
onStateChanged(Pipeline::Null, Pipeline::Null, Pipeline::Null);
}
VideoGadgetWidget::~VideoGadgetWidget()
{
m_ui = 0;
}
void VideoGadgetWidget::setConfiguration(VideoGadgetConfiguration *config)
{
videoWidget()->setVisible(config->displayVideo());
// m_ui->control->setEnabled(config->displayControls());
bool restart = false;
if (videoWidget()->pipelineDesc() != config->pipelineDesc()) {
if (videoWidget()->isPlaying()) {
restart = true;
stop();
}
msg(QString("setting pipeline %0").arg(config->pipelineDesc()));
videoWidget()->setPipelineDesc(config->pipelineDesc());
}
if (restart || (!videoWidget()->isPlaying() && config->autoStart())) {
start();
}
}
void VideoGadgetWidget::start()
{
msg(QString("starting..."));
m_ui->startButton->setEnabled(false);
videoWidget()->start();
}
void VideoGadgetWidget::pause()
{
msg(QString("pausing..."));
m_ui->pauseButton->setEnabled(false);
videoWidget()->pause();
}
void VideoGadgetWidget::stop()
{
msg(QString("stopping..."));
videoWidget()->stop();
}
void VideoGadgetWidget::console()
{
m_ui->consoleTextBrowser->setVisible(!m_ui->consoleTextBrowser->isVisible());
}
void VideoGadgetWidget::onStateChanged(Pipeline::State oldState, Pipeline::State newState, Pipeline::State pendingState)
{
Q_UNUSED(oldState);
// msg(QString("state changed: ") + VideoWidget::name(oldState) + " -> " + VideoWidget::name(newState) + " / " + VideoWidget::name(pendingState));
bool startEnabled = true;
bool pauseEnabled = true;
bool stopEnabled = true;
bool startVisible = false;
bool pauseVisible = false;
bool stopVisible = true;
switch (newState) {
case Pipeline::Ready:
// start & !stop
startVisible = true;
stopEnabled = false;
break;
case Pipeline::Paused:
if (pendingState == Pipeline::Playing) {
// !pause & stop
pauseVisible = true;
pauseEnabled = false;
} else if (pendingState == Pipeline::Ready) {
// start & !stop
startVisible = true;
stopEnabled = false;
} else {
// start & stop
startVisible = true;
}
break;
case Pipeline::Playing:
// pause & stop
pauseVisible = true;
break;
default:
// start & !stop
startVisible = true;
stopEnabled = false;
break;
}
m_ui->startButton->setVisible(startVisible);
m_ui->startButton->setEnabled(startEnabled);
m_ui->pauseButton->setVisible(pauseVisible);
m_ui->pauseButton->setEnabled(pauseEnabled);
m_ui->stopButton->setVisible(stopVisible);
m_ui->stopButton->setEnabled(stopEnabled);
}
void VideoGadgetWidget::msg(const QString &str)
{
if (m_ui) {
m_ui->consoleTextBrowser->append(str);
}
}
VideoWidget *VideoGadgetWidget::videoWidget()
{
return m_ui->video;
}

View File

@ -0,0 +1,68 @@
/**
******************************************************************************
*
* @file videogadgetwidget.h
* @author The LibrePilot Project, http://www.librepilot.org Copyright (C) 2017.
* @addtogroup GCSPlugins GCS Plugins
* @{
* @addtogroup VideoGadgetPlugin Video Gadget Plugin
* @{
* @brief A video gadget plugin
*****************************************************************************/
/*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* for more details.
*
* You should have received a copy of the GNU General Public License along
* with this program; if not, write to the Free Software Foundation, Inc.,
* 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
#ifndef VIDEOGADGETWIDGET_H_
#define VIDEOGADGETWIDGET_H_
#include "pipeline.h"
#include "ui_video.h"
#include <QFrame>
#include <QtCore/QEvent>
#include <QtGui/QResizeEvent>
#include <QtGui/QPaintEvent>
class VideoWidget;
class VideoGadgetConfiguration;
class VideoGadgetWidget : public QFrame {
Q_OBJECT
public:
VideoGadgetWidget(QWidget *parent = 0);
~VideoGadgetWidget();
void setConfiguration(VideoGadgetConfiguration *config);
private slots:
void start();
void pause();
void stop();
void console();
void onStateChanged(Pipeline::State oldState, Pipeline::State newState, Pipeline::State pendingState);
private:
Ui_Form *m_ui;
VideoGadgetConfiguration *config;
void msg(const QString &str);
VideoWidget *videoWidget();
};
#endif /* VIDEOGADGETWIDGET_H_ */

View File

@ -0,0 +1,96 @@
<?xml version="1.0" encoding="UTF-8"?>
<ui version="4.0">
<class>VideoOptionsPage</class>
<widget class="QWidget" name="VideoOptionsPage">
<property name="geometry">
<rect>
<x>0</x>
<y>0</y>
<width>378</width>
<height>300</height>
</rect>
</property>
<property name="windowTitle">
<string>Form</string>
</property>
<layout class="QGridLayout" name="gridLayout">
<property name="margin">
<number>0</number>
</property>
<item row="6" column="0">
<widget class="QLabel" name="descLabel">
<property name="text">
<string>Pipeline:</string>
</property>
</widget>
</item>
<item row="6" column="1">
<widget class="QPlainTextEdit" name="descPlainTextEdit"/>
</item>
<item row="1" column="1">
<widget class="QCheckBox" name="displayVideoCheckBox">
<property name="text">
<string>Display video</string>
</property>
</widget>
</item>
<item row="2" column="1">
<widget class="QCheckBox" name="displayControlsCheckBox">
<property name="text">
<string>Display controls</string>
</property>
</widget>
</item>
<item row="7" column="1">
<widget class="QPlainTextEdit" name="infoPlainTextEdit"/>
</item>
<item row="7" column="0">
<widget class="QLabel" name="infoLabel">
<property name="text">
<string>Info:</string>
</property>
</widget>
</item>
<item row="3" column="1">
<widget class="QCheckBox" name="autoStartCheckBox">
<property name="text">
<string>Auto Start</string>
</property>
</widget>
</item>
<item row="4" column="1">
<widget class="QCheckBox" name="respectAspectRatioCheckBox">
<property name="text">
<string>Respect aspect ratio</string>
</property>
</widget>
</item>
<item row="8" column="1">
<layout class="QHBoxLayout" name="horizontalLayout">
<item>
<spacer name="horizontalSpacer">
<property name="orientation">
<enum>Qt::Horizontal</enum>
</property>
<property name="sizeHint" stdset="0">
<size>
<width>40</width>
<height>20</height>
</size>
</property>
</spacer>
</item>
<item>
<widget class="QPushButton" name="helpButton">
<property name="text">
<string>Help</string>
</property>
</widget>
</item>
</layout>
</item>
</layout>
</widget>
<resources/>
<connections/>
</ui>

View File

@ -0,0 +1,64 @@
/**
******************************************************************************
*
* @file videoplugin.cpp
* @author The LibrePilot Project, http://www.librepilot.org Copyright (C) 2017.
* @addtogroup GCSPlugins GCS Plugins
* @{
* @addtogroup VideoGadgetPlugin Video Gadget Plugin
* @{
* @brief A video gadget plugin
*****************************************************************************/
/*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* for more details.
*
* You should have received a copy of the GNU General Public License along
* with this program; if not, write to the Free Software Foundation, Inc.,
* 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
#include "videoplugin.h"
#include "videogadgetfactory.h"
#include <extensionsystem/pluginmanager.h>
#include <QDebug>
#include <QtPlugin>
#include <QStringList>
VideoPlugin::VideoPlugin()
{
// Do nothing
}
VideoPlugin::~VideoPlugin()
{
// Do nothing
}
bool VideoPlugin::initialize(const QStringList & args, QString *errMsg)
{
Q_UNUSED(args);
Q_UNUSED(errMsg);
mf = new VideoGadgetFactory(this);
addAutoReleasedObject(mf);
return true;
}
void VideoPlugin::extensionsInitialized()
{
// Do nothing
}
void VideoPlugin::shutdown()
{
// Do nothing
}

View File

@ -0,0 +1,51 @@
/**
******************************************************************************
*
* @file videoplugin.h
* @author The LibrePilot Project, http://www.librepilot.org Copyright (C) 2017.
* @addtogroup GCSPlugins GCS Plugins
* @{
* @addtogroup VideoGadgetPlugin Video Gadget Plugin
* @{
* @brief A video gadget plugin
*****************************************************************************/
/*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* for more details.
*
* You should have received a copy of the GNU General Public License along
* with this program; if not, write to the Free Software Foundation, Inc.,
* 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
#ifndef VIDEOPLUGIN_H_
#define VIDEOPLUGIN_H_
#include <extensionsystem/iplugin.h>
class VideoGadgetFactory;
class VideoPlugin : public ExtensionSystem::IPlugin {
Q_OBJECT
Q_PLUGIN_METADATA(IID "OpenPilot.Video")
public:
VideoPlugin();
~VideoPlugin();
void extensionsInitialized();
bool initialize(const QStringList &arguments, QString *errorString);
void shutdown();
private:
VideoGadgetFactory *mf;
};
#endif /* VIDEOPLUGIN_H_ */

Some files were not shown because too many files have changed in this diff Show More