mirror of
https://bitbucket.org/librepilot/librepilot.git
synced 2024-11-29 07:24:13 +01:00
dc012a1503
LP-563 Reduce telemetry bandwidth Approved-by: Lalanne Laurent <f5soh@free.fr> Approved-by: Philippe Renon <philippe_renon@yahoo.fr> Approved-by: Alessio Morale <alessiomorale@gmail.com>
374 lines
14 KiB
C
374 lines
14 KiB
C
/**
|
|
******************************************************************************
|
|
* @addtogroup OpenPilotModules OpenPilot Modules
|
|
* @{
|
|
* @addtogroup CameraStab Camera Stabilization Module
|
|
* @brief Camera stabilization module
|
|
* Updates accessory outputs with values appropriate for camera stabilization
|
|
* @{
|
|
*
|
|
* @file camerastab.c
|
|
* @author The OpenPilot Team, http://www.openpilot.org Copyright (C) 2010.
|
|
* @brief Stabilize camera against the roll pitch and yaw of aircraft
|
|
*
|
|
* @see The GNU Public License (GPL) Version 3
|
|
*
|
|
*****************************************************************************/
|
|
/*
|
|
* This program is free software; you can redistribute it and/or modify
|
|
* it under the terms of the GNU General Public License as published by
|
|
* the Free Software Foundation; either version 3 of the License, or
|
|
* (at your option) any later version.
|
|
*
|
|
* This program is distributed in the hope that it will be useful, but
|
|
* WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
|
|
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
|
|
* for more details.
|
|
*
|
|
* You should have received a copy of the GNU General Public License along
|
|
* with this program; if not, write to the Free Software Foundation, Inc.,
|
|
* 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
|
*/
|
|
|
|
/**
|
|
* Output object: Accessory
|
|
*
|
|
* This module will periodically calculate the output values for stabilizing the camera
|
|
*
|
|
* UAVObjects are automatically generated by the UAVObjectGenerator from
|
|
* the object definition XML file.
|
|
*
|
|
* Modules have no API, all communication to other modules is done through UAVObjects.
|
|
* However modules may use the API exposed by shared libraries.
|
|
* See the OpenPilot wiki for more details.
|
|
* http://www.openpilot.org/OpenPilot_Application_Architecture
|
|
*
|
|
*/
|
|
|
|
#include "openpilot.h"
|
|
|
|
#include "accessorydesired.h"
|
|
#include "attitudestate.h"
|
|
#include "mixersettings.h"
|
|
#include "actuatorcommand.h"
|
|
#include "camerastabsettings.h"
|
|
#include "cameradesired.h"
|
|
#include "hwsettings.h"
|
|
|
|
//
|
|
// Configuration
|
|
//
|
|
#define SAMPLE_PERIOD_MS 10
|
|
|
|
// Private types
|
|
|
|
// Private variables
|
|
static bool gimbalOutputEnabled = false;
|
|
|
|
static struct CameraStab_data {
|
|
portTickType lastSysTime;
|
|
float inputs[CAMERASTABSETTINGS_INPUT_NUMELEM];
|
|
|
|
#ifdef USE_GIMBAL_LPF
|
|
float attitudeFiltered[CAMERASTABSETTINGS_INPUT_NUMELEM];
|
|
#endif
|
|
|
|
#ifdef USE_GIMBAL_FF
|
|
float ffLastAttitude[CAMERASTABSETTINGS_INPUT_NUMELEM];
|
|
float ffLastAttitudeFiltered[CAMERASTABSETTINGS_INPUT_NUMELEM];
|
|
float ffFilterAccumulator[CAMERASTABSETTINGS_INPUT_NUMELEM];
|
|
#endif
|
|
} *csd;
|
|
|
|
// Private functions
|
|
static void attitudeUpdated(UAVObjEvent *ev);
|
|
|
|
#ifdef USE_GIMBAL_FF
|
|
static void applyFeedForward(uint8_t index, float dT, float *attitude, CameraStabSettingsData *cameraStab);
|
|
#endif
|
|
|
|
// this structure is equivalent to the UAVObjects for one mixer.
|
|
typedef struct {
|
|
uint8_t type;
|
|
int8_t matrix[5];
|
|
} __attribute__((packed)) Mixer_t;
|
|
|
|
|
|
/**
|
|
* Initialise the module, called on startup
|
|
* \returns 0 on success or -1 if initialisation failed
|
|
*/
|
|
int32_t CameraStabInitialize(void)
|
|
{
|
|
bool cameraStabEnabled;
|
|
|
|
HwSettingsOptionalModulesData optionalModules;
|
|
|
|
HwSettingsOptionalModulesGet(&optionalModules);
|
|
|
|
#ifdef MODULE_CAMERASTAB_BUILTIN
|
|
cameraStabEnabled = true;
|
|
optionalModules.CameraStab = HWSETTINGS_OPTIONALMODULES_ENABLED;
|
|
HwSettingsOptionalModulesSet(&optionalModules);
|
|
#else
|
|
if (optionalModules.CameraStab == HWSETTINGS_OPTIONALMODULES_ENABLED) {
|
|
cameraStabEnabled = true;
|
|
} else {
|
|
cameraStabEnabled = false;
|
|
}
|
|
#endif
|
|
|
|
if (cameraStabEnabled) {
|
|
// allocate and initialize the static data storage only if module is enabled
|
|
csd = (struct CameraStab_data *)pios_malloc(sizeof(struct CameraStab_data));
|
|
if (!csd) {
|
|
return -1;
|
|
}
|
|
|
|
// initialize camera state variables
|
|
memset(csd, 0, sizeof(struct CameraStab_data));
|
|
csd->lastSysTime = xTaskGetTickCount();
|
|
|
|
AttitudeStateInitialize();
|
|
CameraDesiredInitialize();
|
|
|
|
UAVObjEvent ev = {
|
|
.obj = AttitudeStateHandle(),
|
|
.instId = 0,
|
|
.event = 0,
|
|
.lowPriority = false,
|
|
};
|
|
EventPeriodicCallbackCreate(&ev, attitudeUpdated, SAMPLE_PERIOD_MS / portTICK_RATE_MS);
|
|
|
|
return 0;
|
|
}
|
|
|
|
return -1;
|
|
}
|
|
|
|
/* stub: module has no module thread */
|
|
int32_t CameraStabStart(void)
|
|
{
|
|
return 0;
|
|
}
|
|
|
|
MODULE_INITCALL(CameraStabInitialize, CameraStabStart);
|
|
|
|
static void attitudeUpdated(UAVObjEvent *ev)
|
|
{
|
|
if (ev->obj != AttitudeStateHandle()) {
|
|
return;
|
|
}
|
|
|
|
if (!gimbalOutputEnabled) {
|
|
MixerSettingsData mixerSettings;
|
|
MixerSettingsGet(&mixerSettings);
|
|
Mixer_t *mixers = (Mixer_t *)&mixerSettings.Mixer1Type;
|
|
for (int ct = 0; ct < ACTUATORCOMMAND_CHANNEL_NUMELEM; ct++) {
|
|
uint8_t mixer_type = mixers[ct].type;
|
|
if ((mixer_type >= MIXERSETTINGS_MIXER1TYPE_CAMERAROLLORSERVO1) &&
|
|
(mixer_type <= MIXERSETTINGS_MIXER1TYPE_CAMERAYAW)) {
|
|
gimbalOutputEnabled = true;
|
|
}
|
|
}
|
|
return;
|
|
}
|
|
|
|
AccessoryDesiredData accessory;
|
|
|
|
CameraStabSettingsData cameraStab;
|
|
CameraStabSettingsGet(&cameraStab);
|
|
|
|
// check how long since last update, time delta between calls in ms
|
|
portTickType thisSysTime = xTaskGetTickCount();
|
|
float dT_millis = (thisSysTime > csd->lastSysTime) ?
|
|
(float)((thisSysTime - csd->lastSysTime) * portTICK_RATE_MS) :
|
|
(float)SAMPLE_PERIOD_MS;
|
|
csd->lastSysTime = thisSysTime;
|
|
|
|
// storage for elevon roll component before the pitch component has been generated
|
|
// we are guaranteed that the iteration order of i is roll pitch yaw
|
|
// that guarnteees this won't be used uninited, but the compiler doesn't know that
|
|
// so we init it or turn the warning/error off for each compiler
|
|
float elevon_roll = 0.0f;
|
|
|
|
// process axes
|
|
for (uint8_t i = 0; i < CAMERASTABSETTINGS_INPUT_NUMELEM; i++) {
|
|
// read and process control input
|
|
if (CameraStabSettingsInputToArray(cameraStab.Input)[i] != CAMERASTABSETTINGS_INPUT_NONE) {
|
|
if (AccessoryDesiredInstGet(CameraStabSettingsInputToArray(cameraStab.Input)[i] -
|
|
CAMERASTABSETTINGS_INPUT_ACCESSORY0, &accessory) == 0) {
|
|
float input_rate;
|
|
switch (CameraStabSettingsStabilizationModeToArray(cameraStab.StabilizationMode)[i]) {
|
|
case CAMERASTABSETTINGS_STABILIZATIONMODE_ATTITUDE:
|
|
csd->inputs[i] = accessory.AccessoryVal *
|
|
CameraStabSettingsInputRangeToArray(cameraStab.InputRange)[i];
|
|
break;
|
|
case CAMERASTABSETTINGS_STABILIZATIONMODE_AXISLOCK:
|
|
input_rate = accessory.AccessoryVal *
|
|
CameraStabSettingsInputRateToArray(cameraStab.InputRate)[i];
|
|
if (fabsf(input_rate) > cameraStab.MaxAxisLockRate) {
|
|
csd->inputs[i] = boundf(csd->inputs[i] + input_rate * 0.001f * dT_millis,
|
|
-CameraStabSettingsInputRangeToArray(cameraStab.InputRange)[i],
|
|
CameraStabSettingsInputRangeToArray(cameraStab.InputRange)[i]);
|
|
}
|
|
break;
|
|
default:
|
|
PIOS_Assert(0);
|
|
}
|
|
}
|
|
}
|
|
|
|
// calculate servo output
|
|
float attitude;
|
|
|
|
switch (i) {
|
|
case CAMERASTABSETTINGS_INPUT_ROLL:
|
|
AttitudeStateRollGet(&attitude);
|
|
break;
|
|
case CAMERASTABSETTINGS_INPUT_PITCH:
|
|
AttitudeStatePitchGet(&attitude);
|
|
break;
|
|
case CAMERASTABSETTINGS_INPUT_YAW:
|
|
AttitudeStateYawGet(&attitude);
|
|
break;
|
|
default:
|
|
PIOS_Assert(0);
|
|
}
|
|
|
|
#ifdef USE_GIMBAL_LPF
|
|
if (CameraStabSettingsResponseTimeToArray(cameraStab.ResponseTime)[i]) {
|
|
float rt = (float)CameraStabSettingsResponseTimeToArray(cameraStab.ResponseTime)[i];
|
|
attitude = csd->attitudeFiltered[i] = ((rt * csd->attitudeFiltered[i]) + (dT_millis * attitude)) / (rt + dT_millis);
|
|
}
|
|
#endif
|
|
|
|
#ifdef USE_GIMBAL_FF
|
|
if (CameraStabSettingsFeedForwardToArray(cameraStab.FeedForward)[i]) {
|
|
applyFeedForward(i, dT_millis, &attitude, &cameraStab);
|
|
}
|
|
#endif
|
|
|
|
// bounding for elevon mixing occurs on the unmixed output
|
|
// to limit the range of the mixed output you must limit the range
|
|
// of both the unmixed pitch and unmixed roll
|
|
float output = boundf((attitude + csd->inputs[i]) / CameraStabSettingsOutputRangeToArray(cameraStab.OutputRange)[i], -1.0f, 1.0f);
|
|
|
|
// set output channels
|
|
switch (i) {
|
|
case CAMERASTABSETTINGS_INPUT_ROLL:
|
|
// we are guaranteed that the iteration order of i is roll pitch yaw
|
|
// for elevon mixing we simply grab the value for later use
|
|
if (cameraStab.GimbalType == CAMERASTABSETTINGS_GIMBALTYPE_ROLLPITCHMIXED) {
|
|
elevon_roll = output;
|
|
} else {
|
|
CameraDesiredRollOrServo1Set(&output);
|
|
}
|
|
break;
|
|
case CAMERASTABSETTINGS_INPUT_PITCH:
|
|
// we are guaranteed that the iteration order of i is roll pitch yaw
|
|
// for elevon mixing we use the value we previously grabbed and set both s1 and s2
|
|
if (cameraStab.GimbalType == CAMERASTABSETTINGS_GIMBALTYPE_ROLLPITCHMIXED) {
|
|
float elevon_pitch = output;
|
|
// elevon reversing works like this:
|
|
// first use the normal reversing facilities to get servo 1 roll working in the correct direction
|
|
// then use the normal reversing facilities to get servo 2 roll working in the correct direction
|
|
// then use these new reversing switches to reverse servo 1 and/or 2 pitch as needed
|
|
// if servo 1 pitch is reversed
|
|
if (cameraStab.Servo1PitchReverse == CAMERASTABSETTINGS_SERVO1PITCHREVERSE_TRUE) {
|
|
// use (reversed pitch) + roll
|
|
output = ((1.0f - elevon_pitch) + elevon_roll) / 2.0f;
|
|
} else {
|
|
// use pitch + roll
|
|
output = (elevon_pitch + elevon_roll) / 2.0f;
|
|
}
|
|
CameraDesiredRollOrServo1Set(&output);
|
|
// if servo 2 pitch is reversed
|
|
if (cameraStab.Servo2PitchReverse == CAMERASTABSETTINGS_SERVO2PITCHREVERSE_TRUE) {
|
|
// use (reversed pitch) - roll
|
|
output = ((1.0f - elevon_pitch) - elevon_roll) / 2.0f;
|
|
} else {
|
|
// use pitch - roll
|
|
output = (elevon_pitch - elevon_roll) / 2.0f;
|
|
}
|
|
CameraDesiredPitchOrServo2Set(&output);
|
|
} else {
|
|
CameraDesiredPitchOrServo2Set(&output);
|
|
}
|
|
break;
|
|
case CAMERASTABSETTINGS_INPUT_YAW:
|
|
CameraDesiredYawSet(&output);
|
|
break;
|
|
default:
|
|
PIOS_Assert(0);
|
|
}
|
|
}
|
|
}
|
|
|
|
#ifdef USE_GIMBAL_FF
|
|
void applyFeedForward(uint8_t index, float dT_millis, float *attitude, CameraStabSettingsData *cameraStab)
|
|
{
|
|
// compensate high feed forward values depending on gimbal type
|
|
float gimbalTypeCorrection = 1.0f;
|
|
|
|
switch (cameraStab->GimbalType) {
|
|
case CAMERASTABSETTINGS_GIMBALTYPE_GENERIC:
|
|
case CAMERASTABSETTINGS_GIMBALTYPE_ROLLPITCHMIXED:
|
|
// no correction
|
|
break;
|
|
case CAMERASTABSETTINGS_GIMBALTYPE_YAWROLLPITCH:
|
|
if (index == CAMERASTABSETTINGS_INPUT_ROLL) {
|
|
float pitch;
|
|
AttitudeStatePitchGet(&pitch);
|
|
gimbalTypeCorrection = (cameraStab->OutputRange.Pitch - fabsf(pitch))
|
|
/ cameraStab->OutputRange.Pitch;
|
|
}
|
|
break;
|
|
case CAMERASTABSETTINGS_GIMBALTYPE_YAWPITCHROLL:
|
|
if (index == CAMERASTABSETTINGS_INPUT_PITCH) {
|
|
float roll;
|
|
AttitudeStateRollGet(&roll);
|
|
gimbalTypeCorrection = (cameraStab->OutputRange.Roll - fabsf(roll))
|
|
/ cameraStab->OutputRange.Roll;
|
|
}
|
|
break;
|
|
default:
|
|
PIOS_Assert(0);
|
|
}
|
|
|
|
// apply feed forward
|
|
float accumulator = csd->ffFilterAccumulator[index];
|
|
accumulator += (*attitude - csd->ffLastAttitude[index]) *
|
|
(float)CameraStabSettingsFeedForwardToArray(cameraStab->FeedForward)[index] * gimbalTypeCorrection;
|
|
csd->ffLastAttitude[index] = *attitude;
|
|
*attitude += accumulator;
|
|
|
|
float filter = (float)((accumulator > 0.0f) ? CameraStabSettingsAccelTimeToArray(cameraStab->AccelTime)[index] :
|
|
CameraStabSettingsDecelTimeToArray(cameraStab->DecelTime)[index]) / dT_millis;
|
|
if (filter < 1.0f) {
|
|
filter = 1.0f;
|
|
}
|
|
accumulator -= accumulator / filter;
|
|
csd->ffFilterAccumulator[index] = accumulator;
|
|
*attitude += accumulator;
|
|
|
|
// apply acceleration limit
|
|
float delta = *attitude - csd->ffLastAttitudeFiltered[index];
|
|
float maxDelta = (float)cameraStab->MaxAccel * 0.001f * dT_millis;
|
|
|
|
if (fabsf(delta) > maxDelta) {
|
|
// we are accelerating too hard
|
|
*attitude = csd->ffLastAttitudeFiltered[index] + ((delta > 0.0f) ? maxDelta : -maxDelta);
|
|
}
|
|
csd->ffLastAttitudeFiltered[index] = *attitude;
|
|
}
|
|
#endif // USE_GIMBAL_FF
|
|
|
|
/**
|
|
* @}
|
|
*/
|
|
|
|
/**
|
|
* @}
|
|
*/
|