With a BLE CPro sensor I'm trying to "build" a remote control for a smartphone game. How can I recognise the orientation (pref. grads) of the sensor if it is on the left or right without being affected by their gravity. i.e. in a shaken/moving environment?
My problem is that if I calculate the orientation using the accelerometer, every time the sensor is being shaken the gravity changes drastically which makes it difficult to know the current orientation?
This CPro sensor offers an example on how to calculate their quaternions. As an output it shows a cube graphic using OpenGL ES which follows the orientation/rotation of the sensor. Unfortunately I’m not quite understanding how can I get from the quaternions the orientation in grads…
//src https://github.com/mbientlab-projects/iOSSensorFusion/tree/master
- (void)performKalmanUpdate
{
[self.estimator readAccel:self.accelData
rates:self.gyroData
field:self.magnetometerData];
if (self.estimator.compassCalibrated && self.estimator.gyroCalibrated)
{
auto q = self.estimator.eskf->getState();
auto g = self.estimator.eskf->getAPred();
auto a = self.accelData;
auto w = self.gyroData;
auto mp = self.estimator.eskf->getMPred();
auto m = self.estimator.eskf->getMMeas();
_s->qvals[0] = q.a();
_s->qvals[1] = q.b();
_s->qvals[2] = q.c();
_s->qvals[3] = q.d();
// calculate un-filtered angles
float ay = -a.y;
if (ay < -1.0f) {
ay = -1.0f;
} else if (ay > 1.0f) {
ay = 1.0f;
}
_s->ang[1] = std::atan2(-a.x, -a.z);
_s->ang[0] = std::asin(-ay);
_s->ang[2] = std::atan2(m(1), m(0)); // hack: using the filtered cos/theta to tilt-compensate here
// send transform to render view
auto R = q.to_matrix();
GLKMatrix4 trans = GLKMatrix4Identity;
auto M = GLKMatrix4MakeAndTranspose(R(0,0), R(0,1), R(0,2), 0.0f,
R(1,0), R(1,1), R(1,2), 0.0f,
R(2,0), R(2,1), R(2,2), 0.0f,
0.0f, 0.0f, 0.0f, 1.0f);
trans = GLKMatrix4Multiply(trans, M);
self.renderVC.cubeOrientation = trans;
}
}