diff --git a/aquadrone_description/models/v2.urdf.xacro b/aquadrone_description/models/v2.urdf.xacro
index 213917a5..f1194f81 100755
--- a/aquadrone_description/models/v2.urdf.xacro
+++ b/aquadrone_description/models/v2.urdf.xacro
@@ -6,13 +6,6 @@
-
-
-
-
-
-
-
@@ -64,4 +57,4 @@
-
+
\ No newline at end of file
diff --git a/aquadrone_description/models/v28.urdf.xacro b/aquadrone_description/models/v28.urdf.xacro
index 7c98d389..5636fc76 100755
--- a/aquadrone_description/models/v28.urdf.xacro
+++ b/aquadrone_description/models/v28.urdf.xacro
@@ -7,14 +7,6 @@
-
-
-
-
-
-
-
-
@@ -26,8 +18,7 @@
-
-
+
@@ -39,38 +30,38 @@
-
+
-
+
-
+
-
+
-
+
-
+
-
+
-
+
-
+
diff --git a/aquadrone_description/models/v2_wobbly.urdf.xacro b/aquadrone_description/models/v2_wobbly.urdf.xacro
index 5fdf7e70..1a4d1110 100755
--- a/aquadrone_description/models/v2_wobbly.urdf.xacro
+++ b/aquadrone_description/models/v2_wobbly.urdf.xacro
@@ -5,11 +5,6 @@
-
-
-
-
-
@@ -62,4 +57,4 @@
-
+
\ No newline at end of file
diff --git a/aquadrone_description/urdf/aquadrone_v2.urdf.xacro b/aquadrone_description/urdf/aquadrone_v2.urdf.xacro
index 60083445..20a8c58c 100755
--- a/aquadrone_description/urdf/aquadrone_v2.urdf.xacro
+++ b/aquadrone_description/urdf/aquadrone_v2.urdf.xacro
@@ -21,45 +21,16 @@
-
-
-
-
-
-
-
-
+
+
+
+
+
+
@@ -68,72 +39,26 @@ Taken at the output coordinate system.
-
-
-
+
+
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
+
-
+
-
-
+
-
+
-
@@ -158,14 +83,10 @@ Taken at the output coordinate system.
+
0
-
${cob}
-
-
-
-
- ${displaced_volume*bouyancy_fudge_factor}
+ ${volume*1.005}
${width}
${length}
@@ -184,23 +105,13 @@ Taken at the output coordinate system.
0 0 0 0 0 0
0 0 0 0 0 0
-
-
-
-
-
- -74.82 -69.48 -728.4 -268.8 -309.77 -105
-
- -748.22 -992.53 -1821.01 -672 -774.44 -523.27
-
-
+
+ -3 -3 -3 -1 -1 -1
+
+
+ -1 -1 -1 -1 -1 -1
+
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
+
-
+
diff --git a/aquadrone_sim_worlds/worlds/empty_underwater.world b/aquadrone_sim_worlds/worlds/empty_underwater.world
index 2c0e9034..931dc8e2 100644
--- a/aquadrone_sim_worlds/worlds/empty_underwater.world
+++ b/aquadrone_sim_worlds/worlds/empty_underwater.world
@@ -32,9 +32,9 @@
- $(arg max_step_size)
+ 0.01
1.0
- $(arg real_time_update_rate)
+ 500
quick
diff --git a/path_planning/src/path_planning/ros_modules.py b/path_planning/src/path_planning/ros_modules.py
index 5bde61fd..79d72409 100644
--- a/path_planning/src/path_planning/ros_modules.py
+++ b/path_planning/src/path_planning/ros_modules.py
@@ -4,7 +4,6 @@
import time
from std_msgs.msg import Float64
-from std_srvs.srv import Trigger, TriggerRequest
from geometry_msgs.msg import Vector3, Wrench
from sensor_msgs.msg import Image
@@ -45,9 +44,6 @@ def __init__(self):
self.sub_state_sub = rospy.Subscriber("/state_estimation", SubState, self.sub_state_callback)
self.sub_state = SubState()
- rospy.wait_for_service('reset_sub_state_estimation')
- self.state_est_reset_service = rospy.ServiceProxy('reset_sub_state_estimation', Trigger)
-
def sub_state_callback(self, msg):
self.sub_state = msg
@@ -79,10 +75,6 @@ def make_quat(msg):
return out
- def reset_state_estimation(self):
- req = TriggerRequest()
- self.state_est_reset_service(req)
-
class ROSSensorDataModule:
def __init__(self):
diff --git a/stability/src/AxialPid.cpp b/stability/src/AxialPid.cpp
index fa4a3003..57f595eb 100755
--- a/stability/src/AxialPid.cpp
+++ b/stability/src/AxialPid.cpp
@@ -12,11 +12,12 @@ RotPIDController::RotPIDController(float newRKp, float newRKi, float newRKd, flo
//initializing PID controllers
rollControl(rollKp, rollKi, rollKd), pitchControl(pitchKp, pitchKi, pitchKd), yawControl(yawKp, yawKi, yawKd)
{
+ OUT_LIMIT = 1;
MARGIN = 0.5;
//setting output limits
- rollControl.setOutputLimits(1);
- pitchControl.setOutputLimits(1);
- yawControl.setOutputLimits(1);
+ rollControl.setOutputLimits(OUT_LIMIT);
+ pitchControl.setOutputLimits(OUT_LIMIT);
+ yawControl.setOutputLimits(OUT_LIMIT);
//setting pid targets
rollControl.setSetpoint(0);
pitchControl.setSetpoint(0);
@@ -31,11 +32,12 @@ RotPIDController::RotPIDController(float newRKp, float newRKi, float newRKd, flo
//setting PID controllers
rollControl(rollKp, rollKi, rollKd), pitchControl(pitchKp, pitchKi, pitchKd), yawControl(yawKp, yawKi, yawKd)
{
+ OUT_LIMIT = 1;
MARGIN = 0.5;
//setting pid limits
- rollControl.setOutputLimits(1);
- pitchControl.setOutputLimits(1);
- yawControl.setOutputLimits(1);
+ rollControl.setOutputLimits(OUT_LIMIT);
+ pitchControl.setOutputLimits(OUT_LIMIT);
+ yawControl.setOutputLimits(OUT_LIMIT);
//setting pid targets
rollControl.setSetpoint(0);
@@ -51,14 +53,12 @@ RotPIDController::RotPIDController():
//initializing PID controllers
rollControl(rollKp, rollKi, rollKd), pitchControl(pitchKp, pitchKi, pitchKd), yawControl(yawKp, yawKi, yawKd)
{
- OUT_LIMIT_R = 1;
- OUT_LIMIT_P = 1;
- OUT_LIMIT_Y = 1;
+ OUT_LIMIT = 1;
MARGIN = 0.5;
//setting pid limits
- rollControl.setOutputLimits(OUT_LIMIT_R);
- pitchControl.setOutputLimits(OUT_LIMIT_P);
- yawControl.setOutputLimits(OUT_LIMIT_Y);
+ rollControl.setOutputLimits(OUT_LIMIT);
+ pitchControl.setOutputLimits(OUT_LIMIT);
+ yawControl.setOutputLimits(OUT_LIMIT);
//setting pid targets
rollControl.setSetpoint(0);
@@ -66,6 +66,7 @@ RotPIDController::RotPIDController():
yawControl.setSetpoint(0);
rollFlip = pitchFlip = yawFlip = false;
+ std::cout<<"a"<("motorStability", 5);
ros::Subscriber target = n.subscribe("orientation_target", 5, &RotPIDController::setTargets, &rotCtrl);
ros::Subscriber sensor = n.subscribe("state_estimation", 5, &RotPIDController::setInputs, &rotCtrl);
+ std::cout<<"c"<
-
-
-
+
\ No newline at end of file
diff --git a/thruster_control/launch/sim_control.launch b/thruster_control/launch/sim_control.launch
index 62f41d66..c26d06be 100644
--- a/thruster_control/launch/sim_control.launch
+++ b/thruster_control/launch/sim_control.launch
@@ -5,7 +5,5 @@
-
-
-
+
\ No newline at end of file
diff --git a/thruster_control/nodes/demo_depth_control.py b/thruster_control/nodes/demo_depth_control.py
index fc0c33b5..3b6bfdcd 100755
--- a/thruster_control/nodes/demo_depth_control.py
+++ b/thruster_control/nodes/demo_depth_control.py
@@ -6,11 +6,5 @@
if __name__ == "__main__":
- rospy.init_node('depth_control')
-
- Kp = rospy.get_param('/stability/depth/Kp')
- Kd = rospy.get_param('/stability/depth/Kd')
- Ki = rospy.get_param('/stability/depth/Ki')
-
- ddc = DepthPIDController(Kp=Kp, Ki=Ki, Kd=Kd)
+ ddc = DepthPIDController()
ddc.run()
\ No newline at end of file
diff --git a/thruster_control/src/thruster_control/depth_pid_controller.py b/thruster_control/src/thruster_control/depth_pid_controller.py
index 2772b9cf..21a9ffcf 100755
--- a/thruster_control/src/thruster_control/depth_pid_controller.py
+++ b/thruster_control/src/thruster_control/depth_pid_controller.py
@@ -13,12 +13,13 @@
class DepthPIDController:
- def __init__(self, Kp=1, Kd=1, Ki=0):
+ def __init__(self):
+ rospy.init_node('depth_control')
self.depth = 0
- self.Kp = Kp
- self.Kd = Kd
- self.Ki = Ki
+ self.Kp = 3
+ self.Kd = 10
+ self.Ki = 0.05
self.loop_rate = 10.0
self.rate = rospy.Rate(self.loop_rate)
diff --git a/thruster_control/src/thruster_control/thruster_configurations.py b/thruster_control/src/thruster_control/thruster_configurations.py
index 9f3da085..78376fbc 100644
--- a/thruster_control/src/thruster_control/thruster_configurations.py
+++ b/thruster_control/src/thruster_control/thruster_configurations.py
@@ -96,18 +96,18 @@ def initialize(self):
def get_thrusts_to_wrench_matrix(self):
# Up-Down Thrusters
- th_0 = gh.get_thruster_wrench_vector(x=0.2, y=0.285, z=0.1, roll=0, pitch=a2, yaw=0)
- th_1 = gh.get_thruster_wrench_vector(x=0.2, y=-0.285, z=0.1, roll=0, pitch=a2, yaw=0)
- th_2 = gh.get_thruster_wrench_vector(x=-0.2, y=0.285, z=0.1, roll=0, pitch=a2, yaw=0)
- th_3 = gh.get_thruster_wrench_vector(x=-0.2, y=-0.285, z=0.1, roll=0, pitch=a2, yaw=0)
+ th_0 = gh.get_thruster_wrench_vector(x=self.dX, y=self.dY, z=-self.dZ, roll=0, pitch=a2, yaw=0)
+ th_1 = gh.get_thruster_wrench_vector(x=self.dX, y=-self.dY, z=-self.dZ, roll=0, pitch=a2, yaw=0)
+ th_2 = gh.get_thruster_wrench_vector(x=-self.dX, y=self.dY, z=-self.dZ, roll=0, pitch=a2, yaw=0)
+ th_3 = gh.get_thruster_wrench_vector(x=-self.dX, y=-self.dY, z=-self.dZ, roll=0, pitch=a2, yaw=0)
# Forwards-Backwards Thrsuters
- th_4 = gh.get_thruster_wrench_vector(x=0, y=0.285, z=0.1, roll=0, pitch=0, yaw=0)
- th_5 = gh.get_thruster_wrench_vector(x=0, y=-0.285, z=0.1, roll=0, pitch=0, yaw=0)
+ th_4 = gh.get_thruster_wrench_vector(x=0, y=self.dY, z=0, roll=0, pitch=0, yaw=0)
+ th_5 = gh.get_thruster_wrench_vector(x=0, y=-self.dY, z=0, roll=0, pitch=0, yaw=0)
# Lef-Right Thrusters
- th_6 = gh.get_thruster_wrench_vector(x=0, y=0.17, z=0, roll=0, pitch=0, yaw=-a2)
- th_7 = gh.get_thruster_wrench_vector(x=0, y=-0.17, z=0, roll=0, pitch=0, yaw=-a2)
+ th_6 = gh.get_thruster_wrench_vector(x=self.dX, y=0, z=self.dZ, roll=0, pitch=0, yaw=-a2)
+ th_7 = gh.get_thruster_wrench_vector(x=-self.dX, y=0, z=self.dZ, roll=0, pitch=0, yaw=-a2)
return np.column_stack((th_0, th_1, th_2, th_3, th_4, th_5, th_6, th_7))
diff --git a/thruster_control/test/test_depth_control.py b/thruster_control/test/test_depth_control.py
index 0d3a4973..cc3fd985 100755
--- a/thruster_control/test/test_depth_control.py
+++ b/thruster_control/test/test_depth_control.py
@@ -64,5 +64,4 @@ def test_updates_current_depth(self):
if __name__ == '__main__':
- rospy.init_node('test_depth_control')
- rostest.rosrun('thruster_control', 'test_depth_pid_control', TestDepthPIDController)
+ rostest.rosrun('thruster_control', 'test_depth_pid_control', TestDepthPIDController)
\ No newline at end of file
diff --git a/vision/src/faker.py b/vision/src/faker.py
deleted file mode 100755
index fb0c2e37..00000000
--- a/vision/src/faker.py
+++ /dev/null
@@ -1,89 +0,0 @@
-#!/usr/bin/env python
-
-import rospy
-
-from geometry_msgs.msg import Point, Pose, Vector3
-from std_msgs.msg import String
-from gazebo_msgs.msg import ModelStates
-from aquadrone_msgs.msg import Vision_Array, Vision
-
-
-
-
-
-class Omniscient_Vision:
- def __init__(self):
- self.relative_pos_pub = rospy.Publisher("Vision_Data", Vision_Array, queue_size = 1)
- self.object_pos_sub = rospy.Subscriber("gazebo/model_states", ModelStates, self.get_obj_pos)
- self.object_pos = []
- self.object_ident = []
- self.sub_pos = [0,0,0]
- self.relative_pos = []
- self.rate = rospy.Rate(20)
- self.pub_msg = Vision_Array()
- self.testing = True
-
- def get_obj_pos(self, data):
- names = data.name
- model_pos = [pose.position for pose in data.pose]
- temp_obj = []
- temp_sub = []
- temp_ident = []
- if(not self.testing):
- if(len(names) != 0):
- for i in range(len(names)):
- if(names[i] != "aquadrone"):
- temp_obj.append([model_pos[i].x, model_pos[i].y, model_pos[i].z])
- temp_ident.append(names[i])
- else:
- temp_sub = [model_pos[i].x, model_pos[i].y, model_pos[i].z]
- else:
- temp_obj = [[0,0,1],[0,0,1],[0,0,1],[0,0,1]]
- temp_sub = [0,0,0]
- temp_ident = ["a","a","a","a"]
- self.object_pos = temp_obj
- self.sub_pos = temp_sub
- self.object_ident = temp_ident
-
-
- def calc_rel_pos(self):
- self.relative_pos = []
- if len(self.object_pos) != 0 and len(self.sub_pos) != 0:
- for i in range(len(self.object_pos)):
- self.relative_pos.append([ self.object_pos[i][0] - self.sub_pos[0],
- self.object_pos[i][1] - self.sub_pos[1],
- self.object_pos[i][2] - self.sub_pos[2]])
- else:
- self.relative_pos = [[0,0,0]]
-
- def get_pub_msg(self):
- #print(self.object_pos)
- message = Vision_Array()
- #initializing message
- if(len(self.relative_pos) != 0 and len(self.object_ident) != 0):
- message.data = []
- for i in range(len(self.relative_pos)):
- message.data.append(Vision())
- new_data = Vector3()
- new_data.x = self.relative_pos[i][0]
- new_data.y = self.relative_pos[i][1]
- new_data.z = self.relative_pos[i][2]
- message.data[i].obj_data = new_data
- message.data[i].identifier = self.object_ident[i]
-
- self.pub_msg = message
-
- def run(self):
- while not rospy.is_shutdown():
- #self.set_rate()
- self.calc_rel_pos()
- self.get_pub_msg()
- self.relative_pos_pub.publish(self.pub_msg)
- self.rate.sleep()
-
-if __name__ == "__main__":
- rospy.init_node("omniscient_vision_node")
-
- faker = Omniscient_Vision()
- faker.run()
-
diff --git a/vision/CMakeLists.txt b/world_positioning/CMakeLists.txt
similarity index 93%
rename from vision/CMakeLists.txt
rename to world_positioning/CMakeLists.txt
index c3fc1087..f638c2bf 100644
--- a/vision/CMakeLists.txt
+++ b/world_positioning/CMakeLists.txt
@@ -1,5 +1,5 @@
-cmake_minimum_required(VERSION 3.0.2)
-project(vision)
+cmake_minimum_required(VERSION 2.8.3)
+project(world_positioning)
## Compile as C++11, supported in ROS Kinetic and newer
# add_compile_options(-std=c++11)
@@ -7,11 +7,10 @@ project(vision)
## Find catkin macros and libraries
## if COMPONENTS list like find_package(catkin REQUIRED COMPONENTS xyz)
## is used, also find other catkin packages
-find_package(catkin REQUIRED
+find_package(catkin REQUIRED COMPONENTS
rospy
geometry_msgs
std_msgs
- gazebo_msgs
)
## System dependencies are found with CMake's conventions
@@ -48,8 +47,10 @@ find_package(catkin REQUIRED
## * add every package in MSG_DEP_SET to generate_messages(DEPENDENCIES ...)
## Generate messages in the 'msg' folder
-#add_message_files(
-# geometry_msgs.msg
+# add_message_files(
+# FILES
+# Message1.msg
+# Message2.msg
# )
## Generate services in the 'srv' folder
@@ -102,9 +103,9 @@ find_package(catkin REQUIRED
## CATKIN_DEPENDS: catkin_packages dependent projects also need
## DEPENDS: system dependencies of this project that dependent projects also need
catkin_package(
- #INCLUDE_DIRS include
- #LIBRARIES omniscient_fake
- #CATKIN_DEPENDS geometry_msgs rospy std_msgs
+# INCLUDE_DIRS include
+# LIBRARIES world_positioning
+# CATKIN_DEPENDS rospy
# DEPENDS system_lib
)
@@ -116,12 +117,12 @@ catkin_package(
## Your package locations should be listed before other locations
include_directories(
# include
- ${catkin_INCLUDE_DIRS}
+ ${catkin_INCLUDE_DIRS}
)
## Declare a C++ library
# add_library(${PROJECT_NAME}
-# src/${PROJECT_NAME}/omniscient_fake.cpp
+# src/${PROJECT_NAME}/world_positioning.cpp
# )
## Add cmake target dependencies of the library
@@ -132,7 +133,7 @@ include_directories(
## Declare a C++ executable
## With catkin_make all packages are built within a single CMake context
## The recommended prefix ensures that target names across packages don't collide
-# add_executable(${PROJECT_NAME}_node src/omniscient_fake_node.cpp)
+# add_executable(${PROJECT_NAME}_node src/world_positioning_node.cpp)
## Rename C++ executable without prefix
## The above recommended prefix causes long target names, the following renames the
@@ -158,7 +159,7 @@ include_directories(
## Mark executable scripts (Python etc.) for installation
## in contrast to setup.py, you can choose the destination
-# catkin_install_python(PROGRAMS
+# install(PROGRAMS
# scripts/my_python_script
# DESTINATION ${CATKIN_PACKAGE_BIN_DESTINATION}
# )
@@ -196,7 +197,7 @@ include_directories(
#############
## Add gtest based cpp test target and link libraries
-# catkin_add_gtest(${PROJECT_NAME}-test test/test_omniscient_fake.cpp)
+# catkin_add_gtest(${PROJECT_NAME}-test test/test_world_positioning.cpp)
# if(TARGET ${PROJECT_NAME}-test)
# target_link_libraries(${PROJECT_NAME}-test ${PROJECT_NAME})
# endif()
diff --git a/vision/package.xml b/world_positioning/package.xml
similarity index 90%
rename from vision/package.xml
rename to world_positioning/package.xml
index db3a3372..c3ae9b97 100644
--- a/vision/package.xml
+++ b/world_positioning/package.xml
@@ -1,8 +1,8 @@
- vision
+ world_positioning
0.0.0
- The vision package
+ The world_positioning package
@@ -19,7 +19,7 @@
-
+
@@ -52,17 +52,15 @@
rospy
std_msgs
geometry_msgs
- gazebo_msgs
rospy
std_msgs
geometry_msgs
- gazebo_msgs
rospy
std_msgs
geometry_msgs
- gazebo_msgs
+
diff --git a/vision/setup.py b/world_positioning/setup.py
similarity index 81%
rename from vision/setup.py
rename to world_positioning/setup.py
index c97e0d92..1a4e0afe 100644
--- a/vision/setup.py
+++ b/world_positioning/setup.py
@@ -2,7 +2,7 @@
from catkin_pkg.python_setup import generate_distutils_setup
d = generate_distutils_setup()
-d['packages'] = ['omniscient_fake']
+d['packages'] = ['world_positioning']
d['package_dir'] = {'': 'src'}
setup(**d)
diff --git a/world_positioning/src/node.py b/world_positioning/src/node.py
new file mode 100755
index 00000000..a24222ed
--- /dev/null
+++ b/world_positioning/src/node.py
@@ -0,0 +1,11 @@
+#!/usr/bin/env python
+
+import rospy
+from worldP_ekf import EKF
+
+
+if __name__ == "__main__":
+ rospy.init_node("world_state_estimation")
+
+ ekf = EKF()
+ ekf.run()
diff --git a/world_positioning/src/worldP_ekf.py b/world_positioning/src/worldP_ekf.py
new file mode 100755
index 00000000..a60dbf5e
--- /dev/null
+++ b/world_positioning/src/worldP_ekf.py
@@ -0,0 +1,415 @@
+#!/usr/bin/env python
+
+import rospy
+import math
+import scipy.linalg
+
+import autograd.numpy as np
+from autograd import grad, jacobian, elementwise_grad
+
+from geometry_msgs.msg import Point, Vector3, Quaternion
+from aquadrone_msgs.msg import SubState, WorldState, Vision_Array, Vision
+
+from worldP_indicies import IDx as IDx
+from worldP_listener import SubStateListener, ZedCamListener
+from aquadrone_math_utils.orientation_math import Yaw, Pitch, Roll, RPY_Matrix
+
+def quat_msg_to_array(q):
+ return np.array([q.w, q.x, q.y, q.z])
+
+def vec_to_msg(vec):
+ message = Vector3()
+ message.x = vec[0]
+ message.y = vec[1]
+ message.z = vec[2]
+ return message
+
+def quat_to_euler(q):
+ # From wikipedia (https://en.wikipedia.org/wiki/Conversion_between_quaternions_and_Euler_angles)
+ # Copied from state estimation
+ w = q[0]
+ x = q[1]
+ y = q[2]
+ z = q[3]
+
+ euler = np.array([0,0,0])
+
+ # roll (x-axis rotation)
+ sinr_cosp = +2.0 * (w * x + y * z)
+ cosr_cosp = +1.0 - 2.0 * (x * x + y * y)
+ #angles[0] = np.arctan2(sinr_cosp, cosr_cosp)
+
+
+ # yaw (z-axis rotation)
+ siny_cosp = +2.0 * (w * z + x * y)
+ cosy_cosp = +1.0 - 2.0 * (y * y + z * z)
+ #angles[2] = math.atan2(siny_cosp, cosy_cosp)
+
+ # pitch (y-axis rotation)
+ sinp = +2.0 * (w * y - z * x)
+ if np.abs(sinp) >= 1:
+ #angles[1] = np.sign(sinp) * math.pi # use 90 degrees if out of range
+ return np.array([ np.arctan2(sinr_cosp, cosr_cosp),
+ np.sign(sinp) * math.pi,
+ np.arctan2(siny_cosp, cosy_cosp)
+ ])
+ else:
+ #angles[1] = math.asin(sinp)
+ return np.array([ np.arctan2(sinr_cosp, cosr_cosp),
+ np.arcsin(sinp),
+ np.arctan2(siny_cosp, cosy_cosp)
+ ])
+
+ return euler
+
+def quat_msg_to_euler(q):
+ q_array = quat_msg_to_array(q)
+ return vec_to_msg(quat_to_euler(q_array))
+
+def quat_to_euler_jacobian(q):
+ return jacobian(quat_to_euler(q))
+
+class EKF:
+ def __init__(self):
+ # https://en.wikipedia.org/wiki/Extended_Kalman_filter
+
+ ''' Model Description
+ x = state
+ u = inputs (sub position and orientation, object position in camera)
+ z = outputs (sub and object position)
+ P = uncertainty/variance matrix of state x
+
+ Dynamics: x[k+1] = f(x[k], u[k])
+ Outputs: z[k] = h(x[k], u[k])
+
+ Linear Form:
+ x[k+1] = A*x[k] + B*u[k]
+ z[k] = C*x[k] + B*u[k]
+ '''
+
+ ''' Process Description
+
+ Each loop will do a prediction step based on the sub state,
+ to update the expected sub and world object positions and their variance.
+
+ Then a measurement step, where it uses input from the zed camera's
+ detected objects to refine the prediction and varience
+
+ Then there is a step where the new expected sub state and world state is converted
+ to a WorldState message which is then published.
+
+ '''
+
+ #first version: only 1 object in state
+
+ self.nState = 13 #number of elements in state
+ self.numObjects = 10 #number objects being considered
+
+ #n and m are only for 1 point
+ self.n = self.nState+3*self.numObjects #substate + world x,y,z of object
+ self.m = 7+3*self.numObjects #position and orientation + x,y,z of object in camera
+
+ self.x = np.zeros((self.n,1))
+ self.x[IDx.Ow] = 1
+
+ self.u = np.zeros((self.m, 1))
+
+ self.B = np.zeros((self.nState,1)) #input modification matrix
+
+ self.P = np.eye(self.n)
+ self.Q = np.eye(self.n) * 0.01 #uncert in dynamics model
+
+ self.camera_info = ZedCamListener()
+ self.state_info = SubStateListener()
+
+ self.state_msg = SubState()
+ self.camera_msg = Vision_Array()
+ self.world_msg = WorldState()
+
+ self.rate = 20
+ self.rate_ctrl = rospy.Rate(self.rate)
+
+ self.calc_F = jacobian(self.f)
+
+ self.world_pub = rospy.Publisher("world_state", WorldState, queue_size = 1)
+
+ self.last_prediction_t = self.get_t()
+
+ def get_t(self):
+ return rospy.Time.now().to_sec()
+
+ def prediction(self):
+ #jacobian of F
+ F = self.calc_F(self.x, self.u)
+ F = np.reshape(F,(self.n ,self.n))
+ Fx = F[0:self.n, 0:self.n]
+ #print(Fx)
+
+ #update x and P
+ self.x = self.f(self.x, self.u)
+
+ self.P = np.linalg.multi_dot([Fx,self.P, np.transpose(Fx)]) + self.Q
+
+
+ def update(self):
+ #update world based on sub state
+ z = np.zeros((0,0)) #measurements
+ h = np.zeros((0,0)) #measurement prediction
+
+ H = np.zeros((0,0)) #jacobian of h
+ R = np.zeros((0,0)) #uncert of measurement
+
+ def add_block_diag(H,newH):
+ if H.shape[0] == 0:
+ #ans = np.diag(newH)
+ ans = newH
+ #print(ans)
+ ans.reshape((newH.shape[0],newH.shape[0]))
+ return ans
+ return scipy.linalg.block_diag(H,newH)
+
+ def add_block_vert(H,newH):
+ if H.shape[0] == 0:
+ return newH
+ return np.vstack([H,newH])
+
+ def read_listener(listener, z, h, H, R):
+
+ if listener.is_valid():
+ meas = listener.get_measurement_z()
+ z = np.append(z,np.array([meas]))
+ pred = listener.state_to_measurement_h(self.x, self.u)
+ h = np.append(h,np.array([pred]))
+
+ #print(self.x)
+ #print("test")
+ H = add_block_vert(H, listener.get_H(self.x, self.u))
+ #print(listener, listener.is_valid())
+ R = add_block_diag(R, listener.get_R())
+
+ return z,h,H,R
+ """
+ def read_listener_camera(listener, z,h,H,R):
+ if listener.is_valid():
+ meas = listener.get_measurement_z()
+
+ rotation_matrix = RPY_Matrix([[0,0,0],
+ [0,0,0],
+ [0,0,0]])
+ #returns np array
+ for i in range(0,meas.shape[0],3): # reads in a point at a time
+ #get sub orientation in quat
+ sub_quat = np.array([z[6],z[7],z[8],z[9]])
+ #turn into euler
+ sub_euler = quat_to_euler(sub_quat)
+ #get object pos relative to camera
+ camP = np.array([meas[i],meas[i+1],meas[i+2]])
+ #get sub position
+ subP = np.array([z[0],z[1],z[2]])
+
+ rotation_matrix = RPY_Matrix(sub_euler[0], sub_euler[1], sub_euler[2])
+
+ final = camP*rotation_matrix+ subP
+
+
+ z = np.append(z,np.array([final]))
+ pred = listener.state_to_measurement_h(self.x, self.u) #TODO might need to check x
+ h=np.append(h,np.array([pred]))
+
+ H = add_block_vert(H, listener.get_H(self.x, self.u))
+
+ local_var = listener.get_R()
+
+ #local_x = np.array([local_var[0],0,0]) #making local x var vector
+ #local_y = np.array([0,local_var[1],0]) #making local y var vector
+ #local_z = np.array([0,0,local_var[2]]) #making local z var vector
+
+ world_var = local_var*rotation_matrix #converting local variance to world frame variance
+ R = add_block_diag(R, world_var) #TODO need to implement localizing of uncert
+
+ return z,h,H,R
+ """
+
+ for listener in (self.state_info, self.camera_info):
+ try:
+ z,h,H,R = read_listener(listener,z,h,H,R)
+ except TypeError as e:
+ print(e)
+ return
+
+ if R.shape[0] == 0:
+ return
+
+ #error in measurments vs predicted
+ y = z-h
+ y.shape = (y.shape[0],1)
+
+ #calc kalman gain
+ Ht = np.transpose(H)
+ S = np.dot(np.dot(H,self.P),Ht)+R
+ #print(S)
+ K = np.dot(np.dot(self.P,Ht),np.linalg.inv(S)) #TODO with "proper" rotation and stuff of the input data, we get a singular matrix, so its just using the raw data for now, which is technically correct for the fake omniscient node thing
+
+ KH = np.dot(K,H)
+ I = np.eye(KH.shape[0])
+
+ diff = np.dot(K,y)
+
+ #update state x and uncert P
+ self.x = self.x+diff
+ self.P = np.dot(I-KH,self.P)
+
+ def f(self, x, u):
+ #calculate next state from current state x and inputs u
+ #must be autograd-able
+
+ #update time and calc dt
+ t = self.get_t()
+ dt = t - self.last_prediction_t
+ self.last_prediction_t = t
+
+ #find new position
+ new_pos = np.array([x[IDx.SPx] + dt*x[IDx.SVx],
+ x[IDx.SPy] + dt*x[IDx.SVy],
+ x[IDx.SPz] + dt*x[IDx.SVz]])
+
+ #find new orientation in euler
+ new_orient = self.update_orientation_quaternion(x)
+ new_vel = np.array([x[IDx.SVx],x[IDx.SVy],x[IDx.SVz]])
+ new_ang_vel = np.array([x[IDx.Ax],x[IDx.Ay],x[IDx.Az]])
+
+
+ #find new object position (essentially the same spot since they dont move)
+ new_obj_pos = np.array(x[self.nState:x.shape[0]])
+
+
+ #new_obj_pos = np.zeros((x.shape[0]-self.nState,1))
+ #for i in range(self.nState,x.shape[0]):
+ # new_obj_pos[i-self.nState] = x.tolist()
+
+ return_val = np.vstack([new_pos,new_vel, new_orient, new_ang_vel, new_obj_pos])
+ return return_val
+
+ #copied from ekf.py
+ def update_orientation_quaternion(self, x):
+
+ dt = 1.0 / self.rate
+ # https://gamedev.stackexchange.com/questions/108920/applying-angular-velocity-to-quaternion
+ q_old = np.array([x[IDx.Ow],
+ x[IDx.Ox],
+ x[IDx.Oy],
+ x[IDx.Oz]])
+ try:
+ q_old.shape = (4,1)
+ except Exception as e:
+ pass
+
+ w = np.array([0,
+ x[IDx.Ax],
+ x[IDx.Ay],
+ x[IDx.Az]])[np.newaxis]
+ w = np.transpose(w)
+ try:
+ w.shape = (4,1)
+ except Exception as e:
+ pass
+
+ new_or = q_old + 0.5 * dt * w
+
+ try:
+ new_or.shape = (4,1)
+ except Exception as e:
+ pass
+ mag = np.linalg.norm(new_or)
+ new_or = new_or / mag
+ return new_or
+
+ def output(self):
+ var = np.array(np.diagonal(self.P))
+ var.shape = (self.n,1)
+
+ def fill_vector_value_variance(self,val, var, x, P, i0):
+ val.x = x[i0+0]
+ val.y = x[i0+1]
+ val.z = x[i0+2]
+
+ var.x = P[i0+0, i0+0]
+ var.y = P[i0+1, i0+1]
+ var.z = P[i0+2, i0+2]
+
+ def fill_vector_value_variance_objects(self,val, var, x, P, i0):
+
+ #get sub orientation in quat
+ sub_quat = np.array([x[IDx.Ow],x[IDx.Ox],x[IDx.Oy],x[IDx.Oz]])
+ #turn into euler
+ sub_euler = quat_to_euler(sub_quat)
+ """
+ #get object pos relative to camera
+ camP = np.array([x[i0],x[i0+1],x[i0+2]])
+ #get sub position
+ subP = np.array([x[IDx.Px],x[IDx.Py],x[IDx.Pz]])
+ """
+ rotation_matrix = RPY_Matrix(sub_euler[0], sub_euler[1], sub_euler[2])
+
+ #final = camP*rotation_matrix+ subP
+
+ #val = Vector3()
+
+ val.x = x[i0][0] #absolute position of objects
+ val.y = x[i0 + 1][0]
+ val.z = x[i0 + 2][0]
+
+ #print("val",x[i0][0])
+
+ # calculate varience of objects' position
+
+ v = np.array([P[i0],P[i0+1],P[i0+2]])
+ var_final = np.linalg.multi_dot([v.T,rotation_matrix])
+
+ #var = Vector3()
+
+ var.x = var_final[0][0]
+ var.y = var_final[1][0]
+ var.z = var_final[2][0]
+
+
+ def update_world_msg(self):
+ #position msg part
+ self.world_msg = WorldState()
+ self.fill_vector_value_variance(self.world_msg.position,
+ self.world_msg.pos_variance,
+ self.x, self.P, IDx.SPx)
+ #orientation msg part
+ quat = Quaternion()
+ quat.x = self.x[IDx.Ox]
+ quat.y = self.x[IDx.Oy]
+ quat.z = self.x[IDx.Oz]
+ quat.w = self.x[IDx.Ow]
+
+ self.world_msg.orientation_quat = quat
+ self.world_msg.orientation_quat_variance.w = self.P[IDx.Ow, IDx.Ow]
+ self.world_msg.orientation_quat_variance.x = self.P[IDx.Ox, IDx.Ox]
+ self.world_msg.orientation_quat_variance.y = self.P[IDx.Oy, IDx.Oy]
+ self.world_msg.orientation_quat_variance.z = self.P[IDx.Oz, IDx.Oz]
+
+ #obj position msg part
+ #print(self.x[self.nState:self.nState+(self.numObjects-1)*3])
+ self.world_msg.obj_positions = []
+ self.world_msg.obj_pos_variances = []
+ for i in range(0,self.numObjects):
+ self.world_msg.obj_positions.append(Point())
+ self.world_msg.obj_pos_variances.append(Vector3())
+ self.fill_vector_value_variance_objects(self.world_msg.obj_positions[i],
+ self.world_msg.obj_pos_variances[i],
+ self.x, self.P, self.nState + i*3)
+
+ def run(self):
+ while not rospy.is_shutdown():
+ self.rate_ctrl.sleep()
+
+ self.output()
+
+ self.prediction()
+ self.update()
+ self.update_world_msg()
+ self.world_pub.publish(self.world_msg)
diff --git a/world_positioning/src/worldP_indicies.py b/world_positioning/src/worldP_indicies.py
new file mode 100644
index 00000000..4b361e9d
--- /dev/null
+++ b/world_positioning/src/worldP_indicies.py
@@ -0,0 +1,24 @@
+class IDx:
+ #Sub Position
+ SPx = 0
+ SPy = 1
+ SPz = 2
+
+ #Sub Velocity
+ SVx = 3
+ SVy = 4
+ SVz = 5
+
+ # Orientation - Quaternion
+ Ow = 6
+ Ox = 7
+ Oy = 8
+ Oz = 9
+
+ # Angular Velocity
+ Ax = 10
+ Ay = 11
+ Az = 12
+
+
+
diff --git a/world_positioning/src/worldP_listener.py b/world_positioning/src/worldP_listener.py
new file mode 100644
index 00000000..3c4cf705
--- /dev/null
+++ b/world_positioning/src/worldP_listener.py
@@ -0,0 +1,345 @@
+import rospy
+import math
+import numpy as np
+import scipy.linalg
+
+import autograd.numpy as np
+from autograd import grad, jacobian, elementwise_grad
+
+from geometry_msgs.msg import Point, Vector3, Quaternion
+from aquadrone_msgs.msg import SubState, Vision_Array, Vision
+from aquadrone_math_utils.orientation_math import Yaw, Pitch, Roll, RPY_Matrix
+
+from worldP_indicies import IDx as IDx
+
+def quat_to_euler(q):
+ # From wikipedia (https://en.wikipedia.org/wiki/Conversion_between_quaternions_and_Euler_angles)
+ # Copied from state estimation
+ w = q[0]
+ x = q[1]
+ y = q[2]
+ z = q[3]
+
+ euler = np.array([0,0,0])
+
+ # roll (x-axis rotation)
+ sinr_cosp = +2.0 * (w * x + y * z)
+ cosr_cosp = +1.0 - 2.0 * (x * x + y * y)
+ #angles[0] = np.arctan2(sinr_cosp, cosr_cosp)
+
+
+ # yaw (z-axis rotation)
+ siny_cosp = +2.0 * (w * z + x * y)
+ cosy_cosp = +1.0 - 2.0 * (y * y + z * z)
+ #angles[2] = math.atan2(siny_cosp, cosy_cosp)
+
+ # pitch (y-axis rotation)
+ sinp = +2.0 * (w * y - z * x)
+ if np.abs(sinp) >= 1:
+ #angles[1] = np.sign(sinp) * math.pi # use 90 degrees if out of range
+ return np.array([ np.arctan2(sinr_cosp, cosr_cosp),
+ np.sign(sinp) * math.pi,
+ np.arctan2(siny_cosp, cosy_cosp)
+ ])
+ else:
+ #angles[1] = math.asin(sinp)
+ return np.array([ np.arctan2(sinr_cosp, cosr_cosp),
+ np.arcsin(sinp),
+ np.arctan2(siny_cosp, cosy_cosp)
+ ])
+
+ return euler
+
+class BaseListener(object):
+ #----------------
+ #Common Functions
+ #----------------
+
+
+
+ def __init__(self):
+ self.last_time = rospy.Time.now().to_sec()
+ self.calc_H = jacobian(self.state_to_measurement_h)
+ self.numObjects = 10
+
+ def is_valid(self):
+ return rospy.Time.now().to_sec() - self.last_time < self.get_timeout_sec()
+
+ def get_H(self, x, u):
+ H = self.calc_H(x,u)
+ H = np.reshape(H, (self.get_p(), x.shape[0]))
+ return H
+
+ #---------------------------
+ #Listener Specific Functions
+ #---------------------------
+
+ def get_timeout_sec(self):
+ #amount of time before old readings are outdated
+ raise NotImplementedError
+
+ def get_p(self):
+ #number of elements in input
+ raise NotImplementedError
+
+ def get_measurement_z(self):
+ #input
+ raise NotImplementedError
+
+ def get_R(self):
+ #uncertainty of measurements
+ raise NotImplementedError
+
+ def state_to_measurement_h(self):
+ #calculate predicted sensor readings
+ #use autograd
+ raise NotImplementedError
+
+
+class SubStateListener(BaseListener):
+ def __init__(self):
+ super(SubStateListener, self).__init__()
+ self.state_sub = rospy.Subscriber("state_estimation",SubState, self.subState_cb)
+
+ self.position = np.array([0,0,0])
+ self.pos_var = np.array([0,0,0])
+
+ self.velocity = np.array([0,0,0])
+ self.vel_var = np.array([0,0,0])
+
+ self.orientation = np.array([1,0,0,0])
+ self.orient_var = np.array([1,1,1,1])
+
+ self.angular_vel = np.array([0,0,0])
+ self.angular_vel_var = np.array([0,0,0])
+
+ def get_timeout_sec(self):
+ return 1 #subject to change
+
+ def get_p(self):
+ return 13
+
+ def get_measurement_z(self):
+ vec = np.zeros((self.get_p(),1))
+ for i in range(0,3):
+ vec[i] = self.position[i]
+ for i in range(0,3):
+ vec[i+3] = self.velocity[i]
+ for i in range(0,4):
+ vec[i+6] = self.orientation[i]
+ for i in range(0,3):
+ vec[i+10] = self.angular_vel[i]
+ return vec
+
+ def get_R(self):
+ vec = np.zeros((self.get_p(),self.get_p()))
+ for i in range(0,3):
+ vec[i,i] = self.pos_var[i]
+ for i in range(0,3):
+ vec[i+3,i+3] = self.vel_var[i]
+ for i in range(0,4):
+ vec[i+6,i+6] = self.orient_var[i]
+ for i in range(0,3):
+ vec[i+10,i+10] = self.angular_vel_var[i]
+ return vec
+
+ def state_to_measurement_h(self,x,u): #do we need u?
+ #print(type(x))
+ z = np.array( [x[IDx.SPx],
+ x[IDx.SPy],
+ x[IDx.SPz],
+ x[IDx.SVx],
+ x[IDx.SVy],
+ x[IDx.SVz],
+ x[IDx.Ow],
+ x[IDx.Ox],
+ x[IDx.Oy],
+ x[IDx.Oz],
+ x[IDx.Ax],
+ x[IDx.Ay],
+ x[IDx.Az]])
+ return z
+
+ def subState_cb(self,msg):
+ #input position
+ self.position = np.array( [msg.position.x,
+ msg.position.y,
+ msg.position.z])[np.newaxis]
+ print(self.position)
+ self.position.shape = (3,1)
+ #input position variance
+ self.pos_var = np.array( [msg.pos_variance.x,
+ msg.pos_variance.y,
+ msg.pos_variance.z])
+ self.pos_var.shape = (3,1)
+ #input velocity
+ self.velocity = np.array( [msg.velocity.x,
+ msg.velocity.y,
+ msg.velocity.z])[np.newaxis]
+ self.velocity.shape = (3,1)
+ #input velocity varience
+ self.vel_var = np.array( [msg.vel_variance.x,
+ msg.vel_variance.y,
+ msg.vel_variance.z])
+ self.vel_var.shape = (3,1)
+ #input orientation
+ self.orientation = np.array( [msg.orientation_quat.w, ##not sure whether rpy is the right one
+ msg.orientation_quat.x,
+ msg.orientation_quat.y,
+ msg.orientation_quat.z])[np.newaxis]
+ self.orientation.shape = (4,1)
+ #input orientation varience
+ self.orient_var = np.array( [msg.orientation_quat_variance.w, ##not sure whether rpy is the right one
+ msg.orientation_quat_variance.x,
+ msg.orientation_quat_variance.y,
+ msg.orientation_quat_variance.z])
+ self.orient_var.shape = (4,1)
+ #input angular velocity
+ self.angular_vel = np.array( [msg.ang_vel.x,
+ msg.ang_vel.y,
+ msg.ang_vel.z])[np.newaxis]
+ self.angular_vel.shape = (3,1)
+ #input angular velocity varience
+ self.angular_vel_var = np.array( [msg.ang_vel_variance.x,
+ msg.ang_vel_variance.y,
+ msg.ang_vel_variance.z])
+ self.angular_vel_var.shape = (3,1)
+
+ self.last_time = rospy.Time.now().to_sec()
+
+class ZedCamListener(BaseListener):
+
+ def __init__(self):
+ super(ZedCamListener, self).__init__()
+ #currently working for a single object
+ self.cam_sub = rospy.Subscriber("Vision_Data",Vision_Array,self.cam_cb)
+
+ self.obj_pos = np.zeros((3 * self.numObjects , 1))
+ self.obj_pos_var = np.zeros((3*self.numObjects,1))
+ self.obj_ident_list = []
+
+ def get_timeout_sec(self):
+ return 0.1
+
+ def get_p(self):
+ #single point, 3 elements
+ return 3*self.numObjects
+
+ def get_measurement_z(self):
+ vec = np.zeros((self.get_p(),1))
+ for i in range(0,3*self.numObjects):
+ vec[i] = self.obj_pos[i]
+ return vec
+
+ def get_R(self):
+ var = np.zeros((self.get_p(),self.get_p()))
+ for i in range(0,3*self.numObjects):
+ var[i,i] = self.obj_pos_var[i]
+ return var
+
+ def quat_to_euler(self, q):
+ # From wikipedia (https://en.wikipedia.org/wiki/Conversion_between_quaternions_and_Euler_angles)
+ # Copied from state estimation
+ w = q[0]
+ x = q[1]
+ y = q[2]
+ z = q[3]
+
+ euler = np.array([0,0,0])
+
+ # roll (x-axis rotation)
+ sinr_cosp = +2.0 * (w * x + y * z)
+ cosr_cosp = +1.0 - 2.0 * (x * x + y * y)
+ #angles[0] = np.arctan2(sinr_cosp, cosr_cosp)
+
+
+ # yaw (z-axis rotation)
+ siny_cosp = +2.0 * (w * z + x * y)
+ cosy_cosp = +1.0 - 2.0 * (y * y + z * z)
+ #angles[2] = math.atan2(siny_cosp, cosy_cosp)
+
+ # pitch (y-axis rotation)
+ sinp = +2.0 * (w * y - z * x)
+ if np.abs(sinp) >= 1:
+ #angles[1] = np.sign(sinp) * math.pi # use 90 degrees if out of range
+ return np.array([ np.arctan2(sinr_cosp, cosr_cosp),
+ np.sign(sinp) * math.pi,
+ np.arctan2(siny_cosp, cosy_cosp)
+ ])
+ else:
+ #angles[1] = math.asin(sinp)
+ return np.array([ np.arctan2(sinr_cosp, cosr_cosp),
+ np.arcsin(sinp),
+ np.arctan2(siny_cosp, cosy_cosp)
+ ])
+
+ return euler
+
+ def state_to_measurement_h(self, x, u):
+ #x_process = None
+ x_temp = []
+ for i in range(x.shape[0]):
+ x_temp.append(x[i])
+
+ x_process = np.array(x_temp)
+
+ #print(x_process)
+ #get sub position
+ sub_pos = np.array([x_process[IDx.SPx],x_process[IDx.SPy],x_process[IDx.SPz]])
+ #get sub orientation in quat
+ sub_quat = np.array([x_process[IDx.Ow],x_process[IDx.Ox],x_process[IDx.Oy],x_process[IDx.Oz]])
+ #turn into euler
+ sub_euler = quat_to_euler(sub_quat)
+ #make absolute --> relative rotatoin matrix using negatives of euler angles
+ rotation_matrix = RPY_Matrix(-sub_euler[0], -sub_euler[1], -sub_euler[2])
+
+ ret_temp = []
+ for i in range(0,3*self.numObjects,3):
+ raw = np.array([x_process[13+i],x_process[13+i+1],x_process[13+i+2]])
+
+ transformed = np.linalg.multi_dot([rotation_matrix,raw])
+
+ relative_not_rot = transformed + sub_pos
+ test = raw - sub_pos #bypasses rotation thing
+ #print(transformed)
+ """
+ ret_temp.append(relative_not_rot[0])
+ ret_temp.append(relative_not_rot[1])
+ ret_temp.append(relative_not_rot[2])
+ """
+ ret_temp.append(test[0])
+ ret_temp.append(test[1])
+ ret_temp.append(test[2])
+
+ ret = np.array(ret_temp)
+ return ret
+
+ def cam_cb(self, msg):
+ #update/create identifier list
+ if(self.obj_ident_list == []):
+ self.obj_ident_list = [v.identifier for v in msg.data]
+
+ else:
+ for v in msg.data:
+ #print("aa")
+ if(v.identifier not in self.obj_ident_list):
+ self.obj_ident_list.append(v.identifier)
+ #print(self.obj_pos)
+ for i in range(0,self.numObjects):
+ if(i == len(msg.data)): #to stop array out of range
+ break
+ self.obj_pos[3*i] = msg.data[i].obj_data.x
+ self.obj_pos[3*i + 1] = msg.data[i].obj_data.y
+ self.obj_pos[3*i + 2] = msg.data[i].obj_data.z
+ self.obj_pos.shape = (3*self.numObjects,1)
+ #self.obj_pos = np.array([msg.point.x, msg.point.y, msg.point.z])[np.newaxis]
+
+ for i in range(0,self.numObjects*3):
+ self.obj_pos_var[i] = 0# zero for now msg[i+30] #not sure about formatting
+ self.obj_pos_var.shape = (3*self.numObjects,1)
+ """
+ self.obj_pos_var = np.array([msg.point_covariance[0],
+ msg.point_covariance[1],
+ msg.point_covariance[2]])
+ """
+ self.last_time = rospy.Time.now().to_sec()