Hi,
I'm trying to use hector mapping on my custom robot to produce a map, this works fine without any odometry information from my odom node.
here's the launch file (i didn't write it, but it works pretty well).
The problem is when i try and use it with my odometry information, published from my odom node (which has the same framework as the one from the odom tutorial, but is customised to take my encoder data and use my equations) -
#include
#include
#include
#include
#include
#define RovWid 0.25
#define Pi 3.141592865358979
#define TicksPerRev 600
#define WheelDia 0.13
float left,right; //declare global variables to hold incoming data
void CallbackLeft(const std_msgs::Float32::ConstPtr& msg)
{
//ROS_INFO("Left ticks [%f]", msg->data);
left = msg->data;
}
void CallbackRight(const std_msgs::Float32::ConstPtr& msg)
{
//ROS_INFO("Right ticks [%f]", msg->data);
right = msg->data;
}
int main(int argc, char** argv){
ros::init(argc, argv, "odometry_publisher");
ros::NodeHandle n;
ros::Publisher odom_pub = n.advertise("odom", 50);
ros::Subscriber sub1 = n.subscribe("ticks_left", 100, CallbackLeft);
ros::Subscriber sub2 = n.subscribe("ticks_right", 100, CallbackRight);
tf::TransformBroadcaster odom_broadcaster;
double vx = 0.0;
double vy = 0.0;
double vth = 0.0;
float DeltaLeft = 0;
float DeltaRight = 0;
float PreviousRight = 0;
float PreviousLeft = 0;
float Theta = 0;
float X = 0;
float Y = 0;
float Circum, DisPerTick, expr1,right_minus_left;
ros::Time current_time, last_time;
current_time = ros::Time::now();
last_time = ros::Time::now();
ros::Rate r(5);
ROS_INFO("Node started");
Circum = Pi * WheelDia;
DisPerTick = Circum / TicksPerRev;
while(n.ok()){
ros::spinOnce(); // check for incoming messages
current_time = ros::Time::now();
DeltaRight = (right - PreviousRight) * DisPerTick;
DeltaLeft = (left - PreviousLeft) * DisPerTick;
PreviousRight = right;
PreviousLeft = left;
if (DeltaLeft == DeltaRight){
X += DeltaLeft * cos(Theta);
Y += DeltaLeft * sin(Theta);
}
else{
expr1 = RovWid * 2 * (DeltaRight + DeltaLeft)/ 2.0 / (DeltaRight - DeltaLeft);
right_minus_left = DeltaRight - DeltaLeft;
X += expr1 * (sin(right_minus_left / (RovWid *2) + Theta) - sin(Theta));
Y -= expr1 * (cos(right_minus_left / (RovWid *2) + Theta) - cos(Theta));
Theta += right_minus_left / (RovWid *2);
Theta = Theta - ((2 * Pi) * floor( Theta / (2 * Pi)));
}
ROS_INFO("X [%f]", X);
ROS_INFO("Y [%f]", Y);
ROS_INFO("Theta [%f]", Theta);
ROS_INFO("\n");
geometry_msgs::Quaternion odom_quat = tf::createQuaternionMsgFromYaw(Theta);
geometry_msgs::TransformStamped odom_trans;
odom_trans.header.stamp = current_time;
odom_trans.header.frame_id = "odom";
odom_trans.child_frame_id = "base_link";
odom_trans.transform.translation.x = X;
odom_trans.transform.translation.y = Y;
odom_trans.transform.translation.z = 0.0;
odom_trans.transform.rotation = odom_quat;
odom_broadcaster.sendTransform(odom_trans);
nav_msgs::Odometry odom;
odom.header.stamp = current_time;
odom.header.frame_id = "odom";
odom.pose.pose.position.x = X;
odom.pose.pose.position.y = Y;
odom.pose.pose.position.z = 0.0;
odom.pose.pose.orientation = odom_quat;
odom.child_frame_id = "base_link";
odom.twist.twist.linear.x = 0;
odom.twist.twist.linear.y = 0;
odom.twist.twist.angular.z = 0;
odom_pub.publish(odom);
last_time = current_time;
r.sleep();
}
}
This published odometry information just fine. The problem is when i try and incorporate the two.
Following the hector slam robot setup, i change the base frame and odom frame too:
but i get the following timeout:
[ INFO] [1490208346.381940938]: lookupTransform base_frame to laser timed out. Could not transform laser scan into base_frame.
This stops the slam node from creating anything
When i change the base frame to base_link as it was in the original, i get the following error:
[ERROR] [1490208025.858563554]: Transform failed during publishing of map_odom transform: Lookup would require extrapolation at time 1490208024.946150226, but only time 1490208024.845273926 is in the buffer, when looking up transform from frame [base_link] to frame [odom]
This error still allows for mapping, but makes no use of my odometry info.
No doubt the problem is me not setting up the required transfrom, but im genuinely at a loss with what to do about it. Help would be greatly appreciated. At the very least i hope someone finds my code useful.
↧
hector slam launch with odom and hokuyo
↧
sendTransform() takes exactly 6 arguments (2 given)
I was trying to run [this tutorial](http://wiki.ros.org/urdf/Tutorials/Using%20urdf%20with%20robot_state_publisher) in python code, but i got above error when i try to run it.
My python converted program of given tutorial is:
#!/usr/bin/env python
import rospy
from sensor_msgs.msg import JointState
from std_msgs.msg import Header
import tf
import geometry_msgs.msg
import math
def talker():
pub = rospy.Publisher('joint_states', JointState, queue_size=1)
rospy.init_node('state_publisher')
broadcaster = tf.TransformBroadcaster()
rate = rospy.Rate(30) # 10hz
M_PI = 3.145
degree = M_PI/180;
# robot state
tilt = 0
tinc = degree
swivel=0
angle=0
height=0
hinc=0.005
# message declarations
t = geometry_msgs.msg.TransformStamped()
hello_str = JointState()
t.header.frame_id = "odom"
t.child_frame_id = "axis"
while not rospy.is_shutdown():
# update joint_state
hello_str.header.stamp = rospy.Time.now()
hello_str.name = ['swivel','tilt','periscope']
hello_str.position = swivel
hello_str.velocity = tilt
hello_str.effort = height
t.header.stamp = rospy.Time.now()
t.transform.translation.x = math.cos(angle)*2
t.transform.translation.y = math.sin(angle)*2
t.transform.translation.z = .7
#t.transform.rotation = tf.createQuaternionMsgFromYaw(angle+M_PI/2)
t.transform.rotation = tf.transformations.quaternion_from_euler(0, 0, angle)
# send the joint state and transform
pub.publish(hello_str)
broadcaster.sendTransform(t)
# Create new robot state
tilt += tinc
if (tilt<-.5 or tilt>0):
tinc *= -1
height += hinc
if (height>.2 or height<0):
hinc *= -1
swivel += degree
angle += degree/4
rate.sleep()
if __name__ == '__main__':
try:
talker()
except rospy.ROSInterruptException:
pass
How can i remove this, i am using ros indigo.
Even if i change that line with :
broadcaster.sendTransform((0.5,1.0,0),tf.transformations.quaternion_from_euler(0, 0, angle+M_PI/2),rospy.Time.now(),"odom","axis")
Its not working. Than it shows error:
field position must be a list or tuple type
[state_pub-2] process has died [pid 11654, exit co
↧
↧
Duplicate tf tree operations offline in C++?
What is the most expedient way to duplicate the sort of information the tf tree provides but without having to sendTransform or do lookups on a tf listener object? One big thing I don't require is any time stamping, I just want to build a tree, query it, then I would change it and query again and only get results for that most recent transform hierarchy.
I want to build up a transform tree using the same information (and ideally the same data types) I'd otherwise put into a sendTransform and then be able to query it for what the transform from X to Y is and get the same return value datatypes.
Could I use the listener object, but insert transforms directly into it that are immediately available for lookup without have to broadcast them?
The main concern is speed which requires parallelism, I want to create and use any number of trees simultaneously with no sharing between them, and no delay before transforms are available after defining frame relationships.
I know I can figure out something for myself probably any of several different ways using any of several different matrix operation libraries, datatypes and so on, and wrap the whole thing in tf-like interface, but I'd prefer a general and already available approach that is as close to tf as possible.
↧
robot_localization ukf not publishing map->odom
I am trying to use the ukf node in the robot_localization but this node is not publishing the transform from the frame map to odom and the following error message appear:
Could not obtain transform from odom to map. Error was "map" passed to lookupTransform argument target_frame does not exist.
My config file is the following:
> frequency: 30
>> sensor_timeout: 0.1
>> two_d_mode: true
>> transform_time_offset: 0.0
>> transform_timeout: 0.0
>> print_diagnostics: true
>> debug: false
>> debug_out_file:
> ~/.ros/robot/rDebug.log
>> map_frame: map
> odom_frame: odom
> base_link_frame: base_link
> world_frame: map
>> odom0: odom
>> odom0_config: [true, true, false,
> false, false, true,
> false, false, false,
> false, false, false,
> false, false, false]
>> odom0_queue_size: 2
>> odom0_nodelay: false
>> odom0_differential: false
>> odom0_relative: false
>> odom0_pose_rejection_threshold: 5
> odom0_twist_rejection_threshold: 1
>> pose0: pose
>> pose0_config: [true,
> true, false,
> false, false, true,
> false, false, false,
> false, false, false,
> false, false, false]
>> pose0_differential: true
>> pose0_relative: false
>> pose0_queue_size: 5
>> pose0_rejection_threshold: 2
>> pose0_nodelay: false
>> use_control: true
>> control_timeout: 0.2
>> control_config: [true, false, false,
> false, false, true]
>> acceleration_limits: [1.3, 0.0, 0.0,
> 0.0, 0.0, 3.4]
>> deceleration_limits: [1.3, 0.0, 0.0,
> 0.0, 0.0, 4.5]
>> acceleration_gains: [0.8, 0.0, 0.0,
> 0.0, 0.0, 0.9]
>> deceleration_gains: [1.0, 0.0, 0.0,
> 0.0, 0.0, 1.0]
>> process_noise_covariance: [0.05, 0,
> 0, 0, 0, 0, 0, 0,
> 0, 0, 0, 0, 0, 0, 0,
> 0, 0.05, 0, 0, 0, 0, 0, 0,
> 0, 0, 0, 0, 0, 0, 0,
> 0, 0, 0.06, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
> 0, 0, 0, 0.03, 0, 0, 0, 0,
> 0, 0, 0, 0, 0, 0, 0,
> 0, 0, 0, 0, 0.03, 0, 0, 0,
> 0, 0, 0, 0, 0, 0, 0,
> 0, 0, 0, 0, 0, 0.06, 0, 0,
> 0, 0, 0, 0, 0, 0, 0,
> 0, 0, 0, 0, 0, 0, 0.025, 0,
> 0, 0, 0, 0, 0, 0, 0,
> 0, 0, 0, 0, 0, 0, 0, 0.025,
> 0, 0, 0, 0, 0, 0, 0,
> 0, 0, 0, 0, 0, 0, 0, 0,
> 0.04, 0, 0, 0, 0, 0, 0,
> 0, 0, 0, 0, 0, 0, 0, 0,
> 0, 0.01, 0, 0, 0, 0, 0,
> 0, 0, 0, 0, 0, 0, 0, 0,
> 0, 0, 0.01, 0, 0, 0, 0,
> 0, 0, 0, 0, 0, 0, 0, 0,
> 0, 0, 0, 0.02, 0, 0, 0,
> 0, 0, 0, 0, 0, 0, 0, 0,
> 0, 0, 0, 0, 0.01, 0, 0,
> 0, 0, 0, 0, 0, 0, 0, 0,
> 0, 0, 0, 0, 0, 0.01, 0,
> 0, 0, 0, 0, 0, 0, 0, 0,
> 0, 0, 0, 0, 0, 0,
> 0.015]
>> initial_estimate_covariance: [1e-9, 0,
> 0, 0, 0, 0, 0, 0, 0,
> 0, 0, 0, 0, 0, 0,
> 0, 1e-9, 0, 0, 0, 0, 0, 0,
> 0, 0, 0, 0, 0, 0,
> 0,
> 0, 0, 1e-9, 0, 0, 0, 0, 0, 0,
> 0, 0, 0, 0, 0, 0,
> 0, 0, 0, 1e-9, 0, 0, 0, 0, 0,
> 0, 0, 0, 0, 0, 0,
> 0, 0, 0, 0, 1e-9, 0, 0, 0, 0,
> 0, 0, 0, 0, 0, 0,
> 0, 0, 0, 0, 0, 1e-9, 0, 0, 0,
> 0, 0, 0, 0, 0, 0,
> 0, 0, 0, 0, 0, 0, 1e-9, 0, 0,
> 0, 0, 0, 0, 0, 0,
> 0, 0, 0, 0, 0, 0, 0, 1e-9, 0,
> 0, 0, 0, 0, 0, 0,
> 0, 0, 0, 0, 0, 0, 0, 0,
> 1e-9, 0, 0, 0, 0, 0,
> 0,
> 0, 0, 0, 0, 0, 0, 0, 0, 0,
> 1e-9, 0, 0, 0, 0, 0,
> 0, 0, 0, 0, 0, 0, 0, 0, 0,
> 0, 1e-9, 0, 0, 0, 0,
> 0, 0, 0, 0, 0, 0, 0, 0, 0,
> 0, 0, 1e-9, 0, 0, 0,
> 0, 0, 0, 0, 0, 0, 0, 0, 0,
> 0, 0, 0, 1e-9, 0, 0,
> 0, 0, 0, 0, 0, 0, 0, 0, 0,
> 0, 0, 0, 0, 1e-9, 0,
> 0, 0, 0, 0, 0, 0, 0, 0, 0,
> 0, 0, 0, 0, 0, 1e-9]
>>> alpha: 0.001
>> kappa: 0
>> beta: 2
My inputs are odometry and a PoseWithCovarianceStamped. The frame_ids in the message does not have slashes. Odometry/filtered is not being published as well. All other transformations are okay including odom->base_link.
What am I doing wrong?
↧
Converting Python tf code to C++
Are there any non-trivial examples for using the tf package to calculate quaternions in C++ for an Arduino?
I'm trying to convert this Python code for publishing IMU data, to the equivalent C++, so I can run it on an Arduino using the rosserial_arduino package:
import rospy
import tf
from sensor_msgs.msg import Imu
from std_msgs.msg import Header
imu_msg = Imu()
imu_msg.header = Header()
imu_msg.header.stamp = rospy.Time.now()
imu_msg.header.frame_id = '/base_link'
quaternion = tf.transformations.quaternion_from_euler(roll, pitch, yaw)
imu_msg.orientation.x = quaternion[0]
imu_msg.orientation.y = quaternion[1]
imu_msg.orientation.z = quaternion[2]
imu_msg.orientation.w = quaternion[3]
imu_msg.linear_acceleration.x = accel[0]
imu_msg.linear_acceleration.y = accel[1]
imu_msg.linear_acceleration.z = accel[2]
imu_pub.publish(imu_msg)
However, even though there are several [general-purpose C++ examples](http://wiki.ros.org/tf/Tutorials/Writing%20a%20tf%20broadcaster%20%28C%2B%2B%29) and rosserial_arduino examples [for using Tf](http://wiki.ros.org/rosserial_arduino/Tutorials/Time%20and%20TF), I can't find anything for calculating quaternions on the Arduino.
All the C++ code I've found seems to require defining:
tf::Transform transform;
However, there doesn't seem to be any tf.h header available for the Arduino, and this gives me the compilation error:
error: 'Transform' in namespace 'tf' does not name a type
↧
↧
Python version of tf functions
Hi,
I am trying to transform a velocity from one frame to another. I want to use the transformVector3 function as mentioned in this [link](http://wiki.ros.org/tf/Overview/Using%20Published%20Transforms). Since it requires a StampedVector I am trying to convert the datatype using the function [tf::vector3StampedMsgtoTF](http://docs.ros.org/indigo/api/tf/html/c++/namespacetf.html#a091bf046c418be5e6604c1b025e1ff41). I am writing my node in Python but I cannot find the Python version of the above tf functions anywhere.
Any help will be much appreciated.
ps. Does using tf2 avoid the explicit conversion of a geometry message to TF datatypes?
↧
Is it necessary to broadcast both odometry and tf data?
Most [odometry examples](http://wiki.ros.org/navigation/Tutorials/RobotSetup/Odom) I've read show reporting of both odometry and tf. Is this necessary? If you're broadcasting the robot's base_link transform via tf, doesn't that accomplish the same thing as odometry messages?
↧
rviz process died on kinetic + debian, publisherupdate/tf connection refused error
Hi,
I'm trying out my robot configuration on Kinetic + Debian Jessie. I have also installed the ROS-I packages, including industrial-core and I've build industrial-moveit from source (not sure I needed that package too. Built after I saw these errors, and building didn't solve it).
When I launch the demo launch file all seems to work. I get a warning about deprecated messages. That's logical since I try to use my indigo configs, and I need to see if and where I need to change for kinetic.
[ WARN] [1492246179.456638579]: Deprecation warning: parameter 'allowed_execution_duration_scaling' moved into namespace 'trajectory_execution'. Please, adjust file trajectory_execution.launch.xml!
[ WARN] [1492246179.457501942]: Deprecation warning: parameter 'allowed_goal_duration_margin' moved into namespace 'trajectory_execution'. Please, adjust file trajectory_execution.launch.xml!
then after I get the message "you can start planning now" rviz crashes with the following message
[rviz_xw6600_2762_3121412043575825610-5] process has died [pid 2819, exit code -11, cmd /opt/ros/kinetic/lib/rviz/rviz -d /home/machinekit/catkin_ws/src/matilda_moveit_config/launch/moveit.rviz __name:=rviz_xw6600_2762_3121412043575825610 __log:=/home/machinekit/.ros/log/74be51e0-21b8-11e7-b05d-001f2900ec6f/rviz_xw6600_2762_3121412043575825610-5.log]. log file: /home/machinekit/.ros/log/74be51e0-21b8-11e7-b05d-001f2900ec6f/rviz_xw6600_2762_3121412043575825610-5*.log
from master.log in the mentioned log files directory i see the following:
[rosmaster.master][INFO] 2017-04-15 11:02:00,238: publisherUpdate[/tf] -> http://xw6600:34005/ []
[rosmaster.master][INFO] 2017-04-15 11:02:00,239: publisherUpdate[/tf] -> http://xw6600:34005/ []: sec=0.00, exception=[Errno 111] Connection refused
[rosmaster.threadpool][ERROR] 2017-04-15 11:02:00,240: Traceback (most recent call last):
File "/opt/ros/kinetic/lib/python2.7/dist-packages/rosmaster/threadpool.py", line 218, in run
result = cmd(*args)
File "/opt/ros/kinetic/lib/python2.7/dist-packages/rosmaster/master_api.py", line 210, in publisher_update_task
ret = xmlrpcapi(api).publisherUpdate('/master', topic, pub_uris)
File "/usr/lib/python2.7/xmlrpclib.py", line 1233, in __call__
return self.__send(self.__name, args)
File "/usr/lib/python2.7/xmlrpclib.py", line 1591, in __request
verbose=self.__verbose
File "/usr/lib/python2.7/xmlrpclib.py", line 1273, in request
return self.single_request(host, handler, request_body, verbose)
File "/usr/lib/python2.7/xmlrpclib.py", line 1301, in single_request
self.send_content(h, request_body)
File "/usr/lib/python2.7/xmlrpclib.py", line 1448, in send_content
connection.endheaders(request_body)
File "/usr/lib/python2.7/httplib.py", line 1035, in endheaders
self._send_output(message_body)
File "/usr/lib/python2.7/httplib.py", line 879, in _send_output
self.send(msg)
File "/usr/lib/python2.7/httplib.py", line 841, in send
self.connect()
File "/usr/lib/python2.7/httplib.py", line 822, in connect
self.timeout, self.source_address)
File "/usr/lib/python2.7/socket.py", line 571, in create_connection
raise err
error: [Errno 111] Connection refused
[rosmaster.master][INFO] 2017-04-15 11:02:00,240: publisherUpdate[/tf_static] -> http://xw6600:34005/ []
[rosmaster.master][INFO] 2017-04-15 11:02:00,240: publisherUpdate[/tf_static] -> http://xw6600:34005/ []: sec=0.00, exception=[Errno 111] Connection refused
[rosmaster.threadpool][ERROR] 2017-04-15 11:02:00,241: Traceback (most recent call last):
File "/opt/ros/kinetic/lib/python2.7/dist-packages/rosmaster/threadpool.py", line 218, in run
result = cmd(*args)
File "/opt/ros/kinetic/lib/python2.7/dist-packages/rosmaster/master_api.py", line 210, in publisher_update_task
ret = xmlrpcapi(api).publisherUpdate('/master', topic, pub_uris)
File "/usr/lib/python2.7/xmlrpclib.py", line 1233, in __call__
return self.__send(self.__name, args)
File "/usr/lib/python2.7/xmlrpclib.py", line 1591, in __request
verbose=self.__verbose
File "/usr/lib/python2.7/xmlrpclib.py", line 1273, in request
return self.single_request(host, handler, request_body, verbose)
File "/usr/lib/python2.7/xmlrpclib.py", line 1301, in single_request
self.send_content(h, request_body)
File "/usr/lib/python2.7/xmlrpclib.py", line 1448, in send_content
connection.endheaders(request_body)
File "/usr/lib/python2.7/httplib.py", line 1035, in endheaders
self._send_output(message_body)
File "/usr/lib/python2.7/httplib.py", line 879, in _send_output
self.send(msg)
File "/usr/lib/python2.7/httplib.py", line 841, in send
self.connect()
File "/usr/lib/python2.7/httplib.py", line 822, in connect
self.timeout, self.source_address)
File "/usr/lib/python2.7/socket.py", line 571, in create_connection
raise err
error: [Errno 111] Connection refused
Any thoughts on how to get to the root cause and some pointers on how to solve this would be highly appreciated.
Best Regards,
Bas
↧
frames not shown using view_frame node but in RViz
Hi,
I am simulating mobile robot in Gazebo and visulizing in RViz. I have attached camera and laser links and corresponding Gazebo plugins and can successfully see them in RViz and Gazebo. I can see all the frames in TF tree in RVIz including laser link and camera link. However whenever I use `rosrun tf view_frames` to see the transform tree I noticed that laser and camera links are missing from there. The URDF clearly describes the parent and child links for laser and camera joints. Further to inform that `robot_sate_publisher` and `joint_sate_publisher` nodes are also running. Then my question is why can not I see laser joint and camera joint in `frames.pdf` generated using `rosrun tf view_frames` but in RViz those joint transform are shown OK with correct transforms?
Thank you in advance.
↧
↧
transform (x,y,z) coordinate from kinect to /map frame using tf
Hi, I'm tracking an object using a kinect and trying to publish the tf transform of that object in a "/map frame. I'm able to return the object's x, y and depth data using OpenCV. However, I'm struggling with the tf stuff. I've written a function which is meant to update the transform
void updatetransform(double x, double y, double z)
{
r = -1.5708;
p = 0;
y = -3.1415;
static tf::TransformBroadcaster br;
transform.setOrigin(tf::Vector3(x, y, z));
quaternion.setRPY(r,p,y); //where r p y are fixed
transform.setRotation(quaternion);
br.sendTransform(tf::StampedTransform(transform, ros::Time::now(), "kinect2_ir_optical_frame","drone/base_link"));
ROS_INFO("Sent Drone Transform at %f %f %f", x, y, z);
}
and in my launch file
But when I run Rviz, using "/map" as a fixed frame, the drone/base_link tf isn't shown in the screen. I've attached a picture of what it looks like at this [link](http://imgur.com/a/MZEmV). My x,y and depth values are okay from what the kinect but the transform is wrong. Am I doing something wrong here?
Also here's my tf_tree which looks about right to me. [Link here](http://imgur.com/a/hPuDq)
↧
robot_localization producing 180° rotation jumps only when static publishing map->odom
I have configured `world_frame=odom`. When I statically publish TF map->odom my output of ekf_localization_node is jumping a lot. I've looked all over for the problem but I am stuck!
Here's an 8 second streamable video of the problem seen in rviz:
https://drive.google.com/file/d/0B6qINUvvDwDfMzdWUG1CWWNlMm8/view?usp=sharing
When I comment out...
...the problem goes away and I see good, stable, filtered odom.
My data bag of the problem
https://drive.google.com/file/d/0B6qINUvvDwDfOGxYdVFvUlFMNzg/view?usp=sharing
My ekf_localization_node.launch
[false, false, false,
true, true, true,
false, false, false,
false, false, false,
false, false, false] [true, true, false,
false, false, false,
false, false, false,
false, false, true,
false, false, false]
My rtabmap rgbd_odometry.launch
My roswtf
dan@ubuntu:~/URC$ roswtf
Loaded plugin tf.tfwtf
No package or stack in context
================================================================================
Static checks summary:
No errors or warnings
================================================================================
Beginning tests of your ROS graph. These may take awhile...
analyzing graph...
... done analyzing graph
running graph rules...
... done running graph rules
running tf checks, this will take a second...
... tf checks complete
Online checks summary:
Found 2 warning(s).
Warnings are things that may be just fine, but are sometimes at fault
WARNING The following node subscriptions are unconnected:
* /ekf_localization_local:
* /set_pose
* /rqt_gui_py_node_8452:
* /statistics
WARNING The following nodes are unexpectedly connected:
* /rgbd_odometry/zed_odom->/rviz_1492655609198267406 (/tf)
* /rgbd_odometry/zed_odom->/roswtf_9931_1492660614427 (/tf)
* /robot_base_to_camera_link->/rviz_1492655609198267406 (/tf_static)
* /map_to_odom->/rviz_1492655609198267406 (/tf_static)
* /camera_link_to_zed_actual_frame->/rviz_1492655609198267406 (/tf_static)
My bad TF (when publishing static map->odom transform)
$ rosrun tf tf_echo robot_base odom
At time 1492662864.848
- Translation: [0.001, -0.017, -0.038]
- Rotation: in Quaternion [0.639, -0.283, -0.638, 0.324]
in RPY (radian) [1.541, 0.684, -1.535]
in RPY (degree) [88.289, 39.178, -87.969]
My good TF (when NOT publishing static map->odom transform)
$ rosrun tf tf_echo robot_base odom
At time 1492661532.036
- Translation: [0.016, -0.004, 0.000]
- Rotation: in Quaternion [-0.000, -0.000, 0.001, 1.000]
in RPY (radian) [-0.000, -0.000, 0.003]
in RPY (degree) [-0.000, -0.000, 0.154]
I'm using Ubuntu 16.04 on a TX1.
Here's my version of robot_localization
commit 3c7521fb109fcd0fe6090ae8ae407b031838e80d
Merge: fe2f10e cd86690
Author: Tom Moore
Date: Mon Mar 27 19:45:55 2017 +0100
Merge pull request #355 from clearpathrobotics/fix_gains_defaults
Fix acceleration and deceleration gains default logic
I appreciate any help! Thanks!
Fixed typo: my problem transform IS map -> odom
↧
navsat_transform_node: Tf has two or more unconnected trees.
Hi,
I'm new in ROS
I use robot_localisation Package for Fusing imu, gps, odometry
but i have problem with navsat_transform_node
when i launch the launch file i receive this warning:
[ WARN] [1492703521.573344934, 1492523889.516523566]: Could not obtain transform from imu0 to base_link. Error was Could not find a connection between 'base_link' and 'imu0' because they are not part of the same tree.Tf has two or more unconnected trees.
the topic odometry/gps
is empty no message are publish
I don’t understood how i can fix
launch file:
https://www.dropbox.com/s/anjvbz7n5w6eqa0/ekf_filter.launch?dl=0
Bag:
https://www.dropbox.com/s/6lwmhr3abc2e67s/u5-fuori10hzlungocoperto.bag?dl=0
Tanks in advance
↧
mapviz - transforming local xy coordinate frame to tile map lat/lon coordinate frame
I am running a tile map plugin for mapviz. As I move my mouse on the map I can see the GPS coordinates changing on the ui interface. I also used the swri_transform_util to give my robot a lat/lon xy origin. Now I am trying to use the point click publisher to get lat/lon coordinates when a place on the map is clicked. I am not sure what the tile map frame is however. I can get my robot's local xy position when I click on the map, but not sure how to transform it to the lat/lon coordinate.
↧
↧
Slam gmapping and hector
Hello!
I'm currently trying to implement a slam using gazebo.
I already have the laser tf publish, as well and the tf between the wheels and the base_link and gazebo publishes the odometry and I think it publishes the tf too.
So i'll put up the questions in order:
1- Why is the map so horrible using gmapping and it's fairly good using hector-slam?
2- My current tf tree using hector is this :

Do I need the transforme between odom->base_footprint->base_link for slam-gmapping to work and why would I need it either way?
3-I don't really understand how the odom tf is published. I used the robot_state_publisher node and I know it publishes the wheels tf but does it also publish the odom tf or is it gazebo who does it?
Thanks in advance!
↧
Which library can I access XmlRpcValue.h file?
Hello ROS users,
While installing geometry package for tf, during catkin_make I receive error:
21%] Built target roscpp_generate_messages_cpp
[ 21%] Built target roscpp_generate_messages_py
[ 22%] Built target test_roscpp_generate_messages_eus
[ 25%] Built target test_roscpp_generate_messages_py
[ 27%] Built target test_roscpp_generate_messages_nodejs
[ 30%] Built target test_roscpp_generate_messages_lisp
[ 32%] Built target test_roscpp_generate_messages_cpp
[ 33%] Built target test_rosmaster_generate_messages_cpp
[ 36%] Built target test_rosmaster_generate_messages_lisp
[ 37%] Built target test_rosmaster_generate_messages_nodejs
[ 37%] Built target test_rosservice_generate_messages_lisp
[ 37%] Built target test_rosservice_generate_messages_eus
[ 38%] Built target test_rosmaster_generate_messages_eus
[ 38%] Built target test_rosservice_generate_messages_cpp
[ 38%] Built target test_rosservice_generate_messages_nodejs
[ 40%] Built target topic_tools_generate_messages_cpp
[ 41%] Built target test_rosmaster_generate_messages_py
[ 41%] Built target test_rosservice_generate_messages_py
[ 41%] Built target topic_tools_generate_messages_nodejs
[ 42%] Built target topic_tools_generate_messages_py
[ 43%] Built target topic_tools_generate_messages_lisp
[ 43%] Built target topic_tools_generate_messages_eus
[ 45%] Built target test_rosbag_generate_messages_eus
[ 45%] Built target test_rosbag_generate_messages_nodejs
[ 46%] Built target test_rosbag_generate_messages_lisp
[ 47%] Built target test_rosbag_generate_messages_py
[ 48%] Built target test_rosbag_generate_messages_cpp
[ 50%] Built target test_rostopic_generate_messages_cpp
[ 50%] Built target test_rostopic_generate_messages_nodejs
[ 50%] Built target test_rostopic_generate_messages_eus
[ 51%] Built target test_rostopic_generate_messages_py
[ 52%] Built target test_rostopic_generate_messages_lisp
[ 52%] Built target tf_generate_messages_py
[ 52%] Built target tf_generate_messages_lisp
[ 52%] Built target tf_generate_messages_cpp
[ 53%] Built target tf_generate_messages_eus
[ 53%] Built target tf_generate_messages_nodejs
[ 53%] Built target amcl_sensors
[ 53%] Built target robot_pose_ekf_generate_messages_cpp
[ 53%] Built target robot_pose_ekf_generate_messages_lisp
[ 53%] Built target robot_pose_ekf_generate_messages_py
[ 53%] Built target robot_pose_ekf_generate_messages_nodejs
[ 53%] Built target robot_pose_ekf_generate_messages_eus
[ 53%] Built target hector_mapping_generate_messages_py
[ 53%] Built target hector_mapping_generate_messages_nodejs
[ 53%] Built target hector_mapping_generate_messages_cpp
[ 53%] Built target hector_mapping_generate_messages_lisp
[ 53%] Built target costmap_2d_generate_messages_cpp
[ 53%] Built target hector_mapping_generate_messages_eus
[ 53%] Built target costmap_2d_generate_messages_nodejs
[ 55%] Built target costmap_2d_generate_messages_eus
[ 55%] Built target costmap_2d_generate_messages_py
[ 55%] Built target costmap_2d_generate_messages_lisp
[ 55%] Built target base_local_planner_generate_messages_cpp
[ 55%] Built target base_local_planner_generate_messages_nodejs
[ 55%] Built target base_local_planner_generate_messages_lisp
[ 55%] Built target base_local_planner_generate_messages_eus
[ 55%] Built target base_local_planner_generate_messages_py
[ 55%] Built target navfn_generate_messages_cpp
[ 55%] Built target navfn_generate_messages_nodejs
[ 56%] Built target navfn_generate_messages_py
[ 56%] Built target navfn_generate_messages_eus
[ 57%] Built target rtabmap_ros_generate_messages_lisp
[ 58%] Built target rtabmap_ros_generate_messages_py
[ 58%] Built target navfn_generate_messages_lisp
[ 60%] Built target rtabmap_ros_generate_messages_cpp
[ 61%] Built target rtabmap_ros_generate_messages_eus
[ 61%] Built target scanmatcher
[ 62%] Built target rtabmap_ros_generate_messages_nodejs
[ 62%] Built target hector_nav_msgs_generate_messages
[ 62%] Built target rosconsole
[ 62%] Built target octomap_msgs_generate_messages
[ 62%] Built target tf2_msgs_generate_messages
[ 62%] Built target test_roslib_comm_generate_messages
[ 62%] Built target tf2_msgs_gencpp
[ 62%] Built target roscpp_generate_messages
[ 62%] Built target test_rosmaster_generate_messages
[ 62%] Built target test_roscpp_generate_messages
[ 65%] Built target test_rospy_generate_messages_py
[ 67%] Built target test_rospy_generate_messages_eus
[ 67%] Built target test_rosservice_generate_messages
[ 68%] Built target test_rospy_generate_messages_cpp
[ 70%] Built target test_rospy_generate_messages_lisp
[ 71%] Built target test_rospy_generate_messages_nodejs
[ 71%] Built target topic_tools_generate_messages
[ 71%] Built target test_rosbag_generate_messages
[ 71%] Built target tf_generate_messages
[ 71%] Built target test_rostopic_generate_messages
[ 71%] Built target tf_gencpp
[ 71%] Built target costmap_2d_generate_messages
[ 71%] Built target hector_mapping_generate_messages
[ 71%] Built target robot_pose_ekf_generate_messages
[ 71%] Built target navfn_generate_messages
[ 71%] Built target base_local_planner_generate_messages
[ 71%] Built target rtabmap_ros_generate_messages
[ 71%] Built target gridfastslam
[ 71%] Built target test_rospy_generate_messages
[ 71%] Built target tf2
[ 71%] Built target tf2_py
[ 75%] Built target roscpp
[ 75%] Built target laser_scan_sparsifier
[ 75%] Built target voxel_grid
[ 75%] Built target message_filters
[ 75%] Built target rosout
[ 75%] Built target laser_scan_splitter
[ 75%] Built target topic_tools
[ 75%] Built target geotiff_writer
[ 75%] Built target laser_scan_sparsifier_node
[ 75%] Built target DepthImageToLaserScan
[ 75%] Built target laser_scan_sparsifier_nodelet
[ 75%] Built target laser_scan_splitter_node
[ 75%] Built target laser_scan_splitter_nodelet
[ 75%] Built target tf2_ros
[ 75%] Built target mux
[ 75%] Built target map_to_image_node
[ 76%] Built target demux
[ 76%] Built target drop
[ 76%] Built target relay
[ 76%] Built target throttle
[ 76%] Built target switch_mux
[ 77%] Built target DepthImageToLaserScanROS
[ 77%] Built target rosbag
[ 77%] Built target tf2_ros_buffer_server
[ 77%] Built target tf2_ros_static_transform_publisher
[ 77%] Built target tf
[ 77%] Built target pytf_py
[ 77%] Built target pointcloud_to_laserscan
[ 77%] Built target geotiff_node
[ 77%] Built target play
[ 77%] Built target record
[ 77%] Built target ZEDWrapper
[ 77%] Built target geotiff_saver
[ 77%] Built target hector_geotiff_plugins
[ 77%] Built target DepthImageToLaserScanNodelet
[ 77%] Building CXX object geometry/tf/CMakeFiles/tf_change_notifier.dir/src/change_notifier.cpp.o
[ 77%] Built target testBroadcaster
[ 77%] Built target tf_empty_listener
[ 77%] Built target static_transform_publisher
[ 77%] Built target depthimage_to_laserscan
[ 77%] Built target tf_monitor
[ 77%] Built target testListener
[ 77%] Built target tf_echo
[ 78%] Built target fake_localization
[ 78%] Built target transform_listener_unittest
[ 78%] Built target slam_gmapping
[ 80%] Built target imu_attitude_to_tf_node
[ 80%] Built target slam_gmapping_nodelet
[ 80%] Built target slam_gmapping_replay
Scanning dependencies of target hector_trajectory_server
Scanning dependencies of target map_server_image_loader
Scanning dependencies of target map_server-map_saver
[ 80%] Built target pose_and_orientation_to_imu_node
[ 80%] Linking CXX executable /home/mars-lab/catkin_ws/devel/lib/map_server/rtest
[ 81%] Built target hector_map_server
[ 81%] Building CXX object hector_slam/hector_trajectory_server/CMakeFiles/hector_trajectory_server.dir/src/hector_trajectory_server.cpp.o
Scanning dependencies of target ncd_parser
Scanning dependencies of target amcl
[ 81%] Building CXX object navigation/map_server/CMakeFiles/map_server_image_loader.dir/src/image_loader.cpp.o
Scanning dependencies of target laser_ortho_projector
[ 81%] Building CXX object scan_tools/ncd_parser/CMakeFiles/ncd_parser.dir/src/ncd_parser.cpp.o
[ 81%] Building CXX object navigation/map_server/CMakeFiles/map_server-map_saver.dir/src/map_saver.cpp.o
[ 81%] Building CXX object navigation/amcl/CMakeFiles/amcl.dir/src/amcl_node.cpp.o
[ 81%] Building CXX object scan_tools/laser_ortho_projector/CMakeFiles/laser_ortho_projector.dir/src/laser_ortho_projector.cpp.o
[ 81%] Built target rtest
Scanning dependencies of target laser_scan_matcher
[ 81%] Building CXX object scan_tools/laser_scan_matcher/CMakeFiles/laser_scan_matcher.dir/src/laser_scan_matcher.cpp.o
/home/mars-lab/catkin_ws/src/geometry/tf/src/change_notifier.cpp:34:25: fatal error: XmlRpcValue.h: No such file or directory
compilation terminated.
geometry/tf/CMakeFiles/tf_change_notifier.dir/build.make:62: recipe for target 'geometry/tf/CMakeFiles/tf_change_notifier.dir/src/change_notifier.cpp.o' failed
make[2]: *** [geometry/tf/CMakeFiles/tf_change_notifier.dir/src/change_notifier.cpp.o] Error 1
CMakeFiles/Makefile2:37905: recipe for target 'geometry/tf/CMakeFiles/tf_change_notifier.dir/all' failed
make[1]: *** [geometry/tf/CMakeFiles/tf_change_notifier.dir/all] Error 2
make[1]: *** Waiting for unfinished jobs....
[ 81%] Linking CXX executable /home/mars-lab/catkin_ws/devel/lib/map_server/map_saver
[ 81%] Built target map_server-map_saver
[ 82%] Linking CXX shared library /home/mars-lab/catkin_ws/devel/lib/libmap_server_image_loader.so
[ 82%] Built target map_server_image_loader
[ 82%] Linking CXX executable /home/mars-lab/catkin_ws/devel/lib/ncd_parser/ncd_parser
[ 82%] Built target ncd_parser
[ 82%] Linking CXX executable /home/mars-lab/catkin_ws/devel/lib/hector_trajectory_server/hector_trajectory_server
[ 82%] Built target hector_trajectory_server
[ 82%] Linking CXX executable /home/mars-lab/catkin_ws/devel/lib/amcl/amcl
[ 82%] Built target amcl
[ 82%] Linking CXX shared library /home/mars-lab/catkin_ws/devel/lib/liblaser_ortho_projector.so
[ 82%] Built target laser_ortho_projector
[ 82%] Linking CXX shared library /home/mars-lab/catkin_ws/devel/lib/liblaser_scan_matcher.so
[ 82%] Built target laser_scan_matcher
Makefile:138: recipe for target 'all' failed
make: *** [all] Error 2
Invoking "make -j8 -l8" failed
I know it is because a library is missing, I install xmlprc library, but not sure why it still gives error. Anyone know which library this file is related to?
Ps: I have done sudo apt-get-upgrade and sudo apt-get update as well.
Thanks for the help.
↧
Using static_transform_publisher for laser tf with slam
Hello!
I want to use slam with a mobile robot but I'm not sure if I can use static_transform_publisher to publish the tf between the laser and the base_link.
Is that a good practice? Because, so far, I don't think the laser scans are good with this static_transform.
Thanks
↧
Axis conversion between OSVR and ROS
I'm using the [`vrpn_client_ros`](http://wiki.ros.org/vrpn_client_ros) package to connect an OSVR headset to ROS. However, the OSVR uses [a different axis convention than ROS](http://resource.osvr.com/docs/OSVR-Core/TopicWritingClientApplication.html) (much like camera optical frames).
In OSVR, X is right, Y is up, and Z is near.
In ROS, X is forward, Y is left, and Z is up.
This is leading to very strange behavior when I compare orientation between the two. Roll, pitch, and yaw on the OSVR seem to change depending on which direction it's facing, where pitch when facing north becomes yaw when facing west, and negative pitch when facing south. Moreover, it's very clear that rotation axes are very different between the headset IMU and camera odometry.
How can I convert the OSVR axes into ROS axes such that pose and twist from the OSVR headset IMU are equivalent to pose and twist from my camera odometry?
I have some code that swaps the axes to no avail. I've tried several combinations in desperation, but I always get the same rotation behavior.
namespace osvr_axis_conversion {
void osvrPoseCallback(const geometry_msgs::PoseStampedConstPtr &msg) { // osvr to ros
geometry_msgs::PoseStamped outputMsg;
outputMsg.header = msg->header;
outputMsg.pose.orientation.x = msg->pose.orientation.y;
outputMsg.pose.orientation.y = msg->pose.orientation.z;
outputMsg.pose.orientation.z = msg->pose.orientation.x;
outputMsg.pose.orientation.w = msg->pose.orientation.w;
osvrPosePub.publish(outputMsg);
ros::spinOnce();
}
void osvrTwistCallback(const geometry_msgs::TwistStampedConstPtr &msg) { // osvr to ros
geometry_msgs::TwistStamped outputMsg;
//outputMsg = *msg;
//outputMsg.twist = msg->twist;
outputMsg.header = msg->header;
outputMsg.twist.angular.x = msg->twist.angular.y;
outputMsg.twist.angular.y = msg->twist.angular.z;
outputMsg.twist.angular.z = msg->twist.angular.x;
osvrTwistPub.publish(outputMsg);
ros::spinOnce();
}
void odometryCallback(const nav_msgs::OdometryConstPtr &msg) { // ros to osvr
// Do the reverse of the above!
nav_msgs::Odometry outputMsg;
outputMsg = *msg;
outputMsg.pose.pose.position.x = msg->pose.pose.position.z;
outputMsg.pose.pose.position.y = msg->pose.pose.position.x;
outputMsg.pose.pose.position.z = msg->pose.pose.position.y;
outputMsg.pose.pose.orientation.x = msg->pose.pose.orientation.z;
outputMsg.pose.pose.orientation.y = msg->pose.pose.orientation.x;
outputMsg.pose.pose.orientation.z = msg->pose.pose.orientation.y;
outputMsg.pose.pose.orientation.w = msg->pose.pose.orientation.w;
outputMsg.twist.twist.linear.x = msg->twist.twist.linear.z;
outputMsg.twist.twist.linear.y = msg->twist.twist.linear.x;
outputMsg.twist.twist.linear.z = msg->twist.twist.linear.y;
outputMsg.twist.twist.angular.x = msg->twist.twist.angular.z;
outputMsg.twist.twist.angular.y = msg->twist.twist.angular.x;
outputMsg.twist.twist.angular.z = msg->twist.twist.angular.y;
// IMPORTANT: We're not handling the covariance here, so it's wrong
// in the output! This is being sent to the OSVR driver which doesn't
// use the covariance data.
odometryPub.publish(outputMsg);
ros::spinOnce();
}
}
↧
↧
Transform from map to odom for robot using rtabmap
I built a custom robot using URDF and I'm trying to view the position of the robot using the "odom" published from rtabmap. I managed to create a transform from odom to base_footprint but when I run rtabmap I get the following warning:
**Could not get transform from base_footprint to kinect2_rgb_optical_frame**
I've tried to create the transforms within rgbd_mapping_kinect2.launch by changing my **fixed frame** to **base_footprint**
and adding the following transforms:
My Kinect input for my robot model looks like this:
and my tf Tree looks like :http://imgur.com/a/6hTf1
As you can see, **kinect2_base_link** isn't connected to **kinect2_link**. Could you plese help with this?
↧
Quaternion transformations in Python
I'm trying to place some Markers in RViz, using a node written in Python. To this end, I need to create a `geometry_msgs.mgs.Pose` with an `orientation` Quaternion.
But I can't for the life of me find the utility and conversion functions that I need for Quaternions. There are *some* in `tf.transformations`, but those produce `numpy` Quaternions, which I would have to manually split up and throw into the `geometry_msgs.msg.Quaternion` constructor.
Considering that Quaternions are of little use without a set of functions to work with them, my question is: **Where are the proper conversion functions for Quaternions?** That is, functions to create and modify `geometry_msgs.msg.Quaternion` or to convert between a `numpy` Quaternion (4-tuple) and `geometry_msgs.msg.Quaternion` (object with x,y,z,w attributes).
My environment is Ubuntu 12.04 Precise, with ROS fuerte (using fuerte is currently mandatory for this project; I'm already looking into upgrading, but that's not at all easy).
↧
Initialising /odom
Hi,
I read this discussion on answers.ros:
http://answers.ros.org/question/235228/how-is-the-orientation-of-frame-odom-initialized/
What I understand is that, "/odom" takes its initial orientation from the "IMU". So if I publish a certain orientation in quarternion to "sensor_msgs::Imu" I should be able initialise the "odom" frame to start in that direction.
I did the following:
void Initialise(sensor_msgs::Imu &msg_value)
{
msg_value.header.stamp = ros::Time::now();
msg_value.header.frame_id = "/odom";
msg_value.angular_velocity.x = 0.0;
msg_value.angular_velocity.y = 0.0;
msg_value.angular_velocity.z = 0.0;
msg_value.linear_acceleration.x = 0.0;
msg_value.linear_acceleration.y = 0.0;
msg_value.linear_acceleration.z = 0.0;
msg_value.orientation.x = 0.0;
msg_value.orientation.y = 1.0;
msg_value.orientation.z = 0.0;
msg_value.orientation.w = 0.0;
}
int main(int argc, char** argv)
{
blah....
geometry_msgs::Twist msg;
geometry_msgs::PoseWithCovarianceStamped msg_setpose;
nav_msgs::Odometry nav_msg;
sensor_msgs::JointState jt_msg;
gazebo_msgs::SetModelState set_model_state;
gazebo_msgs::ModelState model_state;
sensor_msgs::Imu msg_imu;
ros::NodeHandle nh;
ros::init(argc, argv, "random_commands");
ros::Publisher pub_imu=nh.advertise("imu/data", 100);
Initialise(msg_imu);
pub_imu.publish(msg_imu);
ros::Rate rate(10);
srand(time(0));
while(ros::ok()) {
//give speed
//check distance
....blah..
rate.sleep();
}
ros::spin();
return 0;
}
All headers have been included and the code compiles.
I subscribe to "nav_msgs::Odometry" which I think uses, "geometry_msgs::Pose", to get the odometry data with respect to position and orientation. Speed is being fed in the x direction through the "cmd_vel" topic.
What I expected to see:
I expected the odom frame to be initialised as per the IMU data. So I expected the robot to turn by 90degrees in the odom frame and start moving as speed came in. I expected to see the odometry data to show increase in the 'y' direction. But this does not seem to be the case.
The ekf_localisation is part of the jackal-ros pakage I think. What am I doing wrong? Is my understanding wrong?
Please help.
Regards,
rsmitha.
↧