From c5d5c1fae365694e4314eb19645e1830f8b253c7 Mon Sep 17 00:00:00 2001 From: Konstantinos Chatzilygeroudis Date: Thu, 1 Feb 2024 13:57:43 +0200 Subject: [PATCH] Fix CI + update docs --- .github/workflows/ci_linux.yml | 2 +- docs/faq/index.html | 97 +++++++++++++++++++++++++-------- docs/search/search_index.json | 2 +- docs/sitemap.xml | 10 ++-- docs/sitemap.xml.gz | Bin 199 -> 198 bytes src/examples/python/ci.py | 45 +++++++++++++++ 6 files changed, 127 insertions(+), 29 deletions(-) create mode 100644 src/examples/python/ci.py diff --git a/.github/workflows/ci_linux.yml b/.github/workflows/ci_linux.yml index 8ab5d581..7b221b60 100644 --- a/.github/workflows/ci_linux.yml +++ b/.github/workflows/ci_linux.yml @@ -115,6 +115,6 @@ jobs: if [ "$BUILD_PYTHON" = "ON" ]; then python_dist_dir=$(python -c "from distutils.sysconfig import get_python_lib; print(get_python_lib(plat_specific=True, prefix=''))") export PYTHONPATH=/home/runner/.dart_install/$python_dist_dir - python ${{github.workspace}}/src/examples/python/arm.py + python ${{github.workspace}}/src/examples/python/ci.py fi diff --git a/docs/faq/index.html b/docs/faq/index.html index bfb128aa..bd6df190 100644 --- a/docs/faq/index.html +++ b/docs/faq/index.html @@ -546,8 +546,8 @@
  • - - Is there a way to change the joint or link (body) properties (e.g., actuation, mass)? + + Is there a way to change the joint properties (e.g., actuation, friction)?
  • @@ -605,6 +605,19 @@ How can I spawn multiple robots in parallel? + +
  • @@ -798,8 +811,8 @@
  • - - Is there a way to change the joint or link (body) properties (e.g., actuation, mass)? + + Is there a way to change the joint properties (e.g., actuation, friction)?
  • @@ -857,6 +870,19 @@ How can I spawn multiple robots in parallel? + +
  • @@ -1764,7 +1790,7 @@

    Is there a way to change the joint or link (body) properties (e.g., actuation, mass)?

    +

    Is there a way to change the joint properties (e.g., actuation, friction)?

    There are 6 types of actuators available, you can set the same actuator to multiple joints at once, or you can set each sensor separately:

    @@ -2138,6 +2164,7 @@

    RGB_D sensorHow can I spawn multiple robots in parallel?

    +

    Robot Pool (only in C++)

    The best way to do so is to create a Robot pool. With a robot pool you:

    • Minimize the overhead of loading robots (it happens only once!) or cloning robots (it never happens)
    • @@ -2146,13 +2173,22 @@

      How can I spawn mul

    Let's see a more practical example:

      -
    • -

      First we need to include the proper header:

      +
    • First we need to include the proper header:
    • +
    +
    +
    +
    #include <robot_dart/robot_pool.hpp>
     
    -
  • -
  • -

    Then we create a creator function and the pool object:

    + + + +
      +
    • Then we create a creator function and the pool object:
    • +
    +
    +
    +
    namespace pool {
         // This function should load one robot: here we load Talos
         inline std::shared_ptr<robot_dart::Robot> robot_creator()
    @@ -2164,20 +2200,30 @@ 

    How can I spawn mul robot_dart::RobotPool robot_pool(robot_creator, NUM_THREADS); } // namespace pool

    -
  • - + + +

    The creator function is the function responsible for loading your robot. This should basically look like a standalone code to load or create a robot.

      -
    • -

      Next, we create a few threads that utilize the robots (in your code you might be using OpenMP or TBB):

      +
    • Next, we create a few threads that utilize the robots (in your code you might be using OpenMP or TBB):
    • +
    +
    +
    +
    // for the example, we run NUM_THREADS threads of eval_robot()
     std::vector<std::thread> threads(NUM_THREADS * 2); // *2 to see some reuse
     for (size_t i = 0; i < threads.size(); ++i)
         threads[i] = std::thread(eval_robot, i); // eval_robot is the function that uses the robot
     
    - -
  • -

    An example evaluation function:

    +
  • +
    +
    +
      +
    • An example evaluation function:
    • +
    +
    +
    +
    inline void eval_robot(int i)
     {
         // We get one available robot
    @@ -2196,11 +2242,15 @@ 

    How can I spawn mul std::cout << "Robot " << i << " freed!" << std::endl; }

    - - +
    +
    +

    I need to simulate many worlds with camera sensors in parallel. How can I do this?

    -

    On magnum_contexts.cpp you can find an example showcasing the use of many worlds with camera sensors in parallel. The main takeaway is that we need to pre-allocate OpenGL contexts so that each thread can take one and use it to render their worlds. -

    // Load robot from URDF
    +

    On magnum_contexts.cpp you can find an example showcasing the use of many worlds with camera sensors in parallel. The main takeaway is that we need to pre-allocate OpenGL contexts so that each thread can take one and use it to render their worlds.

    +
    +
    +
    +
    // Load robot from URDF
     auto global_robot = std::make_shared<robot_dart::robots::Iiwa>();
     
     std::vector<std::thread> workers;
    @@ -2263,7 +2313,10 @@ 

    for (size_t i = 0; i < workers.size(); i++) { workers[i].join(); } -

    +
    + + +

    I do not know how to use waf. How can I detect RobotDART from CMake?

    You need to use waf to build RobotDART, but when installing the library a CMake module is installed. Thus it is possible use RobotDART in your code using CMake. You can find a complete example at cmake/example. In short the CMake would look like this:

    cmake_minimum_required(VERSION 3.10 FATAL_ERROR)
    diff --git a/docs/search/search_index.json b/docs/search/search_index.json
    index 80b84016..1ae75267 100644
    --- a/docs/search/search_index.json
    +++ b/docs/search/search_index.json
    @@ -1 +1 @@
    -{"config":{"lang":["en"],"separator":"[\\s\\-]+","pipeline":["stopWordFilter"]},"docs":[{"location":"","title":"Home","text":""},{"location":"#robotdart","title":"RobotDART","text":"

    RobotDART is a C++11 robot simulator (with optional Python bindings) built on top of the DART physics engine. The RobotDART simulator is intended to be used by Robotics and Machine Learning researchers who want to write controllers or test learning algorithms without the delays and overhead that usually comes with other simulators (e.g., Gazebo, Coppelia-sim).

    For this reason, the simulator runs headless by default, and there is the possibility of rendering the scene (e.g., through a camera sensor) without opening a graphics window. All RobotDART's code is thread-safe (including graphics and camera sensors), and thus enables its users to use their code in parallel jobs in multicore computers.

    In a few words, RobotDART combines:

    • a physics engine (DART)
    • an optional graphics engine (Magnum)
    • a few sensor classes (IMU, force/torque sensors, cameras, etc.)
    • a curated URDF library
    • ... and a few useful features to make the life of roboticists/researchers easier

    "},{"location":"#main-features","title":"Main Features","text":"
    • Modern C++ code that makes it easy to develop environments and applications
    • Fast and reliable simulation of robotic mechanisms and their interactions (through the DART physics engine)
    • A structured Robot class that enables a unified creation and access to all important values: in RobotDART you can load any robot description file (URDF, SDF, SKEL, and MuJoCo files) with the same command, and all robot measurements can be queried without using any DART code
    • A generic RobotControl class that enables fast prototyping of any type of controller
    • A generic Sensor class that allows the creation of any kind of sensor
    • A growing list of already implemented sensors, that includes 6-axis ForceTorque, IMU, RGB, and RGB-D sensors
    • A simulation class (RobotDARTSimu) that handles multiple robots and sensors, and allows for step-by-step simulation
    • A growing list of supported robots along with edited and optimized models to be used with RobotDART (see the robots page for details and examples):
      • PAL Talos humanoid
      • Franka Emika Panda
      • KUKA LBR Iiwa (14kg version)
      • IIT iCub humanoid (without hands)
      • Unitree A1 quadruped robot
      • Dynamixel-based 6-legged robot
      • A simple arm for educational purposes
      • and you can use any URDF
    • A custom graphical interface built on top of Magnum that allows generic customization
    • Support for windowless OpenGL context creation (even in parallel threads!) to allow for camera sensor usage even in parallel jobs running on clusters
    • Support for video recording in simulation time (i.e., not affected by delays of simulator and/or graphics) for visualization or debugging purposes
    • Full-featured Python bindings for fast prototyping
    • RobotDART runs on any Linux distribution and Mac OS
    "},{"location":"#what-robotdart-is-not","title":"What RobotDART is not","text":"
    • RobotDART is primarily intended to be non-interactive (run a simulation, record/view the result),
    • Interaction is limited to changing the view and your own code. No GUI for adding objects or interactively build an environment,
    • RobotDART is not optimized for wheeled robots,
    • RobotDART is not optimized for simulating complex (e.g., mountain-like) terrains.
    "},{"location":"faq/","title":"FAQ","text":""},{"location":"faq/#frequently-asked-questions","title":"Frequently Asked Questions","text":"

    This pages provides a user guide of the library through Frequently Asked Questions (FAQ).

    "},{"location":"faq/#what-is-a-minimal-working-example-of-robotdart","title":"What is a minimal working example of RobotDART?","text":"

    You can find a minimal working example at hello_world.cpp. This example is creating a world where a hexapod robot is placed just above a floor and left to fall. The robot has no actuation, and there is the simplest graphics configuration. Let's split it down.

    • We first include the appropriate files:
    C++Python
    #include <robot_dart/robot_dart_simu.hpp>\n#ifdef GRAPHIC\n#include <robot_dart/gui/magnum/graphics.hpp>\n#endif\n
    import RobotDART as rd\n
    • We then load our hexapod robot:
    C++Python
    auto robot = std::make_shared<robot_dart::Robot>(\"pexod.urdf\");\n
    robot = rd.Robot(\"pexod.urdf\");\n
    • We need to place it above the floor to avoid collision (we can use RobotDART's helpers ;)):
    C++Python
    robot->set_base_pose(robot_dart::make_tf({0., 0., 0.2}));\n
    robot.set_base_pose([0., 0., 0., 0., 0., 0.2])\n
    • We can now create the simulation object and add the robot and the floor:
    C++Python
    robot_dart::RobotDARTSimu simu(0.001); // dt=0.001, 1KHz simulation\nsimu.add_floor();\nsimu.add_robot(robot);\n
    simu = rd.RobotDARTSimu(0.001); # dt=0.001, 1KHz simulation\nsimu.add_floor();\nsimu.add_robot(robot);\n
    • If needed or wanted, we can add a graphics component to visualize the scene:
    C++Python
    auto graphics = std::make_shared<robot_dart::gui::magnum::Graphics>();\nsimu.set_graphics(graphics);\ngraphics->look_at({0.5, 3., 0.75}, {0.5, 0., 0.2});\n
    graphics = rd.gui.Graphics()\nsimu.set_graphics(graphics)\ngraphics.look_at([0.5, 3., 0.75], [0.5, 0., 0.2])\n
    • Once everything is configured, we can run our simulation for a few seconds:
    C++Python
    simu.run(10.);\n
    simu.run(10.)\n
    • Here's how it looks:

    "},{"location":"faq/#what-robots-are-supported-in-robotdart","title":"What robots are supported in RobotDART?","text":"

    RobotDART supports any robot that can be described by a URDF, SDF, SKEL or MJCF file. Nevertheless, we have a curated list of robots with edited and optimized models to be used with RobotDART (see the robots page for details and examples).

    "},{"location":"faq/#how-can-i-load-my-own-urdfsdfskelmjcf-file","title":"How can I load my own URDF/SDF/SKEL/MJCF file?","text":"

    See the robots page for details.

    "},{"location":"faq/#how-do-i-enable-graphics-in-my-code","title":"How do I enable graphics in my code?","text":"

    To enable graphics in your code, you need to do the following:

    • Install Magnum. See the installation page for details.
    • Create and set a graphics object in the simulation object. Here's an example:
    C++Python
    auto graphics = std::make_shared<robot_dart::gui::magnum::Graphics>();\nsimu.set_graphics(graphics);\ngraphics->look_at({0.5, 3., 0.75}, {0.5, 0., 0.2});\n
    graphics = rd.gui.Graphics()\nsimu.set_graphics(graphics)\ngraphics.look_at([0.5, 3., 0.75], [0.5, 0., 0.2])\n
    "},{"location":"faq/#i-want-to-have-multiple-camera-sensors-is-it-possible","title":"I want to have multiple camera sensors. Is it possible?","text":"

    Having multiple camera sensors is indeed possible. We can add as many cameras as we wish along the main camera defined in How do I record a video:

    C++Python
    // Add camera\nauto camera = std::make_shared<robot_dart::sensor::Camera>(graphics->magnum_app(), 256, 256);\n
    # Add camera\ncamera = rd.sensor.Camera(graphics.magnum_app(), 32, 32)\n
    "},{"location":"faq/#how-do-i-record-a-video","title":"How do I record a video?","text":"

    In order to record a video of what the main or any other camera \"sees\", you need to call the function record_video(path) of the graphics class:

    C++Python
    graphics->record_video(\"talos_dancing.mp4\");\n
    graphics.record_video(\"talos_dancing.mp4\")\n

    Or the camera class:

    C++Python
    // cameras can also record video\ncamera->record_video(\"video-camera.mp4\");\n
    # cameras can also record video\ncamera.record_video(\"video-camera.mp4\")\n
    "},{"location":"faq/#how-can-i-position-a-camera-to-the-environment","title":"How can I position a camera to the environment?","text":"

    In order to position a camera inside the world, we need to use the lookAt method of the camera/graphics object:

    C++Python
    // set the position of the camera, and the position where the main camera is looking at\nEigen::Vector3d cam_pos = {-0.5, -3., 0.75};\nEigen::Vector3d cam_looks_at = {0.5, 0., 0.2};\ncamera->look_at(cam_pos, cam_looks_at);\n
    # set the position of the camera, and the position where the main camera is looking at\ncam_pos = [-0.5, -3., 0.75]\ncam_looks_at = [0.5, 0., 0.2]\ncamera.look_at(cam_pos, cam_looks_at)\n
    "},{"location":"faq/#how-can-i-attach-a-camera-to-a-moving-link","title":"How can I attach a camera to a moving link?","text":"

    Cameras can be easily attached to a moving link:

    C++Python
    Eigen::Isometry3d tf;\ntf = Eigen::AngleAxisd(3.14, Eigen::Vector3d{1., 0., 0.});\ntf *= Eigen::AngleAxisd(1.57, Eigen::Vector3d{0., 0., 1.});\ntf.translation() = Eigen::Vector3d(0., 0., 0.1);\ncamera->attach_to_body(robot->body_node(\"iiwa_link_ee\"), tf); // cameras are looking towards -z by default\n
    tf = dartpy.math.Isometry3()\nrot =  dartpy.math.AngleAxis(3.14, [1., 0., 0.])\nrot = rot.multiply( dartpy.math.AngleAxis(1.57, [0., 0., 1.])).to_rotation_matrix()\ntf.set_translation([0., 0., 0.1])\ncamera.attach_to_body(robot.body_node(\"iiwa_link_ee\"), tf) # cameras are looking towards -z by default\n
    "},{"location":"faq/#how-can-i-manipulate-the-camera-object","title":"How can I manipulate the camera object?","text":"

    Every camera has its own parameters, i.e a Near plane, a far plane, a Field Of View (FOV), a width and a height (that define the aspect ratio), you can manipulate each one separately:

    C++Python
    camera->camera().set_far_plane(5.f);\ncamera->camera().set_near_plane(0.01f);\ncamera->camera().set_fov(60.0f);\n
    camera.camera().set_far_plane(5.)\ncamera.camera().set_near_plane(0.01)\ncamera.camera().set_fov(60.0)\n

    or all at once:

    C++Python
    camera->camera().set_camera_params(5., // far plane\n0.01f, // near plane\n60.0f, // field of view\n600, // width\n400 // height\n);\n
    camera.camera().set_camera_params(5., #far plane\n0.01, #near plane\n60.0, # field of view\n600, # width\n400) #height\n

    You can find a complete example at cameras.cpp.

    "},{"location":"faq/#how-can-i-interact-with-the-camera","title":"How can I interact with the camera?","text":"

    We can move translate the cameras with the WASD keys, zoom in and out using the mouse wheel and rotate the camera with holding the left mouse key and moving the mouse.

    "},{"location":"faq/#what-do-the-numbers-in-the-status-bar-mean","title":"What do the numbers in the status bar mean?","text":"

    The status bar looks like this:

    Where simulation time gives us the total simulated time (in seconds), wall time gives us the total time (in seconds) that has passed in real-time once we have started simulating. The next number X.Xx gives us the real-time factor: for example, 1.1x means that the simulation runs 1.1 times faster than real-time, whereas 0.7x means that the simulation runs slower than real-time. The value it: XX ms reports the time it took the last iteration (in milliseconds). The last part gives us whether the simulation tries to adhere to real-time or not. sync means that RobotDART will slow down the simulation in order for it to be in real-time, whereas no-sync means that RobotDART will try to run the simulation as fast as possible.

    "},{"location":"faq/#how-can-i-alter-the-graphics-scene-eg-change-lighting-conditions","title":"How can I alter the graphics scene (e.g., change lighting conditions)?","text":"

    When creating a graphics object, you can pass a GraphicsConfiguration object that changes the default values:

    C++Python
    robot_dart::gui::magnum::GraphicsConfiguration configuration;\n// We can change the width/height of the window (or camera image dimensions)\nconfiguration.width = 1280;\nconfiguration.height = 960;\nconfiguration.title = \"Graphics Tutorial\"; // We can set a title for our window\n// We can change the configuration for shadows\nconfiguration.shadowed = true;\nconfiguration.transparent_shadows = true;\nconfiguration.shadow_map_size = 1024;\n// We can also alter some specifications for the lighting\nconfiguration.max_lights = 3; // maximum number of lights for our scene [default=3]\nconfiguration.specular_strength = 0.25; // strength of the specular component\n// Some extra configuration for the main camera\nconfiguration.draw_main_camera = true;\nconfiguration.draw_debug = true;\nconfiguration.draw_text = true;\n// We can also change the background color [default=black]\nconfiguration.bg_color = Eigen::Vector4d{1.0, 1.0, 1.0, 1.0};\n// Create the graphics object with the configuration as parameter\nauto graphics = std::make_shared<robot_dart::gui::magnum::Graphics>(configuration);\n
    configuration = rd.gui.GraphicsConfiguration()\n# We can change the width/height of the window (or camera, dimensions)\nconfiguration.width = 1280\nconfiguration.height = 960\nconfiguration.title = \"Graphics Tutorial\"  # We can set a title for our window\n# We can change the configuration for shadows\nconfiguration.shadowed = True\nconfiguration.transparent_shadows = True\nconfiguration.shadow_map_size = 1024\n# We can also alter some specifications for the lighting\nconfiguration.max_lights = 3  # maximum number of lights for our scene\nconfiguration.specular_strength = 0.25  # strength og the specular component\n#  Some extra configuration for the main camera\nconfiguration.draw_main_camera = True\nconfiguration.draw_debug = True\nconfiguration.draw_text = True\n# We can also change the background color [default = black]\nconfiguration.bg_color = [1., 1., 1., 1.]\n# create the graphics object with the configuration as a parameter\ngraphics = rd.gui.Graphics(configuration)\n

    You can disable or enable shadows on the fly as well:

    C++Python
    // Disable shadows\ngraphics->enable_shadows(false, false);\nsimu.run(1.);\n// Enable shadows only for non-transparent objects\ngraphics->enable_shadows(true, false);\nsimu.run(1.);\n// Enable shadows for transparent objects as well\ngraphics->enable_shadows(true, true);\nsimu.run(1.);\n
    # Disable shadows\ngraphics.enable_shadows(False, False)\nsimu.run(1.)\n# Enable shadows only for non-transparent objects\ngraphics.enable_shadows(True, False)\nsimu.run(1.)\n# Enable shadows for transparent objects as well\ngraphics.enable_shadows(True, True)\nsimu.run(1.)\n

    You can also add your own lights. The application by default creates 2 light sources and the maximum number of lights is 3 (you can change this once before the creation of the graphics object via the GraphicsConfiguration object). So usually before you add your lights, you have to clear the default lights:

    C++Python
    // Clear Lights\ngraphics->clear_lights();\n
    # Clear Lights\ngraphics.clear_lights()\n

    Then you must create a custom light material:

    C++Python
    // Create Light material\nMagnum::Float shininess = 1000.f;\nMagnum::Color4 white = {1.f, 1.f, 1.f, 1.f};\n// ambient, diffuse, specular\nauto custom_material = robot_dart::gui::magnum::gs::Material(white, white, white, shininess);\n
    # Clear Light material\nshininess = 1000.\nwhite = magnum.Color4(1., 1., 1., 1.)\n# ambient, diffuse, specular\ncustom_material = rd.gui.Material(white, white, white, shininess)\n

    Now you can add on ore more of the following lights:

    Point Light:

    C++Python
    // Create point light\nMagnum::Vector3 position = {0.f, 0.f, 2.f};\nMagnum::Float intensity = 1.f;\nMagnum::Vector3 attenuation_terms = {1.f, 0.f, 0.f};\nauto point_light = robot_dart::gui::magnum::gs::create_point_light(position, custom_material, intensity, attenuation_terms);\ngraphics->add_light(point_light);\n
    # Create point light\nposition = magnum.Vector3(0., 0., 2.)\nintensity = 1.\nattenuation_terms = magnum.Vector3(1., 0., 0.)\npoint_light = rd.gui.create_point_light(position, custom_material, intensity, attenuation_terms)\ngraphics.add_light(point_light)\n

    Spot Light:

    C++Python
    // Create spot light\nMagnum::Vector3 position = {0.f, 0.f, 1.f};\nMagnum::Vector3 direction = {-1.f, -1.f, -1.f};\nMagnum::Float intensity = 1.f;\nMagnum::Vector3 attenuation_terms = {1.f, 0.f, 0.f};\nMagnum::Float spot_exponent = M_PI;\nMagnum::Float spot_cut_off = M_PI / 8;\nauto spot_light = robot_dart::gui::magnum::gs::create_spot_light(position, custom_material, direction, spot_exponent, spot_cut_off, intensity, attenuation_terms);\ngraphics->add_light(spot_light);\n
    # Create spot light\nposition = magnum.Vector3(0., 0., 1.)\ndirection = magnum.Vector3(-1, -1, -1)\nintensity = 1.\nattenuation_terms = magnum.Vector3(1., 0., 0.)\nspot_exponent = np.pi\nspot_cut_off = np.pi / 8\nspot_light = rd.gui.create_spot_light(position, custom_material, direction, spot_exponent, spot_cut_off, intensity, attenuation_terms)\ngraphics.add_light(spot_light)\n

    Directional Light:

    C++Python
    // Create directional light\nMagnum::Vector3 direction = {-1.f, -1.f, -1.f};\nauto directional_light = robot_dart::gui::magnum::gs::create_directional_light(direction, custom_material);\ngraphics->add_light(directional_light);\n
    # Create directional light\ndirection = magnum.Vector3(-1, -1, -1)\ndirectional_light = rd.gui.create_directional_light(direction, custom_material)\ngraphics.add_light(directional_light)\n
    "},{"location":"faq/#i-want-to-visualize-a-target-configuration-of-my-robot-is-this-possible","title":"I want to visualize a target configuration of my robot, is this possible?","text":"

    Yes this is possible. RobotDART gives the ability to create a clone of your robot that has no physics, no contacts, just visuals:

    C++Python
    // Add a ghost robot; only visuals, no dynamics, no collision\nauto ghost = robot->clone_ghost();\nsimu.add_robot(ghost);\n
    # Add a ghost robot; only visuals, no dynamics, no collision\nghost = robot.clone_ghost()\nsimu.add_robot(ghost)\n
    "},{"location":"faq/#how-can-i-control-my-robot","title":"How can I control my robot ?","text":"

    PD control

    C++Python
    // add a PD-controller to the arm\n// set desired positions\nEigen::VectorXd ctrl = robot_dart::make_vector({0., M_PI / 4., 0., -M_PI / 4., 0., M_PI / 2., 0., 0.});\n// add the controller to the robot\nauto controller = std::make_shared<robot_dart::control::PDControl>(ctrl);\nrobot->add_controller(controller);\ncontroller->set_pd(300., 50.);\n
    # add a PD-controller to the arm\n# set desired positions\nctrl = [0., np.pi / 4., 0., -np.pi / 4., 0., np.pi / 2., 0., 0.]\n# add the controller to the robot\ncontroller = rd.PDControl(ctrl)\nrobot.add_controller(controller)\ncontroller.set_pd(300., 50.)\n

    Simple control

    C++Python
    auto controller1 = std::make_shared<robot_dart::control::SimpleControl>(ctrl);\nrobot->add_controller(controller1);\n
    controller1 = rd.SimpleControl(ctrl)\nrobot.add_controller(controller1)\n

    Robot control

    C++Python
    class MyController : public robot_dart::control::RobotControl {\npublic:\nMyController() : robot_dart::control::RobotControl() {}\nMyController(const Eigen::VectorXd& ctrl, bool full_control) : robot_dart::control::RobotControl(ctrl, full_control) {}\nMyController(const Eigen::VectorXd& ctrl, const std::vector<std::string>& dof_names) : robot_dart::control::RobotControl(ctrl, dof_names) {}\nvoid configure() override\n{\n_active = true;\n}\nEigen::VectorXd calculate(double) override\n{\nauto robot = _robot.lock();\nEigen::VectorXd cmd = 100. * (_ctrl - robot->positions(_controllable_dofs));\nreturn cmd;\n}\nstd::shared_ptr<robot_dart::control::RobotControl> clone() const override\n{\nreturn std::make_shared<MyController>(*this);\n}\n};\n
    class MyController(rd.RobotControl):\ndef __init__(self, ctrl, full_control):\nrd.RobotControl.__init__(self, ctrl, full_control)\ndef __init__(self, ctrl, controllable_dofs):\nrd.RobotControl.__init__(self, ctrl, controllable_dofs)\ndef configure(self):\nself._active = True\ndef calculate(self, t):\ntarget = np.array([self._ctrl])\ncmd = 100*(target-self.robot().positions(self._controllable_dofs))\nreturn cmd[0]\n# TO-DO: This is NOT working at the moment!\ndef clone(self):\nreturn MyController(self._ctrl, self._controllable_dofs)\n
    "},{"location":"faq/#is-there-a-way-to-control-the-simulation-timestep","title":"Is there a way to control the simulation timestep?","text":"

    When creating a RobotDARTSimu object you choose the simulation timestep:

    C++Python
    // choose time step of 0.001 seconds\nrobot_dart::RobotDARTSimu simu(0.001);\n
    # choose time step of 0.001 seconds\nsimu = rd.RobotDARTSimu(0.001)\n

    which can later be modified by:

    C++Python
    // set timestep to 0.005 and update control frequency(bool)\nsimu.set_timestep(0.005, true);\n
    # set timestep to 0.005 and update control frequency(bool)\nsimu.set_timestep(0.005, True)\n
    "},{"location":"faq/#i-want-to-simulate-a-mars-environment-is-it-possible-to-change-the-gravitational-force-of-the-simulation-environment","title":"I want to simulate a mars environment, is it possible to change the gravitational force of the simulation environment?","text":"

    Yes you can modify the gravitational forces 3-dimensional vector of the simulation:

    C++Python
    // Set gravitational force of mars\nEigen::Vector3d mars_gravity = {0., 0., -3.721};\nsimu.set_gravity(mars_gravity);\n
    # set gravitational force of mars\nmars_gravity = [0., 0., -3.721]\nsimu.set_gravity(mars_gravity)\n
    "},{"location":"faq/#which-collision-detectors-are-available-what-are-their-differences-how-can-i-choose-between-them","title":"Which collision detectors are available? What are their differences? How can I choose between them?","text":"DART FCL ODE Bullet Support only basic shapes Full-featured collision detector fully integrated by DART External collision detector of ODE External collision detector of Bullet This is building along with DART This is a required dependency of DART Needs an external library Needs an external library Very fast for small scenes Accurate detailed collisions, but not very fast Fast collision detection (the integration is not complete) Fast and accurate collision detection (works well for wheels as well)

    We can easily select one of the available collision detectors using the simulator object:

    C++Python
    simu.set_collision_detector(\"fcl\"); // collision_detector can be \"dart\", \"fcl\", \"ode\" or \"bullet\" (case does not matter)\n
    simu.set_collision_detector(\"fcl\") # collision_detector can be \"dart\", \"fcl\", \"ode\" or \"bullet\" (case does not matter)\n
    "},{"location":"faq/#my-robot-does-not-self-collide-how-can-i-change-this","title":"My robot does not self-collide. How can I change this?","text":"

    One possible cause may be the fact that self collision is disabled, you can check and change this:

    C++Python
    if (!robot->self_colliding()) {\nstd::cout << \"Self collision is not enabled\" << std::endl;\n// set self collisions to true and adjacent collisions to false\nrobot->set_self_collision(true, false);\n}\n
    if(not robot.self_colliding()):\nprint(\"Self collision is not enabled\")\n# set self collisions to true and adjacent collisions to false\nrobot.set_self_collision(True, False)\n
    "},{"location":"faq/#how-can-i-compute-kinematicdynamic-properties-of-my-robot-eg-jacobians-mass-matrix","title":"How can I compute kinematic/dynamic properties of my robot (e.g., Jacobians, Mass Matrix)?","text":"

    Kinematic Properties:

    C++Python
    // Get Joint Positions(Angles)\nauto joint_positions = robot->positions();\n// Get Joint Velocities\nauto joint_vels = robot->velocities();\n// Get Joint Accelerations\nauto joint_accs = robot->accelerations();\n// Get link_name(str) Transformation matrix with respect to the world frame.\nauto eef_tf = robot->body_pose(link_name);\n// Get translation vector from transformation matrix\nauto eef_pos = eef_tf.translation();\n// Get rotation matrix from tranformation matrix\nauto eef_rot = eef_tf.rotation();\n// Get link_name 6d pose vector [logmap(eef_tf.linear()), eef_tf.translation()]\nauto eef_pose_vec = robot->body_pose_vec(link_name);\n// Get link_name 6d velocity vector [angular, cartesian]\nauto eef_vel = robot->body_velocity(link_name);\n// Get link_name 6d acceleration vector [angular, cartesian]\nauto eef_acc = robot->body_acceleration(link_name);\n// Jacobian targeting the origin of link_name(str)\nauto jacobian = robot->jacobian(link_name);\n// Jacobian time derivative\nauto jacobian_deriv = robot->jacobian_deriv(link_name);\n// Center of Mass Jacobian\nauto com_jacobian = robot->com_jacobian();\n// Center of Mass Jacobian Time Derivative\nauto com_jacobian_deriv = robot->com_jacobian_deriv();\n
    # Get Joint Positions(Angles)\njoint_positions = robot.positions()\n# Get Joint Velocities\njoint_vels = robot.velocities()\n# Get Joint Accelerations\njoint_accs = robot.accelerations()\n# Get link_name(str) Transformation matrix with respect to the world frame.\neef_tf = robot.body_pose(link_name)\n# Get translation vector from transformation matrix\neef_pos = eef_tf.translation()\n# Get rotation matrix from tranformation matrix\neef_rot = eef_tf.rotation()\n# Get link_name 6d pose vector [logmap(eef_tf.linear()), eef_tf.translation()]\neef_pose_vec = robot.body_pose_vec(link_name)\n# Get link_name 6d velocity vector [angular, cartesian]\neef_vel = robot.body_velocity(link_name)\n# Get link_name 6d acceleration vector [angular, cartesian]\neef_acc = robot.body_acceleration(link_name)\n# Jacobian targeting the origin of link_name(str)\njacobian = robot.jacobian(link_name)\n# Jacobian time derivative\njacobian_deriv = robot.jacobian_deriv(link_name)\n# Center of Mass Jacobian\ncom_jacobian = robot.com_jacobian()\n# Center of Mass Jacobian Time Derivative\ncom_jacobian_deriv = robot.com_jacobian_deriv()\n

    Dynamic Properties:

    C++Python
    // Get Joint Forces\nauto joint_forces = robot->forces();\n// Get link's mass\nauto eef_mass = robot->body_mass(link_name);\n// Mass Matrix of robot\nauto mass_matrix = robot->mass_matrix();\n// Inverse of Mass Matrix\nauto inv_mass_matrix = robot->inv_mass_matrix();\n// Augmented Mass matrix\nauto aug_mass_matrix = robot->aug_mass_matrix();\n// Inverse of Augmented Mass matrix\nauto inv_aug_mass_matrix = robot->inv_aug_mass_matrix();\n// Coriolis Force vector\nauto coriolis = robot->coriolis_forces();\n// Gravity Force vector\nauto gravity = robot->gravity_forces();\n// Combined vector of Coriolis Force and Gravity Force\nauto coriolis_gravity = robot->coriolis_gravity_forces();\n// Constraint Force Vector\nauto constraint_forces = robot->constraint_forces();\n
    # Get Joint Forces\njoint_forces = robot.forces()\n# Get link's mass\neef_mass = robot.body_mass(link_name)\n# Mass Matrix of robot\nmass_matrix = robot.mass_matrix()\n# Inverse of Mass Matrix\ninv_mass_matrix = robot.inv_mass_matrix()\n# Augmented Mass matrix\naug_mass_matrix = robot.aug_mass_matrix()\n# Inverse of Augmented Mass matrix\ninv_aug_mass_matrix = robot.inv_aug_mass_matrix()\n# Coriolis Force vector\ncoriolis = robot.coriolis_forces()\n# Gravity Force vector\ngravity = robot.gravity_forces()\n# Combined vector of Coriolis Force and Gravity Force\ncoriolis_gravity = robot.coriolis_gravity_forces()\n# NOT IMPLEMENTED\n# # Constraint Force Vector\n# constraint_forces = robot.constraint_forces()\n
    "},{"location":"faq/#is-there-a-way-to-change-the-joint-or-link-body-properties-eg-actuation-mass","title":"Is there a way to change the joint or link (body) properties (e.g., actuation, mass)?","text":"

    There are 6 types of actuators available, you can set the same actuator to multiple joints at once, or you can set each sensor separately:

    C++Python
    // Set all DoFs to same actuator\nrobot->set_actuator_types(\"servo\"); // actuator types can be \"servo\", \"torque\", \"velocity\", \"passive\", \"locked\", \"mimic\"\n// You can also set actuator types separately\nrobot->set_actuator_type(\"torque\", \"iiwa_joint_5\");\n
    # Set all DoFs to same actuator\n# actuator types can be \"servo\", \"torque\", \"velocity\", \"passive\", \"locked\", \"mimic\"\nrobot.set_actuator_types(\"servo\")\n# You can also set actuator types separately\nrobot.set_actuator_type(\"torque\", \"iiwa_joint_5\")\n

    To enable position and velocity limits for the actuators:

    C++Python
    // \u0395nforce joint position and velocity limits\nrobot->set_position_enforced(true);\n
    # \u0395nforce joint position and velocity limits\nrobot.set_position_enforced(True)\n

    Every DOF's limits (position, velocity, acceleration, force) can be modified:

    C++Python
    // Modify Position Limits\nEigen::VectorXd pos_upper_lims(7);\npos_upper_lims << 2.096, 2.096, 2.096, 2.096, 2.096, 2.096, 2.096;\nrobot->set_position_upper_limits(pos_upper_lims, dof_names);\nrobot->set_position_lower_limits(-pos_upper_lims, dof_names);\n// Modify Velocity Limits\nEigen::VectorXd vel_upper_lims(7);\nvel_upper_lims << 1.5, 1.5, 1.5, 1.5, 1.5, 1.5, 1.5;\nrobot->set_velocity_upper_limits(vel_upper_lims, dof_names);\nrobot->set_velocity_lower_limits(-vel_upper_lims, dof_names);\n// Modify Force Limits\nEigen::VectorXd force_upper_lims(7);\nforce_upper_lims << 150, 150, 150, 150, 150, 150, 150;\nrobot->set_force_upper_limits(force_upper_lims, dof_names);\nrobot->set_force_lower_limits(-force_upper_lims, dof_names);\n// Modify Acceleration Limits\nEigen::VectorXd acc_upper_lims(7);\nacc_upper_lims << 1500, 1500, 1500, 1500, 1500, 1500, 1500;\nrobot->set_acceleration_upper_limits(acc_upper_lims, dof_names);\nrobot->set_acceleration_lower_limits(-acc_upper_lims, dof_names);\n
    # Modify Position Limits\npos_upper_lims = np.array([2.096, 2.096, 2.096, 2.096, 2.096, 2.096, 2.096])\nrobot.set_position_upper_limits(pos_upper_lims, dof_names)\nrobot.set_position_lower_limits(-1*pos_upper_lims, dof_names)\n# Modify Velocity Limits\nvel_upper_lims = np.array([1.5, 1.5, 1.5, 1.5, 1.5, 1.5, 1.5])\nrobot.set_velocity_upper_limits(vel_upper_lims, dof_names)\nrobot.set_velocity_lower_limits(-1*vel_upper_lims, dof_names)\n# Modify Force Limits\nforce_upper_lims = np.array([150, 150, 150, 150, 150, 150, 150])\nrobot.set_force_upper_limits(force_upper_lims, dof_names)\nrobot.set_force_lower_limits(-1*force_upper_lims, dof_names)\n# Modify Acceleration Limits\nacc_upper_lims = np.array([1500, 1500, 1500, 1500, 1500, 1500, 1500])\nrobot.set_acceleration_upper_limits(acc_upper_lims, dof_names)\nrobot.set_acceleration_lower_limits(-1*acc_upper_lims, dof_names)\n

    You can also modify the damping coefficients, coulomb frictions and spring stiffness of every DOF:

    C++Python
    // Modify Damping Coefficients\nstd::vector<double> damps = {0.4, 0.4, 0.4, 0.4, 0.4, 0.4, 0.4};\nrobot->set_damping_coeffs(damps, dof_names);\n// Modify Coulomb Frictions\nstd::vector<double> cfrictions = {0.001, 0.001, 0.001, 0.001, 0.001, 0.001, 0.001};\nrobot->set_coulomb_coeffs(cfrictions, dof_names);\n// Modify  Spring Stiffness\nstd::vector<double> stiffnesses = {0.001, 0.001, 0.001, 0.001, 0.001, 0.001, 0.001};\nrobot->set_spring_stiffnesses(stiffnesses, dof_names);\n
    # Modify Damping Coefficients\ndamps = [0.4, 0.4, 0.4, 0.4, 0.4, 0.4, 0.4]\nrobot.set_damping_coeffs(damps, dof_names)\n# Modify Coulomb Frictions\ncfrictions = [0.001, 0.001, 0.001, 0.001, 0.001, 0.001, 0.001]\nrobot.set_coulomb_coeffs(cfrictions, dof_names)\n# Modify  Spring Stiffness\nstiffnesses = [0.001, 0.001, 0.001, 0.001, 0.001, 0.001, 0.001]\nrobot.set_spring_stiffnesses(stiffnesses, dof_names)\n
    "},{"location":"faq/#what-are-the-supported-sensors-how-can-i-use-an-imu","title":"What are the supported sensors? How can I use an IMU?","text":"

    Sensors in RobotDART can be added only through the simulator object. All of the sensors can be added without being attached to any body or joint but some of them can operate only when attached to something (e.g. ForceTorque sensors).

    "},{"location":"faq/#torque-sensor","title":"Torque sensor","text":"

    Torque sensors can be added to every joint of the robot:

    C++Python
    // Add torque sensors to the robot\nint ct = 0;\nstd::vector<std::shared_ptr<robot_dart::sensor::Torque>> tq_sensors(robot->num_dofs());\nfor (const auto& joint : robot->dof_names())\ntq_sensors[ct++] = simu.add_sensor<robot_dart::sensor::Torque>(robot, joint, 1000);\n
    # Add torque sensors to the robot\ntq_sensors = np.empty(robot.num_dofs(), dtype=rd.sensor.Torque)\nct = 0\nfor joint in robot.dof_names():\nsimu.add_sensor(rd.sensor.Torque(robot, joint, 1000))\ntq_sensors[ct] = simu.sensors()[-1]\nct += 1\n

    Torque sensors measure the torque \\(\\tau \\in \\rm I\\!R^n\\) of the attached joint (where \\(n\\) is the DOFs of the joint):

    C++Python
    // vector that contains the torque measurement for every joint (scalar)\nEigen::VectorXd torques_measure(robot->num_dofs());\nfor (const auto& tq_sens : tq_sensors)\ntorques_measure.block<1, 1>(ct++, 0) = tq_sens->torques();\n
    # vector that contains the torque measurement for every joint (scalar)\ntorques_measure = np.empty(robot.num_dofs())\nct = 0\nfor tq_sens in tq_sensors:\ntorques_measure[ct] = tq_sens.torques()\nct += 1\n
    "},{"location":"faq/#force-torque-sensor","title":"Force-Torque sensor","text":"

    Force-Torque sensors can be added to every joint of the robot:

    C++Python
    // Add force-torque sensors to the robot\nct = 0;\nstd::vector<std::shared_ptr<robot_dart::sensor::ForceTorque>> f_tq_sensors(robot->num_dofs());\nfor (const auto& joint : robot->dof_names())\nf_tq_sensors[ct++] = simu.add_sensor<robot_dart::sensor::ForceTorque>(robot, joint, 1000, \"parent_to_child\");\n
    # Add force-torque sensors to the robot\nf_tq_sensors = np.empty(robot.num_dofs(), dtype=rd.sensor.ForceTorque)\nct = 0\nfor joint in robot.dof_names():\nsimu.add_sensor(rd.sensor.ForceTorque(\nrobot, joint, 1000, \"parent_to_child\"))\nf_tq_sensors[ct] = simu.sensors()[-1]\nprint(f_tq_sensors)\nct += 1\n

    Torque sensors measure the force \\(\\boldsymbol{F} \\in \\rm I\\!R^3\\), the torque \\(\\boldsymbol{\\tau} \\in \\rm I\\!R^3\\) and the wrench \\(\\boldsymbol{\\mathcal{F}} =\\begin{bmatrix} \\tau, F\\end{bmatrix}\\in \\rm I\\!R^6\\) of the attached joint:

    C++Python
    //  matrix that contains the torque measurement for every joint (3d vector)\nEigen::MatrixXd ft_torques_measure(robot->num_dofs(), 3);\n//  matrix that contains the force measurement for every joint (3d vector)\nEigen::MatrixXd ft_forces_measure(robot->num_dofs(), 3);\n//  matrix that contains the wrench measurement for every joint (6d vector)[torque, force]\nEigen::MatrixXd ft_wrench_measure(robot->num_dofs(), 6);\nct = 0;\nfor (const auto& f_tq_sens : f_tq_sensors) {\nft_torques_measure.block<1, 3>(ct, 0) = f_tq_sens->torque();\nft_forces_measure.block<1, 3>(ct, 0) = f_tq_sens->force();\nft_wrench_measure.block<1, 6>(ct, 0) = f_tq_sens->wrench();\nct++;\n}\n
    #  matrix that contains the torque measurement for every joint (3d vector)\nft_torques_measure = np.empty([robot.num_dofs(), 3])\n#  matrix that contains the force measurement for every joint (3d vector)\nft_forces_measure = np.empty([robot.num_dofs(), 3])\n#  matrix that contains the wrench measurement for every joint (6d vector)[torque, force]\nft_wrench_measure = np.empty([robot.num_dofs(), 6])\nct = 0\nfor f_tq_sens in f_tq_sensors:\nft_torques_measure[ct, :] = f_tq_sens.torque()\nft_forces_measure[ct, :] = f_tq_sens.force()\nft_wrench_measure[ct, :] = f_tq_sens.wrench()\nct += 1\n
    "},{"location":"faq/#imu-sensor","title":"IMU sensor","text":"

    IMU sensors can be added to every link of the robot:

    C++Python
    // Add IMU sensors to the robot\nct = 0;\nstd::vector<std::shared_ptr<robot_dart::sensor::IMU>> imu_sensors(robot->num_bodies());\nfor (const auto& body_node : robot->body_names()) {\n// _imu(std::make_shared<sensor::IMU>(sensor::IMUConfig(body_node(\"head\"), frequency))),\nimu_sensors[ct++] = simu.add_sensor<robot_dart::sensor::IMU>(robot_dart::sensor::IMUConfig(robot->body_node(body_node), 1000));\n}\n
    # Add IMU sensors to the robot\nct = 0\nimu_sensors = np.empty(robot.num_bodies(), dtype=rd.sensor.IMU)\nfor body_node in robot.body_names():\nsimu.add_sensor(rd.sensor.IMU(\nrd.sensor.IMUConfig(robot.body_node(body_node), 1000)))\nimu_sensors[ct] = simu.sensors()[-1]\nct += 1\n

    IMU sensors measure the angular position vector \\(\\boldsymbol{\\theta} \\in \\rm I\\!R^3\\), the angular velocity \\(\\boldsymbol{\\omega} \\in \\rm I\\!R^3\\) and the linear acceleration \\(\\boldsymbol{\\alpha} \\in \\rm I\\!R^3\\) of the attached link:

    C++Python
    Eigen::MatrixXd imu_angular_positions_measure(robot->num_bodies(), 3);\nEigen::MatrixXd imu_angular_velocities_measure(robot->num_bodies(), 3);\nEigen::MatrixXd imu_linear_acceleration_measure(robot->num_bodies(), 3);\nct = 0;\nfor (const auto& imu_sens : imu_sensors) {\nimu_angular_positions_measure.block<1, 3>(ct, 0) = imu_sens->angular_position_vec();\nimu_angular_velocities_measure.block<1, 3>(ct, 0) = imu_sens->angular_velocity();\nimu_linear_acceleration_measure.block<1, 3>(ct, 0) = imu_sens->linear_acceleration();\nct++;\n}\n
    imu_angular_positions_measure = np.empty([robot.num_bodies(), 3])\nimu_angular_velocities_measure = np.empty([robot.num_bodies(), 3])\nimu_linear_acceleration_measure = np.empty([robot.num_bodies(), 3])\nct = 0\nfor imu_sens in imu_sensors:\nimu_angular_positions_measure[ct,:] = imu_sens.angular_position_vec()\nimu_angular_velocities_measure[ct, :] = imu_sens.angular_velocity()\nimu_linear_acceleration_measure[ct,:] = imu_sens.linear_acceleration()\nct += 1\n
    "},{"location":"faq/#rgb-sensor","title":"RGB sensor","text":"

    Any camera can be used as an RGB sensor:

    C++Python
    // a nested std::vector (w*h*3) of the last image taken can be retrieved\nauto rgb_image = camera->image();\n
    # a nested array (w*h*3) of the last image taken can be retrieved\nrgb_image = camera.image()\n

    We can easily save the image and/or transform it to grayscale:

    C++Python
    // a nested std::vector (w*h*3) of the last image taken can be retrieved\nauto rgb_image = camera->image();\n
    # a nested array (w*h*3) of the last image taken can be retrieved\nrgb_image = camera.image()\n
    "},{"location":"faq/#rgb_d-sensor","title":"RGB_D sensor","text":"

    Any camera can also be configured to also record depth:

    C++Python
    camera->camera().record(true, true); // cameras are recording color images by default, enable depth images as well for this example\n
    # cameras are recording color images by default, enable depth images as well for this example\ncamera.camera().record(True, True)\n

    We can then read the RGB and depth images:

    C++Python
    // get the depth image from a camera\n// with a version for visualization or bigger differences in the output\nauto rgb_d_image = camera->depth_image();\n// and the raw values that can be used along with the camera parameters to transform the image to point-cloud\nauto rgb_d_image_raw = camera->raw_depth_image();\n
    # get the depth image from a camera\n# with a version for visualization or bigger differences in the output\nrgb_d_image = camera.depth_image()\n# and the raw values that can be used along with the camera parameters to transform the image to point-cloud\nrgb_d_image_raw = camera.raw_depth_image()\n

    We can save the depth images as well:

    C++Python
    robot_dart::gui::save_png_image(\"camera-depth.png\", rgb_d_image);\nrobot_dart::gui::save_png_image(\"camera-depth-raw.png\", rgb_d_image_raw);\n
    rd.gui.save_png_image(\"camera-depth.png\", rgb_d_image)\nrd.gui.save_png_image(\"camera-depth-raw.png\", rgb_d_image_raw)\n
    "},{"location":"faq/#how-can-i-spawn-multiple-robots-in-parallel","title":"How can I spawn multiple robots in parallel?","text":"

    The best way to do so is to create a Robot pool. With a robot pool you:

    • Minimize the overhead of loading robots (it happens only once!) or cloning robots (it never happens)
    • Make sure that your robots are \"clean\" once released from each thread
    • Focus on the important stuff rather than handling robots and threads

    Let's see a more practical example:

    • First we need to include the proper header:

      #include <robot_dart/robot_pool.hpp>\n
    • Then we create a creator function and the pool object:

      namespace pool {\n// This function should load one robot: here we load Talos\ninline std::shared_ptr<robot_dart::Robot> robot_creator()\n{\nreturn std::make_shared<robot_dart::robots::Talos>();\n}\n// To create the object we need to pass the robot_creator function and the number of maximum parallel threads\nrobot_dart::RobotPool robot_pool(robot_creator, NUM_THREADS);\n} // namespace pool\n

    The creator function is the function responsible for loading your robot. This should basically look like a standalone code to load or create a robot.

    • Next, we create a few threads that utilize the robots (in your code you might be using OpenMP or TBB):

      // for the example, we run NUM_THREADS threads of eval_robot()\nstd::vector<std::thread> threads(NUM_THREADS * 2); // *2 to see some reuse\nfor (size_t i = 0; i < threads.size(); ++i)\nthreads[i] = std::thread(eval_robot, i); // eval_robot is the function that uses the robot\n
    • An example evaluation function:

      inline void eval_robot(int i)\n{\n// We get one available robot\nauto robot = pool::robot_pool.get_robot();\nstd::cout << \"Robot \" << i << \" got [\" << robot->skeleton() << \"]\" << std::endl;\n/// --- some robot_dart code ---\nsimulate_robot(robot);\n// --- do something with the result\nstd::cout << \"End of simulation \" << i << std::endl;\n// CRITICAL : free your robot !\npool::robot_pool.free_robot(robot);\nstd::cout << \"Robot \" << i << \" freed!\" << std::endl;\n}\n
    "},{"location":"faq/#i-need-to-simulate-many-worlds-with-camera-sensors-in-parallel-how-can-i-do-this","title":"I need to simulate many worlds with camera sensors in parallel. How can I do this?","text":"

    On magnum_contexts.cpp you can find an example showcasing the use of many worlds with camera sensors in parallel. The main takeaway is that we need to pre-allocate OpenGL contexts so that each thread can take one and use it to render their worlds.

    // Load robot from URDF\nauto global_robot = std::make_shared<robot_dart::robots::Iiwa>();\nstd::vector<std::thread> workers;\n// Set maximum number of parallel GL contexts (this is GPU-dependent)\nrobot_dart::gui::magnum::GlobalData::instance()->set_max_contexts(4);\n// We want 15 parallel simulations with different GL context each\nsize_t N_workers = 15;\nstd::mutex mutex;\nsize_t id = 0;\n// Launch the workers\nfor (size_t i = 0; i < N_workers; i++) {\nworkers.push_back(std::thread([&] {\nmutex.lock();\nsize_t index = id++;\nmutex.unlock();\n// Get the GL context -- this is a blocking call\n// will wait until one GL context is available\n// get_gl_context(gl_context); // this call will not sleep between failed queries\nget_gl_context_with_sleep(gl_context, 20); // this call will sleep 20ms between each failed query\n// Do the simulation\nauto robot = global_robot->clone();\nrobot_dart::RobotDARTSimu simu(0.001);\nEigen::VectorXd ctrl = robot_dart::make_vector({0., M_PI / 3., 0., -M_PI / 4., 0., 0., 0.});\nauto controller = std::make_shared<robot_dart::control::PDControl>(ctrl);\nrobot->add_controller(controller);\ncontroller->set_pd(300., 50.);\n// Magnum graphics\nrobot_dart::gui::magnum::GraphicsConfiguration configuration = robot_dart::gui::magnum::WindowlessGraphics::default_configuration();\nconfiguration.width = 1024;\nconfiguration.height = 768;\nauto graphics = std::make_shared<robot_dart::gui::magnum::WindowlessGraphics>(configuration);\nsimu.set_graphics(graphics);\n// Position the camera differently for each thread to visualize the difference\ngraphics->look_at({0.4 * index, 3.5 - index * 0.1, 2.}, {0., 0., 0.25});\n// record images from main camera/graphics\n// graphics->set_recording(true); // WindowlessGLApplication records images by default\nsimu.add_robot(robot);\nsimu.run(6);\n// Save the image for verification\nrobot_dart::gui::save_png_image(\"camera_\" + std::to_string(index) + \".png\",\ngraphics->image());\n// Release the GL context for another thread to use\nrelease_gl_context(gl_context);\n}));\n}\n// Wait for all the workers\nfor (size_t i = 0; i < workers.size(); i++) {\nworkers[i].join();\n}\n

    "},{"location":"faq/#i-do-not-know-how-to-use-waf-how-can-i-detect-robotdart-from-cmake","title":"I do not know how to use waf. How can I detect RobotDART from CMake?","text":"

    You need to use waf to build RobotDART, but when installing the library a CMake module is installed. Thus it is possible use RobotDART in your code using CMake. You can find a complete example at cmake/example. In short the CMake would look like this:

    cmake_minimum_required(VERSION 3.10 FATAL_ERROR)\nproject(robot_dart_example)\n# we ask for Magnum because we want to build the graphics\nfind_package(RobotDART REQUIRED OPTIONAL_COMPONENTS Magnum)\nadd_executable(robot_dart_example example.cpp)\ntarget_link_libraries(robot_dart_example\nRobotDART::Simu\n)\nif(RobotDART_Magnum_FOUND)\nadd_executable(robot_dart_example_graphics example.cpp)\ntarget_link_libraries(robot_dart_example_graphics\nRobotDART::Simu\nRobotDART::Magnum\n)\nendif()\n
    "},{"location":"faq/#where-can-i-find-complete-working-examples-for-either-c-or-python","title":"Where can I find complete working examples for either c++ or python?","text":"

    C++ examples

    Python examples

    "},{"location":"install/","title":"Manual Installation","text":""},{"location":"install/#manual-installation-of-robotdart","title":"Manual Installation of RobotDART","text":""},{"location":"install/#dependencies","title":"Dependencies","text":""},{"location":"install/#required","title":"Required","text":"
    • Ubuntu (it should work on versions >= 14.04) or OSX
    • Eigen3 (needed by DART)
    • Boost (needed by DART)
    • DART, http://dartsim.github.io/
    "},{"location":"install/#optional","title":"Optional","text":"
    • Magnum (for graphics), https://github.com/mosra/magnum
    "},{"location":"install/#installation-of-the-dependencies","title":"Installation of the dependencies","text":""},{"location":"install/#installing-boost-and-eigen3","title":"Installing Boost and Eigen3","text":"

    For Ubuntu-based distributions we should use the following commands to install Eigen3 and Boost:

    sudo apt-get update\nsudo apt-get install libeigen3-dev libboost-filesystem-dev libboost-system-dev libboost-regex-dev\n

    For OSX with brew:

    brew install eigen3\nbrew install boost\n
    "},{"location":"install/#installing-dart","title":"Installing DART","text":"

    In order to use RobotDART, you need to install DART (from source).

    For Ubuntu systems, please follow the detailed installation instructions on the DART documentation website. Make sure that you don't forget to add the PPAs as detailed here. What is more, you can enable the -DART_ENABLE_SIMD flag in the CMake configuration. In addition, you need the following optional dependency: DART Parsers. Lastly, it is recommended to use either the master branch or v6.12.1 tag (and not the one provided in DART's documentation). In short you should do the following:

    Ubuntu <= 14.04 only

    sudo apt-add-repository ppa:libccd-debs/ppa\nsudo apt-add-repository ppa:fcl-debs/ppa\n

    For all Ubuntu distributions

    sudo apt-add-repository ppa:dartsim/ppa\nsudo apt-get update\n\nsudo apt-get install build-essential cmake pkg-config git\nsudo apt-get install libeigen3-dev libassimp-dev libccd-dev libfcl-dev libboost-regex-dev libboost-system-dev\n\nsudo apt-get install libtinyxml-dev libtinyxml2-dev\nsudo apt-get install liburdfdom-dev liburdfdom-headers-dev\n\ncd /path/to/tmp/folder\ngit clone git://github.com/dartsim/dart.git\ncd dart\ngit checkout v6.12.1\n\nmkdir build\ncd build\ncmake -DCMAKE_BUILD_TYPE=Release -DCMAKE_INSTALL_PREFIX=/usr ..\nmake -j4\nsudo make install\n

    If you want to install DART somewhere else than /usr/local, you should use the -DCMAKE_INSTALL_PREFIX=/path/to/install/folder cmake argument.

    For OSX systems with homebrew, please follow the detailed installation instructions on the DART documentation website. You need to follow the same procedure as for Ubuntu systems. In short you should do the following:

    brew install eigen\nbrew install assimp\nbrew install libccd\nbrew install dartsim/dart/fcl04\nbrew install boost\n\nbrew install tinyxml\nbrew install tinyxml2\nbrew install urdfdom\n\ncd /path/to/tmp/folder\ngit clone git://github.com/dartsim/dart.git\ncd dart\ngit checkout v6.12.1\n\nmkdir build\ncd build\ncmake -DCMAKE_BUILD_TYPE=Release -DCMAKE_INSTALL_PREFIX=/usr ..\nmake -j4\nsudo make install\n
    "},{"location":"install/#installing-magnum","title":"Installing Magnum","text":"

    Magnum depends on Corrade and we are going to use a few plugins and extras from the library. We are also going to use Glfw and Glx for the back-end. Follow the instrutions below:

    #installation of Glfw and OpenAL\n# Ubuntu\nsudo apt-get install libglfw3-dev libglfw3 libopenal-dev libassimp-dev\n# Mac OSX\nbrew install glfw3 openal-soft assimp\n\n# installation of Corrade\ncd /path/to/tmp/folder\ngit clone https://github.com/mosra/corrade.git\ncd corrade\nmkdir build && cd build\ncmake -DCMAKE_BUILD_TYPE=Release ..\nmake -j\nsudo make install\n\n# installation of Magnum\ncd /path/to/tmp/folder\ngit clone https://github.com/mosra/magnum.git\ncd magnum\nmkdir build && cd build\n# Ubuntu\ncmake -DCMAKE_BUILD_TYPE=Release -DWITH_AUDIO=ON -DWITH_DEBUGTOOLS=ON -DWITH_GL=ON -DWITH_MESHTOOLS=ON -DWITH_PRIMITIVES=ON -DWITH_SCENEGRAPH=ON -DWITH_SHADERS=ON -DWITH_TEXT=ON -DWITH_TEXTURETOOLS=ON -DWITH_TRADE=ON -DWITH_GLFWAPPLICATION=ON -DWITH_WINDOWLESSEGLAPPLICATION=ON -DWITH_OPENGLTESTER=ON -DWITH_ANYAUDIOIMPORTER=ON -DWITH_ANYIMAGECONVERTER=ON -DWITH_ANYIMAGEIMPORTER=ON -DWITH_ANYSCENEIMPORTER=ON -DWITH_MAGNUMFONT=ON -DWITH_OBJIMPORTER=ON -DWITH_TGAIMPORTER=ON -DWITH_WAVAUDIOIMPORTER=ON -DTARGET_HEADLESS=ON .. # this will enable almost all features of Magnum that are not necessarily needed for robot_dart (please refer to the documentation of Magnum for more details on selecting only the ones that you need)\n# Mac OSX\ncmake -DCMAKE_BUILD_TYPE=Release -DWITH_AUDIO=ON -DWITH_DEBUGTOOLS=ON -DWITH_GL=ON -DWITH_MESHTOOLS=ON -DWITH_PRIMITIVES=ON -DWITH_SCENEGRAPH=ON -DWITH_SHADERS=ON -DWITH_TEXT=ON -DWITH_TEXTURETOOLS=ON -DWITH_TRADE=ON -DWITH_GLFWAPPLICATION=ON -DWITH_WINDOWLESSCGLAPPLICATION=ON -DWITH_OPENGLTESTER=ON -DWITH_ANYAUDIOIMPORTER=ON -DWITH_ANYIMAGECONVERTER=ON -DWITH_ANYIMAGEIMPORTER=ON -DWITH_ANYSCENEIMPORTER=ON -DWITH_MAGNUMFONT=ON -DWITH_OBJIMPORTER=ON -DWITH_TGAIMPORTER=ON -DWITH_WAVAUDIOIMPORTER=ON .. # this will enable almost all features of Magnum that are not necessarily needed for robot_dart (please refer to the documentation of Magnum for more details on selecting only the ones that you need)\nmake -j\nsudo make install\n\n# installation of Magnum Plugins\ncd /path/to/tmp/folder\ngit clone https://github.com/mosra/magnum-plugins.git\ncd magnum-plugins\nmkdir build && cd build\ncmake -DCMAKE_BUILD_TYPE=Release -DWITH_ASSIMPIMPORTER=ON -DWITH_DDSIMPORTER=ON -DWITH_JPEGIMPORTER=ON -DWITH_OPENGEXIMPORTER=ON -DWITH_PNGIMPORTER=ON -DWITH_TINYGLTFIMPORTER=ON -DWITH_STBTRUETYPEFONT=ON .. # this will enable quite a few Magnum Plugins that are not necessarily needed for robot_dart (please refer to the documentation of Magnum for more details on selecting only the ones that you need)\nmake -j\nsudo make install\n\n# installation of Magnum DART Integration (DART needs to be installed) and Eigen Integration\ncd /path/to/tmp/folder\ngit clone https://github.com/mosra/magnum-integration.git\ncd magnum-integration\nmkdir build && cd build\ncmake -DCMAKE_BUILD_TYPE=Release -DWITH_DART=ON -DWITH_EIGEN=ON ..\nmake -j\nsudo make install\n
    "},{"location":"install/#compilation-and-running-the-examples","title":"Compilation and running the examples","text":"

    The compilation of the library is straight-forward:

    • retrieve the code, for instance with git clone https://github.com/resibots/robot_dart.git
    • cd /path/to/repo/root
    • ./waf configure
    • ./waf

    To build the examples, execute this: ./waf examples

    Now you can run the examples. For example, to run the arm example you need to type the following: ./build/arm (or ./build/arm_plain to run it without graphics).

    "},{"location":"install/#installing-the-library","title":"Installing the library","text":"

    To install the library (assuming that you have already compiled it), you need only to run:

    • ./waf install

    By default the library will be installed in /usr/local/lib (for this maybe sudo ./waf install might be needed) and a static library will be generated. You can change the defaults as follows:

    • ./waf configure --prefix=/path/to/install/dir --shared
    • ./waf install

    In short, with --prefix you can change the directory where the library will be installed and if --shared is present a shared library will be created instead of a static one.

    "},{"location":"install/#compiling-the-python-bindings","title":"Compiling the python bindings","text":"

    For the python bindings of robot_dart, we need numpy to be installed, pybind11 and the python bindings of DART (dartpy).

    For numpy one can install it with pip or standard packages. For pybind11 please follow the instructions on the dart website (focus on the pybind11 part, for the other parts follow our instructions above).

    For the python bindings of DART, do:

    cd dart\n\nmkdir build\ncd build\ncmake -DDART_BUILD_DARTPY=ON -DCMAKE_BUILD_TYPE=Release -DCMAKE_INSTALL_PREFIX=/usr ..\nmake -j4\nsudo make install\nsudo make install-dartpy # for DART >= v6.12.0, we do not need this\n

    Then the compilation of robot_dart is almost identical as before:

    • retrieve the code, for instance with git clone https://github.com/resibots/robot_dart.git
    • cd /path/to/repo/root
    • ./waf configure --python (--python enables the python bindings)
    • ./waf
    • Install the library (including the python bindings) as before (no change is needed)
    • Depending on your installation directory you might need to update your PYTHONPATH, e.g. export PYTHONPATH=$PYTHONPATH:/usr/local/lib/python3.8/site-packages/

    To run the python examples (for the python examples you need to have enabled the graphics, that is, install Magnum library), run:

    • cd /path/to/repo/root
    • python src/python/example.py or python src/python/example_parallel.py
    "},{"location":"install/#common-issues-with-python-bindings","title":"Common Issues with Python bindings","text":"

    One of the most common issue with the python bindings is the fact that DART bindings might be compiled and installed for python 3 and the robot_dart ones for python 2. To force the usage of python 3 for robot_dart, you use python3 ./waf instead of just ./waf in all the commands above.

    "},{"location":"quick_install/","title":"Installation","text":""},{"location":"quick_install/#scripts-for-quick-installation-of-robotdart","title":"Scripts for Quick Installation of RobotDART","text":"

    In this page we provide standalone scripts for installing RobotDART for Ubuntu (20.04) and OSX. The scripts will install all the required dependencies and RobotDART. Notably, all dependencies that need to be compiled by source and RobotDART will be installed inside the /opt folder. This way, one can be rest assured that their system will be clean.

    "},{"location":"quick_install/#ubuntu-2004","title":"Ubuntu 20.04","text":"

    To quickly install RobotDART on Ubuntu 20.04, we just need to run ./scripts/install_ubuntu.sh from the root of the repo. In more detail:

    • git clone https://github.com/resibots/robot_dart.git
    • cd robot_dart
    • ./scripts/install_ubuntu.sh

    This will install everything needed! Once the script is successfully executed, one should add the following to their ~/.bashrc or ~/.zshrc file:

    export PATH=/opt/magnum/bin:$PATH\nexport LD_LIBRARY_PATH=/opt/dart/lib:/opt/magnum/lib:/opt/robot_dart/lib:$LD_LIBRARY_PATH\nexport PYTHONPATH=/opt/dart/lib/python3/dist-packages:/opt/robot_dart/lib/python3.8/site-packages:$PYTHONPATH\n
    "},{"location":"quick_install/#osx","title":"OSX","text":"

    Coming soon

    "},{"location":"robots/","title":"Supported robots","text":""},{"location":"robots/#supported-robots","title":"Supported robots","text":"

    Every robot is a defined as a URDF, which will be installed $PREFIX/share/utheque. All robots have pre-defined \"robot classes\" that define sensors and other properties; for your custom/new robots, you will have to add the sensors/properties via the generic robot class (or create a new robot class).

    The URDF files are loaded using the following rules (see utheque::path()):

    • First check in the current directory
    • If not found, check in current_directory/utheque
    • If not found, check in $ROBOT_DART_PATH/utheque
    • If not found, check in the robot dart installation path/robots (e.g., /usr/share/utheque or $HOME/share/utheque)
    • Otherwise, report failure

    utheque is a separate header-only library that gets installed together with RobotDART (or even alone), that can be used in libraries that do not want to interfere with RobotDART and use the curated URDF files.

    "},{"location":"robots/#talos-pal-robotics","title":"Talos (PAL Robotics)","text":"

    Talos is a humanoid robot made by PAL Robotics.

    • Datasheet: [pdf]
    • 32 degrees of freedom (6 for each leg, 7 for each arm, 2 for the waist, 2 for the neck, 1 for each gripper)
    • 175 cm / 95 kg
    • IMU in the torso
    • Torque sensors in all joints except head, wrist and gripper (22 torque sensors total)
    • 1 force/torque sensor in each ankle
    • 1 force/torque sensor in each wrist

    We have two URDF files:

    • robots/talos/talos.urdf :
      • accurate (simplified but made of polygons) collision meshes
      • mimic joints for the gripper
      • Not compatible the DART collision detector (you need to use FCL collision detector - shipped with DART)
      • URDF: [talos.urdf]
      • Example: [talos.cpp]

    Load Talos

    C++
    auto robot = std::make_shared<robot_dart::robots::Talos>();\n
    Python
    robot = rd.Talos()\n
    • robot/talos/talos_fast.urdf:
      • no collision except for the feet, which are approximated by boxes
      • grippers are fixed (no movement is allowed)
      • compatible with the DART collision detector
      • URDF: [talos_fast.urdf]
      • Example: [talos_fast.cpp]

    talos_fast.urdf is faster because it makes it possible to use the DART collision detector (and has much collision shapes). You should prefer it except if you want to use the grippers (e.g., for manipulation) or are working on self-collisions.

    Load Talos Fast

    C++
    // load talos fast\nauto robot = std::make_shared<robot_dart::robots::TalosFastCollision>();\n// Set actuator types to VELOCITY (for speed)\nrobot->set_actuator_types(\"velocity\");\ndouble dt = 0.001;\nrobot_dart::RobotDARTSimu simu(dt);\n// we can use the DART (fast) collision detector\nsimu.set_collision_detector(\"dart\");\n
    Python
    robot = rd.TalosFastCollision()\n

    Please note that the mesh files (.glb) require assimp 5.x (and not assimp4.x usually shipped with ROS). If you cannot load the URDF, please check your assimp version.

    "},{"location":"robots/#panda-franka-emika","title":"Panda (Franka Emika)","text":"

    The Franka is a modern manipulator made by Franka Emika Panda. It is commonly found in many robotics labs.

    • Datasheet: [pdf]
    • 7 degrees of freedom
    • Can be controlled in torque
    • 18 kg
    • workspace: 855 mm (horizontal), 1190 mm (vertical)
    • URDF: [franka.urdf]
    • Example: [franka.cpp] The URDF includes the gripper.

    Load Franka

    C++
    auto robot = std::make_shared<robot_dart::robots::Franka>();\n
    Python
    robot = rd.Franka()\n
    "},{"location":"robots/#lbr-iiwa-kuka","title":"LBR iiwa (KUKA)","text":"

    The LBR iiwa is manufactured by KUKA. It is similar to the Panda and is also very common in robotics labs.

    • Datasheet: [pdf]
    • We implement the 14 kg version
    • 29.5 kg
    • 7 degrees of freedom
    • URDF: [iiwa.urdf]
    • Example: [iiwa.cpp]

    Load Iiwa

    C++
    auto robot = std::make_shared<robot_dart::robots::Iiwa>();\n
    Python
    robot = rd.Iiwa()\n
    "},{"location":"robots/#icub-iit","title":"iCub (IIT)","text":"

    The iCub is a open source humanoid robot made by the Instituto Italiano di Tecnologia. There are currently 42 iCUbs in the world, and many versions.

    • Datasheet (rev 2.3) [pdf]
    • 6 force/torque sensors (upper arms, upper legs, ankles)
    • IMU in the head
    • We do to simulate the skin
    • We do not simulate the hands
    • Our model is close to the Inria's iCub, but it has not been checked in detail.
    • URDF: [icub.urdf]
    • Example [icub.cpp]

    Please note that the mesh files (.glb) require assimp 5.x (and not assimp4.x usually shipped with ROS). If you cannot load the URDF, please check your assimp version.

    Load iCub

    C++
    auto robot = std::make_shared<robot_dart::robots::ICub>();\n// Set actuator types to VELOCITY motors so that they stay in position without any controller\nrobot->set_actuator_types(\"velocity\");\nrobot_dart::RobotDARTSimu simu(0.001);\nsimu.set_collision_detector(\"fcl\");\n
    Python
    robot = rd.ICub()\n# Set actuator types to VELOCITY motors so that they stay in position without any controller\nrobot.set_actuator_types(\"velocity\")\nsimu = rd.RobotDARTSimu(0.001)\nsimu.set_collision_detector(\"fcl\")\n

    Print IMU sensor measurements

    C++
    std::cout << \"Angular    Position: \" << robot->imu().angular_position_vec().transpose().format(fmt) << std::endl;\nstd::cout << \"Angular    Velocity: \" << robot->imu().angular_velocity().transpose().format(fmt) << std::endl;\nstd::cout << \"Linear Acceleration: \" << robot->imu().linear_acceleration().transpose().format(fmt) << std::endl;\nstd::cout << \"=================================\" << std::endl;\n
    Python
    print(\"Angular    Position: \",  robot.imu().angular_position_vec().transpose())\nprint(\"Angular    Velocity: \",  robot.imu().angular_velocity().transpose())\nprint(\"Linear Acceleration: \",  robot.imu().linear_acceleration().transpose())\nprint(\"=================================\" )\n

    Print Force-Torque sensor measurements

    C++
    std::cout << \"FT ( force): \" << robot->ft_foot_left().force().transpose().format(fmt) << std::endl;\nstd::cout << \"FT (torque): \" << robot->ft_foot_left().torque().transpose().format(fmt) << std::endl;\nstd::cout << \"=================================\" << std::endl;\n
    Python
    print(\"FT ( force): \",  robot.ft_foot_left().force().transpose())\nprint(\"FT (torque): \",  robot.ft_foot_left().torque().transpose())\nprint(\"=================================\")\n
    "},{"location":"robots/#unitree-a1","title":"Unitree A1","text":"

    A1 is a quadruped robot made by the Unitree Robotics.

    • IMU in the torso
    • We do not simulate the foot pressure sensors (yet)
    • One can easily add a depth camera on the head
    • URDF: [a1.urdf]
    • Example [a1.cpp]

    Load A1

    C++
    auto robot = std::make_shared<robot_dart::robots::A1>();\n
    Python
    robot = rd.A1()\n

    Print IMU sensor measurements

    C++
    std::cout << \"Angular    Position: \" << robot->imu().angular_position_vec().transpose().format(fmt) << std::endl;\nstd::cout << \"Angular    Velocity: \" << robot->imu().angular_velocity().transpose().format(fmt) << std::endl;\nstd::cout << \"Linear Acceleration: \" << robot->imu().linear_acceleration().transpose().format(fmt) << std::endl;\nstd::cout << \"=================================\" << std::endl;\n
    Python
    print( \"Angular    Position: \", robot.imu().angular_position_vec().transpose())\nprint( \"Angular    Velocity: \", robot.imu().angular_velocity().transpose())\nprint( \"Linear Acceleration: \", robot.imu().linear_acceleration().transpose())\nprint( \"=================================\")\n

    Add a depth camera on the head

    How can I attach a camera to a moving link?

    Please note that the mesh files (.glb) require assimp 5.x (and not assimp4.x usually shipped with ROS). If you cannot load the URDF, please check your assimp version.

    "},{"location":"robots/#dynamixel-based-hexapod-robot-inria-and-others","title":"Dynamixel-based hexapod robot (Inria and others)","text":"

    This hexapod is a simple 6-legged robot based on dynamixel actuators. It is similar to the robot used in the paper `Robots that can adapt like animals' (Cully et al., 2015).

    • 6 legs, 3 degrees of freedom for each leg (18 degrees of freedom)
    • simple URDF (no meshes)
    • URDF: [pexod.urdf]
    • Example: [hexapod.cpp]

    Load Hexapod

    C++
    auto robot = std::make_shared<robot_dart::robots::Hexapod>();\n
    Python
    robot = rd.Hexapod()\n

    Load Pexod

    C++
    auto robot = std::make_shared<robot_dart::Robot>(\"pexod.urdf\");\n
    Python
    robot = rd.Robot(\"pexod.urdf\");\n
    "},{"location":"robots/#simple-arm","title":"Simple arm","text":"
    • A simple arm for educational or debugging purposes
    • 5 degrees of freedom
    • simple URDF (no meshes)
    • URDF: [arm.urdf]
    • Example: [arm.cpp]

    Load Simple Arm

    C++
    auto robot = std::make_shared<robot_dart::robots::Arm>();\n
    Python
    robot = rd.Arm()\n
    "},{"location":"robots/#loading-custom-robots","title":"Loading Custom Robots","text":"

    RobotDART gives you the ability to load custom robots that are defined in URDF, SDF, SKEL or MJCF files. For example, you can load a urdf model using:

    Load custom Robot

    C++
        auto your_robot = std::make_shared<robot_dart::Robot>(\"path/to/model.urdf\");\n
    Python
        your_robot = robot_dart.Robot(\"path/to/model.urdf\")\n

    Load custom Robot with packages (e.g STL, DAE meshes)

    C++
        std::vector<std::pair<std::string, std::string>> your_model_packages = ('model', 'path/to/model/dir');\nauto your_robot = std::make_shared<robot_dart::Robot>(\"path/to/model.urdf\", your_model_packages, \"packages\");\n
    Python
        your_model_packages = [(\"model\", \"path/to/model/dir\")]\nyour_robot = robot_dart.Robot(\"path/to/model.urdf\", your_model_packages)\n
    "}]} \ No newline at end of file +{"config":{"lang":["en"],"separator":"[\\s\\-]+","pipeline":["stopWordFilter"]},"docs":[{"location":"","title":"Home","text":""},{"location":"#robotdart","title":"RobotDART","text":"

    RobotDART is a C++11 robot simulator (with optional Python bindings) built on top of the DART physics engine. The RobotDART simulator is intended to be used by Robotics and Machine Learning researchers who want to write controllers or test learning algorithms without the delays and overhead that usually comes with other simulators (e.g., Gazebo, Coppelia-sim).

    For this reason, the simulator runs headless by default, and there is the possibility of rendering the scene (e.g., through a camera sensor) without opening a graphics window. All RobotDART's code is thread-safe (including graphics and camera sensors), and thus enables its users to use their code in parallel jobs in multicore computers.

    In a few words, RobotDART combines:

    • a physics engine (DART)
    • an optional graphics engine (Magnum)
    • a few sensor classes (IMU, force/torque sensors, cameras, etc.)
    • a curated URDF library
    • ... and a few useful features to make the life of roboticists/researchers easier

    "},{"location":"#main-features","title":"Main Features","text":"
    • Modern C++ code that makes it easy to develop environments and applications
    • Fast and reliable simulation of robotic mechanisms and their interactions (through the DART physics engine)
    • A structured Robot class that enables a unified creation and access to all important values: in RobotDART you can load any robot description file (URDF, SDF, SKEL, and MuJoCo files) with the same command, and all robot measurements can be queried without using any DART code
    • A generic RobotControl class that enables fast prototyping of any type of controller
    • A generic Sensor class that allows the creation of any kind of sensor
    • A growing list of already implemented sensors, that includes 6-axis ForceTorque, IMU, RGB, and RGB-D sensors
    • A simulation class (RobotDARTSimu) that handles multiple robots and sensors, and allows for step-by-step simulation
    • A growing list of supported robots along with edited and optimized models to be used with RobotDART (see the robots page for details and examples):
      • PAL Talos humanoid
      • Franka Emika Panda
      • KUKA LBR Iiwa (14kg version)
      • IIT iCub humanoid (without hands)
      • Unitree A1 quadruped robot
      • Dynamixel-based 6-legged robot
      • A simple arm for educational purposes
      • and you can use any URDF
    • A custom graphical interface built on top of Magnum that allows generic customization
    • Support for windowless OpenGL context creation (even in parallel threads!) to allow for camera sensor usage even in parallel jobs running on clusters
    • Support for video recording in simulation time (i.e., not affected by delays of simulator and/or graphics) for visualization or debugging purposes
    • Full-featured Python bindings for fast prototyping
    • RobotDART runs on any Linux distribution and Mac OS
    "},{"location":"#what-robotdart-is-not","title":"What RobotDART is not","text":"
    • RobotDART is primarily intended to be non-interactive (run a simulation, record/view the result),
    • Interaction is limited to changing the view and your own code. No GUI for adding objects or interactively build an environment,
    • RobotDART is not optimized for wheeled robots,
    • RobotDART is not optimized for simulating complex (e.g., mountain-like) terrains.
    "},{"location":"faq/","title":"FAQ","text":""},{"location":"faq/#frequently-asked-questions","title":"Frequently Asked Questions","text":"

    This pages provides a user guide of the library through Frequently Asked Questions (FAQ).

    "},{"location":"faq/#what-is-a-minimal-working-example-of-robotdart","title":"What is a minimal working example of RobotDART?","text":"

    You can find a minimal working example at hello_world.cpp. This example is creating a world where a hexapod robot is placed just above a floor and left to fall. The robot has no actuation, and there is the simplest graphics configuration. Let's split it down.

    • We first include the appropriate files:
    C++Python
    #include <robot_dart/robot_dart_simu.hpp>\n#ifdef GRAPHIC\n#include <robot_dart/gui/magnum/graphics.hpp>\n#endif\n
    import RobotDART as rd\n
    • We then load our hexapod robot:
    C++Python
    auto robot = std::make_shared<robot_dart::Robot>(\"pexod.urdf\");\n
    robot = rd.Robot(\"pexod.urdf\");\n
    • We need to place it above the floor to avoid collision (we can use RobotDART's helpers ;)):
    C++Python
    robot->set_base_pose(robot_dart::make_tf({0., 0., 0.2}));\n
    robot.set_base_pose([0., 0., 0., 0., 0., 0.2])\n
    • We can now create the simulation object and add the robot and the floor:
    C++Python
    robot_dart::RobotDARTSimu simu(0.001); // dt=0.001, 1KHz simulation\nsimu.add_floor();\nsimu.add_robot(robot);\n
    simu = rd.RobotDARTSimu(0.001); # dt=0.001, 1KHz simulation\nsimu.add_floor();\nsimu.add_robot(robot);\n
    • If needed or wanted, we can add a graphics component to visualize the scene:
    C++Python
    auto graphics = std::make_shared<robot_dart::gui::magnum::Graphics>();\nsimu.set_graphics(graphics);\ngraphics->look_at({0.5, 3., 0.75}, {0.5, 0., 0.2});\n
    graphics = rd.gui.Graphics()\nsimu.set_graphics(graphics)\ngraphics.look_at([0.5, 3., 0.75], [0.5, 0., 0.2])\n
    • Once everything is configured, we can run our simulation for a few seconds:
    C++Python
    simu.run(10.);\n
    simu.run(10.)\n
    • Here's how it looks:

    "},{"location":"faq/#what-robots-are-supported-in-robotdart","title":"What robots are supported in RobotDART?","text":"

    RobotDART supports any robot that can be described by a URDF, SDF, SKEL or MJCF file. Nevertheless, we have a curated list of robots with edited and optimized models to be used with RobotDART (see the robots page for details and examples).

    "},{"location":"faq/#how-can-i-load-my-own-urdfsdfskelmjcf-file","title":"How can I load my own URDF/SDF/SKEL/MJCF file?","text":"

    See the robots page for details.

    "},{"location":"faq/#how-do-i-enable-graphics-in-my-code","title":"How do I enable graphics in my code?","text":"

    To enable graphics in your code, you need to do the following:

    • Install Magnum. See the installation page for details.
    • Create and set a graphics object in the simulation object. Here's an example:
    C++Python
    auto graphics = std::make_shared<robot_dart::gui::magnum::Graphics>();\nsimu.set_graphics(graphics);\ngraphics->look_at({0.5, 3., 0.75}, {0.5, 0., 0.2});\n
    graphics = rd.gui.Graphics()\nsimu.set_graphics(graphics)\ngraphics.look_at([0.5, 3., 0.75], [0.5, 0., 0.2])\n
    "},{"location":"faq/#i-want-to-have-multiple-camera-sensors-is-it-possible","title":"I want to have multiple camera sensors. Is it possible?","text":"

    Having multiple camera sensors is indeed possible. We can add as many cameras as we wish along the main camera defined in How do I record a video:

    C++Python
    // Add camera\nauto camera = std::make_shared<robot_dart::sensor::Camera>(graphics->magnum_app(), 256, 256);\n
    # Add camera\ncamera = rd.sensor.Camera(graphics.magnum_app(), 32, 32)\n
    "},{"location":"faq/#how-do-i-record-a-video","title":"How do I record a video?","text":"

    In order to record a video of what the main or any other camera \"sees\", you need to call the function record_video(path) of the graphics class:

    C++Python
    graphics->record_video(\"talos_dancing.mp4\");\n
    graphics.record_video(\"talos_dancing.mp4\")\n

    Or the camera class:

    C++Python
    // cameras can also record video\ncamera->record_video(\"video-camera.mp4\");\n
    # cameras can also record video\ncamera.record_video(\"video-camera.mp4\")\n
    "},{"location":"faq/#how-can-i-position-a-camera-to-the-environment","title":"How can I position a camera to the environment?","text":"

    In order to position a camera inside the world, we need to use the lookAt method of the camera/graphics object:

    C++Python
    // set the position of the camera, and the position where the main camera is looking at\nEigen::Vector3d cam_pos = {-0.5, -3., 0.75};\nEigen::Vector3d cam_looks_at = {0.5, 0., 0.2};\ncamera->look_at(cam_pos, cam_looks_at);\n
    # set the position of the camera, and the position where the main camera is looking at\ncam_pos = [-0.5, -3., 0.75]\ncam_looks_at = [0.5, 0., 0.2]\ncamera.look_at(cam_pos, cam_looks_at)\n
    "},{"location":"faq/#how-can-i-attach-a-camera-to-a-moving-link","title":"How can I attach a camera to a moving link?","text":"

    Cameras can be easily attached to a moving link:

    C++Python
    Eigen::Isometry3d tf;\ntf = Eigen::AngleAxisd(3.14, Eigen::Vector3d{1., 0., 0.});\ntf *= Eigen::AngleAxisd(1.57, Eigen::Vector3d{0., 0., 1.});\ntf.translation() = Eigen::Vector3d(0., 0., 0.1);\ncamera->attach_to_body(robot->body_node(\"iiwa_link_ee\"), tf); // cameras are looking towards -z by default\n
    tf = dartpy.math.Isometry3()\nrot =  dartpy.math.AngleAxis(3.14, [1., 0., 0.])\nrot = rot.multiply( dartpy.math.AngleAxis(1.57, [0., 0., 1.])).to_rotation_matrix()\ntf.set_translation([0., 0., 0.1])\ncamera.attach_to_body(robot.body_node(\"iiwa_link_ee\"), tf) # cameras are looking towards -z by default\n
    "},{"location":"faq/#how-can-i-manipulate-the-camera-object","title":"How can I manipulate the camera object?","text":"

    Every camera has its own parameters, i.e a Near plane, a far plane, a Field Of View (FOV), a width and a height (that define the aspect ratio), you can manipulate each one separately:

    C++Python
    camera->camera().set_far_plane(5.f);\ncamera->camera().set_near_plane(0.01f);\ncamera->camera().set_fov(60.0f);\n
    camera.camera().set_far_plane(5.)\ncamera.camera().set_near_plane(0.01)\ncamera.camera().set_fov(60.0)\n

    or all at once:

    C++Python
    camera->camera().set_camera_params(5., // far plane\n0.01f, // near plane\n60.0f, // field of view\n600, // width\n400 // height\n);\n
    camera.camera().set_camera_params(5., #far plane\n0.01, #near plane\n60.0, # field of view\n600, # width\n400) #height\n

    You can find a complete example at cameras.cpp.

    "},{"location":"faq/#how-can-i-interact-with-the-camera","title":"How can I interact with the camera?","text":"

    We can move translate the cameras with the WASD keys, zoom in and out using the mouse wheel and rotate the camera with holding the left mouse key and moving the mouse.

    "},{"location":"faq/#what-do-the-numbers-in-the-status-bar-mean","title":"What do the numbers in the status bar mean?","text":"

    The status bar looks like this:

    Where simulation time gives us the total simulated time (in seconds), wall time gives us the total time (in seconds) that has passed in real-time once we have started simulating. The next number X.Xx gives us the real-time factor: for example, 1.1x means that the simulation runs 1.1 times faster than real-time, whereas 0.7x means that the simulation runs slower than real-time. The value it: XX ms reports the time it took the last iteration (in milliseconds). The last part gives us whether the simulation tries to adhere to real-time or not. sync means that RobotDART will slow down the simulation in order for it to be in real-time, whereas no-sync means that RobotDART will try to run the simulation as fast as possible.

    "},{"location":"faq/#how-can-i-alter-the-graphics-scene-eg-change-lighting-conditions","title":"How can I alter the graphics scene (e.g., change lighting conditions)?","text":"

    When creating a graphics object, you can pass a GraphicsConfiguration object that changes the default values:

    C++Python
    robot_dart::gui::magnum::GraphicsConfiguration configuration;\n// We can change the width/height of the window (or camera image dimensions)\nconfiguration.width = 1280;\nconfiguration.height = 960;\nconfiguration.title = \"Graphics Tutorial\"; // We can set a title for our window\n// We can change the configuration for shadows\nconfiguration.shadowed = true;\nconfiguration.transparent_shadows = true;\nconfiguration.shadow_map_size = 1024;\n// We can also alter some specifications for the lighting\nconfiguration.max_lights = 3; // maximum number of lights for our scene [default=3]\nconfiguration.specular_strength = 0.25; // strength of the specular component\n// Some extra configuration for the main camera\nconfiguration.draw_main_camera = true;\nconfiguration.draw_debug = true;\nconfiguration.draw_text = true;\n// We can also change the background color [default=black]\nconfiguration.bg_color = Eigen::Vector4d{1.0, 1.0, 1.0, 1.0};\n// Create the graphics object with the configuration as parameter\nauto graphics = std::make_shared<robot_dart::gui::magnum::Graphics>(configuration);\n
    configuration = rd.gui.GraphicsConfiguration()\n# We can change the width/height of the window (or camera, dimensions)\nconfiguration.width = 1280\nconfiguration.height = 960\nconfiguration.title = \"Graphics Tutorial\"  # We can set a title for our window\n# We can change the configuration for shadows\nconfiguration.shadowed = True\nconfiguration.transparent_shadows = True\nconfiguration.shadow_map_size = 1024\n# We can also alter some specifications for the lighting\nconfiguration.max_lights = 3  # maximum number of lights for our scene\nconfiguration.specular_strength = 0.25  # strength og the specular component\n#  Some extra configuration for the main camera\nconfiguration.draw_main_camera = True\nconfiguration.draw_debug = True\nconfiguration.draw_text = True\n# We can also change the background color [default = black]\nconfiguration.bg_color = [1., 1., 1., 1.]\n# create the graphics object with the configuration as a parameter\ngraphics = rd.gui.Graphics(configuration)\n

    You can disable or enable shadows on the fly as well:

    C++Python
    // Disable shadows\ngraphics->enable_shadows(false, false);\nsimu.run(1.);\n// Enable shadows only for non-transparent objects\ngraphics->enable_shadows(true, false);\nsimu.run(1.);\n// Enable shadows for transparent objects as well\ngraphics->enable_shadows(true, true);\nsimu.run(1.);\n
    # Disable shadows\ngraphics.enable_shadows(False, False)\nsimu.run(1.)\n# Enable shadows only for non-transparent objects\ngraphics.enable_shadows(True, False)\nsimu.run(1.)\n# Enable shadows for transparent objects as well\ngraphics.enable_shadows(True, True)\nsimu.run(1.)\n

    You can also add your own lights. The application by default creates 2 light sources and the maximum number of lights is 3 (you can change this once before the creation of the graphics object via the GraphicsConfiguration object). So usually before you add your lights, you have to clear the default lights:

    C++Python
    // Clear Lights\ngraphics->clear_lights();\n
    # Clear Lights\ngraphics.clear_lights()\n

    Then you must create a custom light material:

    C++Python
    // Create Light material\nMagnum::Float shininess = 1000.f;\nMagnum::Color4 white = {1.f, 1.f, 1.f, 1.f};\n// ambient, diffuse, specular\nauto custom_material = robot_dart::gui::magnum::gs::Material(white, white, white, shininess);\n
    # Clear Light material\nshininess = 1000.\nwhite = magnum.Color4(1., 1., 1., 1.)\n# ambient, diffuse, specular\ncustom_material = rd.gui.Material(white, white, white, shininess)\n

    Now you can add on ore more of the following lights:

    Point Light:

    C++Python
    // Create point light\nMagnum::Vector3 position = {0.f, 0.f, 2.f};\nMagnum::Float intensity = 1.f;\nMagnum::Vector3 attenuation_terms = {1.f, 0.f, 0.f};\nauto point_light = robot_dart::gui::magnum::gs::create_point_light(position, custom_material, intensity, attenuation_terms);\ngraphics->add_light(point_light);\n
    # Create point light\nposition = magnum.Vector3(0., 0., 2.)\nintensity = 1.\nattenuation_terms = magnum.Vector3(1., 0., 0.)\npoint_light = rd.gui.create_point_light(position, custom_material, intensity, attenuation_terms)\ngraphics.add_light(point_light)\n

    Spot Light:

    C++Python
    // Create spot light\nMagnum::Vector3 position = {0.f, 0.f, 1.f};\nMagnum::Vector3 direction = {-1.f, -1.f, -1.f};\nMagnum::Float intensity = 1.f;\nMagnum::Vector3 attenuation_terms = {1.f, 0.f, 0.f};\nMagnum::Float spot_exponent = M_PI;\nMagnum::Float spot_cut_off = M_PI / 8;\nauto spot_light = robot_dart::gui::magnum::gs::create_spot_light(position, custom_material, direction, spot_exponent, spot_cut_off, intensity, attenuation_terms);\ngraphics->add_light(spot_light);\n
    # Create spot light\nposition = magnum.Vector3(0., 0., 1.)\ndirection = magnum.Vector3(-1, -1, -1)\nintensity = 1.\nattenuation_terms = magnum.Vector3(1., 0., 0.)\nspot_exponent = np.pi\nspot_cut_off = np.pi / 8\nspot_light = rd.gui.create_spot_light(position, custom_material, direction, spot_exponent, spot_cut_off, intensity, attenuation_terms)\ngraphics.add_light(spot_light)\n

    Directional Light:

    C++Python
    // Create directional light\nMagnum::Vector3 direction = {-1.f, -1.f, -1.f};\nauto directional_light = robot_dart::gui::magnum::gs::create_directional_light(direction, custom_material);\ngraphics->add_light(directional_light);\n
    # Create directional light\ndirection = magnum.Vector3(-1, -1, -1)\ndirectional_light = rd.gui.create_directional_light(direction, custom_material)\ngraphics.add_light(directional_light)\n
    "},{"location":"faq/#i-want-to-visualize-a-target-configuration-of-my-robot-is-this-possible","title":"I want to visualize a target configuration of my robot, is this possible?","text":"

    Yes this is possible. RobotDART gives the ability to create a clone of your robot that has no physics, no contacts, just visuals:

    C++Python
    // Add a ghost robot; only visuals, no dynamics, no collision\nauto ghost = robot->clone_ghost();\nsimu.add_robot(ghost);\n
    # Add a ghost robot; only visuals, no dynamics, no collision\nghost = robot.clone_ghost()\nsimu.add_robot(ghost)\n
    "},{"location":"faq/#how-can-i-control-my-robot","title":"How can I control my robot ?","text":"

    PD control

    C++Python
    // add a PD-controller to the arm\n// set desired positions\nEigen::VectorXd ctrl = robot_dart::make_vector({0., M_PI / 4., 0., -M_PI / 4., 0., M_PI / 2., 0., 0.});\n// add the controller to the robot\nauto controller = std::make_shared<robot_dart::control::PDControl>(ctrl);\nrobot->add_controller(controller);\ncontroller->set_pd(300., 50.);\n
    # add a PD-controller to the arm\n# set desired positions\nctrl = [0., np.pi / 4., 0., -np.pi / 4., 0., np.pi / 2., 0., 0.]\n# add the controller to the robot\ncontroller = rd.PDControl(ctrl)\nrobot.add_controller(controller)\ncontroller.set_pd(300., 50.)\n

    Simple control

    C++Python
    auto controller1 = std::make_shared<robot_dart::control::SimpleControl>(ctrl);\nrobot->add_controller(controller1);\n
    controller1 = rd.SimpleControl(ctrl)\nrobot.add_controller(controller1)\n

    Robot control

    C++Python
    class MyController : public robot_dart::control::RobotControl {\npublic:\nMyController() : robot_dart::control::RobotControl() {}\nMyController(const Eigen::VectorXd& ctrl, bool full_control) : robot_dart::control::RobotControl(ctrl, full_control) {}\nMyController(const Eigen::VectorXd& ctrl, const std::vector<std::string>& dof_names) : robot_dart::control::RobotControl(ctrl, dof_names) {}\nvoid configure() override\n{\n_active = true;\n}\nEigen::VectorXd calculate(double) override\n{\nauto robot = _robot.lock();\nEigen::VectorXd cmd = 100. * (_ctrl - robot->positions(_controllable_dofs));\nreturn cmd;\n}\nstd::shared_ptr<robot_dart::control::RobotControl> clone() const override\n{\nreturn std::make_shared<MyController>(*this);\n}\n};\n
    class MyController(rd.RobotControl):\ndef __init__(self, ctrl, full_control):\nrd.RobotControl.__init__(self, ctrl, full_control)\ndef __init__(self, ctrl, controllable_dofs):\nrd.RobotControl.__init__(self, ctrl, controllable_dofs)\ndef configure(self):\nself._active = True\ndef calculate(self, t):\ntarget = np.array([self._ctrl])\ncmd = 100*(target-self.robot().positions(self._controllable_dofs))\nreturn cmd[0]\n# TO-DO: This is NOT working at the moment!\ndef clone(self):\nreturn MyController(self._ctrl, self._controllable_dofs)\n
    "},{"location":"faq/#is-there-a-way-to-control-the-simulation-timestep","title":"Is there a way to control the simulation timestep?","text":"

    When creating a RobotDARTSimu object you choose the simulation timestep:

    C++Python
    // choose time step of 0.001 seconds\nrobot_dart::RobotDARTSimu simu(0.001);\n
    # choose time step of 0.001 seconds\nsimu = rd.RobotDARTSimu(0.001)\n

    which can later be modified by:

    C++Python
    // set timestep to 0.005 and update control frequency(bool)\nsimu.set_timestep(0.005, true);\n
    # set timestep to 0.005 and update control frequency(bool)\nsimu.set_timestep(0.005, True)\n
    "},{"location":"faq/#i-want-to-simulate-a-mars-environment-is-it-possible-to-change-the-gravitational-force-of-the-simulation-environment","title":"I want to simulate a mars environment, is it possible to change the gravitational force of the simulation environment?","text":"

    Yes you can modify the gravitational forces 3-dimensional vector of the simulation:

    C++Python
    // Set gravitational force of mars\nEigen::Vector3d mars_gravity = {0., 0., -3.721};\nsimu.set_gravity(mars_gravity);\n
    # set gravitational force of mars\nmars_gravity = [0., 0., -3.721]\nsimu.set_gravity(mars_gravity)\n
    "},{"location":"faq/#which-collision-detectors-are-available-what-are-their-differences-how-can-i-choose-between-them","title":"Which collision detectors are available? What are their differences? How can I choose between them?","text":"DART FCL ODE Bullet Support only basic shapes Full-featured collision detector fully integrated by DART External collision detector of ODE External collision detector of Bullet This is building along with DART This is a required dependency of DART Needs an external library Needs an external library Very fast for small scenes Accurate detailed collisions, but not very fast Fast collision detection (the integration is not complete) Fast and accurate collision detection (works well for wheels as well)

    We can easily select one of the available collision detectors using the simulator object:

    C++Python
    simu.set_collision_detector(\"fcl\"); // collision_detector can be \"dart\", \"fcl\", \"ode\" or \"bullet\" (case does not matter)\n
    simu.set_collision_detector(\"fcl\") # collision_detector can be \"dart\", \"fcl\", \"ode\" or \"bullet\" (case does not matter)\n
    "},{"location":"faq/#my-robot-does-not-self-collide-how-can-i-change-this","title":"My robot does not self-collide. How can I change this?","text":"

    One possible cause may be the fact that self collision is disabled, you can check and change this:

    C++Python
    if (!robot->self_colliding()) {\nstd::cout << \"Self collision is not enabled\" << std::endl;\n// set self collisions to true and adjacent collisions to false\nrobot->set_self_collision(true, false);\n}\n
    if(not robot.self_colliding()):\nprint(\"Self collision is not enabled\")\n# set self collisions to true and adjacent collisions to false\nrobot.set_self_collision(True, False)\n
    "},{"location":"faq/#how-can-i-compute-kinematicdynamic-properties-of-my-robot-eg-jacobians-mass-matrix","title":"How can I compute kinematic/dynamic properties of my robot (e.g., Jacobians, Mass Matrix)?","text":"

    Kinematic Properties:

    C++Python
    // Get Joint Positions(Angles)\nauto joint_positions = robot->positions();\n// Get Joint Velocities\nauto joint_vels = robot->velocities();\n// Get Joint Accelerations\nauto joint_accs = robot->accelerations();\n// Get link_name(str) Transformation matrix with respect to the world frame.\nauto eef_tf = robot->body_pose(link_name);\n// Get translation vector from transformation matrix\nauto eef_pos = eef_tf.translation();\n// Get rotation matrix from tranformation matrix\nauto eef_rot = eef_tf.rotation();\n// Get link_name 6d pose vector [logmap(eef_tf.linear()), eef_tf.translation()]\nauto eef_pose_vec = robot->body_pose_vec(link_name);\n// Get link_name 6d velocity vector [angular, cartesian]\nauto eef_vel = robot->body_velocity(link_name);\n// Get link_name 6d acceleration vector [angular, cartesian]\nauto eef_acc = robot->body_acceleration(link_name);\n// Jacobian targeting the origin of link_name(str)\nauto jacobian = robot->jacobian(link_name);\n// Jacobian time derivative\nauto jacobian_deriv = robot->jacobian_deriv(link_name);\n// Center of Mass Jacobian\nauto com_jacobian = robot->com_jacobian();\n// Center of Mass Jacobian Time Derivative\nauto com_jacobian_deriv = robot->com_jacobian_deriv();\n
    # Get Joint Positions(Angles)\njoint_positions = robot.positions()\n# Get Joint Velocities\njoint_vels = robot.velocities()\n# Get Joint Accelerations\njoint_accs = robot.accelerations()\n# Get link_name(str) Transformation matrix with respect to the world frame.\neef_tf = robot.body_pose(link_name)\n# Get translation vector from transformation matrix\neef_pos = eef_tf.translation()\n# Get rotation matrix from tranformation matrix\neef_rot = eef_tf.rotation()\n# Get link_name 6d pose vector [logmap(eef_tf.linear()), eef_tf.translation()]\neef_pose_vec = robot.body_pose_vec(link_name)\n# Get link_name 6d velocity vector [angular, cartesian]\neef_vel = robot.body_velocity(link_name)\n# Get link_name 6d acceleration vector [angular, cartesian]\neef_acc = robot.body_acceleration(link_name)\n# Jacobian targeting the origin of link_name(str)\njacobian = robot.jacobian(link_name)\n# Jacobian time derivative\njacobian_deriv = robot.jacobian_deriv(link_name)\n# Center of Mass Jacobian\ncom_jacobian = robot.com_jacobian()\n# Center of Mass Jacobian Time Derivative\ncom_jacobian_deriv = robot.com_jacobian_deriv()\n

    Dynamic Properties:

    C++Python
    // Get Joint Forces\nauto joint_forces = robot->forces();\n// Get link's mass\nauto eef_mass = robot->body_mass(link_name);\n// Mass Matrix of robot\nauto mass_matrix = robot->mass_matrix();\n// Inverse of Mass Matrix\nauto inv_mass_matrix = robot->inv_mass_matrix();\n// Augmented Mass matrix\nauto aug_mass_matrix = robot->aug_mass_matrix();\n// Inverse of Augmented Mass matrix\nauto inv_aug_mass_matrix = robot->inv_aug_mass_matrix();\n// Coriolis Force vector\nauto coriolis = robot->coriolis_forces();\n// Gravity Force vector\nauto gravity = robot->gravity_forces();\n// Combined vector of Coriolis Force and Gravity Force\nauto coriolis_gravity = robot->coriolis_gravity_forces();\n// Constraint Force Vector\nauto constraint_forces = robot->constraint_forces();\n
    # Get Joint Forces\njoint_forces = robot.forces()\n# Get link's mass\neef_mass = robot.body_mass(link_name)\n# Mass Matrix of robot\nmass_matrix = robot.mass_matrix()\n# Inverse of Mass Matrix\ninv_mass_matrix = robot.inv_mass_matrix()\n# Augmented Mass matrix\naug_mass_matrix = robot.aug_mass_matrix()\n# Inverse of Augmented Mass matrix\ninv_aug_mass_matrix = robot.inv_aug_mass_matrix()\n# Coriolis Force vector\ncoriolis = robot.coriolis_forces()\n# Gravity Force vector\ngravity = robot.gravity_forces()\n# Combined vector of Coriolis Force and Gravity Force\ncoriolis_gravity = robot.coriolis_gravity_forces()\n# NOT IMPLEMENTED\n# # Constraint Force Vector\n# constraint_forces = robot.constraint_forces()\n
    "},{"location":"faq/#is-there-a-way-to-change-the-joint-properties-eg-actuation-friction","title":"Is there a way to change the joint properties (e.g., actuation, friction)?","text":"

    There are 6 types of actuators available, you can set the same actuator to multiple joints at once, or you can set each sensor separately:

    C++Python
    // Set all DoFs to same actuator\nrobot->set_actuator_types(\"servo\"); // actuator types can be \"servo\", \"torque\", \"velocity\", \"passive\", \"locked\", \"mimic\"\n// You can also set actuator types separately\nrobot->set_actuator_type(\"torque\", \"iiwa_joint_5\");\n
    # Set all DoFs to same actuator\n# actuator types can be \"servo\", \"torque\", \"velocity\", \"passive\", \"locked\", \"mimic\"\nrobot.set_actuator_types(\"servo\")\n# You can also set actuator types separately\nrobot.set_actuator_type(\"torque\", \"iiwa_joint_5\")\n

    To enable position and velocity limits for the actuators:

    C++Python
    // \u0395nforce joint position and velocity limits\nrobot->set_position_enforced(true);\n
    # \u0395nforce joint position and velocity limits\nrobot.set_position_enforced(True)\n

    Every DOF's limits (position, velocity, acceleration, force) can be modified:

    C++Python
    // Modify Position Limits\nEigen::VectorXd pos_upper_lims(7);\npos_upper_lims << 2.096, 2.096, 2.096, 2.096, 2.096, 2.096, 2.096;\nrobot->set_position_upper_limits(pos_upper_lims, dof_names);\nrobot->set_position_lower_limits(-pos_upper_lims, dof_names);\n// Modify Velocity Limits\nEigen::VectorXd vel_upper_lims(7);\nvel_upper_lims << 1.5, 1.5, 1.5, 1.5, 1.5, 1.5, 1.5;\nrobot->set_velocity_upper_limits(vel_upper_lims, dof_names);\nrobot->set_velocity_lower_limits(-vel_upper_lims, dof_names);\n// Modify Force Limits\nEigen::VectorXd force_upper_lims(7);\nforce_upper_lims << 150, 150, 150, 150, 150, 150, 150;\nrobot->set_force_upper_limits(force_upper_lims, dof_names);\nrobot->set_force_lower_limits(-force_upper_lims, dof_names);\n// Modify Acceleration Limits\nEigen::VectorXd acc_upper_lims(7);\nacc_upper_lims << 1500, 1500, 1500, 1500, 1500, 1500, 1500;\nrobot->set_acceleration_upper_limits(acc_upper_lims, dof_names);\nrobot->set_acceleration_lower_limits(-acc_upper_lims, dof_names);\n
    # Modify Position Limits\npos_upper_lims = np.array([2.096, 2.096, 2.096, 2.096, 2.096, 2.096, 2.096])\nrobot.set_position_upper_limits(pos_upper_lims, dof_names)\nrobot.set_position_lower_limits(-1*pos_upper_lims, dof_names)\n# Modify Velocity Limits\nvel_upper_lims = np.array([1.5, 1.5, 1.5, 1.5, 1.5, 1.5, 1.5])\nrobot.set_velocity_upper_limits(vel_upper_lims, dof_names)\nrobot.set_velocity_lower_limits(-1*vel_upper_lims, dof_names)\n# Modify Force Limits\nforce_upper_lims = np.array([150, 150, 150, 150, 150, 150, 150])\nrobot.set_force_upper_limits(force_upper_lims, dof_names)\nrobot.set_force_lower_limits(-1*force_upper_lims, dof_names)\n# Modify Acceleration Limits\nacc_upper_lims = np.array([1500, 1500, 1500, 1500, 1500, 1500, 1500])\nrobot.set_acceleration_upper_limits(acc_upper_lims, dof_names)\nrobot.set_acceleration_lower_limits(-1*acc_upper_lims, dof_names)\n

    You can also modify the damping coefficients, coulomb frictions and spring stiffness of every DOF:

    C++Python
    // Modify Damping Coefficients\nstd::vector<double> damps = {0.4, 0.4, 0.4, 0.4, 0.4, 0.4, 0.4};\nrobot->set_damping_coeffs(damps, dof_names);\n// Modify Coulomb Frictions\nstd::vector<double> cfrictions = {0.001, 0.001, 0.001, 0.001, 0.001, 0.001, 0.001};\nrobot->set_coulomb_coeffs(cfrictions, dof_names);\n// Modify  Spring Stiffness\nstd::vector<double> stiffnesses = {0.001, 0.001, 0.001, 0.001, 0.001, 0.001, 0.001};\nrobot->set_spring_stiffnesses(stiffnesses, dof_names);\n
    # Modify Damping Coefficients\ndamps = [0.4, 0.4, 0.4, 0.4, 0.4, 0.4, 0.4]\nrobot.set_damping_coeffs(damps, dof_names)\n# Modify Coulomb Frictions\ncfrictions = [0.001, 0.001, 0.001, 0.001, 0.001, 0.001, 0.001]\nrobot.set_coulomb_coeffs(cfrictions, dof_names)\n# Modify  Spring Stiffness\nstiffnesses = [0.001, 0.001, 0.001, 0.001, 0.001, 0.001, 0.001]\nrobot.set_spring_stiffnesses(stiffnesses, dof_names)\n
    "},{"location":"faq/#what-are-the-supported-sensors-how-can-i-use-an-imu","title":"What are the supported sensors? How can I use an IMU?","text":"

    Sensors in RobotDART can be added only through the simulator object. All of the sensors can be added without being attached to any body or joint but some of them can operate only when attached to something (e.g. ForceTorque sensors).

    "},{"location":"faq/#torque-sensor","title":"Torque sensor","text":"

    Torque sensors can be added to every joint of the robot:

    C++Python
    // Add torque sensors to the robot\nint ct = 0;\nstd::vector<std::shared_ptr<robot_dart::sensor::Torque>> tq_sensors(robot->num_dofs());\nfor (const auto& joint : robot->dof_names())\ntq_sensors[ct++] = simu.add_sensor<robot_dart::sensor::Torque>(robot, joint, 1000);\n
    # Add torque sensors to the robot\ntq_sensors = np.empty(robot.num_dofs(), dtype=rd.sensor.Torque)\nct = 0\nfor joint in robot.dof_names():\nsimu.add_sensor(rd.sensor.Torque(robot, joint, 1000))\ntq_sensors[ct] = simu.sensors()[-1]\nct += 1\n

    Torque sensors measure the torque \\(\\tau \\in \\rm I\\!R^n\\) of the attached joint (where \\(n\\) is the DOFs of the joint):

    C++Python
    // vector that contains the torque measurement for every joint (scalar)\nEigen::VectorXd torques_measure(robot->num_dofs());\nfor (const auto& tq_sens : tq_sensors)\ntorques_measure.block<1, 1>(ct++, 0) = tq_sens->torques();\n
    # vector that contains the torque measurement for every joint (scalar)\ntorques_measure = np.empty(robot.num_dofs())\nct = 0\nfor tq_sens in tq_sensors:\ntorques_measure[ct] = tq_sens.torques()\nct += 1\n
    "},{"location":"faq/#force-torque-sensor","title":"Force-Torque sensor","text":"

    Force-Torque sensors can be added to every joint of the robot:

    C++Python
    // Add force-torque sensors to the robot\nct = 0;\nstd::vector<std::shared_ptr<robot_dart::sensor::ForceTorque>> f_tq_sensors(robot->num_dofs());\nfor (const auto& joint : robot->dof_names())\nf_tq_sensors[ct++] = simu.add_sensor<robot_dart::sensor::ForceTorque>(robot, joint, 1000, \"parent_to_child\");\n
    # Add force-torque sensors to the robot\nf_tq_sensors = np.empty(robot.num_dofs(), dtype=rd.sensor.ForceTorque)\nct = 0\nfor joint in robot.dof_names():\nsimu.add_sensor(rd.sensor.ForceTorque(\nrobot, joint, 1000, \"parent_to_child\"))\nf_tq_sensors[ct] = simu.sensors()[-1]\nprint(f_tq_sensors)\nct += 1\n

    Torque sensors measure the force \\(\\boldsymbol{F} \\in \\rm I\\!R^3\\), the torque \\(\\boldsymbol{\\tau} \\in \\rm I\\!R^3\\) and the wrench \\(\\boldsymbol{\\mathcal{F}} =\\begin{bmatrix} \\tau, F\\end{bmatrix}\\in \\rm I\\!R^6\\) of the attached joint:

    C++Python
    //  matrix that contains the torque measurement for every joint (3d vector)\nEigen::MatrixXd ft_torques_measure(robot->num_dofs(), 3);\n//  matrix that contains the force measurement for every joint (3d vector)\nEigen::MatrixXd ft_forces_measure(robot->num_dofs(), 3);\n//  matrix that contains the wrench measurement for every joint (6d vector)[torque, force]\nEigen::MatrixXd ft_wrench_measure(robot->num_dofs(), 6);\nct = 0;\nfor (const auto& f_tq_sens : f_tq_sensors) {\nft_torques_measure.block<1, 3>(ct, 0) = f_tq_sens->torque();\nft_forces_measure.block<1, 3>(ct, 0) = f_tq_sens->force();\nft_wrench_measure.block<1, 6>(ct, 0) = f_tq_sens->wrench();\nct++;\n}\n
    #  matrix that contains the torque measurement for every joint (3d vector)\nft_torques_measure = np.empty([robot.num_dofs(), 3])\n#  matrix that contains the force measurement for every joint (3d vector)\nft_forces_measure = np.empty([robot.num_dofs(), 3])\n#  matrix that contains the wrench measurement for every joint (6d vector)[torque, force]\nft_wrench_measure = np.empty([robot.num_dofs(), 6])\nct = 0\nfor f_tq_sens in f_tq_sensors:\nft_torques_measure[ct, :] = f_tq_sens.torque()\nft_forces_measure[ct, :] = f_tq_sens.force()\nft_wrench_measure[ct, :] = f_tq_sens.wrench()\nct += 1\n
    "},{"location":"faq/#imu-sensor","title":"IMU sensor","text":"

    IMU sensors can be added to every link of the robot:

    C++Python
    // Add IMU sensors to the robot\nct = 0;\nstd::vector<std::shared_ptr<robot_dart::sensor::IMU>> imu_sensors(robot->num_bodies());\nfor (const auto& body_node : robot->body_names()) {\n// _imu(std::make_shared<sensor::IMU>(sensor::IMUConfig(body_node(\"head\"), frequency))),\nimu_sensors[ct++] = simu.add_sensor<robot_dart::sensor::IMU>(robot_dart::sensor::IMUConfig(robot->body_node(body_node), 1000));\n}\n
    # Add IMU sensors to the robot\nct = 0\nimu_sensors = np.empty(robot.num_bodies(), dtype=rd.sensor.IMU)\nfor body_node in robot.body_names():\nsimu.add_sensor(rd.sensor.IMU(\nrd.sensor.IMUConfig(robot.body_node(body_node), 1000)))\nimu_sensors[ct] = simu.sensors()[-1]\nct += 1\n

    IMU sensors measure the angular position vector \\(\\boldsymbol{\\theta} \\in \\rm I\\!R^3\\), the angular velocity \\(\\boldsymbol{\\omega} \\in \\rm I\\!R^3\\) and the linear acceleration \\(\\boldsymbol{\\alpha} \\in \\rm I\\!R^3\\) of the attached link:

    C++Python
    Eigen::MatrixXd imu_angular_positions_measure(robot->num_bodies(), 3);\nEigen::MatrixXd imu_angular_velocities_measure(robot->num_bodies(), 3);\nEigen::MatrixXd imu_linear_acceleration_measure(robot->num_bodies(), 3);\nct = 0;\nfor (const auto& imu_sens : imu_sensors) {\nimu_angular_positions_measure.block<1, 3>(ct, 0) = imu_sens->angular_position_vec();\nimu_angular_velocities_measure.block<1, 3>(ct, 0) = imu_sens->angular_velocity();\nimu_linear_acceleration_measure.block<1, 3>(ct, 0) = imu_sens->linear_acceleration();\nct++;\n}\n
    imu_angular_positions_measure = np.empty([robot.num_bodies(), 3])\nimu_angular_velocities_measure = np.empty([robot.num_bodies(), 3])\nimu_linear_acceleration_measure = np.empty([robot.num_bodies(), 3])\nct = 0\nfor imu_sens in imu_sensors:\nimu_angular_positions_measure[ct,:] = imu_sens.angular_position_vec()\nimu_angular_velocities_measure[ct, :] = imu_sens.angular_velocity()\nimu_linear_acceleration_measure[ct,:] = imu_sens.linear_acceleration()\nct += 1\n
    "},{"location":"faq/#rgb-sensor","title":"RGB sensor","text":"

    Any camera can be used as an RGB sensor:

    C++Python
    // a nested std::vector (w*h*3) of the last image taken can be retrieved\nauto rgb_image = camera->image();\n
    # a nested array (w*h*3) of the last image taken can be retrieved\nrgb_image = camera.image()\n

    We can easily save the image and/or transform it to grayscale:

    C++Python
    // a nested std::vector (w*h*3) of the last image taken can be retrieved\nauto rgb_image = camera->image();\n
    # a nested array (w*h*3) of the last image taken can be retrieved\nrgb_image = camera.image()\n
    "},{"location":"faq/#rgb_d-sensor","title":"RGB_D sensor","text":"

    Any camera can also be configured to also record depth:

    C++Python
    camera->camera().record(true, true); // cameras are recording color images by default, enable depth images as well for this example\n
    # cameras are recording color images by default, enable depth images as well for this example\ncamera.camera().record(True, True)\n

    We can then read the RGB and depth images:

    C++Python
    // get the depth image from a camera\n// with a version for visualization or bigger differences in the output\nauto rgb_d_image = camera->depth_image();\n// and the raw values that can be used along with the camera parameters to transform the image to point-cloud\nauto rgb_d_image_raw = camera->raw_depth_image();\n
    # get the depth image from a camera\n# with a version for visualization or bigger differences in the output\nrgb_d_image = camera.depth_image()\n# and the raw values that can be used along with the camera parameters to transform the image to point-cloud\nrgb_d_image_raw = camera.raw_depth_image()\n

    We can save the depth images as well:

    C++Python
    robot_dart::gui::save_png_image(\"camera-depth.png\", rgb_d_image);\nrobot_dart::gui::save_png_image(\"camera-depth-raw.png\", rgb_d_image_raw);\n
    rd.gui.save_png_image(\"camera-depth.png\", rgb_d_image)\nrd.gui.save_png_image(\"camera-depth-raw.png\", rgb_d_image_raw)\n
    "},{"location":"faq/#how-can-i-spawn-multiple-robots-in-parallel","title":"How can I spawn multiple robots in parallel?","text":""},{"location":"faq/#robot-pool-only-in-c","title":"Robot Pool (only in C++)","text":"

    The best way to do so is to create a Robot pool. With a robot pool you:

    • Minimize the overhead of loading robots (it happens only once!) or cloning robots (it never happens)
    • Make sure that your robots are \"clean\" once released from each thread
    • Focus on the important stuff rather than handling robots and threads

    Let's see a more practical example:

    • First we need to include the proper header:
    C++
    #include <robot_dart/robot_pool.hpp>\n
    • Then we create a creator function and the pool object:
    C++
    namespace pool {\n// This function should load one robot: here we load Talos\ninline std::shared_ptr<robot_dart::Robot> robot_creator()\n{\nreturn std::make_shared<robot_dart::robots::Talos>();\n}\n// To create the object we need to pass the robot_creator function and the number of maximum parallel threads\nrobot_dart::RobotPool robot_pool(robot_creator, NUM_THREADS);\n} // namespace pool\n

    The creator function is the function responsible for loading your robot. This should basically look like a standalone code to load or create a robot.

    • Next, we create a few threads that utilize the robots (in your code you might be using OpenMP or TBB):
    C++
    // for the example, we run NUM_THREADS threads of eval_robot()\nstd::vector<std::thread> threads(NUM_THREADS * 2); // *2 to see some reuse\nfor (size_t i = 0; i < threads.size(); ++i)\nthreads[i] = std::thread(eval_robot, i); // eval_robot is the function that uses the robot\n
    • An example evaluation function:
    C++
    inline void eval_robot(int i)\n{\n// We get one available robot\nauto robot = pool::robot_pool.get_robot();\nstd::cout << \"Robot \" << i << \" got [\" << robot->skeleton() << \"]\" << std::endl;\n/// --- some robot_dart code ---\nsimulate_robot(robot);\n// --- do something with the result\nstd::cout << \"End of simulation \" << i << std::endl;\n// CRITICAL : free your robot !\npool::robot_pool.free_robot(robot);\nstd::cout << \"Robot \" << i << \" freed!\" << std::endl;\n}\n
    "},{"location":"faq/#i-need-to-simulate-many-worlds-with-camera-sensors-in-parallel-how-can-i-do-this","title":"I need to simulate many worlds with camera sensors in parallel. How can I do this?","text":"

    On magnum_contexts.cpp you can find an example showcasing the use of many worlds with camera sensors in parallel. The main takeaway is that we need to pre-allocate OpenGL contexts so that each thread can take one and use it to render their worlds.

    C++
    // Load robot from URDF\nauto global_robot = std::make_shared<robot_dart::robots::Iiwa>();\nstd::vector<std::thread> workers;\n// Set maximum number of parallel GL contexts (this is GPU-dependent)\nrobot_dart::gui::magnum::GlobalData::instance()->set_max_contexts(4);\n// We want 15 parallel simulations with different GL context each\nsize_t N_workers = 15;\nstd::mutex mutex;\nsize_t id = 0;\n// Launch the workers\nfor (size_t i = 0; i < N_workers; i++) {\nworkers.push_back(std::thread([&] {\nmutex.lock();\nsize_t index = id++;\nmutex.unlock();\n// Get the GL context -- this is a blocking call\n// will wait until one GL context is available\n// get_gl_context(gl_context); // this call will not sleep between failed queries\nget_gl_context_with_sleep(gl_context, 20); // this call will sleep 20ms between each failed query\n// Do the simulation\nauto robot = global_robot->clone();\nrobot_dart::RobotDARTSimu simu(0.001);\nEigen::VectorXd ctrl = robot_dart::make_vector({0., M_PI / 3., 0., -M_PI / 4., 0., 0., 0.});\nauto controller = std::make_shared<robot_dart::control::PDControl>(ctrl);\nrobot->add_controller(controller);\ncontroller->set_pd(300., 50.);\n// Magnum graphics\nrobot_dart::gui::magnum::GraphicsConfiguration configuration = robot_dart::gui::magnum::WindowlessGraphics::default_configuration();\nconfiguration.width = 1024;\nconfiguration.height = 768;\nauto graphics = std::make_shared<robot_dart::gui::magnum::WindowlessGraphics>(configuration);\nsimu.set_graphics(graphics);\n// Position the camera differently for each thread to visualize the difference\ngraphics->look_at({0.4 * index, 3.5 - index * 0.1, 2.}, {0., 0., 0.25});\n// record images from main camera/graphics\n// graphics->set_recording(true); // WindowlessGLApplication records images by default\nsimu.add_robot(robot);\nsimu.run(6);\n// Save the image for verification\nrobot_dart::gui::save_png_image(\"camera_\" + std::to_string(index) + \".png\",\ngraphics->image());\n// Release the GL context for another thread to use\nrelease_gl_context(gl_context);\n}));\n}\n// Wait for all the workers\nfor (size_t i = 0; i < workers.size(); i++) {\nworkers[i].join();\n}\n
    "},{"location":"faq/#i-do-not-know-how-to-use-waf-how-can-i-detect-robotdart-from-cmake","title":"I do not know how to use waf. How can I detect RobotDART from CMake?","text":"

    You need to use waf to build RobotDART, but when installing the library a CMake module is installed. Thus it is possible use RobotDART in your code using CMake. You can find a complete example at cmake/example. In short the CMake would look like this:

    cmake_minimum_required(VERSION 3.10 FATAL_ERROR)\nproject(robot_dart_example)\n# we ask for Magnum because we want to build the graphics\nfind_package(RobotDART REQUIRED OPTIONAL_COMPONENTS Magnum)\nadd_executable(robot_dart_example example.cpp)\ntarget_link_libraries(robot_dart_example\nRobotDART::Simu\n)\nif(RobotDART_Magnum_FOUND)\nadd_executable(robot_dart_example_graphics example.cpp)\ntarget_link_libraries(robot_dart_example_graphics\nRobotDART::Simu\nRobotDART::Magnum\n)\nendif()\n
    "},{"location":"faq/#where-can-i-find-complete-working-examples-for-either-c-or-python","title":"Where can I find complete working examples for either c++ or python?","text":"

    C++ examples

    Python examples

    "},{"location":"install/","title":"Manual Installation","text":""},{"location":"install/#manual-installation-of-robotdart","title":"Manual Installation of RobotDART","text":""},{"location":"install/#dependencies","title":"Dependencies","text":""},{"location":"install/#required","title":"Required","text":"
    • Ubuntu (it should work on versions >= 14.04) or OSX
    • Eigen3 (needed by DART)
    • Boost (needed by DART)
    • DART, http://dartsim.github.io/
    "},{"location":"install/#optional","title":"Optional","text":"
    • Magnum (for graphics), https://github.com/mosra/magnum
    "},{"location":"install/#installation-of-the-dependencies","title":"Installation of the dependencies","text":""},{"location":"install/#installing-boost-and-eigen3","title":"Installing Boost and Eigen3","text":"

    For Ubuntu-based distributions we should use the following commands to install Eigen3 and Boost:

    sudo apt-get update\nsudo apt-get install libeigen3-dev libboost-filesystem-dev libboost-system-dev libboost-regex-dev\n

    For OSX with brew:

    brew install eigen3\nbrew install boost\n
    "},{"location":"install/#installing-dart","title":"Installing DART","text":"

    In order to use RobotDART, you need to install DART (from source).

    For Ubuntu systems, please follow the detailed installation instructions on the DART documentation website. Make sure that you don't forget to add the PPAs as detailed here. What is more, you can enable the -DART_ENABLE_SIMD flag in the CMake configuration. In addition, you need the following optional dependency: DART Parsers. Lastly, it is recommended to use either the master branch or v6.12.1 tag (and not the one provided in DART's documentation). In short you should do the following:

    Ubuntu <= 14.04 only

    sudo apt-add-repository ppa:libccd-debs/ppa\nsudo apt-add-repository ppa:fcl-debs/ppa\n

    For all Ubuntu distributions

    sudo apt-add-repository ppa:dartsim/ppa\nsudo apt-get update\n\nsudo apt-get install build-essential cmake pkg-config git\nsudo apt-get install libeigen3-dev libassimp-dev libccd-dev libfcl-dev libboost-regex-dev libboost-system-dev\n\nsudo apt-get install libtinyxml-dev libtinyxml2-dev\nsudo apt-get install liburdfdom-dev liburdfdom-headers-dev\n\ncd /path/to/tmp/folder\ngit clone git://github.com/dartsim/dart.git\ncd dart\ngit checkout v6.12.1\n\nmkdir build\ncd build\ncmake -DCMAKE_BUILD_TYPE=Release -DCMAKE_INSTALL_PREFIX=/usr ..\nmake -j4\nsudo make install\n

    If you want to install DART somewhere else than /usr/local, you should use the -DCMAKE_INSTALL_PREFIX=/path/to/install/folder cmake argument.

    For OSX systems with homebrew, please follow the detailed installation instructions on the DART documentation website. You need to follow the same procedure as for Ubuntu systems. In short you should do the following:

    brew install eigen\nbrew install assimp\nbrew install libccd\nbrew install dartsim/dart/fcl04\nbrew install boost\n\nbrew install tinyxml\nbrew install tinyxml2\nbrew install urdfdom\n\ncd /path/to/tmp/folder\ngit clone git://github.com/dartsim/dart.git\ncd dart\ngit checkout v6.12.1\n\nmkdir build\ncd build\ncmake -DCMAKE_BUILD_TYPE=Release -DCMAKE_INSTALL_PREFIX=/usr ..\nmake -j4\nsudo make install\n
    "},{"location":"install/#installing-magnum","title":"Installing Magnum","text":"

    Magnum depends on Corrade and we are going to use a few plugins and extras from the library. We are also going to use Glfw and Glx for the back-end. Follow the instrutions below:

    #installation of Glfw and OpenAL\n# Ubuntu\nsudo apt-get install libglfw3-dev libglfw3 libopenal-dev libassimp-dev\n# Mac OSX\nbrew install glfw3 openal-soft assimp\n\n# installation of Corrade\ncd /path/to/tmp/folder\ngit clone https://github.com/mosra/corrade.git\ncd corrade\nmkdir build && cd build\ncmake -DCMAKE_BUILD_TYPE=Release ..\nmake -j\nsudo make install\n\n# installation of Magnum\ncd /path/to/tmp/folder\ngit clone https://github.com/mosra/magnum.git\ncd magnum\nmkdir build && cd build\n# Ubuntu\ncmake -DCMAKE_BUILD_TYPE=Release -DWITH_AUDIO=ON -DWITH_DEBUGTOOLS=ON -DWITH_GL=ON -DWITH_MESHTOOLS=ON -DWITH_PRIMITIVES=ON -DWITH_SCENEGRAPH=ON -DWITH_SHADERS=ON -DWITH_TEXT=ON -DWITH_TEXTURETOOLS=ON -DWITH_TRADE=ON -DWITH_GLFWAPPLICATION=ON -DWITH_WINDOWLESSEGLAPPLICATION=ON -DWITH_OPENGLTESTER=ON -DWITH_ANYAUDIOIMPORTER=ON -DWITH_ANYIMAGECONVERTER=ON -DWITH_ANYIMAGEIMPORTER=ON -DWITH_ANYSCENEIMPORTER=ON -DWITH_MAGNUMFONT=ON -DWITH_OBJIMPORTER=ON -DWITH_TGAIMPORTER=ON -DWITH_WAVAUDIOIMPORTER=ON -DTARGET_HEADLESS=ON .. # this will enable almost all features of Magnum that are not necessarily needed for robot_dart (please refer to the documentation of Magnum for more details on selecting only the ones that you need)\n# Mac OSX\ncmake -DCMAKE_BUILD_TYPE=Release -DWITH_AUDIO=ON -DWITH_DEBUGTOOLS=ON -DWITH_GL=ON -DWITH_MESHTOOLS=ON -DWITH_PRIMITIVES=ON -DWITH_SCENEGRAPH=ON -DWITH_SHADERS=ON -DWITH_TEXT=ON -DWITH_TEXTURETOOLS=ON -DWITH_TRADE=ON -DWITH_GLFWAPPLICATION=ON -DWITH_WINDOWLESSCGLAPPLICATION=ON -DWITH_OPENGLTESTER=ON -DWITH_ANYAUDIOIMPORTER=ON -DWITH_ANYIMAGECONVERTER=ON -DWITH_ANYIMAGEIMPORTER=ON -DWITH_ANYSCENEIMPORTER=ON -DWITH_MAGNUMFONT=ON -DWITH_OBJIMPORTER=ON -DWITH_TGAIMPORTER=ON -DWITH_WAVAUDIOIMPORTER=ON .. # this will enable almost all features of Magnum that are not necessarily needed for robot_dart (please refer to the documentation of Magnum for more details on selecting only the ones that you need)\nmake -j\nsudo make install\n\n# installation of Magnum Plugins\ncd /path/to/tmp/folder\ngit clone https://github.com/mosra/magnum-plugins.git\ncd magnum-plugins\nmkdir build && cd build\ncmake -DCMAKE_BUILD_TYPE=Release -DWITH_ASSIMPIMPORTER=ON -DWITH_DDSIMPORTER=ON -DWITH_JPEGIMPORTER=ON -DWITH_OPENGEXIMPORTER=ON -DWITH_PNGIMPORTER=ON -DWITH_TINYGLTFIMPORTER=ON -DWITH_STBTRUETYPEFONT=ON .. # this will enable quite a few Magnum Plugins that are not necessarily needed for robot_dart (please refer to the documentation of Magnum for more details on selecting only the ones that you need)\nmake -j\nsudo make install\n\n# installation of Magnum DART Integration (DART needs to be installed) and Eigen Integration\ncd /path/to/tmp/folder\ngit clone https://github.com/mosra/magnum-integration.git\ncd magnum-integration\nmkdir build && cd build\ncmake -DCMAKE_BUILD_TYPE=Release -DWITH_DART=ON -DWITH_EIGEN=ON ..\nmake -j\nsudo make install\n
    "},{"location":"install/#compilation-and-running-the-examples","title":"Compilation and running the examples","text":"

    The compilation of the library is straight-forward:

    • retrieve the code, for instance with git clone https://github.com/resibots/robot_dart.git
    • cd /path/to/repo/root
    • ./waf configure
    • ./waf

    To build the examples, execute this: ./waf examples

    Now you can run the examples. For example, to run the arm example you need to type the following: ./build/arm (or ./build/arm_plain to run it without graphics).

    "},{"location":"install/#installing-the-library","title":"Installing the library","text":"

    To install the library (assuming that you have already compiled it), you need only to run:

    • ./waf install

    By default the library will be installed in /usr/local/lib (for this maybe sudo ./waf install might be needed) and a static library will be generated. You can change the defaults as follows:

    • ./waf configure --prefix=/path/to/install/dir --shared
    • ./waf install

    In short, with --prefix you can change the directory where the library will be installed and if --shared is present a shared library will be created instead of a static one.

    "},{"location":"install/#compiling-the-python-bindings","title":"Compiling the python bindings","text":"

    For the python bindings of robot_dart, we need numpy to be installed, pybind11 and the python bindings of DART (dartpy).

    For numpy one can install it with pip or standard packages. For pybind11 please follow the instructions on the dart website (focus on the pybind11 part, for the other parts follow our instructions above).

    For the python bindings of DART, do:

    cd dart\n\nmkdir build\ncd build\ncmake -DDART_BUILD_DARTPY=ON -DCMAKE_BUILD_TYPE=Release -DCMAKE_INSTALL_PREFIX=/usr ..\nmake -j4\nsudo make install\nsudo make install-dartpy # for DART >= v6.12.0, we do not need this\n

    Then the compilation of robot_dart is almost identical as before:

    • retrieve the code, for instance with git clone https://github.com/resibots/robot_dart.git
    • cd /path/to/repo/root
    • ./waf configure --python (--python enables the python bindings)
    • ./waf
    • Install the library (including the python bindings) as before (no change is needed)
    • Depending on your installation directory you might need to update your PYTHONPATH, e.g. export PYTHONPATH=$PYTHONPATH:/usr/local/lib/python3.8/site-packages/

    To run the python examples (for the python examples you need to have enabled the graphics, that is, install Magnum library), run:

    • cd /path/to/repo/root
    • python src/python/example.py or python src/python/example_parallel.py
    "},{"location":"install/#common-issues-with-python-bindings","title":"Common Issues with Python bindings","text":"

    One of the most common issue with the python bindings is the fact that DART bindings might be compiled and installed for python 3 and the robot_dart ones for python 2. To force the usage of python 3 for robot_dart, you use python3 ./waf instead of just ./waf in all the commands above.

    "},{"location":"quick_install/","title":"Installation","text":""},{"location":"quick_install/#scripts-for-quick-installation-of-robotdart","title":"Scripts for Quick Installation of RobotDART","text":"

    In this page we provide standalone scripts for installing RobotDART for Ubuntu (20.04) and OSX. The scripts will install all the required dependencies and RobotDART. Notably, all dependencies that need to be compiled by source and RobotDART will be installed inside the /opt folder. This way, one can be rest assured that their system will be clean.

    "},{"location":"quick_install/#ubuntu-2004","title":"Ubuntu 20.04","text":"

    To quickly install RobotDART on Ubuntu 20.04, we just need to run ./scripts/install_ubuntu.sh from the root of the repo. In more detail:

    • git clone https://github.com/resibots/robot_dart.git
    • cd robot_dart
    • ./scripts/install_ubuntu.sh

    This will install everything needed! Once the script is successfully executed, one should add the following to their ~/.bashrc or ~/.zshrc file:

    export PATH=/opt/magnum/bin:$PATH\nexport LD_LIBRARY_PATH=/opt/dart/lib:/opt/magnum/lib:/opt/robot_dart/lib:$LD_LIBRARY_PATH\nexport PYTHONPATH=/opt/dart/lib/python3/dist-packages:/opt/robot_dart/lib/python3.8/site-packages:$PYTHONPATH\n
    "},{"location":"quick_install/#osx","title":"OSX","text":"

    Coming soon

    "},{"location":"robots/","title":"Supported robots","text":""},{"location":"robots/#supported-robots","title":"Supported robots","text":"

    Every robot is a defined as a URDF, which will be installed $PREFIX/share/utheque. All robots have pre-defined \"robot classes\" that define sensors and other properties; for your custom/new robots, you will have to add the sensors/properties via the generic robot class (or create a new robot class).

    The URDF files are loaded using the following rules (see utheque::path()):

    • First check in the current directory
    • If not found, check in current_directory/utheque
    • If not found, check in $ROBOT_DART_PATH/utheque
    • If not found, check in the robot dart installation path/robots (e.g., /usr/share/utheque or $HOME/share/utheque)
    • Otherwise, report failure

    utheque is a separate header-only library that gets installed together with RobotDART (or even alone), that can be used in libraries that do not want to interfere with RobotDART and use the curated URDF files.

    "},{"location":"robots/#talos-pal-robotics","title":"Talos (PAL Robotics)","text":"

    Talos is a humanoid robot made by PAL Robotics.

    • Datasheet: [pdf]
    • 32 degrees of freedom (6 for each leg, 7 for each arm, 2 for the waist, 2 for the neck, 1 for each gripper)
    • 175 cm / 95 kg
    • IMU in the torso
    • Torque sensors in all joints except head, wrist and gripper (22 torque sensors total)
    • 1 force/torque sensor in each ankle
    • 1 force/torque sensor in each wrist

    We have two URDF files:

    • robots/talos/talos.urdf :
      • accurate (simplified but made of polygons) collision meshes
      • mimic joints for the gripper
      • Not compatible the DART collision detector (you need to use FCL collision detector - shipped with DART)
      • URDF: [talos.urdf]
      • Example: [talos.cpp]

    Load Talos

    C++
    auto robot = std::make_shared<robot_dart::robots::Talos>();\n
    Python
    robot = rd.Talos()\n
    • robot/talos/talos_fast.urdf:
      • no collision except for the feet, which are approximated by boxes
      • grippers are fixed (no movement is allowed)
      • compatible with the DART collision detector
      • URDF: [talos_fast.urdf]
      • Example: [talos_fast.cpp]

    talos_fast.urdf is faster because it makes it possible to use the DART collision detector (and has much collision shapes). You should prefer it except if you want to use the grippers (e.g., for manipulation) or are working on self-collisions.

    Load Talos Fast

    C++
    // load talos fast\nauto robot = std::make_shared<robot_dart::robots::TalosFastCollision>();\n// Set actuator types to VELOCITY (for speed)\nrobot->set_actuator_types(\"velocity\");\ndouble dt = 0.001;\nrobot_dart::RobotDARTSimu simu(dt);\n// we can use the DART (fast) collision detector\nsimu.set_collision_detector(\"dart\");\n
    Python
    robot = rd.TalosFastCollision()\n

    Please note that the mesh files (.glb) require assimp 5.x (and not assimp4.x usually shipped with ROS). If you cannot load the URDF, please check your assimp version.

    "},{"location":"robots/#panda-franka-emika","title":"Panda (Franka Emika)","text":"

    The Franka is a modern manipulator made by Franka Emika Panda. It is commonly found in many robotics labs.

    • Datasheet: [pdf]
    • 7 degrees of freedom
    • Can be controlled in torque
    • 18 kg
    • workspace: 855 mm (horizontal), 1190 mm (vertical)
    • URDF: [franka.urdf]
    • Example: [franka.cpp] The URDF includes the gripper.

    Load Franka

    C++
    auto robot = std::make_shared<robot_dart::robots::Franka>();\n
    Python
    robot = rd.Franka()\n
    "},{"location":"robots/#lbr-iiwa-kuka","title":"LBR iiwa (KUKA)","text":"

    The LBR iiwa is manufactured by KUKA. It is similar to the Panda and is also very common in robotics labs.

    • Datasheet: [pdf]
    • We implement the 14 kg version
    • 29.5 kg
    • 7 degrees of freedom
    • URDF: [iiwa.urdf]
    • Example: [iiwa.cpp]

    Load Iiwa

    C++
    auto robot = std::make_shared<robot_dart::robots::Iiwa>();\n
    Python
    robot = rd.Iiwa()\n
    "},{"location":"robots/#icub-iit","title":"iCub (IIT)","text":"

    The iCub is a open source humanoid robot made by the Instituto Italiano di Tecnologia. There are currently 42 iCUbs in the world, and many versions.

    • Datasheet (rev 2.3) [pdf]
    • 6 force/torque sensors (upper arms, upper legs, ankles)
    • IMU in the head
    • We do to simulate the skin
    • We do not simulate the hands
    • Our model is close to the Inria's iCub, but it has not been checked in detail.
    • URDF: [icub.urdf]
    • Example [icub.cpp]

    Please note that the mesh files (.glb) require assimp 5.x (and not assimp4.x usually shipped with ROS). If you cannot load the URDF, please check your assimp version.

    Load iCub

    C++
    auto robot = std::make_shared<robot_dart::robots::ICub>();\n// Set actuator types to VELOCITY motors so that they stay in position without any controller\nrobot->set_actuator_types(\"velocity\");\nrobot_dart::RobotDARTSimu simu(0.001);\nsimu.set_collision_detector(\"fcl\");\n
    Python
    robot = rd.ICub()\n# Set actuator types to VELOCITY motors so that they stay in position without any controller\nrobot.set_actuator_types(\"velocity\")\nsimu = rd.RobotDARTSimu(0.001)\nsimu.set_collision_detector(\"fcl\")\n

    Print IMU sensor measurements

    C++
    std::cout << \"Angular    Position: \" << robot->imu().angular_position_vec().transpose().format(fmt) << std::endl;\nstd::cout << \"Angular    Velocity: \" << robot->imu().angular_velocity().transpose().format(fmt) << std::endl;\nstd::cout << \"Linear Acceleration: \" << robot->imu().linear_acceleration().transpose().format(fmt) << std::endl;\nstd::cout << \"=================================\" << std::endl;\n
    Python
    print(\"Angular    Position: \",  robot.imu().angular_position_vec().transpose())\nprint(\"Angular    Velocity: \",  robot.imu().angular_velocity().transpose())\nprint(\"Linear Acceleration: \",  robot.imu().linear_acceleration().transpose())\nprint(\"=================================\" )\n

    Print Force-Torque sensor measurements

    C++
    std::cout << \"FT ( force): \" << robot->ft_foot_left().force().transpose().format(fmt) << std::endl;\nstd::cout << \"FT (torque): \" << robot->ft_foot_left().torque().transpose().format(fmt) << std::endl;\nstd::cout << \"=================================\" << std::endl;\n
    Python
    print(\"FT ( force): \",  robot.ft_foot_left().force().transpose())\nprint(\"FT (torque): \",  robot.ft_foot_left().torque().transpose())\nprint(\"=================================\")\n
    "},{"location":"robots/#unitree-a1","title":"Unitree A1","text":"

    A1 is a quadruped robot made by the Unitree Robotics.

    • IMU in the torso
    • We do not simulate the foot pressure sensors (yet)
    • One can easily add a depth camera on the head
    • URDF: [a1.urdf]
    • Example [a1.cpp]

    Load A1

    C++
    auto robot = std::make_shared<robot_dart::robots::A1>();\n
    Python
    robot = rd.A1()\n

    Print IMU sensor measurements

    C++
    std::cout << \"Angular    Position: \" << robot->imu().angular_position_vec().transpose().format(fmt) << std::endl;\nstd::cout << \"Angular    Velocity: \" << robot->imu().angular_velocity().transpose().format(fmt) << std::endl;\nstd::cout << \"Linear Acceleration: \" << robot->imu().linear_acceleration().transpose().format(fmt) << std::endl;\nstd::cout << \"=================================\" << std::endl;\n
    Python
    print( \"Angular    Position: \", robot.imu().angular_position_vec().transpose())\nprint( \"Angular    Velocity: \", robot.imu().angular_velocity().transpose())\nprint( \"Linear Acceleration: \", robot.imu().linear_acceleration().transpose())\nprint( \"=================================\")\n

    Add a depth camera on the head

    How can I attach a camera to a moving link?

    Please note that the mesh files (.glb) require assimp 5.x (and not assimp4.x usually shipped with ROS). If you cannot load the URDF, please check your assimp version.

    "},{"location":"robots/#dynamixel-based-hexapod-robot-inria-and-others","title":"Dynamixel-based hexapod robot (Inria and others)","text":"

    This hexapod is a simple 6-legged robot based on dynamixel actuators. It is similar to the robot used in the paper `Robots that can adapt like animals' (Cully et al., 2015).

    • 6 legs, 3 degrees of freedom for each leg (18 degrees of freedom)
    • simple URDF (no meshes)
    • URDF: [pexod.urdf]
    • Example: [hexapod.cpp]

    Load Hexapod

    C++
    auto robot = std::make_shared<robot_dart::robots::Hexapod>();\n
    Python
    robot = rd.Hexapod()\n

    Load Pexod

    C++
    auto robot = std::make_shared<robot_dart::Robot>(\"pexod.urdf\");\n
    Python
    robot = rd.Robot(\"pexod.urdf\");\n
    "},{"location":"robots/#simple-arm","title":"Simple arm","text":"
    • A simple arm for educational or debugging purposes
    • 5 degrees of freedom
    • simple URDF (no meshes)
    • URDF: [arm.urdf]
    • Example: [arm.cpp]

    Load Simple Arm

    C++
    auto robot = std::make_shared<robot_dart::robots::Arm>();\n
    Python
    robot = rd.Arm()\n
    "},{"location":"robots/#loading-custom-robots","title":"Loading Custom Robots","text":"

    RobotDART gives you the ability to load custom robots that are defined in URDF, SDF, SKEL or MJCF files. For example, you can load a urdf model using:

    Load custom Robot

    C++
        auto your_robot = std::make_shared<robot_dart::Robot>(\"path/to/model.urdf\");\n
    Python
        your_robot = robot_dart.Robot(\"path/to/model.urdf\")\n

    Load custom Robot with packages (e.g STL, DAE meshes)

    C++
        std::vector<std::pair<std::string, std::string>> your_model_packages = ('model', 'path/to/model/dir');\nauto your_robot = std::make_shared<robot_dart::Robot>(\"path/to/model.urdf\", your_model_packages, \"packages\");\n
    Python
        your_model_packages = [(\"model\", \"path/to/model/dir\")]\nyour_robot = robot_dart.Robot(\"path/to/model.urdf\", your_model_packages)\n
    "}]} \ No newline at end of file diff --git a/docs/sitemap.xml b/docs/sitemap.xml index bd41cff7..ed9db1fb 100644 --- a/docs/sitemap.xml +++ b/docs/sitemap.xml @@ -2,27 +2,27 @@ None - 2024-01-31 + 2024-02-01 daily None - 2024-01-31 + 2024-02-01 daily None - 2024-01-31 + 2024-02-01 daily None - 2024-01-31 + 2024-02-01 daily None - 2024-01-31 + 2024-02-01 daily \ No newline at end of file diff --git a/docs/sitemap.xml.gz b/docs/sitemap.xml.gz index dc14b12f73c3bf133af79f228aeea49e5439ea64..158d71d21c47120b06a0d914c6749b1e33e41311 100644 GIT binary patch literal 198 zcmV;%06G63iwFoGhr4A0|8r?{Wo=<_E_iKh0PT`Z4#FT1h4(!LVPBw)3pJGPT)NT& z5Q;4ch037S+e?39dIz1&ym|97-)4CI>MeTe0LHrt7ldJ?o$`%wZH2FwQ@+Qw$oL~z z&=IAug9>*s4o6DIaU@`(?&Sc)hn9ls(g1WdDI*6gGL&jMu&BAX^-7$*)0|FfyO0oj z--wcxyPTCdD^g_jHZj#n*Xmo)chN{=-<)oog-E~1Km51gb4%|kK8!u`8r}i`0LOD* AkpKVy literal 199 zcmV;&06702iwFoxM!IDJ|8r?{Wo=<_E_iKh0PT`Z4#FT1h4-F@u2X+dM@UZxxfRu#LKTb?3Fv=FMQ`%tZJi|M1^}Pc6MG{{V|Qr3c;u0072# BUMm0q diff --git a/src/examples/python/ci.py b/src/examples/python/ci.py new file mode 100644 index 00000000..8fa255fc --- /dev/null +++ b/src/examples/python/ci.py @@ -0,0 +1,45 @@ +import numpy as np +import RobotDART as rd +import dartpy # OSX breaks if this is imported before RobotDART + +# Create custom controller +class MyController(rd.RobotControl): + def __init__(self, ctrl, full_control): + rd.RobotControl.__init__(self, ctrl, full_control) + def __init__(self, ctrl, controllable_dofs): + rd.RobotControl.__init__(self, ctrl, controllable_dofs) + + def configure(self): + self._active = True + + def calculate(self, t): + target = np.array([self._ctrl]) + cmd = 100*(target-self.robot().positions(self._controllable_dofs)) + return cmd[0] + + # TO-DO: This is NOT working at the moment! + def clone(self): + return MyController(self._ctrl, self._controllable_dofs) + +# Load robot from URDF +robot = rd.Robot("arm.urdf", "arm", False) +robot.fix_to_world() + +# Initiate custom controller +control = MyController([0.0, 1.57, -0.5, 0.7], False) +# Add it to the robot +robot.add_controller(control, 1.) + +# Print initial positions of the robot +print(robot.positions()) + +# Create simulator object +simu = rd.RobotDARTSimu(0.001) + +# Add robot and floor to the simulation +simu.add_robot(robot) +simu.add_checkerboard_floor() + +simu.run(5.) + +print(robot.positions())