diff --git a/requirements.txt b/requirements.txt index cbf1e36..6f81159 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,2 +1,5 @@ sphinx sphinx-rtd-theme +sphinx-copybutton +sphinx-design +sphinx-tabs \ No newline at end of file diff --git a/source/conf.py b/source/conf.py index ec79e13..20cbc68 100644 --- a/source/conf.py +++ b/source/conf.py @@ -20,7 +20,7 @@ # -- Project information ----------------------------------------------------- project = u'Robotont' -copyright = u'2023, University of Tartu, Licensed under CC BY-NC' +copyright = u'2025, University of Tartu, Licensed under CC BY-NC' author = u'Veiko Vunder' # The short X.Y version @@ -28,7 +28,6 @@ # The full version, including alpha/beta/rc tags release = u'0.0.1' - # -- General configuration --------------------------------------------------- # If your documentation needs a minimal Sphinx version, state it here. @@ -40,6 +39,9 @@ # ones. extensions = [ 'sphinx.ext.autodoc', + 'sphinx_copybutton', + 'sphinx_design', + 'sphinx_tabs.tabs' ] # Add any paths that contain templates here, relative to this directory. @@ -59,7 +61,7 @@ # # This is also used if you do content translation via gettext catalogs. # Usually you set "language" from the command line for these cases. -language = None +language = 'en' # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. @@ -69,7 +71,6 @@ # The name of the Pygments (syntax highlighting) style to use. pygments_style = None - # -- Options for HTML output ------------------------------------------------- # The theme to use for HTML and HTML Help pages. See the documentation for @@ -86,7 +87,7 @@ # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = ['nstatic'] +html_static_path = [] # Custom sidebar templates, must be a dictionary that maps document names # to template names. @@ -104,7 +105,6 @@ # Output file base name for HTML help builder. htmlhelp_basename = 'Robotontdoc' - # -- Options for LaTeX output ------------------------------------------------ latex_elements = { @@ -133,7 +133,6 @@ u'M', 'manual'), ] - # -- Options for manual page output ------------------------------------------ # One entry per manual page. List of tuples @@ -143,7 +142,6 @@ [author], 1) ] - # -- Options for Texinfo output ---------------------------------------------- # Grouping the document tree into Texinfo files. List of tuples @@ -155,7 +153,6 @@ 'Miscellaneous'), ] - # -- Options for Epub output ------------------------------------------------- # Bibliographic Dublin Core info. @@ -173,5 +170,4 @@ # A list of files that should not be packed into the epub file. epub_exclude_files = ['search.html'] - # -- Extension configuration ------------------------------------------------- diff --git a/source/demo_gazebo.rst b/source/demo_gazebo.rst index 18ac32a..2a521d5 100644 --- a/source/demo_gazebo.rst +++ b/source/demo_gazebo.rst @@ -4,396 +4,270 @@ Demos on Gazebo ############### -Before running the demos it is necessary to get acquinted with the setup section of the documentation. +Before running the demos it is necessary to get acquainted with the setup section of the documentation. Make sure you check: :ref:`setup_pc_only` Launching the Simulation ------------------------ - -#. To launch the simulator: +#. Clone the ``robotont_gazebo`` package into your workspace: .. code-block:: bash - - roslaunch robotont_gazebo gazebo.launch - - -The launch file has three arguments: - -* model - chooses between a model with NUC and realsense and a model without them - - * default: robotont_gazebo_nuc - - * options: robotont_gazebo_nuc, robotont_gazebo_basic - -* world - chooses which world to use - * default: empty.world + git clone https://github.com/robotont/robotont_gazebo.git - * options: empty.world, minimaze.world, bangbang.world, between.world, colors.world +#. Build and source the newly added package: -* x_pos - chooses x coordinate of the world, controls where the robot will spawn, default: 0 - - -For example, the following command will spawn the robot to a world called bangbang.world in position x=2 and -the model that will be used is robotont_gazebo_nuc. - - .. code-block:: bash - roslaunch robotont_gazebo gazebo.launch world:=$(rospack find robotont_gazebo)/worlds/bangbang.world model:=robotont_gazebo_nuc x_pos:=2 - - -Worlds -------- - -#. minimaze.world + colcon build --packages-select robotont_gazebo + source install/setup.bash - .. image:: /files/pictures/maze.png - :width: 400 - - To run +#. Launch the simulator using the launch file: .. code-block:: bash - roslaunch robotont_gazebo world_minimaze.launch - -#. bangbang.world + ros2 launch robotont_gazebo gazebo.launch.py - .. image:: /files/pictures/bangbang.png - :width: 400 - To run +Launch file arguments +--------------------- - .. code-block:: bash - - roslaunch robotont_gazebo world_bangbang.launch +.. list-table:: + :header-rows: 1 -#. between.world + * - Name + - Description + - Options + * - ``generation`` + - Specify the generation of robotont model that is to be loaded + - 2.1, 3 (default) + * - ``model`` + - Specify the model that is to be loaded into the world + - robotont_gazebo_basic, robotont_gazebo_lidar, robotont_gazebo_nuc (default) + * - ``world`` + - Specify world the robot is spawned in + - bangbang.sdf, between.sdf, colors.sdf, mapping.sdf, maze.sdf, minimaze.sdf, minimaze_ar.sdf, empty_world.sdf (default) + * - ``x``, ``y``, ``z`` + - Specify the robot's spawn pose + - Number, 0 (default) - .. image:: /files/pictures/between.png - :width: 400 +.. tip:: - To run + For example, loading the generation 3 model in colors.sdf world at pose (-2, 1, 0): .. code-block:: bash - - roslaunch robotont_gazebo world_between.launch - -#. colors.world - .. image:: /files/pictures/colors.png - :width: 400 + ros2 launch robotont_gazebo gazebo.launch.py world:=colors.sdf x:=-2 y:=1 - To run - - .. code-block:: bash - - roslaunch robotont_gazebo world_colors.launch +Worlds +------ + +.. list-table:: + :header-rows: 1 + + * - World + - Example + - Launch Command + * - minimaze.sdf + - .. image:: /pictures/minimaze_world_example.png + :width: 200px + - ``ros2 launch robotont_gazebo gazebo.launch.py world:=minimaze.sdf`` + * - bangbang.sdf + - .. image:: /pictures/bangbang_world_example.png + :width: 200px + - ``ros2 launch robotont_gazebo gazebo.launch.py world:=bangbang.sdf`` + * - between.sdf + - .. image:: /pictures/between_world_example.png + :width: 200px + - ``ros2 launch robotont_gazebo gazebo.launch.py world:=between.sdf`` + * - colors.sdf + - .. image:: /pictures/colors_world_example.png + :width: 200px + - ``ros2 launch robotont_gazebo gazebo.launch.py world:=colors.sdf`` + * - mapping.sdf + - .. image:: /pictures/mapping_world_example.png + :width: 200px + - ``ros2 launch robotont_gazebo gazebo.launch.py world:=mapping.sdf`` + * - maze.sdf + - .. image:: /pictures/maze_world_example.png + :width: 200px + - ``ros2 launch robotont_gazebo gazebo.launch.py world:=maze.sdf`` + * - minimaze_ar.sdf + - .. image:: /pictures/minimaze_ar_world_example.png + :width: 200px + - ``ros2 launch robotont_gazebo gazebo.launch.py world:=minimaze_ar.sdf`` 2D Mapping and Localization ---------------------------- -Installation -~~~~~~~~~~~~ -The following packages are needed to run the 2d mapping demo: - - .. code-block:: bash - - sudo apt update - sudo apt install ros-noetic-depthimage-to-laserscan - sudo apt install ros-noetic-move-base - -To run the 2D mapping demo, you need to clone the base package: +Setup +~~~~~~~~~~~~~ - .. code-block:: bash - - git clone https://github.com/robotont-demos/demo_slam.git +.. hint:: - -and choose a mapping method from the following: - - 1. Cartographer - 2. Gmapping - 3. Hector SLAM - -Gmapping and AMCL -~~~~~~~~~~~~~~~~~~ - -Installation -************ - -You can clone the package for the Gmapping method from `this repository. `__ - -To clone the packages: + Before installing any packages from apt, make sure existing packages are up-to-date: .. code-block:: bash - - git clone https://github.com/robotont-demos/demo_slam_gmapping.git - git clone https://github.com/robotont-demos/demo_teleop_keyboard.git + sudo apt update && sudo apt upgrade -y -Running the demo -**************** +.. hint:: -#. Launch the simulator + ROS packages installed from apt are only available **in terminals where the ROS environment has been sourced**. + To use these packages, you must first source the general ROS 2 environment: .. code-block:: bash - - roslaunch robotont_gazebo world_minimaze.launch - -#. Launch teleop keyboard - .. code-block:: bash - - roslaunch robotont_demos teleop_keyboard.launch + source /opt/ros/jazzy/setup.bash -#. Launch 2d_slam.launch +#. Install Nav2 from apt: .. code-block:: bash - - roslaunch demo_slam_gmapping 2d_slam.launch - -#. Display the map on RViz - .. code-block:: bash - - roslaunch demo_slam 2d_slam_display.launch + sudo apt install ros-jazzy-navigation2 -Cartographer -~~~~~~~~~~~~ +#. Navigate to your colcon workspace -Installation -************ + .. code-block:: bash -You can clone the package for the Cartographer method from `this repository. `__ + cd ~//src -To clone the packages: +#. Clone the ``depthimage_to_laserscan`` package .. code-block:: bash - - git clone https://github.com/robotont-demos/demo_slam_cartographer.git - git clone https://github.com/robotont-demos/demo_teleop_keyboard.git -Running the demo -**************** + git clone https://github.com/ros-perception/depthimage_to_laserscan.git --branch ros2 -#. Launch the simulator +#. Build the package: .. code-block:: bash - - roslaunch robotont_gazebo world_minimaze.launch -#. Launch teleop keyboard + colcon build --packages-select depthimage_to_laserscan - .. code-block:: bash - - roslaunch demo_teleop teleop_keyboard.launch +The demo for 2D slam based navigation is available from `this repository `__. -#. Launch 2d_slam.launch +#. Navigate to your colcon workspace .. code-block:: bash - - roslaunch demo_slam_cartographer 2d_slam.launch -#. Display the map on RViz + cd ~//src - .. code-block:: bash - - roslaunch demo_slam 2d_slam_display.launch - -Hector SLAM -~~~~~~~~~~~~ +#. Clone the ``2d_slam`` package -Installation -************ + .. code-block:: bash -You can clone the package for the Hector SLAM method from `this repository. `__ + git clone https://github.com/robotont-demos/2d_slam.git -To clone the packages: +#. Build the package: .. code-block:: bash - - git clone https://github.com/robotont-demos/demo_slam_hector.git - git clone https://github.com/robotont-demos/demo_teleop_keyboard.git -Running the demo -**************** + colcon build --packages-select 2d_slam -#. Launch the simulator +Running the demo +~~~~~~~~~~~~~~~~~ - .. code-block:: bash - - roslaunch robotont_gazebo world_minimaze.launch +The demo can be run on a Robotont featuring either a LIDAR or the standard Realsense D435i camera -#. Launch teleop keyboard +.. tabs:: - .. code-block:: bash - - roslaunch demo_teleop teleop_keyboard.launch + .. tab:: Robotont with LIDAR -#. Launch 2d_slam.launch + #. Spawn LIDAR Robotont in a gazebo world - .. code-block:: bash - - roslaunch demo_slam_hector 2d_slam.launch + .. code-block:: bash -#. Display the map on RViz + ros2 launch robotont_gazebo gazebo.launch.py model:=robotont_gazebo_lidar world:=.sdf - .. code-block:: bash - - roslaunch demo_slam 2d_slam_display.launch - + #. Launch the navigation stack and slam -Setting 2D navigation goals -**************************** + .. code-block:: bash -#. Using ROS Navigation to make the robot move autonomously is pretty straightforward. There are two GUI buttons in RViz to tell the robot where it is located (if it fails to accurately localize at startup) and where it needs to go. + ros2 launch 2d_slam nav2_lidar_slam.launch.py -#. For setting initial pose, click on 2D Pose Estimate and drag the arrow where and how the robot actually is. - - .. image:: /files/pictures/poseestimatearrow.png - :width: 400 + #. (Optional) Visualize costmaps and the robot's model in Rviz2 + .. code-block:: bash -#. To tell the robot where to go, click on 2D Nav Goal - and drag the arrow to where you want the robot to go - and which way does it have to face. + ros2 launch 2d_slam rviz2_visualize_costmaps.launch.py - .. image:: /files/pictures/2dnavgoalarrow.png - :width: 400 + .. tab:: Robotont with Realsense D435i -3D mapping ----------- + #. Spawn Robotont in a gazebo world -Creates a 3D map of the robot's surroundings. + .. code-block:: bash -Installation -~~~~~~~~~~~~ + ros2 launch robotont_gazebo gazebo.launch.py world:=.sdf -#. For 3D mapping: + #. Launch the navigation stack and slam - .. code-block:: bash - - sudo apt install ros-noetic-rtabmap-ros + .. code-block:: bash -and clone the following packages: - - .. code-block:: bash - - git clone https://github.com/robotont-demos/demo_mapping_3d.git - git clone https://github.com/robotont-demos/demo_teleop_keyboard.git + ros2 launch 2d_slam nav2_realsense_slam.launch.py -Running the demo -~~~~~~~~~~~~~~~~ + #. (Optional) Visualize costmaps and the robot's model in Rviz2 -#. Launch the simulator + .. code-block:: bash - .. code-block:: bash - - roslaunch robotont_gazebo world_colors.launch + ros2 launch 2d_slam rviz2_visualize_costmaps.launch.py -#. Launch mapping_3d.launch +Setting 2D navigation goals +~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - .. code-block:: bash - - roslaunch demo_mapping_3d mapping_3d.launch +Using ROS Navigation to make the robot move autonomously is straightforward. In RViz, you have two main GUI buttons: one to set the robot’s current location (if it doesn’t localize itself accurately at startup), and one to set its navigation goal. -#. Launch mapping_3d_display.launch to visualize the result +#. **To set the initial pose**: - .. code-block:: bash - - roslaunch demo_mapping_3d mapping_3d_display.launch + Click on **“2D Pose Estimate”** in the RViz toolbar, then click and drag the arrow to indicate where the robot is located and which way it is facing. + + .. image:: /pictures/pose_estimate.gif + :width: 100% -#. To move the robot open another terminal window and run teleop twist keyboard - .. code-block:: bash - - rosrun demo_teleop teleop_keyboard.launch +#. **To set a navigation goal**: - .. hint:: Notice that the teleop node only receives keypresses when the terminal window is active. + Click on **“2D Goal Pose”** in the RViz toolbar, then click and drag the arrow to the desired destination and orientation for the robot. - .. image:: /files/pictures/3d_mapping_gazebo.png - :width: 400 + .. image:: /pictures/nav_goal.gif + :width: 100% -The robot identifies and tracks the pose of the provided AR tag and acts accordingly. +3D mapping +---------- +.. dropdown:: -Follow the leader -~~~~~~~~~~~~~~~~~ + Creates a 3D map of the robot's surroundings. -The follow the leader demo showing the capabilities of the Robotont platform to detect and follow the AR Tag. + .. image:: /pictures/wip.gif + :width: 200 -Installation -************ -#. For AR tracking: +Follow the leader +----------------- - .. code-block:: bash - - git clone https://github.com/machinekoder/ar_track_alvar.git -b noetic-devel - git clone https://github.com/robotont-demos/demo_ar_follow_the_leader.git +.. dropdown:: -Running the demo -**************** + The follow the leader demo shows the capabilities of the Robotont platform to detect and follow the AR Tag. -On the works + .. image:: /pictures/wip.gif + :width: 200 AR steering ----------- -The AR steering demo showing the capabilities of the Robotont platform to detect and follow the AR Tag. - -Installation -************ - -#. For AR tracking: - - .. code-block:: bash - - git clone https://github.com/machinekoder/ar_track_alvar.git -b noetic-devel - git clone https://github.com/robotont-demos/demo_ar_steering.git - -Running the demo -**************** +.. dropdown:: -#. Launch ar_steering.launch (change tag_nr with your AR tag number) + The AR steering demo shows the capabilities of the Robotont platform to detect and follow the AR Tag. - .. code-block:: bash - - roslaunch demo_ar_steering ar_steering.launch marker_id:=tag_nr - -#. Launch the simulator - - .. code-block:: bash - - roslaunch robotont_gazebo world_colors.launch + .. image:: /pictures/wip.gif + :width: 200 AR maze ------- -The gazebo AR maze demo showing the capabilities of the Robotont platform to detect and follow the AR Tag. - -Installation -************ - -#. For AR tracking: +.. dropdown:: - .. code-block:: bash - - git clone https://github.com/machinekoder/ar_track_alvar.git -b noetic-devel - git clone https://github.com/robotont-demos/demo_ar_maze.git - -Running the demo -**************** + The AR maze demo shows the capabilities of the Robotont platform to detect and follow the AR Tag. -#. Launch gazebo_ar_maze.launch - - .. code-block:: bash - - roslaunch demo_ar_maze gazebo_ar_maze.launch - -#. Launch the simulator - - .. code-block:: bash - - roslaunch robotont_gazebo world_minimaze_ar.launch + .. image:: /pictures/wip.gif + :width: 200 diff --git a/source/demo_robot.rst b/source/demo_robot.rst index 99c4211..eb3b026 100644 --- a/source/demo_robot.rst +++ b/source/demo_robot.rst @@ -1,311 +1,173 @@ -.. _demos_on_robot: +.. _demos_on_robotont: -################# +################## Demos on Robotont -################# +################## -Before running the demos it is necessary to get acquinted with the setup section of the documentation. - -Before running the demos on the robot read the following instructions: +Before running the demos it is necessary to get acquainted with the setup section of the documentation. +Make sure you check: * :ref:`setting_up_pc` * :ref:`connecting_remotely` -Note that some of the commands will run on Robotont on-board computer and some on user PC. - 2D Mapping and Localization ---------------------------- -The following are needed to run the 2D mapping demo: - - .. code-block:: bash - - sudo apt update - sudo apt install ros-noetic-depthimage-to-laserscan - sudo apt install ros-noetic-move-base - -To run the 2D mapping demo, you need to clone the base package: - - .. code-block:: bash - - git clone https://github.com/robotont-demos/demo_slam.git - -and choose a mapping method from the following: +Setup +~~~~~~~~~~~~~ - 1. Cartographer - 2. Gmapping - 3. Hector SLAM +.. hint:: -Gmapping and AMCL -~~~~~~~~~~~~~~~~~~ - -Installation -************ - -You can clone the package for the Gmapping method from `this repository. `__ - -To clone the packages: + Before installing any packages from apt, make sure existing packages are up-to-date: .. code-block:: bash - - git clone https://github.com/robotont-demos/demo_slam_gmapping.git - git clone https://github.com/robotont-demos/demo_teleop.git + sudo apt update && sudo apt upgrade -y -Running the demo -**************** +.. hint:: -#. **On Robotont on-board computer or on PC** launch 2d_slam.launch + ROS packages installed from apt are only available **in terminals where the ROS environment has been sourced**. + To use these packages, you must first source the general ROS 2 environment: .. code-block:: bash - - roslaunch demo_slam_gmapping 2d_slam.launch -#. **On PC** launch 2d_slam_display.launch to visualize the result + source /opt/ros/jazzy/setup.bash - .. code-block:: bash - - roslaunch demo_slam 2d_slam_display.launch - -#. To move the robot open another terminal window **on robotont on-board computer or on the PC** and run teleop twist keyboard (TBA) +#. Install Nav2 from apt: .. code-block:: bash - - roslaunch demo_teleop teleop_keyboard.launch - - .. hint:: Notice that the teleop node only receives keypresses when the terminal window is active. - -Cartographer -~~~~~~~~~~~~ -Installation -************ + sudo apt install ros-jazzy-navigation2 -You can clone the package for the Cartographer method from `this repository. `__ - -To clone the packages: +#. Navigate to your colcon workspace .. code-block:: bash - - git clone https://github.com/robotont-demos/demo_slam_cartographer.git - git clone https://github.com/robotont-demos/demo_teleop.git -Running the demo -**************** + cd ~//src -#. **On Robotont on-board computer or on PC** launch 2d_slam.launch +#. Clone the ``depthimage_to_laserscan`` package .. code-block:: bash - - roslaunch demo_slam_cartographer 2d_slam.launch -#. **On PC** launch 2d_slam_display.launch to visualize the result - - .. code-block:: bash - - roslaunch demo_slam 2d_slam_display.launch + git clone https://github.com/ros-perception/depthimage_to_laserscan.git --branch ros2 -#. To move the robot open another terminal window **on robotont on-board computer or on the PC** and run teleop twist keyboard (TBA) +#. Build the package: .. code-block:: bash - - roslaunch demo_teleop teleop_keyboard.launch - - .. hint:: Notice that the teleop node only receives keypresses when the terminal window is active. - -Hector SLAM -~~~~~~~~~~~~ -Installation -************ + colcon build --packages-select depthimage_to_laserscan -You can clone the package for the Hector SLAM method from `this repository. `__ +The demo for 2D slam based navigation is available from `this repository `__. -To clone the packages: +#. Navigate to your colcon workspace .. code-block:: bash - - git clone https://github.com/robotont-demos/demo_slam_hector.git - git clone https://github.com/robotont-demos/demo_teleop.git -Running the demo -**************** + cd ~//src -#. **On Robotont on-board computer or on PC** launch 2d_slam.launch +#. Clone the ``2d_slam`` package .. code-block:: bash - - roslaunch demo_slam_hector 2d_slam.launch - -#. **On PC** launch 2d_slam_display.launch to visualize the result - .. code-block:: bash - - roslaunch demo_slam 2d_slam_display.launch + git clone https://github.com/robotont-demos/2d_slam.git -#. To move the robot open another terminal window **on robotont on-board computer or on the PC** and run teleop twist keyboard. +#. Build the package: .. code-block:: bash - - roslaunch demo_teleop teleop_keyboard.launch - - .. hint:: Notice that the teleop node only receives keypresses when the terminal window is active. - -Setting 2D navigation goals -~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -#. Using ROS Navigation to make the robot move autonomously is pretty straightforward. There are two GUI buttons in RViz to tell the robot where it is located (if it fails to accurately localize at startup) and where it needs to go. - -#. For setting initial pose, click on 2D Pose Estimate and drag the arrow where and how the robot actually is. - - .. image:: /files/pictures/poseestimatearrow.png - :width: 400 - - -#. To tell the robot where to go, click on 2D Nav Goal - and drag the arrow to where you want the robot to go - and which way does it have to face. - - .. image:: /files/pictures/2dnavgoalarrow.png - :width: 400 - -3D mapping ----------- - -Creates a 3D map of the robot's surroundings. - -Installation -~~~~~~~~~~~~ + colcon build --packages-select 2d_slam -#. For 3D mapping: +Running the demo +~~~~~~~~~~~~~~~~~ - .. code-block:: bash - - sudo apt install ros-noetic-rtabmap-ros +The demo can be run on a Robotont featuring either a LIDAR or the standard Realsense D435i camera -and clone the following packages: - - .. code-block:: bash - - git clone https://github.com/robotont-demos/demo_mapping_3d.git - git clone https://github.com/robotont-demos/demo_teleop.git +.. tabs:: -Running the demo -~~~~~~~~~~~~~~~~ + .. tab:: Robotont with LIDAR -#. **On Robotont on-board computer or on PC** launch mapping_3d.launch + #. Launch the navigation stack and slam - .. code-block:: bash - - roslaunch demo_mapping_3d mapping_3d.launch + .. code-block:: bash -#. **On PC** launch mapping_3d_display.launch to visualize the result + ros2 launch 2d_slam nav2_lidar_slam.launch.py - .. code-block:: bash - - roslaunch demo_mapping_3d mapping_3d_display.launch + #. (Optional) Visualize costmaps and the robot's model in Rviz2 -#. To move the robot open another terminal window **on robotont on-board computer or on user PC** and run teleop twist keyboard - - .. code-block:: bash - - rosrun demo_teleop teleop_keyboard.launch + .. code-block:: bash - .. hint:: Notice that the teleop node only receives keypresses when the terminal window is active. + ros2 launch 2d_slam rviz2_visualize_costmaps.launch.py - .. image:: /files/pictures/3dmap.png - :width: 400 + .. tab:: Robotont with Realsense D435i -AR tracking ------------ + #. Launch the navigation stack and slam -The robot identifies and tracks the pose of the provided AR tag and acts accordingly. + .. code-block:: bash -Follow the leader -~~~~~~~~~~~~~~~~~ + ros2 launch 2d_slam nav2_realsense_slam.launch.py -The follow the leader demo showing the capabilities of the Robotont platform to detect and follow the AR Tag. + #. (Optional) Visualize costmaps and the robot's model in Rviz2 -Installation -************ + .. code-block:: bash -#. For AR tracking: + ros2 launch 2d_slam rviz2_visualize_costmaps.launch.py - .. code-block:: bash - - git clone https://github.com/machinekoder/ar_track_alvar.git -b noetic-devel - git clone https://github.com/robotont-demos/demo_ar_follow_the_leader.git +Setting 2D navigation goals +~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -Running the demo -**************** +Using ROS Navigation to make the robot move autonomously is straightforward. In RViz, you have two main GUI buttons: one to set the robot’s current location (if it doesn’t localize itself accurately at startup), and one to set its navigation goal. -#. **On Robotont on-board computer or on PC** launch ar_follow_the_leader.launch (change tag_nr with your AR tag number) +#. **To set the initial pose**: - .. code-block:: bash - - roslaunch demo_ar_follow_the_leader ar_follow_the_leader.launch marker_id:=tag_nr + Click on **“2D Pose Estimate”** in the RViz toolbar, then click and drag the arrow to indicate where the robot is located and which way it is facing. -#. **On PC** launch ar_marker_display.launch to visualize the result + .. image:: /pictures/pose_estimate.gif + :width: 100% - .. code-block:: bash - - roslaunch demo_ar_follow_the_leader ar_marker_display.launch -AR steering -~~~~~~~~~~~ +#. **To set a navigation goal**: -The AR steering demo showing the capabilities of the Robotont platform to detect and follow the AR Tag. + Click on **“2D Goal Pose”** in the RViz toolbar, then click and drag the arrow to the desired destination and orientation for the robot. -Installation -************ + .. image:: /pictures/nav_goal.gif + :width: 100% -#. For AR tracking: +3D mapping +---------- +.. dropdown:: - .. code-block:: bash - - git clone https://github.com/machinekoder/ar_track_alvar.git -b noetic-devel - git clone https://github.com/robotont-demos/demo_ar_steering.git + Creates a 3D map of the robot's surroundings. -Running the demo -**************** + .. image:: /pictures/wip.gif + :width: 200 -#. **On Robotont on-board computer or on PC** launch ar_steering.launch (change tag_nr with your AR tag number) - .. code-block:: bash - - roslaunch demo_ar_steering ar_steering.launch marker_id:=tag_nr +Follow the leader +----------------- -#. **On PC** launch ar_marker_display.launch to visualize the result - - .. code-block:: bash - - roslaunch demo_ar_steering ar_marker_display.launch +.. dropdown:: + The follow the leader demo shows the capabilities of the Robotont platform to detect and follow the AR Tag. -AR Maze -~~~~~~~ + .. image:: /pictures/wip.gif + :width: 200 -The AR maze demo showing the capabilities of the Robotont platform to detect and follow the AR Tag and navigate through the maze. +AR steering +----------- -Installation -************ +.. dropdown:: -#. For AR tracking: + The AR steering demo shows the capabilities of the Robotont platform to detect and follow the AR Tag. - .. code-block:: bash - - git clone https://github.com/machinekoder/ar_track_alvar.git -b noetic-devel - git clone https://github.com/robotont-demos/demo_ar_maze.git + .. image:: /pictures/wip.gif + :width: 200 -Running the demo -**************** +AR maze +------- -#. **On Robotont on-board computer or on PC** launch ar_maze.launch +.. dropdown:: - .. code-block:: bash - - roslaunch demo_ar_maze ar_maze.launch + The AR maze demo shows the capabilities of the Robotont platform to detect and follow the AR Tag. - .. hint:: Make sure to modify the list with ar tags for maze navigation in 8th line of ar_maze.launch: - roslaunch demo_ar_maze ar_maze.launch marker_ids:="4,10,5" + .. image:: /pictures/wip.gif + :width: 200 diff --git a/source/index.rst b/source/index.rst index 9c0a838..dd26ecb 100644 --- a/source/index.rst +++ b/source/index.rst @@ -1,12 +1,8 @@ ###################### -ROS2 CONSTRUCTION ZONE +Robotont documentation ###################### -.. warning:: - - This documentation is a work in progress. It is not yet complete. - ################# Table of Contents ################# diff --git a/source/overview.rst b/source/overview.rst index c52f1ea..aaeeadf 100644 --- a/source/overview.rst +++ b/source/overview.rst @@ -18,59 +18,92 @@ ROBOTONT currently includes the following out-of-the-box demos: * AR marker tracking, and * gesture-based human-robot interaction. - .. image:: /files/pictures/robotont_gen_2_3_multi_view.png - :width: 800 + .. image:: /pictures/robotont_generations.jpg + :width: 100% Specification ------------- +.. tabs:: -`On-board computer - Intel NUC7i5BNK `__ + .. tab:: Generation 2.1 -.. csv-table:: - :widths: 20, 20 + .. image:: /pictures/robotont_gen2.jpg + :align: center + :width: 80% - "Processor", "Intel Core i5 (7th Gen) 7260U (2 cores, up to 3.4 GHz)" - "RAM", "DDR4 2133 MHz 4 GB" - "GPU", "Intel Iris Plus Graphics 640" - "Peripherals", "1x HDMI, 4 x USB 3.0 Type A, 1 x Thunderbolt 3/DisplayPort/USB-C 3.1 Gen2" - "Storage", "Transcend MTS420 M2.0 SSD 120 GB" - "Network", "Intel I219-V Gigabit Ethernet – RJ45
Intel Dual Band Wireless-AC 8265, IEEE 802.11a/b/g/n/ac" + **On-board computer** – `Intel NUC7i5BNK `__ -`Development board for external devices and motors- ARM NUCLEO-L476RG `__ + - **Processor**: Intel Core i5 (7th Gen) 7260U (2 cores, up to 3.4 GHz) + - **RAM**: DDR4 2133 MHz 4 GB + - **GPU**: Intel Iris Plus Graphics 640 + - **Peripherals**: 1x HDMI, 4 x USB 3.0 Type A, 1 x Thunderbolt 3/DisplayPort/USB-C 3.1 Gen2 + - **Storage**: Transcend MTS420 M2.0 SSD 120 GB + - **Network**: Intel I219-V Gigabit Ethernet – RJ45 + Intel Dual Band Wireless-AC 8265, IEEE 802.11a/b/g/n/ac -.. csv-table:: - :widths: 20, 20 + **Development board** – `ARM NUCLEO-L476RG `__ - "CPU", "ARM®32-bit Cortex®-M4, 80 MHz" - "Debugging and programming interface","ST-LINK/V2-1" - "Connectivity", "mini-USB" - "GPIO", "51" + - **CPU**: ARM® 32-bit Cortex®-M4, 80 MHz + - **Debugging**: ST-LINK/V2-1 + - **Connectivity**: mini-USB + - **GPIO**: 51 -`Motors – DC motors with encoders `__ + **Motors** – `Pololu 1442 `__ -.. csv-table:: - :widths: 20, 20 + - **Voltage**: 12 V + - **Stall current**: 5000 mA + - **Max rpm**: 500 + - **Max torque**: 0.59 Nm + - **Gear Ratio**: 19:1 + - **Encoder (motor)**: 64 counts/rev + - **Encoder (gearbox)**: 1200 counts/rev - "Voltage", "12 V" - "Stall current", "5000 mA" - "Max rpm", "500" - "Max torque", "0.59 Nm" - "Gear Ratio", "19:1" - "Encoder resolution (motor)", "64 counts per revolution" - "Encoder resolution (gearbox)", "1200 counts per revolution" + **3D Camera** – `Intel Realsense D435i `__ -`3D depth camera – Intel Realsense D435 `__ + - **Depth Resolution**: 1280 x 720 + - **FOV (Horizontal)**: 87° (depth), 69.4° (RGB) + - **FOV (Vertical)**: 58° (depth), 42.5° (RGB) + - **RGB Sensor**: 1920 x 1080 @ 30 fps + - **Min Depth**: ~28cm (720p), ~10cm (480p) + - **Operating range**: ~0.3 - 3 meters + - **Connection**: USB 3.1 Type-C + + .. tab:: Generation 3.0 + + .. image:: /pictures/robotont_gen3.jpg + :align: center + :width: 80% + + **On-board computer** – `Intel NUC13ANKI5 `__ + + - **Processor**: Intel® Core™ i5-1340P Processor 12M Cache, up to 4.60 GHz + - **RAM**: DDR4 3200MHz 16GB + - **GPU**: Intel® Iris® Xe Graphics + - **Peripherals**: 2x Thunderbolt 4, 3x USB 3.2 Type-A, 1x USB 2.0, 1x 3.5mm Audio Jack + - **Storage**: SSD 250GB Kingston NV2 M.2 NVMe + - **Network**: Intel Wi-Fi 6E AX211 + Bluetooth 5.3, 1x 2.5Gb LAN + + **Motors** – `Pololu 1442 `__ + + - **Voltage**: 12 V + - **Stall current**: 5500 mA + - **Max rpm**: 530 + - **Max torque**: 0.83 Nm + - **Gear Ratio**: 18.75:1 + - **Encoder (motor)**: 64 counts/rev + - **Encoder (gearbox)**: 1200 counts/rev + + **3D Camera** – `Intel Realsense D435i `__ + + - **Depth Resolution**: 1280 x 720 + - **FOV (Horizontal)**: 87° (depth), 69.4° (RGB) + - **FOV (Vertical)**: 58° (depth), 42.5° (RGB) + - **RGB Sensor**: 1920 x 1080 @ 30 fps + - **Min Depth**: ~28cm (720p), ~10cm (480p) + - **Operating range**: ~0.3 - 3 meters + - **Connection**: USB 3.1 Type-C -.. csv-table:: - :widths: 20, 20 - "Depth Stream Output Resolution", "1280 x 720" - "Depth Field of View horizontal", "85.2 deg" - "Depth Field of Vies vertical", "58 deg" - "RGB Sensor Resolution and Frame Rate", "1920 x 1080 at 30 fps" - "Minimum depth", "110 mm" - "Max range", "Approx.10 meters; Varies depending on calibration, scene, and lighting condition" - "Connection type", "USB 3 Type-C" diff --git a/source/pictures/bangbang_world_example.png b/source/pictures/bangbang_world_example.png new file mode 100644 index 0000000..8fe66d9 Binary files /dev/null and b/source/pictures/bangbang_world_example.png differ diff --git a/source/pictures/between_world_example.png b/source/pictures/between_world_example.png new file mode 100644 index 0000000..c4b1d40 Binary files /dev/null and b/source/pictures/between_world_example.png differ diff --git a/source/pictures/colors_gazebo.png b/source/pictures/colors_gazebo.png new file mode 100644 index 0000000..1d69604 Binary files /dev/null and b/source/pictures/colors_gazebo.png differ diff --git a/source/pictures/colors_world_example.png b/source/pictures/colors_world_example.png new file mode 100644 index 0000000..4ec66ab Binary files /dev/null and b/source/pictures/colors_world_example.png differ diff --git a/source/pictures/dualsense_conf.png b/source/pictures/dualsense_conf.png new file mode 100644 index 0000000..11208cb Binary files /dev/null and b/source/pictures/dualsense_conf.png differ diff --git a/source/pictures/laserscan_gazebo.png b/source/pictures/laserscan_gazebo.png new file mode 100644 index 0000000..a8ca4d4 Binary files /dev/null and b/source/pictures/laserscan_gazebo.png differ diff --git a/source/pictures/laserscan_rviz.png b/source/pictures/laserscan_rviz.png new file mode 100644 index 0000000..f2627b0 Binary files /dev/null and b/source/pictures/laserscan_rviz.png differ diff --git a/source/pictures/laserscan_terminal.png b/source/pictures/laserscan_terminal.png new file mode 100644 index 0000000..2acb46d Binary files /dev/null and b/source/pictures/laserscan_terminal.png differ diff --git a/source/pictures/mapping_world_example.png b/source/pictures/mapping_world_example.png new file mode 100644 index 0000000..3a94a20 Binary files /dev/null and b/source/pictures/mapping_world_example.png differ diff --git a/source/pictures/maze_world_example.png b/source/pictures/maze_world_example.png new file mode 100644 index 0000000..a9aeeec Binary files /dev/null and b/source/pictures/maze_world_example.png differ diff --git a/source/pictures/minimaze_ar_world_example.png b/source/pictures/minimaze_ar_world_example.png new file mode 100644 index 0000000..7d8e763 Binary files /dev/null and b/source/pictures/minimaze_ar_world_example.png differ diff --git a/source/pictures/minimaze_world_example.png b/source/pictures/minimaze_world_example.png new file mode 100644 index 0000000..7b39d82 Binary files /dev/null and b/source/pictures/minimaze_world_example.png differ diff --git a/source/pictures/nav_goal.gif b/source/pictures/nav_goal.gif new file mode 100644 index 0000000..e46ae7f Binary files /dev/null and b/source/pictures/nav_goal.gif differ diff --git a/source/pictures/pose_estimate.gif b/source/pictures/pose_estimate.gif new file mode 100644 index 0000000..d52581f Binary files /dev/null and b/source/pictures/pose_estimate.gif differ diff --git a/source/pictures/robotont_gen2.jpg b/source/pictures/robotont_gen2.jpg new file mode 100644 index 0000000..3fb3b8c Binary files /dev/null and b/source/pictures/robotont_gen2.jpg differ diff --git a/source/pictures/robotont_gen3.jpg b/source/pictures/robotont_gen3.jpg new file mode 100644 index 0000000..d6a0614 Binary files /dev/null and b/source/pictures/robotont_gen3.jpg differ diff --git a/source/pictures/robotont_generations.jpg b/source/pictures/robotont_generations.jpg new file mode 100644 index 0000000..b37ff9e Binary files /dev/null and b/source/pictures/robotont_generations.jpg differ diff --git a/source/pictures/teleop_twist_terminal.png b/source/pictures/teleop_twist_terminal.png new file mode 100644 index 0000000..025c276 Binary files /dev/null and b/source/pictures/teleop_twist_terminal.png differ diff --git a/source/pictures/terminal.png b/source/pictures/terminal.png deleted file mode 100644 index f56dee9..0000000 Binary files a/source/pictures/terminal.png and /dev/null differ diff --git a/source/pictures/twist_keys.png b/source/pictures/twist_keys.png deleted file mode 100644 index 86c9235..0000000 Binary files a/source/pictures/twist_keys.png and /dev/null differ diff --git a/source/pictures/wip.gif b/source/pictures/wip.gif new file mode 100644 index 0000000..9841bb2 Binary files /dev/null and b/source/pictures/wip.gif differ diff --git a/source/sensors.rst b/source/sensors.rst index 74e0c8f..bffc5ca 100644 --- a/source/sensors.rst +++ b/source/sensors.rst @@ -2,44 +2,87 @@ Sensors ####### -Robotont uses a Realsense D435i 3D camera, which provides a regular camera feed and a depth sensor. +The Robotont platform includes an Intel RealSense D435i 3D camera, capable of streaming both regular color images and depth data. These camera feeds are available automatically as soon as the robot is turned on. -The camera feed is launched automatically when the robot is turned on. +Setup +----- -Displaying the camera feed --------------------------- +#. Clone the `depthimage_to_laserscan `__ package into your workspace and build: + + .. code-block:: bash -#. Establish an ssh connection between the robot and the PC as shown here: :ref:`setting_up_pc` + cd ~//src + git clone https://github.com/ros-perception/depthimage_to_laserscan.git --branch ros2 + colcon build +#. Setup distributed ROS 2 as shown here: :ref:`same_env` +#. Establish an SSH connection between the robot and the PC as shown here: :ref:`ssh` -#. **On the PC** display the feed on RViz + + +Displaying the camera feed +-------------------------- + +#. **In Terminal**, on the PC, start Rviz2: .. code-block:: bash - roslaunch rviz rviz + rviz2 -Click on Add and select Camera. In the Camera topic field, select /camera/color/image_raw. +#. Click on **Add** and select **Camera**. In the Camera **Image Topic** field, select */camera/color/image_raw*. - .. image:: /files/pictures/camera_view.png - :width: 400 + .. image:: /pictures/camera_view.png + :width: 100% Getting distances from objects ------------------------------ -Laserscan_to_distance node provides distances from the closest object from the left, the right and the middle. +The `depthimage_to_laserscan` node converts the RealSense camera's depth image into a 2D LaserScan message, which you can use to estimate distances to objects directly in front of the robot. -#. To run laserscan_to_distance node **on Robotont on-board computer** +#. **Launch the depthimage_to_laserscan node** on the Robotont or your PC: .. code-block:: bash - - roslaunch robotont_laserscan_to_distance distance_from_depth_image.launch -#. To display the distances either **on PC** or **on Robotont on-board computer** + ros2 run depthimage_to_laserscan depthimage_to_laserscan_node - .. code-block:: bash - - rostopic echo /scan_to_distance + .. hint:: + Make sure the parameters for the depth image topic and camera info match your camera's output, e.g.: + + .. code-block:: bash + + ros2 run depthimage_to_laserscan depthimage_to_laserscan_node \ + --ros-args \ + --remap depth:=/camera/depth/image_raw \ + --remap depth_camera_info:=/camera/color/camera_info + +#. **Visualize and analyze the LaserScan data**: + + .. admonition:: Option 1: Rviz2 + + * Click on **Add** and select **LaserScan**. In the LaserScan **Topic** field, select */scan* + + .. list-table:: + :widths: 50 50 + :header-rows: 0 + + * - Gazebo simulation + + .. image:: /pictures/laserscan_gazebo.png + :width: 100% + - Rviz2 LaserScan visualization + + .. image:: /pictures/laserscan_rviz.png + :width: 100% + + .. admonition:: Option 2: View raw data + + * **In Terminal**: + + .. code-block:: bash + + ros2 topic echo /scan - .. image:: /files/pictures/terminal.png - :width: 400 \ No newline at end of file + * The messages are of type :code:`sensor_msgs/LaserScan` — see its structure on the `ROS 2 sensor_msgs/LaserScan documentation `__ + .. image:: /pictures/laserscan_terminal.png + :width: 100% \ No newline at end of file diff --git a/source/setup_pc_only.rst b/source/setup_pc_only.rst index 0a3042b..6d66609 100644 --- a/source/setup_pc_only.rst +++ b/source/setup_pc_only.rst @@ -10,19 +10,19 @@ This setup tutorial will guide you through setting up your PC to run the simulat Installing Ubuntu ----------------- -Download and install Ubuntu Linux on your PC from the following link: `Ubuntu 20.04.6 LTS (Focal Fossa) `__. +#. Download Ubuntu image on your PC from the following link: `Ubuntu 24.04.2 (Noble Numbat) `__. -The guide to install Ubuntu on your PC can be found `here `__. +#. For installing Ubuntu on your PC, follow the guide `Install Ubuntu Desktop `__. Installing ROS -------------- -Install ROS Noetic by following the guide: `ROS Noetic `__. +For installing ROS 2 Jazzy, follow the guide for `Ubuntu (deb packages) `__. -Creating a catkin workspace +Creating a colcon workspace ---------------------------- -Create a workspace for catkin as shown `here `__. +Create a workspace for colcon as shown `here `__. Cloning Robotont's packages ----------------------------- @@ -37,7 +37,7 @@ Packages necessary to run the Gazebo simulation with Robotont's demos are follow #. `robotont_gazebo `__ -#. `robotont_navigation `__ +#. `robotont_navigation `__ #. `robotont_msgs `__ @@ -56,28 +56,40 @@ To clone the packages, for example, robotont_description: git clone https://github.com/robotont/robotont_description.git -Building the catkin workspace +Building the colcon workspace ------------------------------ .. code-block:: bash - cd catkin_ws - catkin build + cd colcon_ws + colcon build Sourcing the workspace ----------------------- -Make the workspace visible to ROS (must be done for every new terminal) +Make the workspace visible to ROS 2 (must be done for every new terminal) .. code-block:: bash - source ~/catkin_ws/devel/setup.bash + source ~/colcon_ws/install/setup.bash For automatic sourcing: .. code-block:: bash - echo "source ~/catkin_ws/devel/setup.bash" >> ~/.bashrc + echo "source ~/colcon_ws/install/setup.bash" >> ~/.bashrc +Running the Simulation +--------------------- +After building and sourcing your workspace, you can spawn the robot in a gazebo world, for example: + +.. code-block:: bash + + ros2 launch robotont_gazebo gazebo.launch.py world:=colors.sdf + +.. image:: /pictures/colors_world_example.png + :width: 100% + +Refer to individual demo package READMEs for more details on launching specific demos. diff --git a/source/setup_robot_pc.rst b/source/setup_robot_pc.rst index 414777c..f140eb6 100644 --- a/source/setup_robot_pc.rst +++ b/source/setup_robot_pc.rst @@ -13,33 +13,32 @@ Setting up the PC Installing Ubuntu ----------------- -Download and install Ubuntu Linux on your PC from the following link: `Ubuntu 20.04.6 LTS (Focal Fossa) `__. +#. Download Ubuntu image on your PC from the following link: `Ubuntu 24.04.2 (Noble Numbat) `__. -The guide to install Ubuntu on your PC can be found `here `__. +#. For installing Ubuntu on your PC, follow the guide `Install Ubuntu Desktop `__. Installing ROS -------------- -Install ROS Noetic by following the guide: `ROS Noetic `__. +For installing ROS 2 Jazzy, follow the guide for `Ubuntu (deb packages) `__. - -Creating a catkin workspace +Creating a colcon workspace ---------------------------- -Create a workspace for catkin as shown `here `__. +Create a workspace for colcon as shown `here `__. Cloning Robotont's packages ----------------------------- All Robotont's packages can be accessed from `Robotont's GitHub `__. -Packages necessary to run the demos from PC's terminal are following: +Packages necessary to run the demos from PC's Terminal are the following: #. `robotont_description `__ #. `robotont_nuc_description `__ -#. `robotont_navigation `__ +#. `robotont_navigation `__ You can find the demos from the following repositories: @@ -56,28 +55,28 @@ To clone the packages: git clone https://github.com/robotont/package_name.git -Building the catkin workspace +Building the colcon workspace ------------------------------ .. code-block:: bash - cd catkin_ws - catkin build + cd colcon_ws + colcon build Sourcing the workspace ----------------------- -Make the workspace visible to ROS (must be done for every new terminal) +Make the workspace visible to ROS 2 (must be done for every new Terminal session) .. code-block:: bash - source ~/catkin_ws/devel/setup.bash + source ~/colcon_ws/install/setup.bash For automatic sourcing: .. code-block:: bash - echo "source ~/catkin_ws/devel/setup.bash" >> ~/.bashrc + echo "source ~/colcon_ws/install/setup.bash" >> ~/.bashrc .. _connecting_remotely: @@ -96,162 +95,219 @@ AP connection Access Point (AP) connection involves connecting directly to the robotont's own network. -This method allows for direct communication with the robot without needing an external network infrastructure and is the easiest way to connect to the robot. +This method allows for direct communication with the robot without needing an external network infrastructure and is the easiest way to connect. The topology of the network can be seen in the following image: - .. image:: /files/pictures/apconfig.png - :width: 400 + .. image:: /pictures/apconfig.png + :width: 100% You can achieve this by connecting the user PC to Robotont's network. - .. image:: /files/pictures/wifi_screen.png - :width: 400 + .. image:: /pictures/wifi_screen.png + :align: center + :width: 60% Client connection ----------------- This method involves connecting the robot and the user PC to the same network. The user PC can then connect to the robot using the robot's IP address or hostname. - .. image:: /files/pictures/ssh_graph.png - :width: 400 + .. image:: /pictures/ssh_graph.png + :width: 100% This approach can be used to have multiple Robotonts and PCs within the same network. That is particularly helpful when setting up a classroom with multiple Robotonts. - .. image:: /files/pictures/naming_router.png - :width: 400 + .. image:: /pictures/naming_router.png + :width: 100% +.. _same_env: -IP addresses and Hostnames ---------------------------- +Distributed ROS 2 +----------------- -Both AP and Client connection methods can be used with either an IP address or a hostname based setup. +ROS 2 is designed for distributed systems out of the box. Unlike ROS, it does not use a central ROS Master. Instead, nodes discover each other using `DDS `__. -If you opt for an IP-address based setup, you can skip the hosts file setup. +There are two options for setting up a distributed system, either using static IPs or defining hostnames on each of the devices. -In the following examples, we assume the Robotont and the PC having the following configuration: +.. note:: + For consistent networking, assign static IP addresses or use DHCP reservation for both the robot and your PC -.. csv-table:: - :header: "Machine", "Hostname", "IP-address","Netmask" - :widths: 40, 40, 40,40 +.. tabs:: - "Robotont", "robotont-1", "192.168.200.1", "255.255.255.0" - "PC", "laptop-1", "192.168.200.101","255.255.255.0" + .. tab:: Use static IPs -Hostname based setup -********************* + **On Robotont (on-board computer):** -In the hostname based configuration, the robot and PC query each other via hostnames. It means that both hosts need to have each other's names associated with IP addresses. These hostname <--> IP pairs are defined in the `/etc/hosts` file. Use your favorite text editor and make sure the following entries exist. + .. code-block:: bash -**/etc/hosts on Robotont on-board computer:** + export ROS_DOMAIN_ID=10 + export ROS_IP=192.168.200.1 -.. code-block:: bash + **On PC:** - 127.0.1.1 robotont-1 - 192.168.200.101 laptop-1 + .. code-block:: bash + export ROS_DOMAIN_ID=10 + export ROS_IP=192.168.200.101 -**/etc/hosts on PC:** + .. important:: + Replace the IP addresses with the actual addresses of the devices -.. code-block:: bash + To make these settings persistent, append them to the `.bashrc` file: - 127.0.1.1 laptop-1 - 192.168.200.1 robotont-1 + .. code-block:: bash + echo 'export ROS_DOMAIN_ID=10' >> ~/.bashrc + echo 'export ROS_IP=192.168.200.101' >> ~/.bashrc + + .. note:: + The ``ROS_IP`` variable is helpful if you have multiple network interfaces or encounter issues with node discovery. In many typical setups, ROS 2 nodes will communicate without setting it -.. image:: /files/pictures/hostfile.png - :width: 400 + .. tab:: Define hostnames -SSH ---- -SSH is a secure way to connect to the robot and run commands on it. It is a good way to check the status of the robot and to run commands on it. + On each device: + + 1. **Edit the `/etc/hosts` file**: + + .. code-block:: bash + + sudo nano /etc/hosts + + 2. **Add entries like this**: + + .. code-block:: text + + 192.168.200.1 robotont-1 + 192.168.200.101 laptop-1 + + .. important:: + Replace the IP addresses with the actual addresses of the devices + + 3. **Save and exit**. You can now use hostnames in your ROS 2 setup. Test with: + + .. code-block:: bash + + ping robotont-1 + + If the ping succeeds, hostname resolution is working. ROS 2 nodes can communicate with no extra configuration beyond being on the same subnet. + + .. admonition:: Optional + + On both devices, set common DDS domain: + + .. code-block:: bash -It can be done using the IP address of the robot or the hostname. + export ROS_DOMAIN_ID=10 -You can achieve this by following the steps below: + To make this persistent: -1. Open a new terminal window + .. code-block:: bash -2. Connect the user PC to Robotont's network. + echo 'export ROS_DOMAIN_ID=10' >> ~/.bashrc -3. Establish an ssh connection with either IP address or hostname. +.. _verifying_communication: -If you set up the hosts file (change the X with the ID written on the robot): +Verifying Communication +------------------------ + +1. On the Robotont, start a ROS 2 publisher: .. code-block:: bash - - ssh peko@robotont-X - .. image:: /files/pictures/ssh_nt.png - :width: 400 - -Otherwise, use the IP address: + ros2 run demo_nodes_cpp talker - .. code-block:: bash - - ssh peko@ip_of_the_robot +2. On the PC, start a ROS 2 subscriber: -4. If a yes/no question is asked, enter yes + .. code-block:: bash -5. Enter the password + ros2 run demo_nodes_cpp listener +If setup correctly, the PC should receive messages from the Robotont. -6. When logged in successfully, you can see that the terminal prompt has changed to peko@robotont-X. This will be an important reference when trying to figure out which terminal is connected to where. +.. _ssh: - .. image:: /files/pictures/ssh_nt2.png - :width: 400 +SSH +--- +`SSH `__ provides a safe and reliable way to remotely connect to the robot, allowing you to check its status and execute commands from your PC. -7. After logging into the robot, the ROS environment should be automatically sourced for you. You can quickly display the last lines of the file with tail ~/.bashrc command to examine which workspaces are sourced. +You can connect to the robot using either its IP address or hostname (if defined in the source machine's `/etc/hosts` file). -.. _same_env: +Follow these steps: -Distributed ROS ----------------- +1. Open a new Terminal window on your PC -The ROS environment can be distributed across multiple machines. This means that the ROS Master can be running on one machine, while the nodes are running on another. This is useful when the robot has limited computational resources and the user wants to run the nodes on a more powerful machine. +2. Connect your PC to Robotont’s network -Hostname based approach -*************** +3. Start the SSH connection using either the robot’s hostname or IP address: -We need to tell the PC to look for a ROS Master on Robotont. We do that by modifying a special environment variable named `ROS_MASTER_URI`, which by default points to localhost. +.. tabs:: -**on PC**, open a terminal and enter: + .. tab:: Using the hostname -.. code-block:: bash + .. code-block:: bash - export ROS_MASTER_URI=http://robotont-1:11311 + ssh @ -Now all ROS nodes you run in this terminal will connect to the Master on the Robotont. Test it with e.g. `rosnode list`. -Note that the environment variable has to be set for each terminal window! To make it automatic, you can add the line to the end of the `.bashrc` file in the home directory of the PC: + .. hint:: + Replace ** with an user registered on the target machine and ** with the robot's hostname, e.g -.. code-block:: bash + .. code-block:: bash - echo 'export ROS_MASTER_URI=http://robotont-1:11311' >> ~/.bashrc + ssh peko@robotont-3 + .. image:: /pictures/ssh_nt.png + :width: 100% -IP address based approach -************************* + .. tab:: Using the IP address -To set up the ROS environment with an IP based setup, the `ROS_IP` environmental variable has to be set on both sides. + .. code-block:: bash -**on Robotont on-board computer:** + ssh @ -Add the user PC's IP address to the `ROS_IP` environment variable. + .. hint:: + Replace ** with an user registered on the target machine and ** with the robot's IP address, e.g -.. code-block:: bash + .. code-block:: bash - export ROS_IP=192.168.200.101 + ssh peko@192.168.1.200 +4. If prompted with a “yes/no” question about authenticity, type ``yes`` and press **Enter**. -**on PC:** +5. **Enter the password** when prompted -Add the robot's IP address to the `ROS_IP` environment variable. +6. Verify the login: -.. code-block:: bash + When logged in, the terminal prompt will change to ``peko@robotont-X`` (or similar), indicating you are connected to the robot. This helps you identify which terminal is connected remotely. + + .. image:: /pictures/ssh_nt2.png + :width: 100% + +7. ROS environment setup: + + The robot should automatically source its ROS environment on login. To check which workspaces are being sourced, you can run: + + .. code-block:: bash + + tail ~/.bashrc + +.. tip:: + + If you have connection issues, double-check the robot’s network settings and ensure you are using the correct hostname or IP address + +Troubleshooting Tips +-------------------- + +- Make sure both devices are on the same network/subnet. +- Check that firewalls allow multicast UDP traffic. +- Use the same ``ROS_DOMAIN_ID`` on all machines. +- If communication issues persist: + + - Try a different DDS implementation (e.g., Cyclone DDS or Fast DDS). + - Explicitly set the middleware with: - export ROS_MASTER_URI=http://192.168.200.1:11311 - export ROS_IP=192.168.200.101 + .. code-block:: bash + export RMW_IMPLEMENTATION=rmw_cyclonedds_cpp -Similarly to the hostname based setup, append the commands to `.bashrc` to set the variables automatically. diff --git a/source/teleop_robot.rst b/source/teleop_robot.rst index 62e52e4..2d40ee4 100644 --- a/source/teleop_robot.rst +++ b/source/teleop_robot.rst @@ -1,77 +1,225 @@ -##################### -Controlling the robot -##################### +########################### +Controlling the real robot +########################### - .. image:: /files/pictures/coord.png - :width: 400 +You can control Robotont using either your keyboard, a gamepad or a web-based interface. This section explains how to send movement commands to the robot and interact using both methods. -#. The robot driver subscribes to a specific type of messages called *velocity commands*. The standard name for this topic is :code:`/cmd_vel`. + .. image:: /pictures/coord.png + :width: 100% -#. The message is of type :code:`geometry_msgs/Twist` and it's structure can be found from `ROS wiki `__. +* The robot driver subscribes to a specific type of messages called *velocity commands*. The standard name for this topic is :code:`/cmd_vel`. -#. To set and control the robot speed, the velocity commands need to be published continuously. +* The message is of type :code:`geometry_msgs/Twist` — see its structure on the `ROS 2 geometry_msgs/Twist documentation `__. +* To set and control the robot speed, the velocity commands need to be published continuously. Controlling the robot using teleop twist keyboard ------------------------------------------------- -#. If teleop twist keyboard is not installed + +Setup +~~~~~~ + +.. hint:: + + Before installing any packages from apt, make sure existing packages are up-to-date: .. code-block:: bash - - sudo apt update - sudo apt install ros-noetic-teleop-twist-keyboard -#. Open a new terminal window + sudo apt update && sudo apt upgrade -y -#. Get the robot and PC into the same ROS environment as shown here: :ref:`same_env`. +.. hint:: -#. **On ROBOTONT on-board computer** or on **on PC** run the following command: + ROS packages installed from apt are only available **in terminals where the ROS environment has been sourced**. + To use these packages, you must first source the general ROS 2 environment: .. code-block:: bash - - rosrun teleop_twist_keyboard teleop_twist_keyboard.py - or + source /opt/ros/jazzy/setup.bash + +#. Install teleop twist keyboard from apt: .. code-block:: bash - - roslaunch demo_teleop teleop_keyboard.launch + + sudo apt install ros-jazzy-teleop-twist-keyboard + +#. (Optional) Connect the robot and PC with the same subnet (see :ref:`same_env`). + +Controlling the robot +~~~~~~~~~~~~~~~~~~~~~~ + +#. **In Terminal** (on the robot's on-board computer or another PC, if distributed ROS is set up): + + .. code-block:: bash + + ros2 run teleop_twist_keyboard teleop_twist_keyboard #. Use the following keys to move the robot: - .. image:: /files/pictures/twist_keys.png - :width: 400 + .. image:: /pictures/teleop_twist_terminal.png + :width: 100% + .. warning:: - .. warning:: From this point beyond, you are able to drive the robot with a keyboard. Should you loose control over the robot, do one of the following + From this point beyond, you are able to drive the robot with a keyboard. Should you lose control over the robot, do one of the following: - * PRESS "k" TO STOP THE ROBOT! - * PRESS THE EMERGENCY SWITCH ON THE ROBOT. + * Press "k" to stop the robot + * Press the emergency stop button on the robot - .. hint:: Notice that the teleop node receives keypresses only when the terminal window is active. + .. hint:: Note that teleop only receives keypresses when the terminal window is active (in focus). .. tip:: Use :code:`CTRL + C` to stop the node. +Controlling the robot using a gamepad +-------------------------------------- + +Setup +~~~~~~ + +Connecting a controller +*********************** + +.. hint:: + + Before installing any packages from apt, make sure existing packages are up-to-date: + + .. code-block:: bash + + sudo apt update && sudo apt upgrade -y + +#. Install ``bluetooth``, ``bluez`` and ``bluez-tools`` from apt + + .. code-block:: bash + + sudo apt install bluetooth bluez bluez-tools + +#. Put your controller into pairing mode + + .. note:: + + ``demo_teleop`` package includes the configuration file for `DualSense® `__ controller. + + To put the DualSense controller into pairing mode: + + * Hold the **PS** button and the **Create** button down for a few seconds + * The light bar will start **rapidly flashing blue**, which indicates, that the controller is in pairing mode + +#. **In Terminal**, start the Bluetooth CLI tool: + + .. code-block:: bash + + bluetoothctl + +#. Turn on the Bluetooth agent and scanning: + + .. code-block:: bash + + power on + agent on + scan on + +#. Wait for your controller to appear + + .. note:: + + It should look something like: ``Device XX:XX:XX:XX:XX:XX Wireless Controller`` + +#. Pair and connect the controller: + + Replace ``XX:XX:XX:XX:XX:XX`` with your controller's MAC address: + + .. code-block:: bash + + pair XX:XX:XX:XX:XX:XX + connect XX:XX:XX:XX:XX:XX + trust XX:XX:XX:XX:XX:XX + +#. Stop scanning and exit the tool: + + .. code-block:: bash + + scan off + exit + +Dependencies +************ + +.. hint:: + + ROS packages installed from apt are only available **in terminals where the ROS environment has been sourced**. + To use these packages, you must first source the general ROS 2 environment: + + .. code-block:: bash + + source /opt/ros/jazzy/setup.bash + +#. Install ``joy`` from apt: + + .. code-block:: bash + + sudo apt install ros-jazzy-joy + +#. Navigate to your colcon workspace: + + .. code-block:: bash + + cd ~//src + +#. Clone the ``demo_teleop`` package: + + .. code-block:: bash + + git clone https://github.com/robotont-demos/demo_teleop.git + +#. Build the package: + + .. code-block:: bash + + colcon build --packages-select demo_teleop + +#. (Optional) Connect the robot and PC with the same subnet (see :ref:`same_env`). + +Controlling the robot +~~~~~~~~~~~~~~~~~~~~~~ + +#. **In Terminal** (on the robot's on-board computer or another PC, if distributed ROS is set up): + + .. code-block:: bash + + ros2 launch demo_teleop teleop_joy.launch.py + +#. The robot can be controlled using the joysticks: + + .. image:: /pictures/dualsense_conf.png + :width: 100% + +.. warning:: + + From this point beyond, you are able to drive the robot with a controller. Should you lose control over the robot, do one of the following: + + * Use :code:`CTRL + C` to stop the node. + * Press the emergency stop button on the robot Controlling the robot using a web interface ------------------------------------------- -#. Make sure that the user device and Robot device are connected to the same wifi router +.. important:: + + Make sure that the user's device and the robot are connected to the same subnet and are visible to one another (see :ref:`verifying_communication`). -#. Open the following URL in the user device browser, replacing the IP address with the robot's IP address: +#. Open the following URL in your web browser (replace `` with the actual IP address of your robot): .. code-block:: bash - http://Robot-IP:3000/ + http://:3000/ -You should see the following page: + You should see the following page: - .. image:: /files/pictures/webapp_ok_step.png - :width: 400 + .. image:: /pictures/webapp_ok_step.png + :width: 100% #. Click OK to close the connection status dialog -#. Now you can teleoperate the robot using the touch joystick button as well as see the camera feed and depthcloud. +#. You can now control the robot using the on-screen joystick and view both the camera feed and depth cloud in your browser. - .. image:: /files/pictures/webapp3.png - :width: 400 + .. image:: /pictures/webapp3.png + :width: 100% diff --git a/source/teleop_simu.rst b/source/teleop_simu.rst index 83822fc..af4ef06 100644 --- a/source/teleop_simu.rst +++ b/source/teleop_simu.rst @@ -1,44 +1,71 @@ ######################################## -Controlling the simulated robot on RViz +Controlling the simulated robot in RViz2 ######################################## Setup ------ -#. Install teleop twist keyboard +.. hint:: + + Before installing any packages from apt, make sure existing packages are up-to-date: .. code-block:: bash - - sudo apt update - sudo apt install ros-noetic-teleop-twist-keyboard -#. Start the driver + sudo apt update && sudo apt upgrade -y + +.. hint:: + + ROS packages installed from apt are only available **in terminals where the ROS environment has been sourced**. + To use these packages, you must first source the general ROS 2 environment: + + .. code-block:: bash + + source /opt/ros/jazzy/setup.bash + +#. Install teleop twist keyboard from apt: + + .. code-block:: bash + + sudo apt install ros-jazzy-teleop-twist-keyboard + +#. Navigate to your colcon workspace: .. code-block:: bash - - roslaunch robotont_driver fake_driver.launch -#. Set the fixed frame to :code:`odom` in RViz + cd ~//src - .. image:: /files/pictures/frame_odom_img.png - :width: 400 +#. Clone the ``robotont_driver`` package: + + .. code-block:: bash + + git clone https://github.com/robotont/robotont_driver.git + +#. Build the package: + + .. code-block:: bash + + colcon build --packages-select robotont_driver + +#. Start the driver: + + .. code-block:: bash + + ros2 launch robotont_driver fake_driver_launch.py Controlling the robot using teleop twist keyboard ------------------------------------------------- -#. Open a new terminal window - -#. Run the following command: +#. Start the ``teleop_twist_keyboard`` node: .. code-block:: bash - rosrun teleop_twist_keyboard teleop_twist_keyboard.py + ros2 run teleop_twist_keyboard teleop_twist_keyboard #. Use the following keys to move the robot: - .. image:: /files/pictures/twist_keys.png - :width: 400 + .. image:: /pictures/teleop_twist_terminal.png + :width: 100% - .. hint:: Notice that the teleop node receives keypresses only when the terminal window is active. + .. hint:: Note that teleop only receives keypresses when the terminal window is active (in focus). .. tip:: Use :code:`CTRL + C` to stop the node. \ No newline at end of file