diff --git a/.komment/00000.json b/.komment/00000.json new file mode 100644 index 0000000..c4d41d2 --- /dev/null +++ b/.komment/00000.json @@ -0,0 +1,3565 @@ +[ + { + "name": "specificworker.py", + "path": "components/camera_kinova/src/specificworker.py", + "content": { + "structured": { + "description": "a simple camera module for Robocomp, which is a software framework for robotics and computer vision tasks. The module provides two queues: color_queue and depth_queue, where images and depth maps are stored respectively. The code also defines three methods: startup_check, video_color_thread, and video_depth_thread, which handle the startup of the camera, color image processing, and depth image processing threads.\nThe code also implements the getAll, getDepth, and getImage methods from the CameraRGBDSimple interface, which allow accessing the images and depth maps stored in the queues. The implementation uses Python's threading module to create separate threads for the color and depth image processing, and the QThreadPool class to manage the execution of these threads.", + "items": [ + { + "id": "0a7c820c-0c8f-a08a-8845-37a61b4c82f0", + "ancestors": [], + "description": "Manages two video streams (color and depth) from a robotic camera, queues the frames for processing, and provides methods to retrieve the frames as `TImage` or `TDepth` objects.", + "attributes": [ + { + "name": "Period", + "type_name": "int", + "description": "100, which represents the interval between consecutive updates of the worker's internal state." + }, + { + "name": "hide", + "type_name": "instance", + "description": "Used to hide or show the worker's GUI element when it is not needed, which helps improve performance by reducing flickering and minimizing CPU usage." + }, + { + "name": "depth_queue", + "type_name": "queueQueue", + "description": "Used to store depth images read from a video capture device." + }, + { + "name": "color_queue", + "type_name": "queueQueue", + "description": "Used to store the color frames read from the video capture devices." + }, + { + "name": "color_stream", + "type_name": "cv2VideoCapture", + "description": "Used to capture color video streams." + }, + { + "name": "depth_stream", + "type_name": "cv2VideoCapture", + "description": "Used to capture depth stream from a RTSP source." + }, + { + "name": "color_thread", + "type_name": "threadingThread", + "description": "Used to represent a thread that runs in parallel with the main thread of the program, handling the color stream of the camera." + }, + { + "name": "video_color_thread", + "type_name": "threadingThread", + "description": "Responsible for processing the color stream of the camera in a separate thread." + }, + { + "name": "depth_thread", + "type_name": "threadingThread", + "description": "Used to start a separate thread for processing depth images." + }, + { + "name": "video_depth_thread", + "type_name": "threadingThread", + "description": "Used to run a separate thread for reading depth frames from the camera." + }, + { + "name": "startup_check", + "type_name": "algorithm", + "description": "Called when the object is initialized. It tests if the RoboCompCameraRGBDSimple interfaces are available." + }, + { + "name": "timer", + "type_name": "QTimer", + "description": "Used to schedule a function call every `Period` milliseconds to update the worker's state." + }, + { + "name": "compute", + "type_name": "Python", + "description": "Defined as a slot function that is called by the timer. It performs no operation for now." + } + ], + "name": "SpecificWorker", + "location": { + "start": 39, + "insert": 40, + "offset": " ", + "indent": 4, + "comment": null + }, + "item_type": "class", + "length": 136, + "docLength": null + }, + { + "id": "a9028f1a-0e59-549d-c446-43861bb6d181", + "ancestors": [ + "0a7c820c-0c8f-a08a-8845-37a61b4c82f0" + ], + "description": "Initializes member variables and starts two threads to handle video streams for color and depth sensors, respectively.", + "params": [ + { + "name": "proxy_map", + "type_name": "dict", + "description": "Used to store a mapping of proxy servers for each worker instance, allowing for flexible configuration of proxy servers for different workers." + }, + { + "name": "startup_check", + "type_name": "bool", + "description": "Used to check if the worker has started correctly or not." + } + ], + "returns": null, + "name": "__init__", + "location": { + "start": 40, + "insert": 41, + "offset": " ", + "indent": 8, + "comment": null + }, + "item_type": "method", + "length": 32, + "docLength": null + }, + { + "id": "f4f54540-8f58-0384-9e47-9e50e663026b", + "ancestors": [ + "0a7c820c-0c8f-a08a-8845-37a61b4c82f0" + ], + "description": "Processes two image-like objects, `pix_color` and `pix_depth`, scaling them to the size of the widgets `ui.color` and `ui.depth`. If the object is hidden, it returns `True`.", + "params": [], + "returns": null, + "name": "compute", + "location": { + "start": 86, + "insert": 105, + "offset": " ", + "indent": 8, + "comment": null + }, + "item_type": "method", + "length": 10, + "docLength": null + }, + { + "id": "e61b1b13-c954-3aa2-a34d-23b9ea374543", + "ancestors": [ + "0a7c820c-0c8f-a08a-8845-37a61b4c82f0" + ], + "description": "Reads frames from an opened camera and adds them to a queue for processing, while handling exceptions and keyboard interrupts.", + "params": [ + { + "name": "cap", + "type_name": "Capture", + "description": "Represented by the object `cap`." + }, + { + "name": "inqueue", + "type_name": "Queue", + "description": "Used to store the frames read from the video capture device during the video processing thread execution." + } + ], + "returns": null, + "name": "video_color_thread", + "location": { + "start": 108, + "insert": 109, + "offset": " ", + "indent": 8, + "comment": null + }, + "item_type": "method", + "length": 13, + "docLength": null + }, + { + "id": "efac0efa-fda3-7487-6e40-afe9f25d2855", + "ancestors": [ + "0a7c820c-0c8f-a08a-8845-37a61b4c82f0" + ], + "description": "Reads frames from a given video capture object `cap` and enqueues them in a queue `inqueue`. It repeatedly reads frames until the `cap` is closed, then releases the resource.", + "params": [ + { + "name": "cap", + "type_name": "OpenCVVideoCapture", + "description": "Used to capture video frames from a video file or device." + }, + { + "name": "inqueue", + "type_name": "queueQueue", + "description": "Used to store frames read from the video capture device." + } + ], + "returns": null, + "name": "video_depth_thread", + "location": { + "start": 126, + "insert": 128, + "offset": " ", + "indent": 8, + "comment": null + }, + "item_type": "method", + "length": 13, + "docLength": null + }, + { + "id": "bf555c6c-48eb-b1bb-b844-94a5f66a3632", + "ancestors": [ + "0a7c820c-0c8f-a08a-8845-37a61b4c82f0" + ], + "description": "Tests various components of a class called `ifaces.RoboCompCameraRGBDSimple`. These include the `TImage`, `TDepth`, and `TRGBD` classes, as well as the `QApplication.instance().quit()` method.", + "params": [], + "returns": null, + "name": "startup_check", + "location": { + "start": 145, + "insert": 146, + "offset": " ", + "indent": 8, + "comment": null + }, + "item_type": "method", + "length": 8, + "docLength": null + }, + { + "id": "af7fdef5-f56b-93a6-4642-3eccc9b6d21c", + "ancestors": [ + "0a7c820c-0c8f-a08a-8845-37a61b4c82f0" + ], + "description": "Retrieves RGB-D data from a camera and stores it in a queue for processing. It returns the entire RGB-D data or `None` if the queue is empty.", + "params": [ + { + "name": "camera", + "type_name": "ifacesRoboCompCameraRGBDSimple", + "description": "Used to retrieve RGB-D data from a RoboComp camera." + } + ], + "returns": { + "type_name": "RoboCompCameraRGBDSimple", + "description": "A RGB-D image representing the depth and color information of a camera." + }, + "name": "CameraRGBDSimple_getAll", + "location": { + "start": 162, + "insert": 163, + "offset": " ", + "indent": 8, + "comment": null + }, + "item_type": "method", + "length": 12, + "docLength": null + }, + { + "id": "f6cae19f-c8e7-aeae-4247-463fa32264ff", + "ancestors": [ + "0a7c820c-0c8f-a08a-8845-37a61b4c82f0" + ], + "description": "Retrieves depth data from a queue and returns it in the form of a `TDepth` object with dimensions and depth value.", + "params": [ + { + "name": "camera", + "type_name": "ifacesRoboCompCameraRGBDSimple", + "description": "An instance of the RoboCompCameraRGBDSimple class, which represents a camera for depth sensing." + } + ], + "returns": { + "type_name": "ifacesRoboCompCameraRGBDSimpleTDepth", + "description": "A struct containing height, width and depth information of a image." + }, + "name": "CameraRGBDSimple_getDepth", + "location": { + "start": 179, + "insert": 180, + "offset": " ", + "indent": 8, + "comment": null + }, + "item_type": "method", + "length": 10, + "docLength": null + }, + { + "id": "e08fd8a1-3807-47af-684b-32c2b38826d5", + "ancestors": [ + "0a7c820c-0c8f-a08a-8845-37a61b4c82f0" + ], + "description": "Retrieves an image from a color queue and returns a `TImage` object with the appropriate dimensions and contents.", + "params": [ + { + "name": "camera", + "type_name": "ifacesRoboCompCameraRGBDSimple", + "description": "An instance of a class representing a camera device." + } + ], + "returns": { + "type_name": "ifacesRoboCompCameraRGBDSimpleTImage", + "description": "A struct containing height, width, depth and image values of a RGB-D camera frame." + }, + "name": "CameraRGBDSimple_getImage", + "location": { + "start": 193, + "insert": 194, + "offset": " ", + "indent": 8, + "comment": null + }, + "item_type": "method", + "length": 10, + "docLength": null + } + ] + } + } + }, + { + "name": "specificworker.py", + "path": "pybullet_controller/src/specificworker.py", + "content": { + "structured": { + "description": "A set of classes and interfaces for interacting with various robots and simulation environments. It includes a `Robot` class that provides methods for moving a robot's end effector based on user input from a joystick, as well as methods for simulating the robot's movements using a camera and gripper. Additionally, it defines an interface for interacting with a Kinova arm robot, which includes methods for controlling the arm's joints and gripper, as well as accessing the arm's current position and orientation.", + "items": [ + { + "id": "b4976dda-f74b-338c-d14d-ce9290799753", + "ancestors": [], + "description": "Stores angles for both Kinova and Bullet physics engines, allowing them to be used interchangeably for robotic simulations.", + "attributes": [ + { + "name": "kinovaAngles", + "type_name": "List[float]", + "description": "Used to store a list of angles for Kinova joints." + }, + { + "name": "pybulletAngles", + "type_name": "List[float]", + "description": "Used to store the angles associated with PyBullet joints." + } + ], + "name": "JointData", + "location": { + "start": 67, + "insert": 68, + "offset": " ", + "indent": 4, + "comment": null + }, + "item_type": "class", + "length": 4, + "docLength": null + }, + { + "id": "57ec70e8-ad53-3284-3543-53f9dc60f273", + "ancestors": [ + "b4976dda-f74b-338c-d14d-ce9290799753" + ], + "description": "Initializes two lists: `kinovaAngles` for storing kinematic angles and `pybulletAngles` for storing bullet angles.", + "params": [], + "returns": null, + "usage": { + "language": "python", + "code": "# Import JointData class\nfrom joint_data import JointData\n\n# Create a new instance of the JointData class\njoint_data = JointData()\n\n# Initialize kinovaAngles and pybulletAngles with empty lists\njoint_data.kinovaAngles = []\njoint_data.pybulletAngles = []\n\n# Add some values to kinovaAngles and pybulletAngles\njoint_data.kinovaAngles.append(10)\njoint_data.kinovaAngles.append(20)\njoint_data.pybulletAngles.append(30)\njoint_data.pybulletAngles.append(40)\n", + "description": "" + }, + "name": "__init__", + "location": { + "start": 68, + "insert": 69, + "offset": " ", + "indent": 8, + "comment": null + }, + "item_type": "constructor", + "length": 3, + "docLength": null + }, + { + "id": "0458dba0-d868-2bb3-dd49-5e1ffc87b14d", + "ancestors": [], + "description": "Manages communication with a robot arm and a camera, processing user inputs and sending commands to the robot and the camera to perform specific tasks, such as moving the robot arm or capturing images.", + "attributes": [ + { + "name": "Period", + "type_name": "float|int", + "description": "0 by default, which means that the worker will execute the specified block of code every time it receives a message from the Robot Operating System (ROS) for the current task." + }, + { + "name": "rgb", + "type_name": "Tuple[float,float,float]", + "description": "Used to store the RGB values of a point in 3D space." + }, + { + "name": "timestamp", + "type_name": "float|int", + "description": "Used to store the timestamp of the worker's last execution." + }, + { + "name": "startup_check", + "type_name": "bool|str", + "description": "Used to check if the worker has started successfully or not. It can be accessed and set through the `startup_check()` method, which returns a str representing the status of the startup check (either \"success\" or \"failure\")." + }, + { + "name": "physicsClient", + "type_name": "RoboCompPhysicsClient|None", + "description": "Used to handle physics-related tasks, such as getting robot positions and orientations, and controlling the movement of the robot." + }, + { + "name": "plane", + "type_name": "Tuple[float,float,float]", + "description": "Used to store the orientation of the robot's end effector relative to its base." + }, + { + "name": "table_id", + "type_name": "int|str", + "description": "Used to store a unique identifier for each worker, which can be used to identify the worker in different contexts." + }, + { + "name": "robot_urdf", + "type_name": "Tuple[str,str,str]", + "description": "Used to store the robot's URDF (Universal Robot Description Format) file path. It can be used to load the robot's kinematic information and create a robot arm model." + }, + { + "name": "robot_launch_pos", + "type_name": "Tuple[float,float,float]", + "description": "Used to store the initial position of the robot's end effector." + }, + { + "name": "robot_launch_orien", + "type_name": "Tuple[float,float,float]", + "description": "Used to store the current orientation of the robot's end effector in 3D space. It is updated based on the `data` attribute received from the RoboComp server." + }, + { + "name": "end_effector_link_index", + "type_name": "int", + "description": "Used to specify the index of the end effector link in the robot's skeleton, which determines the position and orientation of the end effector in space." + }, + { + "name": "robot_id", + "type_name": "int|str", + "description": "Used to identify a specific robot worker within a worker pool." + }, + { + "name": "home_angles", + "type_name": "Tuple[float,float,float]", + "description": "Used to store the default positions of the end effector's joints when the worker is in home position." + }, + { + "name": "observation_angles", + "type_name": "Tuple[float,float,float]", + "description": "Used to store the current angles of the robot's end effector with respect to its base." + }, + { + "name": "table_center", + "type_name": "Tuple[float,float,float]", + "description": "Used to store the center position of the worker's end effector in the robot's coordinate system." + }, + { + "name": "cylinderId", + "type_name": "int|str", + "description": "A unique identifier for each worker, used to differentiate between workers in robotics simulations." + }, + { + "name": "threadKinovaAngles", + "type_name": "TData", + "description": "A reference to the current joystick angles of the Kinova arm, which are updated in real-time through a separate thread." + }, + { + "name": "readDataFromProxy", + "type_name": "Dict[str,Any]", + "description": "Used to read data from a Proxy object, which represents a robotic arm, and updates the attributes of the worker based on the received data." + }, + { + "name": "colorKinova", + "type_name": "TImage|TRGBD", + "description": "Used to store the color image captured by the Kinova gripper." + }, + { + "name": "depthKinova", + "type_name": "TImage|TData", + "description": "Used to store the depth image of the Kinova arm, which can be obtained through the `kinovaarm_proxy` attribute." + }, + { + "name": "target_angles", + "type_name": "Tuple[float]", + "description": "Used to store the target angles for each joint of the robot arm in a specific worker mode." + }, + { + "name": "target_position", + "type_name": "List[float]", + "description": "Used to store the desired position of the robot's end effector for a specific worker." + }, + { + "name": "target_orientation", + "type_name": "Tuple[float,float,float]", + "description": "Used to store the target orientation of the robot after the user has made an input through the GUI. It is updated based on the user's input and the current mode of the robot." + }, + { + "name": "target_velocities", + "type_name": "Tuple[float,float,float]", + "description": "Used to store the target velocities for the robot's joints when moving to a specific pose. It contains the target velocities for each joint in the robot's arm." + }, + { + "name": "joy_selected_joint", + "type_name": "TData", + "description": "A parameter that allows to select which joint of the robot will move according to the joy input, in case of a Kinova arm." + }, + { + "name": "move_mode", + "type_name": "int|str", + "description": "Used to store the current move mode of the robot, which can be one of five options: \"home\", \"moving\", \"pausing\", \"resuming\", or \"stopping\"." + }, + { + "name": "ext_joints", + "type_name": "Tuple[RoboCompCameraRGBDSimpleTImage,RoboCompKinovaArmTPose,]", + "description": "Used to store the joint angles of the robot arm and the camera poses." + }, + { + "name": "kinovaarm_proxy", + "type_name": "RoboCompKinovaArmTPose|RoboCompKinovaArmTJointAngles", + "description": "Used to interact with the Kinova arm. It provides access to the arm's joint angles and movement." + }, + { + "name": "ext_gripper", + "type_name": "Union[float,str]", + "description": "Used to store the gripper extension value." + }, + { + "name": "timer", + "type_name": "float|int", + "description": "Used to manage the execution time of the worker, allowing for flexible and efficient handling of tasks." + }, + { + "name": "compute", + "type_name": "callable|func", + "description": "Used to define a function that performs specific computations based on the values of the worker's attributes." + }, + { + "name": "joint_speeds", + "type_name": "List[float]", + "description": "A list of joint speed values for each joint of the robot in the range of -1 to 1, representing the desired joint angles for the robot to move to." + }, + { + "name": "speeds", + "type_name": "Tuple[float,float,float]", + "description": "Represented as a list of 3 float values representing the speed values for the x, y, and z axes of the robot." + }, + { + "name": "angles", + "type_name": "Tuple[float,float,float]", + "description": "3D angles representation for the robot's joints." + }, + { + "name": "gains", + "type_name": "float|int", + "description": "Used to control the speed of movements of the robotic arm. It sets the gain values for each axis, which determine how quickly the arm moves in response to input from the user." + }, + { + "name": "posesTimes", + "type_name": "Tuple[float,float,float]", + "description": "Stored as a list of tuples where each tuple contains the position, orientation, and time of a specific pose." + }, + { + "name": "poses", + "type_name": "Tuple[RoboCompCameraRGBDSimpleTPose]", + "description": "Used to store the current pose of the camera in the world coordinate system." + }, + { + "name": "calibrator", + "type_name": "Tuple[str,str,str]", + "description": "Used to store the calibration parameters for a specific robotic arm, including the gripper position, home position, and joint angles." + }, + { + "name": "cameraKinovaTimer", + "type_name": "float|int", + "description": "Used to track the time taken by the worker's camera and kinova arm to perform their tasks." + }, + { + "name": "readKinovaCamera", + "type_name": "Callable[[Tuple[str,str,str],Tuple[float,float,float]],Tuple[str,str,str]]", + "description": "Used to read data from a Kinova camera. It takes in the camera's API endpoint and returns a tuple containing the camera's image, depth map, and RGBD data." + }, + { + "name": "showKinovaStateTimer", + "type_name": "float", + "description": "0 by default, which means that the worker will update the kinova state every 1 second." + }, + { + "name": "showKinovaAngles", + "type_name": "bool", + "description": "Used to control whether the angles of the kinova arm should be shown or not during the execution of the worker." + }, + { + "name": "gainsTimer", + "type_name": "float|int", + "description": "Used to store the time it takes for the worker to perform its tasks, which allows for more precise control over the timing of the worker's actions." + }, + { + "name": "updateGains", + "type_name": "Dict[str,float]", + "description": "Used to update the gains of the robot's actuators based on user inputs from a joystick." + }, + { + "name": "aamed", + "type_name": "Tuple[str,float,float,float]", + "description": "Used to store the current state of the robot's axes (X, Y, Z, and gripper) in the form of a 3D point." + }, + { + "name": "drawGraphTimer", + "type_name": "float|int", + "description": "Used to control the interval at which the worker updates its graphical representation during simulation." + }, + { + "name": "drawGraph", + "type_name": "Callable[Tuple[str,],None]", + "description": "Defined as a function that takes two arguments: the name of the worker and the graph to be drawn. The function returns None." + }, + { + "name": "pybullet_offset", + "type_name": "Tuple[float,float,float]", + "description": "Used to specify the offset for the robot's end effector (gripper) when it is in \"home\" position." + }, + { + "name": "fig", + "type_name": "Union[float,int]", + "description": "Used to store the current position of the robot's gripper." + }, + { + "name": "ax", + "type_name": "Union[float,int]", + "description": "Used to store the value of a specific axis of a robot's end effector, such as the X, Y, or Z axis." + }, + { + "name": "jointsErrorMap", + "type_name": "Dict[str,float]", + "description": "Used to store the error maps for each joint. It maps joint names to their corresponding error values, which are used to calculate the actual position of the robot's end effector based on the desired position and the current state of the robot." + }, + { + "name": "left_force_series", + "type_name": "List[float]", + "description": "A series of left forces applied to the robot's end effector during a specific task or operation." + }, + { + "name": "right_force_series", + "type_name": "List[float]", + "description": "A series of right-hand forces applied to the robot's end effector, each force being applied over a specific time interval." + }, + { + "name": "graphTimes", + "type_name": "List[float]", + "description": "Used to store the time points at which the worker's state is graphed, allowing for visualization of the worker's behavior over time." + }, + { + "name": "hide", + "type_name": "bool", + "description": "Used to hide or show the worker's actions and variables for debugging purposes." + } + ], + "name": "SpecificWorker", + "location": { + "start": 72, + "insert": 73, + "offset": " ", + "indent": 4, + "comment": null + }, + "item_type": "class", + "length": 1054, + "docLength": null + }, + { + "id": "f4001f82-80f9-52a7-f048-0e3b9a68aa00", + "ancestors": [ + "0458dba0-d868-2bb3-dd49-5e1ffc87b14d" + ], + "description": "Initializes variables and timers for controlling the simulation, camera calibration, and graph drawing. It also sets up connections to the PyBullet environment and reads joint angles from the Kinova arm.", + "params": [ + { + "name": "proxy_map", + "type_name": "Dict[str, int]", + "description": "Used to map joint indices from the PyBullet API to the corresponding joint indices of the Kinect sensor. It helps convert data between different frameworks and devices." + }, + { + "name": "startup_check", + "type_name": "bool", + "description": "Used to check if the robot's kinematic tree has been loaded successfully, before continuing with the initialization process." + } + ], + "returns": null, + "usage": { + "language": "python", + "code": "# Example usage of the SpecificWorker class\nfrom pybullet_envs import proxy_map\nimport time\n\nproxy_map = {\"physicsClient\": p.connect(p.GUI)} # Connect to the GUI for visualization\nstartup_check = False # Set to True for startup check\n\nworker = SpecificWorker(proxy_map, startup_check)\n\n# Do some work with the worker object\nfor i in range(10):\n worker.work()\n", + "description": "\nIn this example, the user creates an instance of the `SpecificWorker` class and passes it a dictionary of proxy objects to be used for communication with PyBullet. The `startup_check` parameter is set to `False`, indicating that the startup check should not be performed. Once created, the user can call various methods on the worker object to perform work using PyBullet, such as loading URDFs and simulating dynamics." + }, + "name": "__init__", + "location": { + "start": 73, + "insert": 74, + "offset": " ", + "indent": 8, + "comment": null + }, + "item_type": "constructor", + "length": 169, + "docLength": null + }, + { + "id": "5731826b-6e1a-3da3-624f-685f2867a779", + "ancestors": [ + "0458dba0-d868-2bb3-dd49-5e1ffc87b14d" + ], + "description": "Sets parameters passed as an argument to the method.", + "params": [ + { + "name": "params", + "type_name": "Union[str, dict]", + "description": "Used to set parameters for the function." + } + ], + "returns": { + "type_name": "bool", + "description": "1 when the parameters are set successfully, and 0 otherwise." + }, + "usage": { + "language": "python", + "code": "import SpecificWorker\n\n# Instantiate the SpecificWorker class and pass in the proxy_map argument\nworker = SpecificWorker(proxy_map)\n\n# Set parameters for the worker using the setParams method\nworker.setParams({'Period': 100, 'rgb': [255, 0, 0]})\n", + "description": "" + }, + "name": "setParams", + "location": { + "start": 283, + "insert": 289, + "offset": " ", + "indent": 8, + "comment": null + }, + "item_type": "method", + "length": 4, + "docLength": null + }, + { + "id": "98ec814d-ce22-26a5-3f46-12ec5d88fd44", + "ancestors": [ + "0458dba0-d868-2bb3-dd49-5e1ffc87b14d" + ], + "description": "Computes the joint angles and velocities based on the observation angles and moves the robot using PyBullet and Kinova's arm. It also updates the error map and graph times.", + "params": [], + "returns": null, + "usage": { + "language": "python", + "code": "# Initialize the SpecificWorker class and its parameters\nworker = SpecificWorker(proxy_map)\nworker.Period = 50\n\n# Perform a simulation of the robot moving to a target position\nworker.moveKinovaWithAngles([1, 2, 3])\n\n# Access data from the compute method using getters\nprint(\"Current joint angles: \", worker.jointsState)\n", + "description": "" + }, + "name": "compute", + "location": { + "start": 292, + "insert": 295, + "offset": " ", + "indent": 8, + "comment": null + }, + "item_type": "method", + "length": 351, + "docLength": null + }, + { + "id": "3b53545e-3235-fda2-b240-d9ededc60ea1", + "ancestors": [ + "0458dba0-d868-2bb3-dd49-5e1ffc87b14d" + ], + "description": "Tests various objects and functions from different interfaces, including ifaces.RoboCompCameraRGBDSimple, ifaces.RoboCompKinovaArm, and ifaces.RoboCompJoystickAdapter. It covers testing points, images, depth, RGBD data, poses, grippers, joints, joint speeds, and button and axis parameters.", + "params": [], + "returns": null, + "usage": { + "language": "python", + "code": "if __name__ == \"__main__\":\n from ifaces import RoboCompCameraRGBDSimple, RoboCompKinovaArm\n\n # Create a proxy map for the robot and camera interfaces\n proxy_map = {RoboCompCameraRGBDSimple: {}, RoboCompKinovaArm: {}}\n\n # Instantiate a specific worker that subclasses GenericWorker and has the startup_check method\n specific_worker = SpecificWorker(proxy_map, startup_check=True)\n", + "description": "" + }, + "name": "startup_check", + "location": { + "start": 685, + "insert": 686, + "offset": " ", + "indent": 8, + "comment": null + }, + "item_type": "method", + "length": 30, + "docLength": null + }, + { + "id": "405310da-51d0-e88e-4142-2deb0fb5cf83", + "ancestors": [ + "0458dba0-d868-2bb3-dd49-5e1ffc87b14d" + ], + "description": "Modifies the target angles of a robot's gripper to move it to a desired position, using PyBullet library's `setJointMotorControl2` function.", + "params": [ + { + "name": "distance", + "type_name": "float", + "description": "Used to set the desired angle for the gripper's joints." + } + ], + "returns": null, + "usage": { + "language": "python", + "code": "import time\nfrom specific_worker import SpecificWorker\n\n# create a new instance of the SpecificWorker class\nspecific_worker = SpecificWorker()\n\n# specify the distance to open or close the gripper\ndistance = 0.15\n\n# call the changePybulletGripper method with the specified distance as an argument\nspecific_worker.changePybulletGripper(distance)\n", + "description": "" + }, + "name": "changePybulletGripper", + "location": { + "start": 716, + "insert": 717, + "offset": " ", + "indent": 8, + "comment": null + }, + "item_type": "method", + "length": 12, + "docLength": null + }, + { + "id": "0a8ce233-df6f-7a81-d84d-f3bc68c79c72", + "ancestors": [ + "0458dba0-d868-2bb3-dd49-5e1ffc87b14d" + ], + "description": "Detects contact points between two links and calculates the force acting on each link based on the contact forces.", + "params": [], + "returns": null, + "usage": { + "language": "python", + "code": "import pybullet as p\nfrom SpecificWorker import SpecificWorker\n\n# Create a new instance of the SpecificWorker class\nworker = SpecificWorker(proxy_map={})\n\n# Initialize the worker with the specific parameters needed for its operation\nworker.init(startup_check=False)\n\n# Use the detectContactPoints method to detect contact points in the simulation\ncontact_points = worker.detectContactPoints()\n", + "description": "" + }, + "name": "detectContactPoints", + "location": { + "start": 729, + "insert": 730, + "offset": " ", + "indent": 8, + "comment": null + }, + "item_type": "method", + "length": 23, + "docLength": null + }, + { + "id": "3887c588-d301-30b3-394d-7c0acce87004", + "ancestors": [ + "0458dba0-d868-2bb3-dd49-5e1ffc87b14d" + ], + "description": "Generates a graph in the current axis using Matplotlib library. It plots joint error values against time and labels them individually.", + "params": [], + "returns": null, + "usage": { + "language": "python", + "code": "from SpecificWorker import GenericWorker\n\n# Create an instance of the SpecificWorker class\nworker = SpecificWorker(proxy_map)\n\n# Set up the graph with the appropriate parameters and data\nworker.setUpGraph()\n\n# Plot the graph with the drawn data\nworker.drawGraph()\n", + "description": "" + }, + "name": "drawGraph", + "location": { + "start": 765, + "insert": 767, + "offset": " ", + "indent": 8, + "comment": null + }, + "item_type": "method", + "length": 25, + "docLength": null + }, + { + "id": "1eb1aaaa-2ada-87b8-d440-892ca06c3a59", + "ancestors": [ + "0458dba0-d868-2bb3-dd49-5e1ffc87b14d" + ], + "description": "Calculates the difference between the kinova target and the detected position of the robot's end effector using AAMED, and adjusts the position of the base to match the target position.", + "params": [], + "returns": { + "type_name": "float", + "description": "The error between the predicted position and the ground truth position, and a boolean value indicating whether a target was detected in the kinova camera stream." + }, + "usage": { + "language": "python", + "code": "from SpecificWorker import SpecificWorker\n# create an instance of the class\nworker = SpecificWorker(proxy_map)\n# call the function with the required parameters and store its return value\nerror, kinova_target_detected = worker.correctTargetPosition()\n", + "description": "" + }, + "name": "correctTargetPosition", + "location": { + "start": 797, + "insert": 798, + "offset": " ", + "indent": 8, + "comment": null + }, + "item_type": "method", + "length": 39, + "docLength": null + }, + { + "id": "da347eb2-d7d1-3caf-1f45-a52c09523698", + "ancestors": [ + "0458dba0-d868-2bb3-dd49-5e1ffc87b14d" + ], + "description": "Sets up the toolbox environment, including adding kinematic objects, defining goal and end-effector axes, and initializing the worker's state variables.", + "params": [ + { + "name": "target_position", + "type_name": "Tuple[float, float, float]", + "description": "Used to specify the target position of the end effector (gripper) in the world coordinate system." + } + ], + "returns": null, + "usage": { + "language": "python", + "code": "from my_package import SpecificWorker\n\n# Initialize the worker with a proxy map\nproxy_map = {\n 'kinova': {'model': 'KinovaGen3', 'pose': [0.5, 0.5, 0.2]},\n 'target': {'position': [-0.1, 0.4, 0.2]}\n}\nspecific_worker = SpecificWorker(proxy_map)\n", + "description": "\nIn this example, a proxy map is created to define the objects that are used in the toolbox initialization. The specific worker is then initialized using the proxy map." + }, + "name": "initialize_toolbox", + "location": { + "start": 837, + "insert": 839, + "offset": " ", + "indent": 8, + "comment": null + }, + "item_type": "method", + "length": 33, + "docLength": null + }, + { + "id": "000bb5b6-5df2-54a7-5743-379e6411c981", + "ancestors": [ + "0458dba0-d868-2bb3-dd49-5e1ffc87b14d" + ], + "description": "Computes the joint angles and velocities for a robot arm based on its current state and the desired position of the end effector. It uses the kinematic chain of the robot to compute the Jacobian matrix, and then solves a quadratic program (QP) to find the optimal joint angles that will reach the target position while satisfying constraints on the joint velocities and accelerations.", + "params": [ + { + "name": "target_position", + "type_name": "Tuple[float, float, float]", + "description": "A desired position for the end effector (EE) to reach." + } + ], + "returns": null, + "usage": { + "language": "python", + "code": "# Create an instance of the SpecificWorker class and initialize it\nworker = SpecificWorker(proxy_map)\n\n# Call the toolbox_compute method with a target position as input\ntarget_position = [0, 0, 0.5]\nworker.toolbox_compute(target_position)\n", + "description": "\nThis code would compute the end-effector pose and velocity for the specific worker instance using the target position specified by the user." + }, + "name": "toolbox_compute", + "location": { + "start": 884, + "insert": 886, + "offset": " ", + "indent": 8, + "comment": null + }, + "item_type": "method", + "length": 56, + "docLength": null + }, + { + "id": "e25ca2cc-3232-f280-284b-fb48ec39a1ec", + "ancestors": [ + "0458dba0-d868-2bb3-dd49-5e1ffc87b14d" + ], + "description": "Reads camera data from a PyBullet simulation and processes it to generate an RGB image that is displayed in a window.", + "params": [], + "returns": { + "type_name": "float", + "description": "The camera image as a numpy array." + }, + "usage": { + "language": "python", + "code": "from specific_worker import SpecificWorker\nimport time\n\n# Create a new instance of SpecificWorker\nspecific_worker = SpecificWorker(proxy_map={})\n\n# Initialize the worker's physicsClient\nspecific_worker.physicsClient = p.connect(p.GUI)\np.setAdditionalSearchPath(pybullet_data.getDataPath())\np.configureDebugVisualizer(p.COV_ENABLE_GUI, 0)\np.setGravity(0, 0, -9.81)\n\np.setRealTimeSimulation(1)\n\nflags = p.URDF_USE_INERTIA_FROM_FILE\np.resetDebugVisualizerCamera(cameraDistance=1.5, cameraYaw=50, cameraPitch=-35,\n cameraTargetPosition=[0, 0, 0.5])\n\n# Load the plane and table URDFs\nspecific_worker.plane = p.loadURDF(\"./URDFs/plane/plane.urdf\")\nspecific_worker.table_id = p.loadURDF(\"./URDFs/table/table.urdf\", basePosition=[0, 0, 0],\n baseOrientation=p.getQuaternionFromEuler([0, 0, 0]))\n\n# Create a new thread for the SpecificWorker\nspecific_worker_thread = Thread(target=specific_worker.run)\nspecific_worker_thread.start()\n\nwhile True:\n # Get the latest image from the camera\n rgb, timestamp = specific_worker.read_camera_fixed()\n\n # Do something with the image (e.g., display it using OpenCV)\n cv2.imshow(\"Pybullet\", rgb)\n cv2.waitKey(3)\n\n # Sleep for a short period to avoid overloading the CPU\n time.sleep(0.05)\n", + "description": "\nThis code creates an instance of SpecificWorker, initializes its physicsClient, loads URDFs for the plane and table, and creates a new thread for the worker. It then enters an infinite loop that calls read_camera_fixed to get the latest image from the fixed camera in the robot. Each iteration of the loop displays the image using OpenCV and sleeps for 0.05 seconds to avoid overloading the CPU." + }, + "name": "read_camera_fixed", + "location": { + "start": 1019, + "insert": 1020, + "offset": " ", + "indent": 8, + "comment": null + }, + "item_type": "method", + "length": 44, + "docLength": null + }, + { + "id": "491a3751-960e-07af-9f4d-70042d1589bd", + "ancestors": [ + "0458dba0-d868-2bb3-dd49-5e1ffc87b14d" + ], + "description": "Reads camera data from Kinova, including depth and color images, and appends them to instance variables `depthKinova` and `colorKinova`, respectively.", + "params": [], + "returns": { + "type_name": "bool", + "description": "True if the operation was successful, and False otherwise." + }, + "usage": { + "language": "python", + "code": "import numpy as np\nimport cv2\nfrom SpecificWorker import SpecificWorker\n\n# Initialize the worker\nworker = SpecificWorker(proxy_map)\n\nwhile True:\n # Read data from the camera\n success = worker.readKinovaCamera()\n if not success:\n break\n\n # Process the data\n depthImage, timestamp = worker.depthKinova[-1]\n kinovaImage, timestamp = worker.colorKinova[-1]\n\n # Display the images\n cv2.imshow(\"Depth Image\", depthImage)\n cv2.imshow(\"Color Image\", kinovaImage)\n\n if cv2.waitKey(1) == ord('q'):\n break\n", + "description": "" + }, + "name": "readKinovaCamera", + "location": { + "start": 1090, + "insert": 1091, + "offset": " ", + "indent": 8, + "comment": null + }, + "item_type": "method", + "length": 15, + "docLength": null + }, + { + "id": "41692b56-725a-1fae-014a-0bbc35ef1b0c", + "ancestors": [ + "0458dba0-d868-2bb3-dd49-5e1ffc87b14d" + ], + "description": "Prints information about the kinematics of the robot, including joint angles, torques, and distance of the gripper.", + "params": [], + "returns": null, + "usage": { + "language": "python", + "code": "# In this case, we are using the SpecificWorker class to connect to a robot and get its joint angles.\nworker = SpecificWorker(proxy_map)\nworker.showKinovaAngles()\n", + "description": "" + }, + "name": "showKinovaAngles", + "location": { + "start": 1106, + "insert": 1107, + "offset": " ", + "indent": 8, + "comment": null + }, + "item_type": "method", + "length": 13, + "docLength": null + }, + { + "id": "8f011b02-6a36-028f-794a-017fd69d6a57", + "ancestors": [ + "0458dba0-d868-2bb3-dd49-5e1ffc87b14d" + ], + "description": "Sets the target velocities of joints external to the robot and then converts them from degrees to radians before setting them as the motor control targets for the robot's joints using PyBullet's `setJointMotorControl2` function.", + "params": [], + "returns": null, + "usage": { + "language": "python", + "code": "import pybullet as pb\nfrom specific_worker import SpecificWorker\n\n# Initialize SpecificWorker class\nspecific_worker = SpecificWorker(proxy_map)\n\n# Load URDFs for robot and plane\nspecific_worker.loadRobot()\nspecific_worker.loadPlane()\n\n# Set external joint velocities\nspecific_worker.ext_joints = [1, 2, 3]\nspecific_worker.target_velocities = [0.5, 0.7, -1.2]\n\n# Move pybullet with external velocities\nspecific_worker.movePybulletWithExternalVel()\n", + "description": "" + }, + "name": "movePybulletWithExternalVel", + "location": { + "start": 1120, + "insert": 1121, + "offset": " ", + "indent": 8, + "comment": null + }, + "item_type": "method", + "length": 9, + "docLength": null + }, + { + "id": "91b52a60-8f4a-41b6-fa45-57aa47934a17", + "ancestors": [ + "0458dba0-d868-2bb3-dd49-5e1ffc87b14d" + ], + "description": "Controls the movement of a robot using PyBullet, setting joint motor control values for each joint based on target velocities provided in a list.", + "params": [], + "returns": null, + "usage": { + "language": "python", + "code": "# Create an instance of SpecificWorker class and initialize it with proxy_map\nspecific_worker = SpecificWorker(proxy_map)\n\n# Call the movePybulletWithToolbox method of SpecificWorker class\nspecific_worker.movePybulletWithToolbox()\n", + "description": "" + }, + "name": "movePybulletWithToolbox", + "location": { + "start": 1130, + "insert": 1132, + "offset": " ", + "indent": 8, + "comment": null + }, + "item_type": "method", + "length": 5, + "docLength": null + }, + { + "id": "6ba9ff5c-1c67-b893-3f4d-f54a605d8ca8", + "ancestors": [ + "0458dba0-d868-2bb3-dd49-5e1ffc87b14d" + ], + "description": "Continuously retrieves joint and gripper state data from a Kinova arm proxy, scales down the gripper distance by 80%, and sleeps for 0.05 seconds before repeating the process.", + "params": [], + "returns": null, + "usage": { + "language": "python", + "code": "# Import the SpecificWorker class from the file \"worker_specific.py\"\nfrom worker_specific import SpecificWorker\n\n# Create a new instance of the SpecificWorker class\nspecific_worker = SpecificWorker(proxy_map)\n\n# Call the readDataFromProxy function to retrieve the joints state and gripper state data\njoints, gripper = specific_worker.readDataFromProxy()\n\n# Use the retrieved data as needed\n# ...\n", + "description": "" + }, + "name": "readDataFromProxy", + "location": { + "start": 1137, + "insert": 1138, + "offset": " ", + "indent": 8, + "comment": null + }, + "item_type": "method", + "length": 6, + "docLength": null + }, + { + "id": "f50e4a9b-d369-6f80-9142-62383e93a2f4", + "ancestors": [ + "0458dba0-d868-2bb3-dd49-5e1ffc87b14d" + ], + "description": "Updates the joint angles of a Kinova arm based on provided angles, and then calls the `moveJointsWithAngle` method of the kinova arm proxy to execute the movements.", + "params": [ + { + "name": "angles", + "type_name": "np.array", + "description": "360-degree radian values representing the angles for each joint to move." + } + ], + "returns": null, + "usage": { + "language": "python", + "code": "# Initialize the SpecificWorker class and its subclasses\nworker = SpecificWorker(proxy_map)\n\n# Set the joint angles for the Kinova arm\nangles = [0, 180, -90, 180, 0, 180]\n\n# Move the Kinova arm to the specified joint angles using the moveKinovaWithAngles method\nworker.moveKinovaWithAngles(angles)\n", + "description": "\nIn this example, an instance of the SpecificWorker class is created and its subclasses are initialized with a proxy_map object. The function moveKinovaWithAngles is then called on the worker object to set the joint angles for the Kinova arm. The user passes in a list of angles to be used as arguments for the method.\nNote that this example uses a simplified version of the SpecificWorker class and only includes the necessary lines of code to illustrate how the function would be used." + }, + "name": "moveKinovaWithAngles", + "location": { + "start": 1146, + "insert": 1147, + "offset": " ", + "indent": 8, + "comment": null + }, + "item_type": "method", + "length": 4, + "docLength": null + }, + { + "id": "416ad103-ecf9-60bc-a146-ad08f8a60215", + "ancestors": [ + "0458dba0-d868-2bb3-dd49-5e1ffc87b14d" + ], + "description": "Calculates and applies joint speeds for a Kinova arm based on joint angles and gains, and then moves the joints with those speeds using the `kinovaarm_proxy` object.", + "params": [], + "returns": null, + "usage": { + "language": "python", + "code": "# Create an instance of the SpecificWorker class\nworker = SpecificWorker(proxy_map)\n\n# Call the moveKinovaWithSpeeds method with desired joint speeds\njoint_speeds = [1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0]\nworker.moveKinovaWithSpeeds(joint_speeds)\n", + "description": "" + }, + "name": "moveKinovaWithSpeeds", + "location": { + "start": 1151, + "insert": 1153, + "offset": " ", + "indent": 8, + "comment": null + }, + "item_type": "method", + "length": 16, + "docLength": null + }, + { + "id": "2104bd70-2fbe-529c-e64c-792777d3de9e", + "ancestors": [ + "0458dba0-d868-2bb3-dd49-5e1ffc87b14d" + ], + "description": "Updates the gains of its joints based on the difference between the current pose and the best recorded pose, with a limit on the speed of the joint's movement.", + "params": [], + "returns": null, + "usage": { + "language": "python", + "code": "# Create an instance of SpecificWorker class and its attributes\nworker = SpecificWorker(proxy_map=proxy_map, startup_check=False)\nworker.Period = 50\nworker.rgb = []\nworker.timestamp = int(time.time()*1000)\n# Initialize physics client\nworker.physicsClient = p.connect(p.GUI)\n# Set up debug visualizer\np.setAdditionalSearchPath(pybullet_data.getDataPath())\np.configureDebugVisualizer(p.COV_ENABLE_GUI, 0)\np.setGravity(0, 0, -9.81)\n# Enable real-time simulation\np.setRealTimeSimulation(1)\n# Load URDF file for plane and table\nworker.plane = p.loadURDF(\"./URDFs/plane/plane.urdf\")\nworker.table_id = p.loadURDF(\"./URDFs/table/table.urdf\", basePosition=[0, 0, 0], baseOrientation=[])\n# Update gains for the specific worker instance using its updateGains method\nworker.updateGains()\n", + "description": "" + }, + "name": "updateGains", + "location": { + "start": 1171, + "insert": 1172, + "offset": " ", + "indent": 8, + "comment": null + }, + "item_type": "method", + "length": 17, + "docLength": null + }, + { + "id": "9e064b68-66e7-81a3-6546-ebd215baf6ce", + "ancestors": [ + "0458dba0-d868-2bb3-dd49-5e1ffc87b14d" + ], + "description": "Receives data from a joystick and sends it to the robot's actuators, adjusting the robot's position and orientation based on the data received.", + "params": [ + { + "name": "data", + "type_name": "dict", + "description": "Passed data from the joystick interface, which contains information about the current state of the \njoystick axes and buttons." + } + ], + "returns": null, + "usage": { + "language": "python", + "code": "# Create an instance of the SpecificWorker class and pass it a proxy map.\nworker = SpecificWorker(proxy_map)\n\n# Set up the joystick and start reading inputs.\nwhile True:\n # Read inputs from the joystick and send them to the robot using the JoystickAdapter_sendData function.\n worker.JoystickAdapter_sendData(inputs)\n\n # Perform other tasks on the robot, such as moving it or checking its state.\n robot_position = p.getBasePositionAndOrientation(robot)[0]\n", + "description": "" + }, + "name": "JoystickAdapter_sendData", + "location": { + "start": 1199, + "insert": 1200, + "offset": " ", + "indent": 8, + "comment": null + }, + "item_type": "method", + "length": 112, + "docLength": null + } + ] + } + } + }, + { + "name": "specificworker.py", + "path": "components/kinova_controller/src/specificworker.py", + "content": { + "structured": { + "description": "A specific worker class that inherits from a generic worker class in the RoboComp framework. The specific worker class is designed for a Kinova Gen3 robot arm and uses various libraries such as NumPy, PySide2, and Kortex API to interact with the robot. It provides methods to get the joints' state, close or open the gripper, move the joints with speeds or angles, and set or get the center of tool.", + "items": [ + { + "id": "15f7ef8f-b227-caa4-ee4b-15855c361c8f", + "ancestors": [], + "description": "Is a worker thread that interacts with a Kinova Gen3 robotic arm and its gripper, providing methods to control the arm's movement, get its joints and gripper states, open and close the gripper, and retrieve tactile data from contact sensors.", + "attributes": [ + { + "name": "Period", + "type_name": "int", + "description": "1 by default. It likely represents the period at which a certain action or computation is performed, possibly tied to a timer event with a duration of 1 second." + }, + { + "name": "startup_check", + "type_name": "bool", + "description": "Set to True when called. It calls a series of tests on various interfaces, including RoboCompContactile.FingerTip and RoboCompKinovaArm.TPose." + }, + { + "name": "kinova", + "type_name": "KinovaGen3", + "description": "Used to interact with a kinova arm, such as getting its joints' state, moving its gripper, or setting its joint angles." + }, + { + "name": "flag", + "type_name": "bool", + "description": "Initialized to True in the `__init__` method. It is used as a control variable in the `compute` method, toggling its value each time the timer event occurs." + }, + { + "name": "timer", + "type_name": "QTimer", + "description": "Initialized with a periodic timer that calls the `compute` method at regular intervals specified by the `Period` attribute." + }, + { + "name": "compute", + "type_name": "QtCoreSlot|None", + "description": "Called when a timer timeout occurs. It gets joints data from KinovaGen3, calculates joint speeds, updates timestamps, and prints contactile data." + }, + { + "name": "joints", + "type_name": "ifacesRoboCompKinovaArmTJoints|None", + "description": "Initialized as None. It stores a list of joints information, including position, velocity, and force, retrieved from the KinovaGen3 robot arm." + }, + { + "name": "gripper", + "type_name": "ifacesRoboCompKinovaArmTGripper", + "description": "Updated in the `compute` method with the current state of the gripper, retrieved by calling the `get_gripper_state` method of the KinovaGen3 object." + }, + { + "name": "speeds", + "type_name": "ifacesRoboCompKinovaArmTJointSpeeds", + "description": "7-element list, representing the speed of each joint of the Kinova arm, initialized with all elements set to zero." + }, + { + "name": "moveWithSpeed", + "type_name": "bool", + "description": "Set to False by default. When it is set to True, the worker moves the joints with speeds specified in the `speeds.jointSpeeds` list." + }, + { + "name": "timestamp", + "type_name": "int", + "description": "Initialized to the current time (in milliseconds) when the worker object is created. It is used to measure the elapsed time between timer events." + } + ], + "name": "SpecificWorker", + "location": { + "start": 42, + "insert": 43, + "offset": " ", + "indent": 4, + "comment": null + }, + "item_type": "class", + "length": 139, + "docLength": null + }, + { + "id": "6b220d17-67a8-458f-c04e-604d3a18a759", + "ancestors": [ + "15f7ef8f-b227-caa4-ee4b-15855c361c8f" + ], + "description": "Initializes an instance with a proxy map and optional startup check. If the startup check is enabled, it performs some checks. Otherwise, it sets up a timer to run the compute function at regular intervals, initializes joints and gripper variables, and sets default speeds for the robot arm.", + "params": [ + { + "name": "proxy_map", + "type_name": "object", + "description": "Passed to the parent class `SpecificWorker` during initialization. It sets up the proxy mapping for the worker's interfaces, allowing it to interact with other components in the system." + }, + { + "name": "startup_check", + "type_name": "bool", + "description": "Optional, with default value False. Its presence determines whether to perform startup checks or initialize the Kinova robot directly. If set to True, startup checks are performed; otherwise, initialization proceeds as usual." + } + ], + "returns": null, + "usage": { + "language": "python", + "code": "worker = SpecificWorker(proxy_map, startup_check=True)", + "description": "" + }, + "name": "__init__", + "location": { + "start": 43, + "insert": 44, + "offset": " ", + "indent": 8, + "comment": null + }, + "item_type": "constructor", + "length": 16, + "docLength": null + }, + { + "id": "7cf50f16-b1d4-a3ba-6243-4d4767639c1b", + "ancestors": [ + "15f7ef8f-b227-caa4-ee4b-15855c361c8f" + ], + "description": "Sets parameters and returns `True`. If any error occurs during the process, it does not handle or report it explicitly.", + "params": [ + { + "name": "params", + "type_name": "Any", + "description": "Intended to be used for setting or updating parameters in the class instance. The actual data type of `params` can vary depending on how it is used within the function." + } + ], + "returns": { + "type_name": "bool", + "description": "`True`. This indicates that the function execution has been successful and all parameters have been set correctly." + }, + "usage": { + "language": "python", + "code": "worker = SpecificWorker(proxy_map, startup_check=True)\nparams = {'key': 'value'}\nresult = worker.setParams(params)", + "description": "" + }, + "name": "setParams", + "location": { + "start": 63, + "insert": 69, + "offset": " ", + "indent": 8, + "comment": null + }, + "item_type": "method", + "length": 4, + "docLength": null + }, + { + "id": "5e15a974-27b2-6a89-154b-0c78ecfeb432", + "ancestors": [ + "15f7ef8f-b227-caa4-ee4b-15855c361c8f" + ], + "description": "Retrieves joint positions, velocities, and torques from a kinova arm, sends gripper state to a RoboCompKinovaArm object, moves joints with specified speeds, updates timestamp, and returns True.", + "params": [], + "returns": { + "type_name": "bool", + "description": "`True`." + }, + "usage": { + "language": "python", + "code": "worker = SpecificWorker(proxy_map)\nworker.compute()", + "description": "" + }, + "name": "compute", + "location": { + "start": 72, + "insert": 75, + "offset": " ", + "indent": 8, + "comment": null + }, + "item_type": "method", + "length": 34, + "docLength": null + }, + { + "id": "729465e1-80cb-97b1-4f4c-08792dd48901", + "ancestors": [ + "15f7ef8f-b227-caa4-ee4b-15855c361c8f" + ], + "description": "Performs startup checks for several RoboComp interfaces, including RoboCompContactile and RoboCompKinovaArm. It instantiates test objects from these interfaces and prints corresponding messages to the console before quitting the application after a 200ms delay.", + "params": [], + "returns": null, + "usage": { + "language": "python", + "code": "worker = SpecificWorker(proxy_map, startup_check=True)\n", + "description": "\nNote: The parameter 'startup_check' in this call specifies whether the worker should perform a startup check before starting its operations." + }, + "name": "startup_check", + "location": { + "start": 112, + "insert": 113, + "offset": " ", + "indent": 8, + "comment": null + }, + "item_type": "method", + "length": 11, + "docLength": null + }, + { + "id": "6872bc6d-6167-47ad-764f-3f6d6ab8a48a", + "ancestors": [ + "15f7ef8f-b227-caa4-ee4b-15855c361c8f" + ], + "description": "Controls the gripper of a Kinova robot arm to close slowly and safely by monitoring tactile sensors until a target force is reached, then stops and prints a success message.", + "params": [], + "returns": null, + "usage": { + "language": "python", + "code": "worker = SpecificWorker(proxy_map)\nworker.KinovaArm_closeGripper()", + "description": "" + }, + "name": "KinovaArm_closeGripper", + "location": { + "start": 131, + "insert": 132, + "offset": " ", + "indent": 8, + "comment": null + }, + "item_type": "method", + "length": 12, + "docLength": null + }, + { + "id": "6cb02dbc-dde7-7e8c-4a48-505c3ea45c50", + "ancestors": [ + "15f7ef8f-b227-caa4-ee4b-15855c361c8f" + ], + "description": "Returns an object of type TPose, representing the center of the tool (end-effector) of a Kinova arm robot, referenced to a specific coordinate system or frame.", + "params": [ + { + "name": "referencedTo", + "type_name": "object", + "description": "Used to specify the coordinate system from which the center of tool coordinates are calculated. It serves as a reference frame for calculating the center of tool position." + } + ], + "returns": { + "type_name": "RoboCompKinovaArmTPose", + "description": "A data structure representing a pose (position and orientation) in space, specifically for the Kinova Arm robot." + }, + "usage": { + "language": "python", + "code": "specific_worker = SpecificWorker(proxy_map)\ncenter_of_tool = specific_worker.KinovaArm_getCenterOfTool(referencedTo)", + "description": "" + }, + "name": "KinovaArm_getCenterOfTool", + "location": { + "start": 147, + "insert": 148, + "offset": " ", + "indent": 8, + "comment": null + }, + "item_type": "method", + "length": 4, + "docLength": null + }, + { + "id": "9634aeee-9fd6-0d96-d54f-8791dd85159c", + "ancestors": [ + "15f7ef8f-b227-caa4-ee4b-15855c361c8f" + ], + "description": "Controls the opening of the gripper on a Kinova arm. It continuously reduces the distance between the gripper's fingers until it reaches a threshold value of 0.01, then stops and sets the gripper speed to 0.", + "params": [], + "returns": null, + "usage": { + "language": "python", + "code": "worker = SpecificWorker(proxy_map, startup_check=False)\nworker.kinova = KinovaGen3()\nworker.KinovaArm_openGripper()", + "description": "" + }, + "name": "KinovaArm_openGripper", + "location": { + "start": 161, + "insert": 162, + "offset": " ", + "indent": 8, + "comment": null + }, + "item_type": "method", + "length": 5, + "docLength": null + }, + { + "id": "9b5abc35-96f0-81b6-5646-92fb06e2d775", + "ancestors": [ + "15f7ef8f-b227-caa4-ee4b-15855c361c8f" + ], + "description": "Sets the center of tool for the Kinova arm according to the provided pose and referencedTo parameters, presumably used for robotic arm control or simulation.", + "params": [ + { + "name": "pose", + "type_name": "Pose", + "description": "6-dimensional array representing the position and orientation of the robot's tool center point." + }, + { + "name": "referencedTo", + "type_name": "object", + "description": "Used to specify the frame of reference for the pose provided." + } + ], + "returns": null, + "usage": { + "language": "python", + "code": "specific_worker = SpecificWorker(proxy_map)\npose = ifaces.RoboCompKinovaArm.TPose()\nreferencedTo = ifaces.RoboCompKinovaArm.ReferenceFrame\nspecific_worker.KinovaArm_setCenterOfTool(pose, referencedTo)", + "description": "" + }, + "name": "KinovaArm_setCenterOfTool", + "location": { + "start": 176, + "insert": 181, + "offset": " ", + "indent": 8, + "comment": null + }, + "item_type": "method", + "length": 4, + "docLength": null + }, + { + "id": "9057a0fd-31ac-f282-a345-b03ccc2712b3", + "ancestors": [ + "15f7ef8f-b227-caa4-ee4b-15855c361c8f" + ], + "description": "Sets the speeds for moving joints of Kinova arm, marks that movement with speed is enabled, and records the time of last move order.", + "params": [ + { + "name": "speeds", + "type_name": "List[int]", + "description": "Utilized to store the speeds at which each joint of Kinova Arm should move when executing the movement command." + } + ], + "returns": null, + "usage": { + "language": "python", + "code": "worker = SpecificWorker(proxy_map)\nspeeds = ifaces.RoboCompKinovaArm.TJointSpeeds()\nspeeds.jointSpeeds = [1, 0.5, 2, 3, 4, 5, 6]\nworker.KinovaArm_moveJointsWithSpeed(speeds)", + "description": "" + }, + "name": "KinovaArm_moveJointsWithSpeed", + "location": { + "start": 186, + "insert": 187, + "offset": " ", + "indent": 8, + "comment": null + }, + "item_type": "method", + "length": 5, + "docLength": null + }, + { + "id": "6fbd182e-7cbc-deb0-1b4a-c479f6c6d848", + "ancestors": [ + "15f7ef8f-b227-caa4-ee4b-15855c361c8f" + ], + "description": "Moves a Kinova arm to a specified joint angle configuration. The angles are retrieved from an input object, printed for debugging purposes, and then sent to the kinova.move_joints_to function to control the arm's joints accordingly.", + "params": [ + { + "name": "angles", + "type_name": "object", + "description": "Expected to contain information about joint angles for the Kinova arm movement." + } + ], + "returns": null, + "usage": { + "language": "python", + "code": "worker = SpecificWorker(proxy_map, startup_check=False)\nangles = ifaces.RoboCompKinovaArm.TAngles()\nangles.jointAngles = [30.0, 45.0, -10.0, 20.0, 40.0, 30.0, 50.0]\nworker.KinovaArm_moveJointsWithAngle(angles)", + "description": "" + }, + "name": "KinovaArm_moveJointsWithAngle", + "location": { + "start": 195, + "insert": 196, + "offset": " ", + "indent": 8, + "comment": null + }, + "item_type": "method", + "length": 4, + "docLength": null + }, + { + "id": "f2b246a7-12cb-4897-ee40-3f05cd17e20f", + "ancestors": [ + "15f7ef8f-b227-caa4-ee4b-15855c361c8f" + ], + "description": "Retrieves the current pose (position and orientation) of the kinova robot using the `get_pose()` method inherited from the GenericWorker class. It assigns this information to a variable named `state`.", + "params": [], + "returns": null, + "usage": { + "language": "python", + "code": "specific_worker = SpecificWorker(proxy_map)\nspecific_worker.buclePrueba()\n", + "description": "" + }, + "name": "buclePrueba", + "location": { + "start": 203, + "insert": 204, + "offset": " ", + "indent": 12, + "comment": null + }, + "item_type": "method", + "length": 3, + "docLength": null + } + ] + } + } + }, + { + "name": "kinovaarmI.py", + "path": "components/kinova_controller/src/kinovaarmI.py", + "content": { + "structured": { + "description": "A class `KinovaArmI` that inherits from the `KinovaArm` class and provides additional methods for controlling a robotic arm. The class uses the `Ice` package to load a slice of an Ice module and access the `KinovaArm` class. The `KinovaArmI` class defines several new methods for closing the gripper, getting the center of the tool, getting the state of the gripper and joints, moving the joints with angle or speed, opening the gripper, setting the center of the tool, and other related tasks.", + "items": [ + { + "id": "39cd7f53-23ba-9f9d-1748-49c34bf762f5", + "ancestors": [], + "description": "Provides methods for controlling a Kinova arm, including closing and opening the gripper, getting the center of tool, gripper state, joints state, and moving the joints with angle or speed.", + "attributes": [ + { + "name": "worker", + "type_name": "KinovaArmWorker|KinovaArm__closeGripper|KinovaArm_getCenterOfTool|KinovaArm_getGripperState|KinovaArm_getJointsState|KinovaArm_moveJ", + "description": "Used to provide access to the underlying KinovaArm worker API." + } + ], + "name": "KinovaArmI", + "location": { + "start": 36, + "insert": 37, + "offset": " ", + "indent": 4, + "comment": null + }, + "item_type": "class", + "length": 28, + "docLength": null + } + ] + } + } + }, + { + "name": "test.py", + "path": "components/kinova_controller/src/test.py", + "content": { + "structured": { + "description": "Two functions: `close_gripper_speed()` and `open_gripper_speed()`. These functions use the `Base` class from a high-level package called `Base_pb2` to control a gripper using speed and position commands. The `close_gripper_speed()` function sets the gripper's speed to close it, while the `open_gripper_speed()` function sets the gripper's speed to open it. Both functions wait for the reported speed or position to reach the desired value before ending the loop.", + "items": [ + { + "id": "33df4610-bc14-2eb7-2741-3335a2cfe5eb", + "ancestors": [], + "description": "Manages the opening and closing of a gripper using speed commands, as well as measuring the movement of the gripper during these operations.", + "attributes": [ + { + "name": "connection", + "type_name": "Base_pb2GripperCommand|Base_pb2GripperRequest", + "description": "Used to send gripper commands to the Kinova Gen III robot. It allows you to specify the command mode, finger position, and other parameters for the gripper movement." + }, + { + "name": "router", + "type_name": "Base_pb2GripperCommand", + "description": "Used to send a gripper command to the Kinova Gen III robotic arm. It allows for sending different types of commands, such as speed or position, to control the movement of the gripper." + }, + { + "name": "base", + "type_name": "Base_pb2Base", + "description": "A reference to the underlying robot's base module, which provides methods for sending commands to the robot's end effectors." + }, + { + "name": "base_cyclic", + "type_name": "Base|GripperRequest", + "description": "Used to specify the cyclic movement pattern for the gripper, allowing for continuous opening or closing of the gripper." + } + ], + "name": "KinovaGen3", + "location": { + "start": 51, + "insert": 52, + "offset": " ", + "indent": 4, + "comment": null + }, + "item_type": "class", + "length": 353, + "docLength": null + }, + { + "id": "ea54bfaf-84b0-9da7-e74b-667f3d67a01f", + "ancestors": [ + "33df4610-bc14-2eb7-2741-3335a2cfe5eb" + ], + "description": "Establishes connections to a device using TCP and creates two clients: `BaseClient` for direct interaction with the device, and `BaseCyclicClient` for cyclic communication.", + "params": [], + "returns": null, + "usage": { + "language": "python", + "code": "# Create a new instance of the KinovaGen3 class\nkinova_gen3 = KinovaGen3()\n\n# Get the current state of the robot's base\nstate = kinova_gen3.get_state()\nprint(f\"Base: {state}\")\n\n# Get the current joint angles of the robot\njoints = kinova_gen3.get_joints()\nprint(f\"Joints: {joints}\")\n\n# Wait for the user to press a key before continuing\ninput(\"Press Enter to continue...\")\n", + "description": "" + }, + "name": "__init__", + "location": { + "start": 52, + "insert": 54, + "offset": " ", + "indent": 8, + "comment": null + }, + "item_type": "constructor", + "length": 11, + "docLength": null + }, + { + "id": "deea3918-dcbc-cb8f-d045-a68f86f2f242", + "ancestors": [ + "33df4610-bc14-2eb7-2741-3335a2cfe5eb", + "9a24a2f4-2727-5bbc-5f46-53ed8a417e38" + ], + "description": "Prints the event name associated with a `notification` object and sets an instance variable `e` to a default value if the event is either `ACTION_END` or `ACTION_ABORT`.", + "params": [ + { + "name": "notification", + "type_name": "Base_pb2.Notification", + "description": "Passed as an argument to the function, providing information about an event that triggered the function call." + }, + { + "name": "e", + "type_name": "Base_pb2.Event", + "description": "Set to an instance of Event upon calling the function." + } + ], + "returns": null, + "usage": { + "language": "python", + "code": "# Create an instance of the Base_pb2.Notification class\nnotification = Base_pb2.Notification()\n\n# Set the action event for the notification to ACTION_END or ACTION_ABORT\nnotification.action_event = Base_pb2.ACTION_END\n\n# Call the check function with the notification object and an exception variable e\ncheck(notification, e)\n", + "description": "" + }, + "name": "check", + "location": { + "start": 77, + "insert": 78, + "offset": " ", + "indent": 12, + "comment": null + }, + "item_type": "function", + "length": 6, + "docLength": null + }, + { + "id": "92234e42-4560-5590-7a42-df863f5bcf9f", + "ancestors": [ + "33df4610-bc14-2eb7-2741-3335a2cfe5eb" + ], + "description": "Retrieves refresh feedback from the base cyclic component and returns it.", + "params": [], + "returns": { + "type_name": "Feedbackbase", + "description": "A cyclic refresh feedback object created by calling the `RefreshFeedback` method." + }, + "usage": { + "language": "python", + "code": "# Initialize the KinovaGen3 class\nkinova = KinovaGen3()\n\n# Wait for a specific event or action to occur before continuing\ne = threading.Event()\nkinova.check_for_end_or_abort(e)\n\n# Get the current state of the robot\nstate = kinova.get_state()\n\n# Print the state of the robot\nprint(state)\n", + "description": "" + }, + "name": "get_state", + "location": { + "start": 86, + "insert": 87, + "offset": " ", + "indent": 8, + "comment": null + }, + "item_type": "method", + "length": 4, + "docLength": null + }, + { + "id": "e5ebf32f-4438-88a1-8c4a-abd36a5e7a61", + "ancestors": [ + "33df4610-bc14-2eb7-2741-3335a2cfe5eb" + ], + "description": "Computes and returns the positions, velocities, and torques of the joints of a robotic arm.", + "params": [], + "returns": { + "type_name": "Dict[str,float]", + "description": "A dictionary containing the positions, velocities, and torques of the joints in the system." + }, + "usage": { + "language": "python", + "code": "# Create an instance of the KinovaGen3 class\nkinova = KinovaGen3()\n\n# Connect to the robot using the IP address, username, and password\nkinova.connect(\"192.168.1.10\", \"admin\", \"admin\")\n\n# Get the joints' position, velocity, and torque\njoints = kinova.get_joints()\nprint(joints[\"position\"])\nprint(joints[\"velocity\"])\nprint(joints[\"torque\"])\n", + "description": "" + }, + "name": "get_joints", + "location": { + "start": 93, + "insert": 94, + "offset": " ", + "indent": 8, + "comment": null + }, + "item_type": "method", + "length": 7, + "docLength": null + }, + { + "id": "654b3e6f-b416-acad-7046-5e64021c5120", + "ancestors": [ + "33df4610-bc14-2eb7-2741-3335a2cfe5eb" + ], + "description": "Retrieves the current state of the gripper by sending a `GripperRequest` message to the base module and returning the measured movement of the gripper in the form of a `finger` object.", + "params": [], + "returns": { + "type_name": "float", + "description": "The finger position of the gripper in meters." + }, + "usage": { + "language": "python", + "code": "# Create an instance of the KinovaGen3 class\nkinova = KinovaGen3()\n\n# Get the current state of the gripper\ngripper_state = kinova.get_gripper_state()\nprint(\"Gripper state:\", gripper_state)\n", + "description": "" + }, + "name": "get_gripper_state", + "location": { + "start": 101, + "insert": 102, + "offset": " ", + "indent": 8, + "comment": null + }, + "item_type": "method", + "length": 4, + "docLength": null + }, + { + "id": "13adca69-4965-9daf-8e42-b96192349229", + "ancestors": [ + "33df4610-bc14-2eb7-2741-3335a2cfe5eb" + ], + "description": "Computes and returns the tool's pose (position and orientation) in a list of six elements.", + "params": [], + "returns": { + "type_name": "Tuple[float,float,float,float,float,float]", + "description": "6 floats representing the tool's pose in 3D space and orientation." + }, + "usage": { + "language": "python", + "code": "# Initialize the KinovaGen3 object with the IP address, username, and password of the robot.\nkinova = KinovaGen3(ip='192.168.1.10', username='admin', psw='admin')\n\n# Get the current pose of the end effector in a list format.\npose = kinova.get_pose()\n\nprint(\"The current pose is:\")\nprint(pose)\n", + "description": "\nThis code initializes an object of type KinovaGen3 with the IP address, username, and password of the robot. It then calls the get_pose method on this object, which returns the current pose of the end effector in a list format. The current pose is then printed to the console." + }, + "name": "get_pose", + "location": { + "start": 106, + "insert": 107, + "offset": " ", + "indent": 8, + "comment": null + }, + "item_type": "method", + "length": 9, + "docLength": null + }, + { + "id": "4d4789de-df44-56af-6c49-d88e6068c646", + "ancestors": [ + "33df4610-bc14-2eb7-2741-3335a2cfe5eb" + ], + "description": "Performs a gripper movement command to reach a specified position.", + "params": [ + { + "name": "target_position", + "type_name": "float", + "description": "Representing the desired position of the gripper to move to." + } + ], + "returns": { + "type_name": "bool", + "description": "True when the gripper has moved to the target position successfully, and False otherwise." + }, + "usage": { + "language": "python", + "code": "# Import KinovaGen3 class from the Kinova package\nfrom kinova import KinovaGen3\n\n# Initialize a KinovaGen3 object with the IP address, username, and password of your robot\nrobot = KinovaGen3(\"192.168.1.10\", \"admin\", \"admin\")\n\n# Move the gripper to a target position of 0.5\nsuccess = robot.gripper_move_to(0.5)\n\n# If the movement is successful, print a success message\nif success:\n print(\"Gripper moved successfully!\")\nelse:\n print(\"Failed to move gripper.\")\n", + "description": "" + }, + "name": "gripper_move_to", + "location": { + "start": 124, + "insert": 125, + "offset": " ", + "indent": 8, + "comment": null + }, + "item_type": "method", + "length": 13, + "docLength": null + }, + { + "id": "71e2fbf3-76cb-7db1-534c-4704e090a4e3", + "ancestors": [ + "33df4610-bc14-2eb7-2741-3335a2cfe5eb" + ], + "description": "Sets the gripper speed by creating a `GripperCommand` message, setting the gripper mode and finger value to the input speed, and sending the command to the base using `SendGripperCommand`.", + "params": [ + { + "name": "speed", + "type_name": "float", + "description": "Represents the desired speed at which the gripper should move in the specified finger." + } + ], + "returns": { + "type_name": "bool", + "description": "`True` if the gripper command was sent successfully, and `False` otherwise." + }, + "usage": { + "language": "python", + "code": "# Create a KinovaGen3 object and connect to the robot\nkinova = KinovaGen3()\n\n# Move the gripper to a speed of 0.5\nkinova.gripper_move_speed(0.5)\n\n# Wait for the gripper movement to complete\ntime.sleep(2)\n", + "description": "\nThis code creates an instance of the `KinovaGen3` class and uses the `gripper_move_speed` method to set the speed of the gripper to 0.5. The `RefreshFeedback` method is then used to wait for the gripper movement to complete." + }, + "name": "gripper_move_speed", + "location": { + "start": 143, + "insert": 144, + "offset": " ", + "indent": 8, + "comment": null + }, + "item_type": "method", + "length": 11, + "docLength": null + }, + { + "id": "00b94328-473a-66ba-694b-dc5f183a2c8d", + "ancestors": [ + "33df4610-bc14-2eb7-2741-3335a2cfe5eb" + ], + "description": "Moves a robotic arm to a specified position and orientation using a Cartesian movement approach, waiting for the movement to finish before returning.", + "params": [ + { + "name": "x", + "type_name": "float", + "description": "Used to set the x-coordinate of the target pose in the Cartesian space." + }, + { + "name": "y", + "type_name": "float", + "description": "Used to specify the target y-coordinate of the robot's end position after moving along the x-axis to the specified `x` coordinate." + }, + { + "name": "z", + "type_name": "float", + "description": "Representing the z-coordinate of the target position for the robot to move to." + }, + { + "name": "theta_x", + "type_name": "float", + "description": "Used to represent the rotation angle of the robot's end effector around its x-axis during movement." + }, + { + "name": "theta_y", + "type_name": "float", + "description": "Used to specify the yaw angle of the robot's end effector during the movement." + }, + { + "name": "theta_z", + "type_name": "float", + "description": "Representing the z-rotation angle of the robot end effector relative to its initial position, along the robot's Z axis." + } + ], + "returns": { + "type_name": "bool", + "description": "1 if the movement was completed successfully within the timeout duration, and 0 otherwise due to a timeout." + }, + "usage": { + "language": "python", + "code": "# Connect to the Kinova robot using the IP address, username, and password.\nkinova = KinovaGen3(\"192.168.1.10\", \"admin\", \"admin\")\n\n# Move the robot to a specific Cartesian position with a specific orientation.\nkinova.cartesian_move_to(10, 20, 30, 45, 60, 90)\n", + "description": "" + }, + "name": "cartesian_move_to", + "location": { + "start": 160, + "insert": 162, + "offset": " ", + "indent": 8, + "comment": null + }, + "item_type": "method", + "length": 26, + "docLength": null + }, + { + "id": "5a0ba554-c89c-74ad-9c4c-55680bc6d5f5", + "ancestors": [ + "33df4610-bc14-2eb7-2741-3335a2cfe5eb" + ], + "description": "Calculates and sends joint speeds to the robot.", + "params": [ + { + "name": "speeds", + "type_name": "List[float]", + "description": "A list of joint speed values to be applied to the robot's joints." + } + ], + "returns": null, + "usage": { + "language": "python", + "code": "# Initialize KinovaGen3 object\nkinova = KinovaGen3()\n\n# Move joints with speeds\nspeed1 = 0.5 # Speed of the first joint\nspeed2 = -0.7 # Speed of the second joint\nspeeds = [speed1, speed2]\nkinova.move_joints_with_speeds(speeds)\n", + "description": "" + }, + "name": "move_joints_with_speeds", + "location": { + "start": 194, + "insert": 198, + "offset": " ", + "indent": 8, + "comment": null + }, + "item_type": "method", + "length": 13, + "docLength": null + }, + { + "id": "767f5351-3caf-dcb6-6f46-13d6a9a32057", + "ancestors": [ + "33df4610-bc14-2eb7-2741-3335a2cfe5eb" + ], + "description": "Moves joints to specified angles using an action object and waits for the movement to finish before returning.", + "params": [ + { + "name": "joints", + "type_name": "List[float]", + "description": "Used to specify the angles of the joints to move to." + } + ], + "returns": { + "type_name": "bool", + "description": "1 if the angular movement was completed successfully within the specified timeout, and 0 otherwise due to a timeout." + }, + "usage": { + "language": "python", + "code": "# Initialize an instance of the KinovaGen3 class\nkinova = KinovaGen3()\n\n# Move joints to a specific position (in degrees)\njoint_angles = [90, 180, 270]\nkinova.move_joints_to(joint_angles)\n", + "description": "\nIn this example, the user initializes an instance of the KinovaGen3 class and then moves the robot's joints to a specific position (in degrees) using the move_joints_to function. The function takes in a list of joint angles as its argument and performs the necessary actions to move the robot's joints to those positions." + }, + "name": "move_joints_to", + "location": { + "start": 212, + "insert": 213, + "offset": " ", + "indent": 8, + "comment": null + }, + "item_type": "method", + "length": 30, + "docLength": null + }, + { + "id": "4c891cd8-66cf-b6ba-0c49-a6bbd04ea541", + "ancestors": [ + "33df4610-bc14-2eb7-2741-3335a2cfe5eb" + ], + "description": "Prints the rotation and translation parameters of an extrinsic parameter set.", + "params": [ + { + "name": "extrinsics", + "type_name": "Extrinsic | RotationMatrix | TranslationVector", + "description": "3x3 rotation matrix or 3x1 translation vector." + } + ], + "returns": null, + "usage": { + "language": "python", + "code": "# Create an instance of the KinovaGen3 class and connect to the robot\nmy_kinova = KinovaGen3()\n\n# Get the extrinsic parameters for the current robot state\nextrinsics = my_kinova.get_state().extrinsics\n\n# Print the rotation matrix and translation vector of the current robot pose\nprint(\"Current robot pose:\")\nmy_kinova.print_extrinsic_parameters(extrinsics)\n", + "description": "" + }, + "name": "print_extrinsic_parameters", + "location": { + "start": 247, + "insert": 248, + "offset": " ", + "indent": 8, + "comment": null + }, + "item_type": "method", + "length": 10, + "docLength": null + }, + { + "id": "baa5bfc1-b708-1e99-e74e-cbdd02e1bb5b", + "ancestors": [ + "33df4610-bc14-2eb7-2741-3335a2cfe5eb" + ], + "description": "Retrieves the device ID of the vision module from the devices info based on the device handle.", + "params": [ + { + "name": "device_manager", + "type_name": "DeviceManager | None", + "description": "Used to represent the device manager object that provides information about the devices registered in the system." + } + ], + "returns": { + "type_name": "int", + "description": "The device ID of the first Vision module detected in the system." + }, + "usage": { + "language": "python", + "code": "# Create an instance of the KinovaGen3 class and connect to the device\nkinova = KinovaGen3()\n\n# Get the vision device ID\nvision_device_id = kinova.example_vision_get_device_id(device_manager)\n\nprint(\"Vision device ID:\", vision_device_id)\n", + "description": "" + }, + "name": "example_vision_get_device_id", + "location": { + "start": 261, + "insert": 262, + "offset": " ", + "indent": 8, + "comment": null + }, + "item_type": "method", + "length": 16, + "docLength": null + }, + { + "id": "1315a984-f516-9ead-914e-0e4fb1301bf6", + "ancestors": [ + "33df4610-bc14-2eb7-2741-3335a2cfe5eb" + ], + "description": "Retrieves extrinsic parameters for a specific vision device using the Vision Config Service.", + "params": [ + { + "name": "vision_config", + "type_name": "VisionConfig", + "description": "Used to retrieve extrinsic parameters for a specific vision device ID." + }, + { + "name": "vision_device_id", + "type_name": "str", + "description": "Used to identify the specific vision device for which the extrinsic parameters are being retrieved." + } + ], + "returns": null, + "usage": { + "language": "python", + "code": "from kinova_gen3 import KinovaGen3\n\n# Create an instance of the class\nkinova = KinovaGen3()\n\n# Get extrinsic parameters for a specific vision device ID\nvision_device_id = 1234\nextrinsics = kinova.example_routed_vision_get_extrinsics(vision_config, vision_device_id)\nprint(\"Extrinsic parameters:\", extrinsics)\n", + "description": "\nThis example retrieves the extrinsic parameters for a specific vision device ID using the Python function example_routed_vision_get_extrinsics and prints them to the console. The user can modify the vision device ID to retrieve different extrinsic parameters." + }, + "name": "example_routed_vision_get_extrinsics", + "location": { + "start": 282, + "insert": 283, + "offset": " ", + "indent": 8, + "comment": null + }, + "item_type": "method", + "length": 6, + "docLength": null + }, + { + "id": "d6893059-c401-9487-c143-228317eab719", + "ancestors": [ + "33df4610-bc14-2eb7-2741-3335a2cfe5eb" + ], + "description": "Prints out various parameters of an intrinsic camera model, including sensor ID, resolution, principal point coordinates, focal length coordinates, and distortion coefficients.", + "params": [ + { + "name": "intrinsics", + "type_name": "IntrinsicParams", + "description": "Used to access the intrinsic parameters of a camera sensor, including the principal point, resolution, focal length, and distortion coefficients." + } + ], + "returns": null, + "usage": { + "language": "python", + "code": "from kinova_gen3 import KinovaGen3\n\n# Create an instance of the KinovaGen3 class and connect to the robot\nk = KinovaGen3()\nk.connect(\"192.168.1.10\", \"admin\", \"admin\")\n\n# Get the intrinsic parameters for the sensor on the robot\nintrinsics = k.get_sensor_intrinsics(\"MY_SENSOR\")\nprint(k.print_intrinsic_parameters(intrinsics))\n", + "description": "" + }, + "name": "print_intrinsic_parameters", + "location": { + "start": 304, + "insert": 305, + "offset": " ", + "indent": 8, + "comment": null + }, + "item_type": "method", + "length": 13, + "docLength": null + }, + { + "id": "3e9b040d-2d3a-a9bd-b842-97c978a7f167", + "ancestors": [ + "33df4610-bc14-2eb7-2741-3335a2cfe5eb" + ], + "description": "Retrieves intrinsic parameters of Color and Depth sensors using the Vision Config Service, and prints the retrieved values.", + "params": [ + { + "name": "vision_config", + "type_name": "VisionConfig_pb2.VisionConfig", + "description": "Used to retrieve intrinsic parameters for different sensors and resolutions." + }, + { + "name": "vision_device_id", + "type_name": "int", + "description": "Used to identify the specific device to retrieve intrinsic parameters for." + } + ], + "returns": null, + "usage": { + "language": "python", + "code": "import grpc\nfrom kinova_utils import VisionConfigClient\n\n# Create a connection to the robot and get the vision config service client\nwith grpc.secure_channel(\"robot_ip\", credentials=grpc.ssl_channel_credentials()) as channel:\n vision_config = VisionConfigClient(channel)\n\n# Get intrinsic parameters for active color resolution\nintrinsics = vision_config.GetIntrinsicParameters(sensor=\"SENSOR_COLOR\")\nprint(\"Active color resolution intrinsic parameters:\", intrinsics)\n\n# Get intrinsic parameters for active depth resolution\nintrinsics = vision_config.GetIntrinsicParameters(sensor=\"SENSOR_DEPTH\")\nprint(\"Active depth resolution intrinsic parameters:\", intrinsics)\n\n# Get intrinsic parameters for color resolution 1920x1080\nintrinsics = vision_config.GetIntrinsicParametersProfile(resolution=\"RESOLUTION_1920x1080\")\nprint(\"Color resolution 1920x1080 intrinsic parameters:\", intrinsics)\n\n# Get intrinsic parameters for depth resolution 424x240\nintrinsics = vision_config.GetIntrinsicParametersProfile(resolution=\"RESOLUTION_424x240\")\nprint(\"Depth resolution 424x240 intrinsic parameters:\", intrinsics)\n", + "description": "" + }, + "name": "example_routed_vision_get_intrinsics", + "location": { + "start": 321, + "insert": 322, + "offset": " ", + "indent": 8, + "comment": null + }, + "item_type": "method", + "length": 27, + "docLength": null + }, + { + "id": "7e7e22aa-fae4-45a6-704e-74263348c2a8", + "ancestors": [ + "33df4610-bc14-2eb7-2741-3335a2cfe5eb" + ], + "description": "Retrieves device and intrinsic information for a vision camera connected to the router.", + "params": [], + "returns": null, + "usage": { + "language": "python", + "code": "import KinovaGen3\n\n# Initialize the KinovaGen3 object with IP address, username, and password\nkinova = KinovaGen3(\"192.168.1.10\", \"admin\", \"admin\")\n\n# Get camera info using the get_camera_info method\ncamera_info = kinova.get_camera_info()\n\n# Print the camera information\nprint(camera_info)\n", + "description": "\nNote that the above code is just an example and may not work as it is, you will need to adjust it to fit your specific needs." + }, + "name": "get_camera_info", + "location": { + "start": 349, + "insert": 350, + "offset": " ", + "indent": 8, + "comment": null + }, + "item_type": "method", + "length": 9, + "docLength": null + }, + { + "id": "74c3d04b-4107-249e-2549-0417b571564a", + "ancestors": [ + "33df4610-bc14-2eb7-2741-3335a2cfe5eb" + ], + "description": "Performs a Cartesian movement action relative to a base tool, taking into account the specified x, y, z coordinates and theta angles. It executes the action, waits for it to finish, and returns whether the movement completed successfully or timed out.", + "params": [ + { + "name": "x", + "type_name": "float", + "description": "Used to set the x-position of the tool relative to its initial position." + }, + { + "name": "y", + "type_name": "int", + "description": "Representing the relative movement along the y-axis of the robot's end effector." + }, + { + "name": "z", + "type_name": "float", + "description": "Representing the relative movement of the tool along the z-axis." + }, + { + "name": "theta_x", + "type_name": "float", + "description": "Representing the angular movement of the tool around the x-axis, measured in radians." + }, + { + "name": "theta_y", + "type_name": "float", + "description": "Representing the angle of rotation around the Y-axis of the tool coordinate system, which determines the orientation of the tool end effector during movement." + }, + { + "name": "theta_z", + "type_name": "float", + "description": "Representing the z-axis rotation angle of the tool relative to its current position, which is used by the action movement to achieve the desired orientation in the z-axis direction." + } + ], + "returns": { + "type_name": "bool", + "description": "1 if the cartesian movement was successful and 0 if it timed out." + }, + "usage": { + "language": "python", + "code": "from kinovagen3 import KinovaGen3\n\n# Create instance of KinovaGen3 class\nkinova = KinovaGen3()\n\n# Move to a relative position in Cartesian space with respect to the current pose\nfinished = kinova.cartesian_move_relative(0.1, 0, 0, 0, 0, 0)\n\nif finished:\n print(\"Cartesian movement completed\")\nelse:\n print(\"Timeout on action notification wait\")\n", + "description": "\nThis code creates an instance of the KinovaGen3 class and then uses the cartesian_move_relative function to move to a relative position in Cartesian space with respect to the current pose. The function returns a boolean value indicating whether the movement was completed within a certain time limit (TIMEOUT_DURATION)." + }, + "name": "cartesian_move_relative", + "location": { + "start": 359, + "insert": 361, + "offset": " ", + "indent": 8, + "comment": null + }, + "item_type": "method", + "length": 28, + "docLength": null + }, + { + "id": "1340ca16-04dc-54a9-994b-9fc8ac84680e", + "ancestors": [ + "33df4610-bc14-2eb7-2741-3335a2cfe5eb" + ], + "description": "Calculates the gripper movement speed to reach a destination position, sends a speed command to the gripper, and measures the gripper's movement to ensure it reaches the desired position.", + "params": [ + { + "name": "dest_pos", + "type_name": "float", + "description": "Representing the desired position of the gripper to move it to with the speed command." + } + ], + "returns": null, + "usage": { + "language": "python", + "code": "# Import KinovaGen3 class from the provided code snippet\nfrom kinova import KinovaGen3\n\n# Initialize a new instance of the KinovaGen3 class\nkin = KinovaGen3()\n\n# Destination position for the gripper to move to\ndest_pos = 0.25\n\n# Move the gripper to the destination position using speed control\nkin.move_gripper_speed_dest(dest_pos)\n", + "description": "" + }, + "name": "move_gripper_speed_dest", + "location": { + "start": 395, + "insert": 397, + "offset": " ", + "indent": 8, + "comment": null + }, + "item_type": "method", + "length": 30, + "docLength": null + }, + { + "id": "5c9b723c-a933-4c83-7246-d2b9520dd09c", + "ancestors": [ + "33df4610-bc14-2eb7-2741-3335a2cfe5eb" + ], + "description": "Controls the speed of the gripper using a speed command.", + "params": [], + "returns": null, + "usage": { + "language": "python", + "code": "# Create an instance of the KinovaGen3 class and connect to the robot\nkinova_gen3 = KinovaGen3()\n\n# Close the gripper using a speed command\nkinova_gen3.close_gripper_speed()\n\n# Print the current speed of the gripper\nprint(\"Current speed is : {0}\".format(kinova_gen3.get_state().gripper.finger[0].value))\n", + "description": "" + }, + "name": "close_gripper_speed", + "location": { + "start": 430, + "insert": 432, + "offset": " ", + "indent": 8, + "comment": null + }, + "item_type": "method", + "length": 20, + "docLength": null + }, + { + "id": "2ce4ba6d-6925-458e-fc47-852c2e2a24da", + "ancestors": [ + "33df4610-bc14-2eb7-2741-3335a2cfe5eb" + ], + "description": "Controls the gripper's speed using a command, and then measures its position to determine when it has reached the desired open state.", + "params": [], + "returns": null, + "usage": { + "language": "python", + "code": "from kinova_gen3 import KinovaGen3\n\n# Create a new instance of the KinovaGen3 class\nmy_kinova = KinovaGen3()\n\n# Open the gripper using the speed command\nmy_kinova.open_gripper_speed()\n", + "description": "" + }, + "name": "open_gripper_speed", + "location": { + "start": 455, + "insert": 457, + "offset": " ", + "indent": 8, + "comment": null + }, + "item_type": "method", + "length": 19, + "docLength": null + } + ] + } + } + }, + { + "name": "genericworker.py", + "path": "components/kinova_controller/src/genericworker.py", + "content": { + "structured": { + "description": "A worker class that inherits from QObject and sets up an Ice environment with a contactile proxy. It also defines two signals: kill and setPeriod. The kill signal is emitted when the worker instance wants to terminate itself, while the setPeriod signal allows changing the compute period of the worker.", + "items": [ + { + "id": "2b636dd8-c8ea-f1a2-c644-12b096106472", + "ancestors": [], + "description": "Manages a timer and emits a signal to stop itself after a specified period. It also has a slot function to set the period of the timer.", + "attributes": [ + { + "name": "kill", + "type_name": "QtCoreQObjectSignal", + "description": "Used to emit a signal indicating that the worker should be terminated." + }, + { + "name": "contactile_proxy", + "type_name": "ContactileProxy|NoneType", + "description": "Used to represent a proxy object for contacting the contactile service." + }, + { + "name": "mutex", + "type_name": "QMutex", + "description": "Used to protect access to the internal state of the worker, allowing only one thread to access it at a time." + }, + { + "name": "Period", + "type_name": "int", + "description": "30 milliseconds by default, used for setting the time interval for emitting the `kill` signal." + }, + { + "name": "timer", + "type_name": "QtCoreQTimer", + "description": "Used to start a timer that emits the `kill` signal after a specified period." + } + ], + "name": "GenericWorker", + "location": { + "start": 37, + "insert": 39, + "offset": " ", + "indent": 4, + "comment": null + }, + "item_type": "class", + "length": 25, + "docLength": null + }, + { + "id": "9f5d9bb6-418c-1e8f-2f45-6fd4efecf967", + "ancestors": [ + "2b636dd8-c8ea-f1a2-c644-12b096106472" + ], + "description": "Initializes instance variables, including a contactile proxy and a timer for periodic work.", + "params": [ + { + "name": "mprx", + "type_name": "Dict[str, Any]", + "description": "Passed as an instance variable `contactile_proxy` as a reference to a ContactileProxy object." + } + ], + "returns": null, + "usage": { + "language": "python", + "code": "worker = GenericWorker(mprx)\nworker.setPeriod(100)\n", + "description": "" + }, + "name": "__init__", + "location": { + "start": 41, + "insert": 42, + "offset": " ", + "indent": 8, + "comment": null + }, + "item_type": "constructor", + "length": 8, + "docLength": null + }, + { + "id": "f7daadf8-e8ac-0f9a-d344-4789ef8051fa", + "ancestors": [ + "2b636dd8-c8ea-f1a2-c644-12b096106472" + ], + "description": "Emits the `kill` signal, indicating that the instance should be terminated.", + "params": [], + "returns": null, + "usage": { + "language": "python", + "code": "# Create an instance of GenericWorker with a contactile proxy object\nworker = GenericWorker(contactile_proxy)\n\n# Set a new period for the worker\nworker.setPeriod(500)\n\n# Kill the worker\nworker.killYourSelf()\n", + "description": "" + }, + "name": "killYourSelf", + "location": { + "start": 51, + "insert": 53, + "offset": " ", + "indent": 8, + "comment": null + }, + "item_type": "method", + "length": 4, + "docLength": null + }, + { + "id": "92ecc764-e422-7e90-3e43-082e94b2f57f", + "ancestors": [ + "2b636dd8-c8ea-f1a2-c644-12b096106472" + ], + "description": "Updates the ` Period` attribute and starts the timer using its value, printing a message to indicate the change.", + "params": [ + { + "name": "p", + "type_name": "int", + "description": "Used to represent the new period value for which the function will set the timer." + } + ], + "returns": null, + "usage": { + "language": "python", + "code": "# Instantiate GenericWorker class and call setPeriod method with parameter 100\nmy_worker = GenericWorker()\nmy_worker.setPeriod(100)\n", + "description": "" + }, + "name": "setPeriod", + "location": { + "start": 58, + "insert": 60, + "offset": " ", + "indent": 8, + "comment": null + }, + "item_type": "method", + "length": 5, + "docLength": null + } + ] + } + } + }, + { + "name": "interfaces.py", + "path": "components/kinova_controller/src/interfaces.py", + "content": { + "structured": { + "description": "An IceStorm class that provides a simple interface for implementing the publisher/subscriber pattern using the Intermediate Java (Ice) framework. The class creates an Ice connector and sets up a topic manager, subscribes to a topic, and implements a default handler for messages. It also defines methods to get proxies maps and destroy the connection when finished. The code uses the `Ice` and `IceStorm` packages.", + "items": [ + { + "id": "2fc90f6c-316d-1387-a542-30827658abcc", + "ancestors": [], + "description": "Manages a sequence of `RoboCompKinovaArm.TJoint` objects, providing methods for appending, extending, and inserting joints.", + "attributes": [], + "name": "TJointSeq", + "location": { + "start": 13, + "insert": 14, + "offset": " ", + "indent": 4, + "comment": null + }, + "item_type": "class", + "length": 16, + "docLength": null + }, + { + "id": "41c18008-b835-d481-0542-cd5508cfcdd2", + "ancestors": [ + "2fc90f6c-316d-1387-a542-30827658abcc" + ], + "description": "Adds an item to the sequence, ensuring it is an instance of the RoboCompKinovaArm.TJoint class before appending it to the parent list.", + "params": [ + { + "name": "item", + "type_name": "RoboCompKinovaArm.TJoint", + "description": "Asserted to be an instance of that class before being appended to the sequence." + } + ], + "returns": null, + "usage": { + "language": "python", + "code": "joint_seq = TJointSeq()\n\n# Append a joint object to the joint sequence\njoint_seq.append(RoboCompKinovaArm.TJoint())\n\n# Print the length of the joint sequence\nprint(len(joint_seq))\n", + "description": "" + }, + "name": "append", + "location": { + "start": 17, + "insert": 18, + "offset": " ", + "indent": 8, + "comment": null + }, + "item_type": "method", + "length": 3, + "docLength": null + }, + { + "id": "f83012b5-86fa-f196-6e46-8747e5e8aa9d", + "ancestors": [ + "2fc90f6c-316d-1387-a542-30827658abcc" + ], + "description": "Iterates over an input iterable and asserts that each item is an instance of the RoboCompKinovaArm.TJoint class before extending the sequence with those items using the superclass's `extend` method.", + "params": [ + { + "name": "iterable", + "type_name": "Sequence[TJoint]", + "description": "An iterable collection of TJoint objects to be extended." + } + ], + "returns": null, + "usage": { + "language": "python", + "code": "# Initialize a list of joints with some initial values\njoint_list = RoboCompKinovaArm.TJointSeq([RoboCompKinovaArm.TJoint(1, 'joint_a'), RoboCompKinovaArm.TJoint(2, 'joint_b')])\n\n# Extend the list with more joints from an iterable object\nmore_joints = [RoboCompKinovaArm.TJoint(3, 'joint_c'), RoboCompKinovaArm.TJoint(4, 'joint_d'), RoboCompKinovaArm.TJoint(5, 'joint_e')]\njoint_list.extend(more_joints)\n", + "description": "\nIn this example, the user first initializes a list of joints with some initial values using the constructor `RoboCompKinovaArm.TJointSeq()`. They then extend the list with more joints from an iterable object by calling the `extend()` method and passing in the iterable object containing the new joints. The resulting list will contain all the original joints plus the new ones added through the `extend()` method." + }, + "name": "extend", + "location": { + "start": 21, + "insert": 22, + "offset": " ", + "indent": 8, + "comment": null + }, + "item_type": "method", + "length": 4, + "docLength": null + }, + { + "id": "bd518a8c-24c2-9eac-5f47-7fb772b04a41", + "ancestors": [ + "2fc90f6c-316d-1387-a542-30827658abcc" + ], + "description": "Modifies the list's index and item by inserting an instance of the RoboCompKinovaArm.TJoint object at the specified index.", + "params": [ + { + "name": "index", + "type_name": "int", + "description": "Used to specify the position at which the new joint should be inserted in the sequence." + }, + { + "name": "item", + "type_name": "RoboCompKinovaArm.TJoint", + "description": "Asserted to be an instance of the TJoint class before calling the superclass's insert method." + } + ], + "returns": null, + "usage": { + "language": "python", + "code": "# Initialize an empty TJointSeq object\nseq = TJointSeq()\n\n# Insert a new joint at index 0 with the name \"joint_1\" and the position (4,5,6)\nseq.insert(0, RoboCompKinovaArm.TJoint(\"joint_1\", (4,5,6)))\n", + "description": "\nThis code would insert a new TJoint object into the TJointSeq object with the name \"joint_1\" and position (4, 5, 6)." + }, + "name": "insert", + "location": { + "start": 26, + "insert": 27, + "offset": " ", + "indent": 8, + "comment": null + }, + "item_type": "method", + "length": 3, + "docLength": null + }, + { + "id": "9ec886f9-fed8-a897-5b40-22326ff99ff4", + "ancestors": [], + "description": "Manages a list of floating-point numbers, providing methods to append, extend, and insert elements with type checks for input values.", + "attributes": [], + "name": "Speeds", + "location": { + "start": 31, + "insert": 32, + "offset": " ", + "indent": 4, + "comment": null + }, + "item_type": "class", + "length": 16, + "docLength": null + }, + { + "id": "1fb57b48-ae30-2cb7-c048-000106829dd3", + "ancestors": [ + "9ec886f9-fed8-a897-5b40-22326ff99ff4" + ], + "description": "Adds an item to the list, checking that it is of type float before appending it to the superclass list.", + "params": [ + { + "name": "item", + "type_name": "float", + "description": "Asserted to be of that type before being appended." + } + ], + "returns": null, + "usage": { + "language": "python", + "code": "speeds = Speeds()\n# create an empty list of speeds\n\nfor i in range(10):\n speed = random.uniform(0, 20)\n # generate a random speed between 0 and 20\n \n speeds.append(speed)\n # add the speed to the list\n", + "description": "" + }, + "name": "append", + "location": { + "start": 35, + "insert": 36, + "offset": " ", + "indent": 8, + "comment": null + }, + "item_type": "method", + "length": 3, + "docLength": null + }, + { + "id": "f6fc9aa5-69bd-3dba-b94a-02e0c48c8f5e", + "ancestors": [ + "9ec886f9-fed8-a897-5b40-22326ff99ff4" + ], + "description": "Takes an iterable and ensures that each item is an instance of `float`. Then, it calls the `super().extend()` method to add the items to the Speeds list.", + "params": [ + { + "name": "iterable", + "type_name": "Iterable[float]", + "description": "An iterable object containing floating-point numbers that the Speeds class will validate and add to its internal list." + } + ], + "returns": null, + "usage": { + "language": "python", + "code": "my_speeds = Speeds()\nmy_speeds.append(5)\nmy_speeds.append(10)\nmy_speeds.extend([15, 20])\nprint(my_speeds)\n# Output: [5, 10, 15, 20]\n", + "description": "" + }, + "name": "extend", + "location": { + "start": 39, + "insert": 40, + "offset": " ", + "indent": 8, + "comment": null + }, + "item_type": "method", + "length": 4, + "docLength": null + }, + { + "id": "46f61a83-7621-3dae-854e-41c409267a0e", + "ancestors": [ + "9ec886f9-fed8-a897-5b40-22326ff99ff4" + ], + "description": "Inserts an item at a given index, checking that the item is a float and calling the parent class's `insert` method with the index and item as arguments.", + "params": [ + { + "name": "index", + "type_name": "int", + "description": "Used to indicate the position at which the new element should be inserted." + }, + { + "name": "item", + "type_name": "float", + "description": "Asserted to be an instance of that type before it is passed to the superclass's insert method." + } + ], + "returns": null, + "usage": { + "language": "python", + "code": "speeds = Speeds()\nspeeds.append(10)\nspeeds.insert(1, 20)\n", + "description": "" + }, + "name": "insert", + "location": { + "start": 44, + "insert": 45, + "offset": " ", + "indent": 8, + "comment": null + }, + "item_type": "method", + "length": 3, + "docLength": null + }, + { + "id": "a0ab07b2-13fe-ae9d-204a-5b62729e238d", + "ancestors": [], + "description": "Is a list-based implementation of angles, providing methods for appending, extending, and inserting angles as floating-point values.", + "attributes": [], + "name": "Angles", + "location": { + "start": 49, + "insert": 50, + "offset": " ", + "indent": 4, + "comment": null + }, + "item_type": "class", + "length": 16, + "docLength": null + }, + { + "id": "fa4cd30f-e410-f5a3-2d40-687ca47eaa99", + "ancestors": [ + "a0ab07b2-13fe-ae9d-204a-5b62729e238d" + ], + "description": "Adds a float value to the list of angles.", + "params": [ + { + "name": "item", + "type_name": "float", + "description": "Asserted to be an instance of that type before being appended to the Angles object." + } + ], + "returns": null, + "usage": { + "language": "python", + "code": "my_list = Angles()\nmy_list.append(180)\nmy_list.append(360)\nprint(my_list) # [180, 360]\n\n# Alternatively, the user can also use extend to add a list of items at once\nmy_list.extend([90, 270])\nprint(my_list) # [180, 360, 90, 270]", + "description": "" + }, + "name": "append", + "location": { + "start": 53, + "insert": 54, + "offset": " ", + "indent": 8, + "comment": null + }, + "item_type": "method", + "length": 3, + "docLength": null + }, + { + "id": "44b402cd-e346-f1ab-cc4e-10675f69a106", + "ancestors": [ + "a0ab07b2-13fe-ae9d-204a-5b62729e238d" + ], + "description": "Checks if each item in the iterable is a float, then calls the parent class's `extend` method with the same iterable.", + "params": [ + { + "name": "iterable", + "type_name": "Iterable[float]", + "description": "Used to add elements to the Angles object." + } + ], + "returns": null, + "usage": { + "language": "python", + "code": "# Create a new list of angles and add two float values \nangles = Angles()\nangles.extend([30.0, 60.0])\n", + "description": "\nIn this example, we create a new instance of the Angles class and then use the extend method to add two floating point numbers to it." + }, + "name": "extend", + "location": { + "start": 57, + "insert": 58, + "offset": " ", + "indent": 8, + "comment": null + }, + "item_type": "method", + "length": 4, + "docLength": null + }, + { + "id": "b2e47f1c-9805-fea4-de4d-5454ab0130b5", + "ancestors": [ + "a0ab07b2-13fe-ae9d-204a-5b62729e238d" + ], + "description": "Inserts an item into the list at a specified index, checking that the input item is a float and calling the superclass's implementation before inserting it.", + "params": [ + { + "name": "index", + "type_name": "int", + "description": "Used to specify the position where the new element will be inserted in the list." + }, + { + "name": "item", + "type_name": "float", + "description": "Asserted to be an instance of that type before being passed to the superclass method for insertion." + } + ], + "returns": null, + "usage": { + "language": "python", + "code": "angles = Angles()\nangles.append(0)\nangles.insert(1, 90)\nangles.insert(2, 45)\nprint(angles) # Output: [0, 90, 45]\n", + "description": "" + }, + "name": "insert", + "location": { + "start": 62, + "insert": 63, + "offset": " ", + "indent": 8, + "comment": null + }, + "item_type": "method", + "length": 3, + "docLength": null + }, + { + "id": "12c2918e-b6e8-6384-6a47-5270872691fa", + "ancestors": [], + "description": "Manages topics and provides a way to publish messages to them through a proxy object, allowing multiple clients to create and manage topics independently.", + "attributes": [ + { + "name": "ice_connector", + "type_name": "object", + "description": "A reference to the Ice Connector used for communication with the server." + }, + { + "name": "mprx", + "type_name": "Dict[str,IceObject]", + "description": "A mapping of topics to their corresponding Ice proxies." + }, + { + "name": "topic_manager", + "type_name": "TopicManager|IceStormNoSuchTopic", + "description": "Responsible for managing topics created by the instance." + } + ], + "name": "Publishes", + "location": { + "start": 72, + "insert": 73, + "offset": " ", + "indent": 4, + "comment": null + }, + "item_type": "class", + "length": 28, + "docLength": null + }, + { + "id": "ae6eabd7-feb5-6ea3-484a-d0052edf2f03", + "ancestors": [ + "12c2918e-b6e8-6384-6a47-5270872691fa" + ], + "description": "Initializes instance variables: ice_connector and topic_manager, which are used to manage communication between the publisher and the Ice platform.", + "params": [ + { + "name": "ice_connector", + "type_name": "object", + "description": "Used to store an instance of `IceConnector`, which provides connectivity between distributed components." + }, + { + "name": "topic_manager", + "type_name": "object", + "description": "Used to store an instance of `TopicManager`." + } + ], + "returns": null, + "usage": { + "language": "python", + "code": "# Initialize the publisher\npublisher = Publishes(ice_connector, topic_manager)\n\n# Create a new topic named \"topic1\"\nproxy = publisher.create_topic('topic1', IceStorm.IceProxy())\n\n# Get the map of all proxies for the current session\nproxies_map = publisher.get_proxies_map()\n", + "description": "" + }, + "name": "__init__", + "location": { + "start": 73, + "insert": 74, + "offset": " ", + "indent": 8, + "comment": null + }, + "item_type": "constructor", + "length": 4, + "docLength": null + }, + { + "id": "ed92fd69-bb11-cfb4-da4c-17646abe1304", + "ancestors": [ + "12c2918e-b6e8-6384-6a47-5270872691fa" + ], + "description": "Creates a new topic or retrieves an existing one based on its name, and returns an Ice proxy for the publisher of that topic.", + "params": [ + { + "name": "topic_name", + "type_name": "str", + "description": "Used to represent the name of a topic to be created or retrieved from the Topic Manager." + }, + { + "name": "ice_proxy", + "type_name": "IceStorm.Publisher.UncheckedCast[Any] ", + "description": "Used to convert the publisher into an unchecked cast proxy." + } + ], + "returns": { + "type_name": "IcePyUncheckedCastProxy", + "description": "An instance of the IcePyUncheckedCast class, representing a one-way publisher for the specified topic name." + }, + "usage": { + "language": "python", + "code": "publishes = Publishes(ice_connector, topic_manager)\nproxy1 = publishes.create_topic(\"TopicName1\", IceStorm.Publisher)\nproxy2 = publishes.create_topic(\"TopicName2\", IceStorm.Publisher)\n", + "description": "" + }, + "name": "create_topic", + "location": { + "start": 79, + "insert": 81, + "offset": " ", + "indent": 8, + "comment": null + }, + "item_type": "method", + "length": 18, + "docLength": null + }, + { + "id": "caa6b5a2-f3c2-f9ac-0043-bed857cec1bb", + "ancestors": [], + "description": "Manages proxy objects for accessing remote objects through an Ice connector. It creates and stores proxies for various properties, allowing for easy access to the remote objects through a consistent interface.", + "attributes": [ + { + "name": "ice_connector", + "type_name": "IceConnector|None", + "description": "Used to manage communication between objects across different partitions in a distributed system." + }, + { + "name": "mprx", + "type_name": "Dict[str,ice_proxy]", + "description": "Used to store proxy objects for different properties of a remote object." + }, + { + "name": "Contactile", + "type_name": "RoboCompContactileContactilePrx|IceObject", + "description": "Used to create a proxy for the Contactile object." + }, + { + "name": "create_proxy", + "type_name": "bool|IcePrx", + "description": "Used to create a proxy object for a remote object property." + } + ], + "name": "Requires", + "location": { + "start": 103, + "insert": 104, + "offset": " ", + "indent": 4, + "comment": null + }, + "item_type": "class", + "length": 25, + "docLength": null + }, + { + "id": "dae262ef-d58b-51a0-eb40-460077be86bf", + "ancestors": [ + "caa6b5a2-f3c2-f9ac-0043-bed857cec1bb" + ], + "description": "Initializes instance variables ice_connector and mprx, as well as creating a proxy for the Contactile object using the create_proxy method.", + "params": [ + { + "name": "ice_connector", + "type_name": "RoboCompICEConnectorPrx", + "description": "Required to initialize the object. It represents an instance of the RoboCompICEConnector interface, which provides access to the RoboComp toolkit's functionality for robot control and interaction with external systems." + } + ], + "returns": null, + "usage": { + "language": "python", + "code": "from Requires import Requires\n\nice_connector = Ice.Communicator()\nrequires = Requires(ice_connector)\n\n# Get a proxy for the Contactile object using its name and type\ncontactile_proxy = requires.create_proxy(\"ContactileProxy\", RoboCompContactile.ContactilePrx)\n\nif contactile_proxy is not None:\n # Use the contactile_proxy to communicate with the Contactile object\n pass\nelse:\n print(\"Cannot connect to the remote object (CameraSimple)\")\n", + "description": "" + }, + "name": "__init__", + "location": { + "start": 104, + "insert": 105, + "offset": " ", + "indent": 8, + "comment": null + }, + "item_type": "constructor", + "length": 5, + "docLength": null + }, + { + "id": "46c6b2d1-e47b-8d83-6e40-51b189574a97", + "ancestors": [ + "caa6b5a2-f3c2-f9ac-0043-bed857cec1bb" + ], + "description": "Creates a proxy object for a specified property of an IceGrid connection.", + "params": [ + { + "name": "property_name", + "type_name": "str", + "description": "Used to specify the name of the property in the remote object to be accessed through the proxy." + }, + { + "name": "ice_proxy", + "type_name": "Ice.Proxy | NoneType", + "description": "Used to create a proxy object for the remote object." + } + ], + "returns": { + "type_name": "bool|str", + "description": "2-element tuple containing the success status and the created proxy object." + }, + "usage": { + "language": "python", + "code": "# Initiate a Requires class object with IceConnector object\nrequires = Requires(Ice.Communicator())\n\n# Create proxy for ContactileProxy property\nsuccess, proxy = requires.create_proxy(\"ContactileProxy\", RoboCompContactile.ContactilePrx)\n\nif success:\n # Use the created proxy object\n proxy.set_force(50)\nelse:\n print(\"Failed to create proxy for ContactileProxy property\")\n", + "description": "" + }, + "name": "create_proxy", + "location": { + "start": 113, + "insert": 115, + "offset": " ", + "indent": 8, + "comment": null + }, + "item_type": "method", + "length": 15, + "docLength": null + }, + { + "id": "3d61ac70-6689-cb99-f241-a7ab77b609d8", + "ancestors": [], + "description": "Creates an ObjectAdapter and adds a Proxy to it. It then subscribes to a topic, retrieves its publisher, and activates the adapter.", + "attributes": [ + { + "name": "ice_connector", + "type_name": "ObjectAdapter|IceReference", + "description": "Used to create an Ice reference for connecting to the Intermediate Java Representation (IJR) server." + }, + { + "name": "topic_manager", + "type_name": "TopicManager|IceObjectPrx", + "description": "Used to manage topics in the system." + } + ], + "name": "Subscribes", + "location": { + "start": 132, + "insert": 133, + "offset": " ", + "indent": 4, + "comment": null + }, + "item_type": "class", + "length": 28, + "docLength": null + }, + { + "id": "f633b9e3-94ff-aebb-f247-811919461ca8", + "ancestors": [ + "3d61ac70-6689-cb99-f241-a7ab77b609d8" + ], + "description": "Sets up instance variables ice_connector, topic_manager, and default_handler for further use by the class.", + "params": [ + { + "name": "ice_connector", + "type_name": "object", + "description": "Used to set the IceConnector instance for the TopicManager class." + }, + { + "name": "topic_manager", + "type_name": "object", + "description": "Used to manage topics related to the IceConnector instance." + }, + { + "name": "default_handler", + "type_name": "Callable[object, object]", + "description": "Used to define a default handler for topics that are not handled by any other handler in the system." + } + ], + "returns": null, + "usage": { + "language": "python", + "code": "# Initializing subscribes instance with ice_connector, topic_manager and default_handler\nsubscribes = Subscribes(ice_connector, topic_manager, default_handler)\n\n# Creating adapter with property name \"Topic1\" and interface handler\nadapter = subscribes.create_adapter(\"Topic1\", interface_handler)\n\n# Adding proxy with Ice.OneWay mode and retrieving the created topic\nproxy = adapter.addWithUUID(handler).ice_oneway()\ntopic = subscribes.topic_manager.retrieve(\"Topic1\")\n", + "description": "\nIn this example, we first create an instance of the Subscribes class with the required parameters, such as ice_connector, topic_manager and default_handler. We then use the create_adapter method to create an adapter with a given property name (in this case, \"Topic1\") and interface handler. The addWithUUID method is used to add a proxy with Ice.OneWay mode to the created adapter, and retrieve the created topic using the topic_manager instance." + }, + "name": "__init__", + "location": { + "start": 133, + "insert": 134, + "offset": " ", + "indent": 8, + "comment": null + }, + "item_type": "constructor", + "length": 3, + "docLength": null + }, + { + "id": "9de3a345-7861-2c8c-b84a-448128705128", + "ancestors": [ + "3d61ac70-6689-cb99-f241-a7ab77b609d8" + ], + "description": "Creates an object adapter and adds a proxy to it, then subscribes to a topic using a specified QoS and activates the adapter.", + "params": [ + { + "name": "property_name", + "type_name": "str | PropertyName", + "description": "Used to specify the name of the topic for which an adapter is to be created." + }, + { + "name": "interface_handler", + "type_name": "object", + "description": "Required to handle the interface for the created adapter." + } + ], + "returns": { + "type_name": "Adapter", + "description": "An iceoryteam::adapter object that represents a new adapter instance created through Iceoryteam's createObjectAdapter method." + }, + "usage": { + "language": "python", + "code": "# Create a new Subscribes object\nsub = Subscribes(ice_connector, topic_manager, default_handler)\n\n# Call create_adapter to create an adapter for the \"MyTopic\" property and set its interface handler to the default handler\nadapter = sub.create_adapter(\"MyTopic\", default_handler)\n", + "description": "\nIn this example, the user creates a new Subscribes object with the ice_connector, topic_manager, and default_handler parameters. Then, they call the create_adapter method on the Subscribes object with \"MyTopic\" as the property name and set its interface handler to the default_handler. The function returns an adapter for the \"MyTopic\" property with its interface handler set to the default_handler." + }, + "name": "create_adapter", + "location": { + "start": 137, + "insert": 138, + "offset": " ", + "indent": 8, + "comment": null + }, + "item_type": "method", + "length": 23, + "docLength": null + }, + { + "id": "ae335a96-8218-54a3-2b4c-288e4dcc018a", + "ancestors": [], + "description": "Defines an adapter creator that enables communication between Ice houses and Kinova arms by creating adapters for property names and interface handlers.", + "attributes": [ + { + "name": "ice_connector", + "type_name": "iceConnector|iceProperties", + "description": "Used to manage objects created by the adapter." + }, + { + "name": "kinovaarm", + "type_name": "KinovaArmI", + "description": "Created by calling the `create_adapter` method with the property name \"KinovaArm\"." + }, + { + "name": "create_adapter", + "type_name": "Optional[Adapter]", + "description": "Used to create a new adapter instance with given name and interface handler." + } + ], + "name": "Implements", + "location": { + "start": 162, + "insert": 163, + "offset": " ", + "indent": 4, + "comment": null + }, + "item_type": "class", + "length": 9, + "docLength": null + }, + { + "id": "cb61369b-89eb-e7aa-ee4a-91bc2cea4409", + "ancestors": [ + "ae335a96-8218-54a3-2b4c-288e4dcc018a" + ], + "description": "Initializes the object by setting its ice connector and creating an adapter for the KinovaArm device using the create_adapter() method.", + "params": [ + { + "name": "ice_connector", + "type_name": "object", + "description": "Used to store the instance of an IceConnector class, which is responsible for handling communication between the application and the Kinova Arm device." + }, + { + "name": "default_handler", + "type_name": "KinovaArmI", + "description": "Passed to create an instance of the KinovaArm class, providing a default handler for the arm's actions." + } + ], + "returns": null, + "usage": { + "language": "python", + "code": "from Implements import Implements\n\n# Create an instance of the Implements class with the required arguments\nimplement = Implements(\"ice_connector\", \"default_handler\")\n\n# Use the create_adapter method to create a new adapter for the KinovaArm property\nkinovaarm_adapter = implement.create_adapter(\"KinovaArm\", kinovaarmI.KinovaArmI(default_handler))\n", + "description": "" + }, + "name": "__init__", + "location": { + "start": 163, + "insert": 164, + "offset": " ", + "indent": 8, + "comment": null + }, + "item_type": "constructor", + "length": 3, + "docLength": null + }, + { + "id": "68fb795f-8315-31bb-a644-11eee8393c86", + "ancestors": [ + "ae335a96-8218-54a3-2b4c-288e4dcc018a" + ], + "description": "Creates an adapter object using the `ice_connector.createObjectAdapter` method and adds an interface handler to it using the `add` method. The added interface handler is then activated using the `activate` method.", + "params": [ + { + "name": "property_name", + "type_name": "str | PropertyNameType", + "description": "Used to specify the name of the property for which an adapter will be created." + }, + { + "name": "interface_handler", + "type_name": "Any | Ice.Reference", + "description": "Used to specify the interface that the adapter will handle." + } + ], + "returns": null, + "usage": { + "language": "python", + "code": "from Implements import Implements\n\n# Initialize an instance of the Implements class\nimpl = Implements()\n\n# Create a new adapter for the KinovaArm interface\nkinovaarm = impl.create_adapter(\"KinovaArm\", kinovaarmI.KinovaArmI(default_handler))\n\n# Activate the adapter to start using it\nkinovaarm.activate()\n", + "description": "" + }, + "name": "create_adapter", + "location": { + "start": 167, + "insert": 168, + "offset": " ", + "indent": 8, + "comment": null + }, + "item_type": "method", + "length": 4, + "docLength": null + }, + { + "id": "adbb6103-88c6-90b4-e046-64907f282dac", + "ancestors": [], + "description": "Manages interfaces, including setting default handlers for implementations and subscriptions, getting proxies maps, and destroying the interface when needed.", + "attributes": [ + { + "name": "ice_config_file", + "type_name": "File|str", + "description": "Used to store the configuration file for the Interprise Communication Interface (Ice) connection." + }, + { + "name": "ice_connector", + "type_name": "Iceinitializeselfice_config_file", + "description": "Used to initialize the Ice connection and retrieve properties." + }, + { + "name": "topic_manager", + "type_name": "IceStormTopicManagerPrx|IceStormTopicManager", + "description": "Initialized when the object is created through the `init_topic_manager()` method. It is used to manage topics in the interface." + }, + { + "name": "init_topic_manager", + "type_name": "IceStormTopicManagerPrx|IceConnectionRefusedException", + "description": "Responsible for initializing a TopicManager proxy using the configuration file." + }, + { + "name": "status", + "type_name": "int", + "description": "0 by default, indicating a successful initialization of the interface manager." + }, + { + "name": "parameters", + "type_name": "Dict[str,str]", + "description": "Created by iterating over the properties returned by the Ice connector's `getProperties()` method, storing their values as strings." + }, + { + "name": "requires", + "type_name": "Requires", + "description": "Used to store a map of proxy identifiers to their corresponding IceStorm proxies." + }, + { + "name": "publishes", + "type_name": "PublishesPrx|InterfaceManager", + "description": "Used to manage publish operations by mapping interfaces to their corresponding pub/sub topics." + }, + { + "name": "implements", + "type_name": "Implements|Proxy", + "description": "Used to specify the default handler for implementing interfaces." + }, + { + "name": "subscribes", + "type_name": "Tuple[str,IceStormTopicManagerPrx]", + "description": "Used to store the subscriptions managed by the interface manager." + } + ], + "name": "InterfaceManager", + "location": { + "start": 173, + "insert": 174, + "offset": " ", + "indent": 4, + "comment": null + }, + "item_type": "class", + "length": 40, + "docLength": null + }, + { + "id": "11fa8932-82b5-c8bc-b342-664ab4d9cb99", + "ancestors": [ + "adbb6103-88c6-90b4-e046-64907f282dac" + ], + "description": "Initializes instance variables and sets up the Ice framework, topic manager, and properties.", + "params": [ + { + "name": "ice_config_file", + "type_name": "str | Path", + "description": "Used to initialize Ice components from an Ice configuration file." + } + ], + "returns": null, + "usage": { + "language": "python", + "code": "import icepyx as ipx\n\n# Initialize IceStorm interface manager with configuration file\nmanager = InterfaceManager(\"config.txt\")\n\n# Get the topic manager object\ntopic_manager = manager.getTopicManager()\n\n# Create a new topic\ntopic = topic_manager.createTopic(\"my_topic\", \"My Topic Description\")\n\n# Subscribe to the topic\nsubscriber = topic.subscribe()\n\n# Print the subscription status\nprint(subscriber.getStatus())\n", + "description": "" + }, + "name": "__init__", + "location": { + "start": 174, + "insert": 176, + "offset": " ", + "indent": 8, + "comment": null + }, + "item_type": "constructor", + "length": 14, + "docLength": null + }, + { + "id": "3ee5aa8a-82f0-e58c-c442-1bac1b3fe769", + "ancestors": [ + "adbb6103-88c6-90b4-e046-64907f282dac" + ], + "description": "Initates and manages a Topic Manager instance for communication with a remote node.", + "params": [], + "returns": { + "type_name": "IceStormTopicManagerPrx", + "description": "A proxy object for the TopicManager interface." + }, + "usage": { + "language": "python", + "code": "# Create an instance of the InterfaceManager class\ninterface_manager = InterfaceManager('my_ice_config_file.txt')\n\n# Call the init_topic_manager method to initialize the topic manager\ntopic_manager = interface_manager.init_topic_manager()\n\n# Use the initialized TopicManager to manage topics\n", + "description": "" + }, + "name": "init_topic_manager", + "location": { + "start": 192, + "insert": 194, + "offset": " ", + "indent": 8, + "comment": null + }, + "item_type": "method", + "length": 8, + "docLength": null + }, + { + "id": "46920010-c906-008c-b046-4f2da308c39c", + "ancestors": [ + "adbb6103-88c6-90b4-e046-64907f282dac" + ], + "description": "Sets the default handler for incoming messages on the interface by creating an instance of the `Implements` and `Subscribes` classes, passing the necessary parameters.", + "params": [ + { + "name": "handler", + "type_name": "Callable[Any, Any]", + "description": "Responsible for handling incoming messages from the IceConnector." + } + ], + "returns": null, + "usage": { + "language": "python", + "code": "from icepyx import InterfaceManager\n\n# Initialize the interface manager with the configuration file path\nmanager = InterfaceManager(\"ice.cfg\")\n\n# Set a default handler for all interfaces\nmanager.set_default_handler(print)\n\n# Start the Ice connection\nmanager.start()\n", + "description": "" + }, + "name": "set_default_hanlder", + "location": { + "start": 202, + "insert": 203, + "offset": " ", + "indent": 8, + "comment": null + }, + "item_type": "method", + "length": 3, + "docLength": null + }, + { + "id": "2cdcc972-34fc-8d9f-a145-cdd29de30ccb", + "ancestors": [ + "adbb6103-88c6-90b4-e046-64907f282dac" + ], + "description": "Aggregates proxies maps from parent and child classes, returning a unified map for further use.", + "params": [], + "returns": { + "type_name": "Dict[str,int]", + "description": "A dictionary containing the proxies for both required and published APIs." + }, + "usage": { + "language": "python", + "code": "from ice import InterfaceManager\n\n# Initialize an Ice connector with an ice config file\nconnector = InterfaceManager(\"ice.cfg\")\n\n# Get a map of proxy objects for all requires, publishes and implements interfaces\nproxies_map = connector.get_proxies_map()\n", + "description": "" + }, + "name": "get_proxies_map", + "location": { + "start": 206, + "insert": 207, + "offset": " ", + "indent": 8, + "comment": null + }, + "item_type": "method", + "length": 5, + "docLength": null + }, + { + "id": "006a7847-1f71-a296-9345-41211332c991", + "ancestors": [ + "adbb6103-88c6-90b4-e046-64907f282dac" + ], + "description": "Terminates any open connections and destroys the IceConnector object, which is a critical component for establishing and managing interfaces.", + "params": [], + "returns": null, + "usage": { + "language": "python", + "code": "interface_manager = InterfaceManager(ice_config_file=\"my_ice_config_file.conf\")\n# do something with interface manager\ninterface_manager.destroy()\n", + "description": "\nThis code creates an instance of the InterfaceManager class and initializes it using a configuration file called \"my_ice_config_file.conf\". After doing some work with the instance, the destroy method is called to clean up any resources that were allocated during its lifetime." + }, + "name": "destroy", + "location": { + "start": 212, + "insert": 213, + "offset": " ", + "indent": 8, + "comment": null + }, + "item_type": "method", + "length": 3, + "docLength": null + } + ] + } + } + } +] \ No newline at end of file diff --git a/.komment/komment.json b/.komment/komment.json new file mode 100644 index 0000000..0ca1494 --- /dev/null +++ b/.komment/komment.json @@ -0,0 +1,33 @@ +{ + "meta": { + "version": "1", + "updated_at": "2024-07-24T11:48:24.616Z", + "created_at": "2024-07-03T12:07:43.789Z", + "pipelines": [ + "1e4ffa95-bdaf-4704-9e22-a4e72cbd5dbe", + "e329fad3-152c-4b69-b3f3-adec4460ee20", + "e13d59cf-aac3-4640-92c5-48fea5fa852c", + "7013db2f-829c-4c3a-938f-e6dcdc3ab191", + "4491c5fd-b71b-49e0-8541-46bb94e775d8", + "de7a47c1-ece4-4bf1-9c4b-6028e2cda2fc", + "9814c399-3b35-44fc-9710-2c2ec682082e", + "2f1a3f35-66e0-44e0-b2ef-66474aa9dc9b", + "5212b28c-e549-4008-af08-a2ba530a1ffb", + "6c45dd8b-72bd-4c2b-b45d-3098c6c49c86", + "7da7f7f4-27ac-4c85-9b54-acf249641c8f", + "3d298415-35f6-4628-aeb7-d8fb2f055a9c", + "b0ca715b-40b5-4025-a5cf-99989a3fdd89" + ] + }, + "lookup": [ + [ + "components/camera_kinova/src/specificworker.py", + "pybullet_controller/src/specificworker.py", + "components/kinova_controller/src/specificworker.py", + "components/kinova_controller/src/kinovaarmI.py", + "components/kinova_controller/src/test.py", + "components/kinova_controller/src/genericworker.py", + "components/kinova_controller/src/interfaces.py" + ] + ] +} \ No newline at end of file diff --git a/components/camera_kinova/src/specificworker.py b/components/camera_kinova/src/specificworker.py index 5eecd58..675b6a1 100644 --- a/components/camera_kinova/src/specificworker.py +++ b/components/camera_kinova/src/specificworker.py @@ -22,6 +22,7 @@ from PySide2.QtCore import QTimer from PySide2.QtWidgets import QApplication +from orca.debug import pidOf from rich.console import Console from genericworker import * import interfaces as ifaces @@ -37,36 +38,58 @@ console = Console(highlight=False) class SpecificWorker(GenericWorker): + """ + Captures and processes RGB-D video streams from a camera, storing frames in + queues for further processing or retrieval by other components. It manages + threads for color and depth stream capture and provides methods to retrieve + images and depth data. + + Attributes: + Period (int): 1000, indicating a period or interval (in milliseconds) for + various operations to be performed by the worker, such as connecting + to streams and processing frames. + hide (bool): Set to `self.hide()` in the `__init__` method, but its exact + purpose or effect is not clear without additional context. + depth_queue (queueQueue[int]): Initialized with a maximum size of 1, which + allows it to store one frame from the depth stream at a time. + color_queue (Queue[Any]): Used to hold color frames from a video stream, + with a maximum size of one frame at a time. It follows first-in-first-out + (FIFO) order. + init_timestamp (int): Initialized with the current time (in milliseconds) + at the instance creation, obtained using `int(time.time()*1000)`. + startup_check (bool): Used to determine whether a startup check should be + performed when initializing the worker. The check tests various data + structures from the RoboComp library. + timer (QTimer): Connected to the compute method, which it calls every + Period milliseconds. + compute (None|Callable[[],None]): Annotated with @QtCore.Slot(). It + represents a slot that gets called when a timer event occurs. The + function does not contain any operation, it only passes. + + """ def __init__(self, proxy_map, startup_check=False): + """ + Initializes object properties, such as time period and queues for depth + and color data, sets up event handling, and starts the worker process upon + initialization or startup check. + + Args: + proxy_map (Dict[str, Any]): Passed to the parent class's constructor + using `super(SpecificWorker, self).__init__(proxy_map)`, indicating + it serves as a configuration or setup map. + startup_check (bool): Optional, with a default value of False. It + determines whether to run startup checks or not when the worker + is initialized. + + """ super(SpecificWorker, self).__init__(proxy_map) - self.Period = 100 + self.Period = 1000 self.hide() #set queue size self.depth_queue = queue.Queue(maxsize=1) self.color_queue = queue.Queue(maxsize=1) - - self.color_stream = cv2.VideoCapture( - "gst-launch-1.0 rtspsrc location=rtsp://192.168.1.10/color latency=30 ! rtph264depay ! h264parse ! avdec_h264 ! videoconvert n-threads=2 ! video/x-raw,format=BGR ! queue ! appsink drop=true",cv2.CAP_GSTREAMER) - - self.depth_stream = cv2.VideoCapture( - "gst-launch-1.0 rtspsrc location=rtsp://192.168.1.10/depth latency=30 ! rtpgstdepay ! videoconvert n-threads=2 ! video/x-raw,format=GRAY16_LE ! queue ! appsink drop=true",cv2.CAP_GSTREAMER) - #print(self.depth_stream.isOpened()) - - # create a thread to capture the stream and start it - self.color_thread = threading.Thread(target=self.video_color_thread, args=(self.color_stream, self.color_queue)) - if(not self.color_stream.isOpened()): - print("color stream not opened") - sys.exit() - self.color_thread.start() - - self.depth_thread = threading.Thread(target=self.video_depth_thread, args=(self.depth_stream, self.depth_queue)) - if(not self.depth_stream.isOpened()): - print("depth stream not opened") - sys.exit() - self.depth_thread.start() - - print("Reading threads started") + self.init_timestamp = int(time.time()*1000) if startup_check: self.startup_check() @@ -76,15 +99,74 @@ def __init__(self, proxy_map, startup_check=False): def __del__(self): """Destructor""" + + self.killThreads() + self.color_stream.release() + self.depth_stream.release() + print("Destructor") def setParams(self, params): + """ + Initializes parameters for video stream capture, starts capturing color + stream from an IP address, and creates a thread to process the captured + frames. It also checks if the stream is opened successfully before proceeding. + + Args: + params (Dict[str, str | int | bool]): Expected to contain key-value + pairs representing various settings such as IP address, latency + and others necessary for connecting to an RTSP stream. + + Returns: + bool: Set to True when the execution is successful, indicating that + the video streams have been successfully opened and a thread has been + created to capture them. + + """ + self.ip = params["ip"] + self.run = True + # create the video capture objects + print(f"Connecting to {self.ip}") + # self.color_stream = cv2.VideoCapture( + # f"gst-launch-1.0 rtspsrc location=rtsp://{self.ip}/color latency=30 ! rtph264depay ! h264parse ! avdec_h264 ! videoconvert n-threads=2 ! video/x-raw,format=BGR ! queue ! appsink drop=true", + # cv2.CAP_GSTREAMER) + self.color_stream = cv2.VideoCapture( + f"gst-launch-1.0 rtspsrc location=rtsp://{self.ip}/color latency=30 ! rtph264depay ! h264parse ! nvh264dec ! videoconvert n-threads=2 ! video/x-raw,format=BGR ! queue ! appsink drop=true", + cv2.CAP_GSTREAMER) + + # self.depth_stream = cv2.VideoCapture( + # f"gst-launch-1.0 rtspsrc location=rtsp://{self.ip}/depth latency=30 ! rtpgstdepay ! videoconvert n-threads=2 ! video/x-raw,format=GRAY16_LE ! queue ! appsink drop=true", + # cv2.CAP_GSTREAMER) + # print(self.depth_stream.isOpened()) + + # create a thread to capture the stream and start it + self.color_thread = threading.Thread(target=self.video_color_thread, args=(self.color_stream, self.color_queue)) + if (not self.color_stream.isOpened()): + print("color stream not opened") + sys.exit() + self.color_thread.start() + + # self.depth_thread = threading.Thread(target=self.video_depth_thread, args=(self.depth_stream, self.depth_queue)) + # if (not self.depth_stream.isOpened()): + # print("depth stream not opened") + # sys.exit() + # self.depth_thread.start() + + print("Reading threads started") + return True @QtCore.Slot() def compute(self): + """ + Computes color and depth pixel values, but these lines are currently + commented out, rendering them inactive. The method also checks if the + worker is hidden and returns True immediately if so. This could be used + to implement lazy computation. + + """ # print('SpecificWorker.compute...') # # color_frame = self.color_queue.get() @@ -106,8 +188,22 @@ def compute(self): ################################################################################################################ def video_color_thread(self, cap, inqueue): + """ + Captures video frames from an OpenCV camera, puts them into a queue, and + releases the camera when it finishes or is interrupted by a keyboard signal. + + Args: + cap (cv2.VideoCapture): Referenced as an object, likely representing + a video capture device or a video file. It provides functionality + for reading frames from the captured video. + inqueue (Queue[Any]): Used to store frames from the camera for further + processing, allowing for efficient handling of video data and + preventing buffer overflows due to full queue condition. + + """ try: - while cap.isOpened(): + while cap.isOpened() and self.run: + # print("color", int(time.time()*1000)-self.init_timestamp) ret, frame = cap.read() if ret: # inqueue.put_nowait(frame) @@ -118,15 +214,32 @@ def video_color_thread(self, cap, inqueue): # Si la cola está llena, descarta la imagen más antigua y agrega la nueva inqueue.get_nowait() inqueue.put_nowait(frame) + print("color finish") except KeyboardInterrupt: print("hilo finish") cap.release() - def video_depth_thread(self, cap, inqueue): + """ + Reads frames from an opened camera capture object (cap), converts them + into depth data, and pushes them into a shared queue for processing. It + runs until the camera is closed or the thread is stopped. + + Args: + cap (cv2.VideoCapture | None): A video capture object that provides + read access to video frames from the device's camera or other video + source. + inqueue (Queue): Used to store frames from the camera capture (`cap`) + as they are read, enabling thread-safe input queuing. + + Used for handling full queue conditions by removing an item before + adding the new one. + + """ #print("inicio bucle") try: - while cap.isOpened(): + while cap.isOpened() and self.run: + print("depth", int(time.time()*1000)-self.init_timestamp) ret, frame = cap.read() if ret: # inqueue.put_nowait(frame) @@ -137,12 +250,31 @@ def video_depth_thread(self, cap, inqueue): # Si la cola está llena, descarta la imagen más antigua y agrega la nueva inqueue.get_nowait() inqueue.put_nowait(frame) + print("depth finish") except KeyboardInterrupt: print("hilo finish") cap.release() + def killThreads(self): + """ + Terminates two threads, color_thread and depth_thread, when its run variable + is set to False, allowing the program to exit safely by joining these + threads before proceeding. + + """ + self.run = False + self.color_thread.join() + self.depth_thread.join() + print("threads killed") + def startup_check(self): + """ + Tests three types of images from the ifaces module, printing a message for + each. After testing, it schedules a single shot to quit the application + after 200 milliseconds using QTimer and QApplication's instance. + + """ print(f"Testing RoboCompCameraRGBDSimple.TImage from ifaces.RoboCompCameraRGBDSimple") test = ifaces.RoboCompCameraRGBDSimple.TImage() print(f"Testing RoboCompCameraRGBDSimple.TDepth from ifaces.RoboCompCameraRGBDSimple") @@ -160,6 +292,22 @@ def startup_check(self): # IMPLEMENTATION of getAll method from CameraRGBDSimple interface # def CameraRGBDSimple_getAll(self, camera): + """ + Retrieves data from two queues and returns it in a structured format, + specifically an instance of TRGBD from the RoboComp library. It handles + exceptions where either queue is empty. + + Args: + camera (CameraRGBDSimple): Not used within the function itself. It + seems to be an unused parameter possibly intended for future use + or method overriding. + + Returns: + TRGBD|None: An object containing depth and image data along with their + corresponding timestamps, unless a queue is empty. In that case it + returns None. + + """ try: ret = ifaces.RoboCompCameraRGBDSimple.TRGBD() #ret.depth.depth = cv2.resize(self.depth_queue.get(), (480, 270)) @@ -170,13 +318,31 @@ def CameraRGBDSimple_getAll(self, camera): ret.image.image = self.color_queue.get_nowait() ret.image.height, ret.image.width, ret.image.depth = ret.image.image.shape ret.image.alivetime = int(time.time()*1000) + print("get all") return ret except queue.Empty: + print("Empty queue") return None # # IMPLEMENTATION of getDepth method from CameraRGBDSimple interface # def CameraRGBDSimple_getDepth(self, camera): + """ + Dequeues an image from a queue, extracts its dimensions and pixel data, + and returns them as a TDepth object representing depth information. + + Args: + camera (RoboCompCameraRGBDSimple): Passed to the method as an argument, + however its usage within the method is unclear and appears redundant + since it's not used. + + Returns: + ifacesRoboCompCameraRGBDSimpleTDepth: A structured data object containing + three elements: ret.height, ret.width and ret.depth where ret.height + and ret.width represent the image dimensions and ret.depth represents + the depth image itself. + + """ try: img = self.depth_queue.get() # img = cv2.resize(img, (480, 270)) @@ -191,12 +357,28 @@ def CameraRGBDSimple_getDepth(self, camera): # IMPLEMENTATION of getImage method from CameraRGBDSimple interface # def CameraRGBDSimple_getImage(self, camera): + """ + Retrieves an image from a color queue, packages it into a TImage object, + and returns the packaged image along with its height, width, depth, and + alivetime stamp. It handles empty queues by returning None. + + Args: + camera (RoboCompCameraRGBDSimple.Camera): Used to pass a camera object + to the function. + + Returns: + RoboCompCameraRGBDSimpleTImage: An object containing image data along + with its dimensions, alive time and the image itself represented as a + numpy array. + + """ try: img = self.color_queue.get() # img = cv2.resize(img, (480, 270)) ret = ifaces.RoboCompCameraRGBDSimple.TImage() ret.height, ret.width, ret.depth = img.shape ret.image = img + ret.alivetime = int(time.time()*1000) return ret except queue.Empty: diff --git a/components/kinova_controller/src/genericworker.py b/components/kinova_controller/src/genericworker.py index 96564d6..019b357 100644 --- a/components/kinova_controller/src/genericworker.py +++ b/components/kinova_controller/src/genericworker.py @@ -36,11 +36,38 @@ class GenericWorker(QtCore.QObject): + """ + Manages a timer and emits a signal to stop itself after a specified period. + It also has a slot function to set the period of the timer. + + Attributes: + kill (QtCoreQObjectSignal): Used to emit a signal indicating that the + worker should be terminated. + contactile_proxy (ContactileProxy|NoneType): Used to represent a proxy + object for contacting the contactile service. + mutex (QMutex): Used to protect access to the internal state of the worker, + allowing only one thread to access it at a time. + Period (int): 30 milliseconds by default, used for setting the time interval + for emitting the `kill` signal. + timer (QtCoreQTimer): Used to start a timer that emits the `kill` signal + after a specified period. + + """ kill = QtCore.Signal() def __init__(self, mprx): + """ + Initializes instance variables, including a contactile proxy and a timer + for periodic work. + + Args: + mprx (Dict[str, Any]): Passed as an instance variable `contactile_proxy` + as a reference to a ContactileProxy object. + + """ super(GenericWorker, self).__init__() + self.contactile_proxy = mprx["ContactileProxy"] self.mutex = QtCore.QMutex(QtCore.QMutex.Recursive) self.Period = 30 @@ -49,6 +76,10 @@ def __init__(self, mprx): @QtCore.Slot() def killYourSelf(self): + """ + Emits the `kill` signal, indicating that the instance should be terminated. + + """ rDebug("Killing myself") self.kill.emit() @@ -56,6 +87,15 @@ def killYourSelf(self): # @param per Period in ms @QtCore.Slot(int) def setPeriod(self, p): + """ + Updates the ` Period` attribute and starts the timer using its value, + printing a message to indicate the change. + + Args: + p (int): Used to represent the new period value for which the function + will set the timer. + + """ print("Period changed", p) self.Period = p self.timer.start(self.Period) diff --git a/components/kinova_controller/src/interfaces.py b/components/kinova_controller/src/interfaces.py index 909a3a9..64ecf26 100644 --- a/components/kinova_controller/src/interfaces.py +++ b/components/kinova_controller/src/interfaces.py @@ -5,59 +5,164 @@ console = Console() +Ice.loadSlice("-I ./src/ --all ./src/Contactile.ice") +import RoboCompContactile Ice.loadSlice("-I ./src/ --all ./src/KinovaArm.ice") import RoboCompKinovaArm class TJointSeq(list): + """ + Manages a sequence of `RoboCompKinovaArm.TJoint` objects, providing methods + for appending, extending, and inserting joints. + + """ def __init__(self, iterable=list()): super(TJointSeq, self).__init__(iterable) def append(self, item): + """ + Adds an item to the sequence, ensuring it is an instance of the + RoboCompKinovaArm.TJoint class before appending it to the parent list. + + Args: + item (RoboCompKinovaArm.TJoint): Asserted to be an instance of that + class before being appended to the sequence. + + """ assert isinstance(item, RoboCompKinovaArm.TJoint) super(TJointSeq, self).append(item) def extend(self, iterable): + """ + Iterates over an input iterable and asserts that each item is an instance + of the RoboCompKinovaArm.TJoint class before extending the sequence with + those items using the superclass's `extend` method. + + Args: + iterable (Sequence[TJoint]): An iterable collection of TJoint objects + to be extended. + + """ for item in iterable: assert isinstance(item, RoboCompKinovaArm.TJoint) super(TJointSeq, self).extend(iterable) def insert(self, index, item): + """ + Modifies the list's index and item by inserting an instance of the + RoboCompKinovaArm.TJoint object at the specified index. + + Args: + index (int): Used to specify the position at which the new joint should + be inserted in the sequence. + item (RoboCompKinovaArm.TJoint): Asserted to be an instance of the + TJoint class before calling the superclass's insert method. + + """ assert isinstance(item, RoboCompKinovaArm.TJoint) super(TJointSeq, self).insert(index, item) setattr(RoboCompKinovaArm, "TJointSeq", TJointSeq) class Speeds(list): + """ + Manages a list of floating-point numbers, providing methods to append, extend, + and insert elements with type checks for input values. + + """ def __init__(self, iterable=list()): super(Speeds, self).__init__(iterable) def append(self, item): + """ + Adds an item to the list, checking that it is of type float before appending + it to the superclass list. + + Args: + item (float): Asserted to be of that type before being appended. + + """ assert isinstance(item, float) super(Speeds, self).append(item) def extend(self, iterable): + """ + Takes an iterable and ensures that each item is an instance of `float`. + Then, it calls the `super().extend()` method to add the items to the Speeds + list. + + Args: + iterable (Iterable[float]): An iterable object containing floating-point + numbers that the Speeds class will validate and add to its internal + list. + + """ for item in iterable: assert isinstance(item, float) super(Speeds, self).extend(iterable) def insert(self, index, item): + """ + Inserts an item at a given index, checking that the item is a float and + calling the parent class's `insert` method with the index and item as arguments. + + Args: + index (int): Used to indicate the position at which the new element + should be inserted. + item (float): Asserted to be an instance of that type before it is + passed to the superclass's insert method. + + """ assert isinstance(item, float) super(Speeds, self).insert(index, item) setattr(RoboCompKinovaArm, "Speeds", Speeds) class Angles(list): + """ + Is a list-based implementation of angles, providing methods for appending, + extending, and inserting angles as floating-point values. + + """ def __init__(self, iterable=list()): super(Angles, self).__init__(iterable) def append(self, item): + """ + Adds a float value to the list of angles. + + Args: + item (float): Asserted to be an instance of that type before being + appended to the Angles object. + + """ assert isinstance(item, float) super(Angles, self).append(item) def extend(self, iterable): + """ + Checks if each item in the iterable is a float, then calls the parent + class's `extend` method with the same iterable. + + Args: + iterable (Iterable[float]): Used to add elements to the Angles object. + + """ for item in iterable: assert isinstance(item, float) super(Angles, self).extend(iterable) def insert(self, index, item): + """ + Inserts an item into the list at a specified index, checking that the input + item is a float and calling the superclass's implementation before inserting + it. + + Args: + index (int): Used to specify the position where the new element will + be inserted in the list. + item (float): Asserted to be an instance of that type before being + passed to the superclass method for insertion. + + """ assert isinstance(item, float) super(Angles, self).insert(index, item) @@ -68,7 +173,30 @@ def insert(self, index, item): class Publishes: + """ + Manages topics and provides a way to publish messages to them through a proxy + object, allowing multiple clients to create and manage topics independently. + + Attributes: + ice_connector (object): A reference to the Ice Connector used for communication + with the server. + mprx (Dict[str,IceObject]): A mapping of topics to their corresponding Ice + proxies. + topic_manager (TopicManager|IceStormNoSuchTopic): Responsible for managing + topics created by the instance. + + """ def __init__(self, ice_connector, topic_manager): + """ + Initializes instance variables: ice_connector and topic_manager, which are + used to manage communication between the publisher and the Ice platform. + + Args: + ice_connector (object): Used to store an instance of `IceConnector`, + which provides connectivity between distributed components. + topic_manager (object): Used to store an instance of `TopicManager`. + + """ self.ice_connector = ice_connector self.mprx={} self.topic_manager = topic_manager @@ -76,6 +204,21 @@ def __init__(self, ice_connector, topic_manager): def create_topic(self, topic_name, ice_proxy): # Create a proxy to publish a AprilBasedLocalization topic + """ + Creates a new topic or retrieves an existing one based on its name, and + returns an Ice proxy for the publisher of that topic. + + Args: + topic_name (str): Used to represent the name of a topic to be created + or retrieved from the Topic Manager. + ice_proxy (IceStorm.Publisher.UncheckedCast[Any] ): Used to convert + the publisher into an unchecked cast proxy. + + Returns: + IcePyUncheckedCastProxy: An instance of the IcePyUncheckedCast class, + representing a one-way publisher for the specified topic name. + + """ topic = False try: topic = self.topic_manager.retrieve(topic_name) @@ -99,15 +242,58 @@ def get_proxies_map(self): class Requires: + """ + Manages proxy objects for accessing remote objects through an Ice connector. + It creates and stores proxies for various properties, allowing for easy access + to the remote objects through a consistent interface. + + Attributes: + ice_connector (IceConnector|None): Used to manage communication between + objects across different partitions in a distributed system. + mprx (Dict[str,ice_proxy]): Used to store proxy objects for different + properties of a remote object. + Contactile (RoboCompContactileContactilePrx|IceObject): Used to create a + proxy for the Contactile object. + create_proxy (bool|IcePrx): Used to create a proxy object for a remote + object property. + + """ def __init__(self, ice_connector): + """ + Initializes instance variables ice_connector and mprx, as well as creating + a proxy for the Contactile object using the create_proxy method. + + Args: + ice_connector (RoboCompICEConnectorPrx): Required to initialize the + object. It represents an instance of the RoboCompICEConnector + interface, which provides access to the RoboComp toolkit's + functionality for robot control and interaction with external systems. + + """ self.ice_connector = ice_connector self.mprx={} + self.Contactile = self.create_proxy("ContactileProxy", RoboCompContactile.ContactilePrx) + def get_proxies_map(self): return self.mprx def create_proxy(self, property_name, ice_proxy): # Remote object connection for + """ + Creates a proxy object for a specified property of an IceGrid connection. + + Args: + property_name (str): Used to specify the name of the property in the + remote object to be accessed through the proxy. + ice_proxy (Ice.Proxy | NoneType): Used to create a proxy object for + the remote object. + + Returns: + bool|str: 2-element tuple containing the success status and the created + proxy object. + + """ try: proxy_string = self.ice_connector.getProperties().getProperty(property_name) try: @@ -126,11 +312,50 @@ def create_proxy(self, property_name, ice_proxy): class Subscribes: + """ + Creates an ObjectAdapter and adds a Proxy to it. It then subscribes to a topic, + retrieves its publisher, and activates the adapter. + + Attributes: + ice_connector (ObjectAdapter|IceReference): Used to create an Ice reference + for connecting to the Intermediate Java Representation (IJR) server. + topic_manager (TopicManager|IceObjectPrx): Used to manage topics in the system. + + """ def __init__(self, ice_connector, topic_manager, default_handler): + """ + Sets up instance variables ice_connector, topic_manager, and default_handler + for further use by the class. + + Args: + ice_connector (object): Used to set the IceConnector instance for the + TopicManager class. + topic_manager (object): Used to manage topics related to the IceConnector + instance. + default_handler (Callable[object, object]): Used to define a default + handler for topics that are not handled by any other handler in + the system. + + """ self.ice_connector = ice_connector self.topic_manager = topic_manager def create_adapter(self, property_name, interface_handler): + """ + Creates an object adapter and adds a proxy to it, then subscribes to a + topic using a specified QoS and activates the adapter. + + Args: + property_name (str | PropertyName): Used to specify the name of the + topic for which an adapter is to be created. + interface_handler (object): Required to handle the interface for the + created adapter. + + Returns: + Adapter: An iceoryteam::adapter object that represents a new adapter + instance created through Iceoryteam's createObjectAdapter method. + + """ adapter = self.ice_connector.createObjectAdapter(property_name) handler = interface_handler proxy = adapter.addWithUUID(handler).ice_oneway() @@ -156,19 +381,95 @@ def create_adapter(self, property_name, interface_handler): class Implements: + """ + Defines an adapter creator that enables communication between Ice houses and + Kinova arms by creating adapters for property names and interface handlers. + + Attributes: + ice_connector (iceConnector|iceProperties): Used to manage objects created + by the adapter. + kinovaarm (KinovaArmI): Created by calling the `create_adapter` method + with the property name "KinovaArm". + create_adapter (Optional[Adapter]): Used to create a new adapter instance + with given name and interface handler. + + """ def __init__(self, ice_connector, default_handler): + """ + Initializes the object by setting its ice connector and creating an adapter + for the KinovaArm device using the create_adapter() method. + + Args: + ice_connector (object): Used to store the instance of an IceConnector + class, which is responsible for handling communication between the + application and the Kinova Arm device. + default_handler (KinovaArmI): Passed to create an instance of the + KinovaArm class, providing a default handler for the arm's actions. + + """ self.ice_connector = ice_connector self.kinovaarm = self.create_adapter("KinovaArm", kinovaarmI.KinovaArmI(default_handler)) def create_adapter(self, property_name, interface_handler): + """ + Creates an adapter object using the `ice_connector.createObjectAdapter` + method and adds an interface handler to it using the `add` method. The + added interface handler is then activated using the `activate` method. + + Args: + property_name (str | PropertyNameType): Used to specify the name of + the property for which an adapter will be created. + interface_handler (Any | Ice.Reference): Used to specify the interface + that the adapter will handle. + + """ adapter = self.ice_connector.createObjectAdapter(property_name) adapter.add(interface_handler, self.ice_connector.stringToIdentity(property_name.lower())) adapter.activate() class InterfaceManager: + """ + Manages interfaces, including setting default handlers for implementations and + subscriptions, getting proxies maps, and destroying the interface when needed. + + Attributes: + ice_config_file (File|str): Used to store the configuration file for the + Interprise Communication Interface (Ice) connection. + ice_connector (Iceinitializeselfice_config_file): Used to initialize the + Ice connection and retrieve properties. + topic_manager (IceStormTopicManagerPrx|IceStormTopicManager): Initialized + when the object is created through the `init_topic_manager()` method. + It is used to manage topics in the interface. + init_topic_manager (IceStormTopicManagerPrx|IceConnectionRefusedException): + Responsible for initializing a TopicManager proxy using the configuration + file. + status (int): 0 by default, indicating a successful initialization of the + interface manager. + parameters (Dict[str,str]): Created by iterating over the properties + returned by the Ice connector's `getProperties()` method, storing their + values as strings. + requires (Requires): Used to store a map of proxy identifiers to their + corresponding IceStorm proxies. + publishes (PublishesPrx|InterfaceManager): Used to manage publish operations + by mapping interfaces to their corresponding pub/sub topics. + implements (Implements|Proxy): Used to specify the default handler for + implementing interfaces. + subscribes (Tuple[str,IceStormTopicManagerPrx]): Used to store the + subscriptions managed by the interface manager. + + """ def __init__(self, ice_config_file): # TODO: Make ice connector singleton + """ + Initializes instance variables and sets up the Ice framework, topic manager, + and properties. + + Args: + ice_config_file (str | Path): Used to initialize Ice components from + an Ice configuration file. + + """ self.ice_config_file = ice_config_file self.ice_connector = Ice.initialize(self.ice_config_file) needs_rcnode = False @@ -187,6 +488,14 @@ def __init__(self, ice_config_file): def init_topic_manager(self): # Topic Manager + """ + Initates and manages a Topic Manager instance for communication with a + remote node. + + Returns: + IceStormTopicManagerPrx: A proxy object for the TopicManager interface. + + """ proxy = self.ice_connector.getProperties().getProperty("TopicManager.Proxy") obj = self.ice_connector.stringToProxy(proxy) try: @@ -196,16 +505,40 @@ def init_topic_manager(self): exit(-1) def set_default_hanlder(self, handler): + """ + Sets the default handler for incoming messages on the interface by creating + an instance of the `Implements` and `Subscribes` classes, passing the + necessary parameters. + + Args: + handler (Callable[Any, Any]): Responsible for handling incoming messages + from the IceConnector. + + """ self.implements = Implements(self.ice_connector, handler) self.subscribes = Subscribes(self.ice_connector, self.topic_manager, handler) def get_proxies_map(self): + """ + Aggregates proxies maps from parent and child classes, returning a unified + map for further use. + + Returns: + Dict[str,int]: A dictionary containing the proxies for both required + and published APIs. + + """ result = {} result.update(self.requires.get_proxies_map()) result.update(self.publishes.get_proxies_map()) return result def destroy(self): + """ + Terminates any open connections and destroys the IceConnector object, which + is a critical component for establishing and managing interfaces. + + """ if self.ice_connector: self.ice_connector.destroy() diff --git a/components/kinova_controller/src/kinovaarmI.py b/components/kinova_controller/src/kinovaarmI.py index f2e3d8e..ddff1b8 100644 --- a/components/kinova_controller/src/kinovaarmI.py +++ b/components/kinova_controller/src/kinovaarmI.py @@ -34,6 +34,17 @@ from RoboCompKinovaArm import * class KinovaArmI(KinovaArm): + """ + Provides methods for controlling a Kinova arm, including closing and opening + the gripper, getting the center of tool, gripper state, joints state, and + moving the joints with angle or speed. + + Attributes: + worker + (KinovaArmWorker|KinovaArm__closeGripper|KinovaArm_getCenterOfTool|KinovaArm_getGripperState|KinovaArm_getJointsState|KinovaArm_moveJ): + Used to provide access to the underlying KinovaArm worker API. + + """ def __init__(self, worker): self.worker = worker diff --git a/components/kinova_controller/src/specificworker.py b/components/kinova_controller/src/specificworker.py index 290cb5c..5cb000f 100644 --- a/components/kinova_controller/src/specificworker.py +++ b/components/kinova_controller/src/specificworker.py @@ -40,18 +40,78 @@ from test import KinovaGen3 class SpecificWorker(GenericWorker): + """ + Is a worker thread that interacts with a Kinova Gen3 robotic arm and its + gripper, providing methods to control the arm's movement, get its joints and + gripper states, open and close the gripper, and retrieve tactile data from + contact sensors. + + Attributes: + Period (int): 1 by default. It likely represents the period at which a + certain action or computation is performed, possibly tied to a timer + event with a duration of 1 second. + startup_check (bool): Set to True when called. It calls a series of tests + on various interfaces, including RoboCompContactile.FingerTip and RoboCompKinovaArm.TPose. + kinova (KinovaGen3): Used to interact with a kinova arm, such as getting + its joints' state, moving its gripper, or setting its joint angles. + flag (bool): Initialized to True in the `__init__` method. It is used as + a control variable in the `compute` method, toggling its value each + time the timer event occurs. + timer (QTimer): Initialized with a periodic timer that calls the `compute` + method at regular intervals specified by the `Period` attribute. + compute (QtCoreSlot|None): Called when a timer timeout occurs. It gets + joints data from KinovaGen3, calculates joint speeds, updates timestamps, + and prints contactile data. + joints (ifacesRoboCompKinovaArmTJoints|None): Initialized as None. It + stores a list of joints information, including position, velocity, and + force, retrieved from the KinovaGen3 robot arm. + gripper (ifacesRoboCompKinovaArmTGripper): Updated in the `compute` method + with the current state of the gripper, retrieved by calling the + `get_gripper_state` method of the KinovaGen3 object. + speeds (ifacesRoboCompKinovaArmTJointSpeeds): 7-element list, representing + the speed of each joint of the Kinova arm, initialized with all elements + set to zero. + moveWithSpeed (bool): Set to False by default. When it is set to True, the + worker moves the joints with speeds specified in the `speeds.jointSpeeds` + list. + timestamp (int): Initialized to the current time (in milliseconds) when + the worker object is created. It is used to measure the elapsed time + between timer events. + + """ def __init__(self, proxy_map, startup_check=False): + """ + Initializes an instance with a proxy map and optional startup check. If + the startup check is enabled, it performs some checks. Otherwise, it sets + up a timer to run the compute function at regular intervals, initializes + joints and gripper variables, and sets default speeds for the robot arm. + + Args: + proxy_map (object): Passed to the parent class `SpecificWorker` during + initialization. It sets up the proxy mapping for the worker's + interfaces, allowing it to interact with other components in the + system. + startup_check (bool): Optional, with default value False. Its presence + determines whether to perform startup checks or initialize the + Kinova robot directly. If set to True, startup checks are performed; + otherwise, initialization proceeds as usual. + + """ super(SpecificWorker, self).__init__(proxy_map) self.Period = 1 if startup_check: self.startup_check() else: self.kinova = KinovaGen3() - self.flag = False + self.flag = True self.timer.timeout.connect(self.compute) self.timer.start(self.Period) self.joints = [] self.gripper = [] + self.speeds = ifaces.RoboCompKinovaArm.TJointSpeeds() + self.speeds.jointSpeeds = [0, 0, 0, 0, 0, 0, 0] + self.moveWithSpeed = False + self.timestamp = time.time()*1000 def __del__(self): """Destructor""" @@ -62,13 +122,37 @@ def setParams(self, params): # except: # traceback.print_exc() # print("Error reading config params") + """ + Sets parameters and returns `True`. If any error occurs during the process, + it does not handle or report it explicitly. + + Args: + params (Any): Intended to be used for setting or updating parameters + in the class instance. The actual data type of `params` can vary + depending on how it is used within the function. + + Returns: + bool: `True`. This indicates that the function execution has been + successful and all parameters have been set correctly. + + """ return True @QtCore.Slot() def compute(self): + # print("timer init", time.time()*1000) + """ + Retrieves joint positions, velocities, and torques from a kinova arm, sends + gripper state to a RoboCompKinovaArm object, moves joints with specified + speeds, updates timestamp, and returns True. + + Returns: + bool: `True`. + + """ if self.flag: - self.kinova.get_camera_info() + # self.kinova.get_camera_info() self.flag = False ret = ifaces.RoboCompKinovaArm.TJoints() @@ -90,9 +174,32 @@ def compute(self): ret.distance = self.kinova.get_gripper_state() self.gripper = ret + if self.moveWithSpeed: + if self.lastMoveOrder + 1000 < time.time() * 1000: + self.speeds.jointSpeeds = [0, 0, 0, 0, 0, 0, 0] + self.moveWithSpeed = False + self.kinova.move_joints_with_speeds(self.speeds.jointSpeeds) + + # self.kinova.gripper_move_speed(-0.005) + + # print("timer end", time.time()*1000) + print(time.time()*1000 - self.timestamp) + print("Contactile data:", self.contactile_proxy.getValues()) + return True def startup_check(self): + """ + Performs startup checks for several RoboComp interfaces, including + RoboCompContactile and RoboCompKinovaArm. It instantiates test objects + from these interfaces and prints corresponding messages to the console + before quitting the application after a 200ms delay. + + """ + print(f"Testing RoboCompContactile.FingerTip from ifaces.RoboCompContactile") + test = ifaces.RoboCompContactile.FingerTip() + print(f"Testing RoboCompContactile.FingerTips from ifaces.RoboCompContactile") + test = ifaces.RoboCompContactile.FingerTips() print(f"Testing RoboCompKinovaArm.TPose from ifaces.RoboCompKinovaArm") test = ifaces.RoboCompKinovaArm.TPose() print(f"Testing RoboCompKinovaArm.TGripper from ifaces.RoboCompKinovaArm") @@ -108,18 +215,43 @@ def startup_check(self): # IMPLEMENTATION of closeGripper method from KinovaArm interface # def KinovaArm_closeGripper(self): + """ + Controls the gripper of a Kinova robot arm to close slowly and safely by + monitoring tactile sensors until a target force is reached, then stops and + prints a success message. - # - # write your CODE here - # - self.kinova.close_gripper_speed() - pass + """ + force = 0 + while force < 6 and self.gripper.distance < 0.9: + self.kinova.gripper_move_speed(-0.005) + tactileValues = self.contactile_proxy.getValues() + leftValues = tactileValues.left + righValues = tactileValues.right + force = (abs(leftValues.x) + abs(leftValues.y) + abs(leftValues.z) + + abs(righValues.x) + abs(righValues.y) + abs(righValues.z)) + print("Gripper closed") + self.kinova.gripper_move_speed(0) # # IMPLEMENTATION of getCenterOfTool method from KinovaArm interface # def KinovaArm_getCenterOfTool(self, referencedTo): + """ + Returns an object of type TPose, representing the center of the tool + (end-effector) of a Kinova arm robot, referenced to a specific coordinate + system or frame. + + Args: + referencedTo (object): Used to specify the coordinate system from which + the center of tool coordinates are calculated. It serves as a + reference frame for calculating the center of tool position. + + Returns: + RoboCompKinovaArmTPose: A data structure representing a pose (position + and orientation) in space, specifically for the Kinova Arm robot. + + """ ret = ifaces.RoboCompKinovaArm.TPose() # # write your CODE here @@ -134,12 +266,16 @@ def KinovaArm_getGripperState(self): # IMPLEMENTATION of openGripper method from KinovaArm interface # def KinovaArm_openGripper(self): - - # - # write your CODE here - # - self.kinova.open_gripper_speed() - pass + """ + Controls the opening of the gripper on a Kinova arm. It continuously reduces + the distance between the gripper's fingers until it reaches a threshold + value of 0.01, then stops and sets the gripper speed to 0. + + """ + while self.gripper.distance > 0.01: + self.kinova.gripper_move_speed(0.005) + + self.kinova.gripper_move_speed(0) # # IMPLEMENTATION of getJointsState method from KinovaArm interface @@ -155,18 +291,52 @@ def KinovaArm_setCenterOfTool(self, pose, referencedTo): # # write your CODE here # + """ + Sets the center of tool for the Kinova arm according to the provided pose + and referencedTo parameters, presumably used for robotic arm control or simulation. + + Args: + pose (Pose): 6-dimensional array representing the position and orientation + of the robot's tool center point. + referencedTo (object): Used to specify the frame of reference for the + pose provided. + + """ pass # # IMPLEMENTATION of moveJointsWithSpeed method from KinovaArm interface # def KinovaArm_moveJointsWithSpeed(self, speeds): - self.kinova.move_joints_with_speeds(speeds.jointSpeeds) + """ + Sets the speeds for moving joints of Kinova arm, marks that movement with + speed is enabled, and records the time of last move order. + + Args: + speeds (List[int]): Utilized to store the speeds at which each joint + of Kinova Arm should move when executing the movement command. + + """ + self.speeds = speeds + self.moveWithSpeed = True + self.lastMoveOrder = time.time()*1000 + # self.kinova.move_joints_with_speeds(speeds.jointSpeeds) # # IMPLEMENTATION of moveJointsWithAngle method from KinovaArm interface # def KinovaArm_moveJointsWithAngle(self, angles): + """ + Moves a Kinova arm to a specified joint angle configuration. The angles + are retrieved from an input object, printed for debugging purposes, and + then sent to the kinova.move_joints_to function to control the arm's joints + accordingly. + + Args: + angles (object): Expected to contain information about joint angles + for the Kinova arm movement. + + """ print(angles.jointAngles) self.kinova.move_joints_to(angles.jointAngles) pass @@ -175,10 +345,24 @@ def KinovaArm_moveJointsWithAngle(self, angles): def buclePrueba(self): + """ + Retrieves the current pose (position and orientation) of the kinova + robot using the `get_pose()` method inherited from the GenericWorker + class. It assigns this information to a variable named `state`. + + """ state = self.kinova.get_pose() #self.kinova.cartesian_move_to(state[0],state[1],0.1,state[3],state[4],state[5]) + ###################### + # From the RoboCompContactile you can call this methods: + # self.contactile_proxy.getValues(...) + + ###################### + # From the RoboCompContactile you can use this types: + # RoboCompContactile.FingerTip + # RoboCompContactile.FingerTips ###################### # From the RoboCompKinovaArm you can use this types: diff --git a/components/kinova_controller/src/test.py b/components/kinova_controller/src/test.py index f95c338..158574e 100644 --- a/components/kinova_controller/src/test.py +++ b/components/kinova_controller/src/test.py @@ -49,8 +49,33 @@ } class KinovaGen3(): + """ + Manages the opening and closing of a gripper using speed commands, as well as + measuring the movement of the gripper during these operations. + + Attributes: + connection (Base_pb2GripperCommand|Base_pb2GripperRequest): Used to send + gripper commands to the Kinova Gen III robot. It allows you to specify + the command mode, finger position, and other parameters for the gripper + movement. + router (Base_pb2GripperCommand): Used to send a gripper command to the + Kinova Gen III robotic arm. It allows for sending different types of + commands, such as speed or position, to control the movement of the gripper. + base (Base_pb2Base): A reference to the underlying robot's base module, + which provides methods for sending commands to the robot's end effectors. + base_cyclic (Base|GripperRequest): Used to specify the cyclic movement + pattern for the gripper, allowing for continuous opening or closing + of the gripper. + + """ def __init__(self): + """ + Establishes connections to a device using TCP and creates two clients: + `BaseClient` for direct interaction with the device, and `BaseCyclicClient` + for cyclic communication. + + """ ip = "192.168.1.10" username = "admin" psw = "admin" @@ -75,6 +100,19 @@ def check_for_end_or_abort(self, e): """ def check(notification, e=e): + """ + Prints the event name associated with a `notification` object and sets + an instance variable `e` to a default value if the event is either + `ACTION_END` or `ACTION_ABORT`. + + Args: + notification (Base_pb2.Notification): Passed as an argument to the + function, providing information about an event that triggered + the function call. + e (Base_pb2.Event): Set to an instance of Event upon calling the + function. + + """ print("EVENT : " + \ Base_pb2.ActionEvent.Name(notification.action_event)) if notification.action_event == Base_pb2.ACTION_END \ @@ -84,6 +122,14 @@ def check(notification, e=e): return check def get_state(self): + """ + Retrieves refresh feedback from the base cyclic component and returns it. + + Returns: + Feedbackbase: A cyclic refresh feedback object created by calling the + `RefreshFeedback` method. + + """ feedback = self.base_cyclic.RefreshFeedback() # for j in feedback.actuators: # print (j.command_id, j.position) @@ -91,6 +137,15 @@ def get_state(self): return feedback.base def get_joints(self): + """ + Computes and returns the positions, velocities, and torques of the joints + of a robotic arm. + + Returns: + Dict[str,float]: A dictionary containing the positions, velocities, + and torques of the joints in the system. + + """ feedback = self.base_cyclic.RefreshFeedback() jointsPosition = [j.position for j in feedback.actuators] jointsVelocity = [j.velocity for j in feedback.actuators] @@ -99,11 +154,29 @@ def get_joints(self): return joints def get_gripper_state(self): + """ + Retrieves the current state of the gripper by sending a `GripperRequest` + message to the base module and returning the measured movement of the + gripper in the form of a `finger` object. + + Returns: + float: The finger position of the gripper in meters. + + """ gripper_request = Base_pb2.GripperRequest() gripper_request.mode = Base_pb2.GRIPPER_POSITION return self.base.GetMeasuredGripperMovement(gripper_request).finger[0].value def get_pose(self): + """ + Computes and returns the tool's pose (position and orientation) in a list + of six elements. + + Returns: + Tuple[float,float,float,float,float,float]: 6 floats representing the + tool's pose in 3D space and orientation. + + """ state = self.get_state() return [state.tool_pose_x, state.tool_pose_y, state.tool_pose_z, @@ -121,8 +194,93 @@ def get_pose(self): def stop_movement(self): self.cartesian_move_relative(0, 0, 0, 0, 0, 0) + def gripper_move_to(self, target_position): + """ + Performs a gripper movement command to reach a specified position. + + Args: + target_position (float): Representing the desired position of the + gripper to move to. + + Returns: + bool: True when the gripper has moved to the target position successfully, + and False otherwise. + + """ + gripper_command = Base_pb2.GripperCommand() + finger = gripper_command.gripper.finger.add() + + # Close the gripper with position increments + print("Performing gripper test in position...") + gripper_command.mode = Base_pb2.GRIPPER_POSITION + position = 0.00 + finger.finger_identifier = 1 + finger.value = target_position + self.base.SendGripperCommand(gripper_command) + # while position < 1.0: + # finger.value = position + # print("Going to position {:0.2f}...".format(finger.value)) + # self.base.SendGripperCommand(gripper_command) + # position += 0.1 + # time.sleep(1) + return True + + def gripper_move_speed(self, speed): + """ + Sets the gripper speed by creating a `GripperCommand` message, setting the + gripper mode and finger value to the input speed, and sending the command + to the base using `SendGripperCommand`. + + Args: + speed (float): Represents the desired speed at which the gripper should + move in the specified finger. + + Returns: + bool: `True` if the gripper command was sent successfully, and `False` + otherwise. + + """ + gripper_command = Base_pb2.GripperCommand() + finger = gripper_command.gripper.finger.add() + + # Close the gripper with speed increments + print("Performing gripper test in speed...") + gripper_command.mode = Base_pb2.GRIPPER_SPEED + finger.finger_identifier = 1 + finger.value = speed + + ## TODO: Check to stop when the sensor detect an object + + + + self.base.SendGripperCommand(gripper_command) + return True + def cartesian_move_to(self, x, y, z, theta_x, theta_y, theta_z): + """ + Moves a robotic arm to a specified position and orientation using a Cartesian + movement approach, waiting for the movement to finish before returning. + + Args: + x (float): Used to set the x-coordinate of the target pose in the + Cartesian space. + y (float): Used to specify the target y-coordinate of the robot's end + position after moving along the x-axis to the specified `x` coordinate. + z (float): Representing the z-coordinate of the target position for + the robot to move to. + theta_x (float): Used to represent the rotation angle of the robot's + end effector around its x-axis during movement. + theta_y (float): Used to specify the yaw angle of the robot's end + effector during the movement. + theta_z (float): Representing the z-rotation angle of the robot end + effector relative to its initial position, along the robot's Z axis. + + Returns: + bool: 1 if the movement was completed successfully within the timeout + duration, and 0 otherwise due to a timeout. + + """ print("Starting Cartesian Especific Movement ...") action = Base_pb2.Action() action.name = "Cartesian Especific movement" @@ -159,6 +317,14 @@ def move_joints_with_speeds(self, speeds): # SPEED = 20.0 # speeds = [SPEED, 0, -SPEED, 0, SPEED, 0, -SPEED] + """ + Calculates and sends joint speeds to the robot. + + Args: + speeds (List[float]): A list of joint speed values to be applied to + the robot's joints. + + """ joint_speeds = Base_pb2.JointSpeeds() i = 0 @@ -174,6 +340,19 @@ def move_joints_with_speeds(self, speeds): # self.base.Stop() def move_joints_to(self, joints): + """ + Moves joints to specified angles using an action object and waits for the + movement to finish before returning. + + Args: + joints (List[float]): Used to specify the angles of the joints to move + to. + + Returns: + bool: 1 if the angular movement was completed successfully within the + specified timeout, and 0 otherwise due to a timeout. + + """ action = Base_pb2.Action() action.name = "Example angular action movement" action.application_data = "" @@ -209,6 +388,15 @@ def move_joints_to(self, joints): # Prints the extrinsic parameters on stdout # def print_extrinsic_parameters(self, extrinsics): + """ + Prints the rotation and translation parameters of an extrinsic parameter + set. + + Args: + extrinsics (Extrinsic | RotationMatrix | TranslationVector): 3x3 + rotation matrix or 3x1 translation vector. + + """ print("Rotation matrix:") print("[{0: .6f} {1: .6f} {2: .6f}".format( \ extrinsics.rotation.row1.column1, extrinsics.rotation.row1.column2, extrinsics.rotation.row1.column3)) @@ -223,6 +411,19 @@ def print_extrinsic_parameters(self, extrinsics): # Returns the device identifier of the Vision module, 0 if not found # def example_vision_get_device_id(self, device_manager): + """ + Retrieves the device ID of the vision module from the devices info based + on the device handle. + + Args: + device_manager (DeviceManager | None): Used to represent the device + manager object that provides information about the devices registered + in the system. + + Returns: + int: The device ID of the first Vision module detected in the system. + + """ vision_device_id = 0 # getting all device routing information (from DeviceManagerClient service) @@ -244,6 +445,17 @@ def example_vision_get_device_id(self, device_manager): # Example showing how to retrieve the extrinsic parameters # def example_routed_vision_get_extrinsics(self, vision_config, vision_device_id): + """ + Retrieves extrinsic parameters for a specific vision device using the + Vision Config Service. + + Args: + vision_config (VisionConfig): Used to retrieve extrinsic parameters + for a specific vision device ID. + vision_device_id (str): Used to identify the specific vision device + for which the extrinsic parameters are being retrieved. + + """ print("\n\n** Example showing how to retrieve the extrinsic parameters **") print("\n-- Using Vision Config Service to get extrinsic parameters --") @@ -266,6 +478,17 @@ def resolution_to_string(self, resolution): # Prints the intrinsic parameters on stdout # def print_intrinsic_parameters(self, intrinsics): + """ + Prints out various parameters of an intrinsic camera model, including + sensor ID, resolution, principal point coordinates, focal length coordinates, + and distortion coefficients. + + Args: + intrinsics (IntrinsicParams): Used to access the intrinsic parameters + of a camera sensor, including the principal point, resolution, + focal length, and distortion coefficients. + + """ print("Sensor: {0} ({1})".format(intrinsics.sensor, self.sensor_to_string(intrinsics.sensor))) print("Resolution: {0} ({1})".format(intrinsics.resolution, self.resolution_to_string(intrinsics.resolution))) print("Principal point x: {0:.6f}".format(intrinsics.principal_point_x)) @@ -283,6 +506,17 @@ def print_intrinsic_parameters(self, intrinsics): # Example showing how to retrieve the intrinsic parameters of the Color and Depth sensors # def example_routed_vision_get_intrinsics(self, vision_config, vision_device_id): + """ + Retrieves intrinsic parameters of Color and Depth sensors using the Vision + Config Service, and prints the retrieved values. + + Args: + vision_config (VisionConfig_pb2.VisionConfig): Used to retrieve intrinsic + parameters for different sensors and resolutions. + vision_device_id (int): Used to identify the specific device to retrieve + intrinsic parameters for. + + """ sensor_id = VisionConfig_pb2.SensorIdentifier() profile_id = VisionConfig_pb2.IntrinsicProfileIdentifier() @@ -311,6 +545,11 @@ def example_routed_vision_get_intrinsics(self, vision_config, vision_device_id): self.print_intrinsic_parameters(intrinsics) def get_camera_info(self): + """ + Retrieves device and intrinsic information for a vision camera connected + to the router. + + """ device_manager = DeviceManagerClient(self.router) vision_config = VisionConfigClient(self.router) @@ -322,6 +561,31 @@ def get_camera_info(self): def cartesian_move_relative(self, x, y, z, theta_x, theta_y, theta_z): + """ + Performs a Cartesian movement action relative to a base tool, taking into + account the specified x, y, z coordinates and theta angles. It executes + the action, waits for it to finish, and returns whether the movement + completed successfully or timed out. + + Args: + x (float): Used to set the x-position of the tool relative to its + initial position. + y (int): Representing the relative movement along the y-axis of the + robot's end effector. + z (float): Representing the relative movement of the tool along the z-axis. + theta_x (float): Representing the angular movement of the tool around + the x-axis, measured in radians. + theta_y (float): Representing the angle of rotation around the Y-axis + of the tool coordinate system, which determines the orientation + of the tool end effector during movement. + theta_z (float): Representing the z-axis rotation angle of the tool + relative to its current position, which is used by the action + movement to achieve the desired orientation in the z-axis direction. + + Returns: + bool: 1 if the cartesian movement was successful and 0 if it timed out. + + """ print("Starting Cartesian action movement ...") action = Base_pb2.Action() action.name = "Example Cartesian action movement" @@ -358,6 +622,16 @@ def cartesian_move_relative(self, x, y, z, theta_x, theta_y, theta_z): def move_gripper_speed_dest(self, dest_pos): + """ + Calculates the gripper movement speed to reach a destination position, + sends a speed command to the gripper, and measures the gripper's movement + to ensure it reaches the desired position. + + Args: + dest_pos (float): Representing the desired position of the gripper to + move it to with the speed command. + + """ gr_pos = self.get_gripper_state() direction = 1 if (gr_pos - dest_pos) > 0 else -1 @@ -393,6 +667,10 @@ def move_gripper_speed_dest(self, dest_pos): def close_gripper_speed(self): + """ + Controls the speed of the gripper using a speed command. + + """ gripper_command = Base_pb2.GripperCommand() finger = gripper_command.gripper.finger.add() # Set speed to close gripper @@ -418,6 +696,11 @@ def close_gripper_speed(self): def open_gripper_speed(self): + """ + Controls the gripper's speed using a command, and then measures its position + to determine when it has reached the desired open state. + + """ gripper_command = Base_pb2.GripperCommand() finger = gripper_command.gripper.finger.add() diff --git a/pybullet_controller/src/specificworker.py b/pybullet_controller/src/specificworker.py index a49004d..d32f382 100644 --- a/pybullet_controller/src/specificworker.py +++ b/pybullet_controller/src/specificworker.py @@ -54,6 +54,8 @@ import collections from pybullet_utils import urdfEditor as ed from pybullet_utils import bullet_client as bc +import matplotlib.pyplot as plt +# from matplotlib.animation import FuncAnimation # Set the LC_NUMERIC environment variable os.environ['LC_NUMERIC'] = 'en_US.UTF-8' @@ -61,11 +63,189 @@ console = Console(highlight=False) + +class JointData: + """ + Stores angles for both Kinova and Bullet physics engines, allowing them to be + used interchangeably for robotic simulations. + + Attributes: + kinovaAngles (List[float]): Used to store a list of angles for Kinova joints. + pybulletAngles (List[float]): Used to store the angles associated with + PyBullet joints. + + """ + def __init__(self): + """ + Initializes two lists: `kinovaAngles` for storing kinematic angles and + `pybulletAngles` for storing bullet angles. + + """ + self.kinovaAngles = [] + self.pybulletAngles = [] + class SpecificWorker(GenericWorker): + """ + Manages communication with a robot arm and a camera, processing user inputs + and sending commands to the robot and the camera to perform specific tasks, + such as moving the robot arm or capturing images. + + Attributes: + Period (float|int): 0 by default, which means that the worker will execute + the specified block of code every time it receives a message from the + Robot Operating System (ROS) for the current task. + rgb (Tuple[float,float,float]): Used to store the RGB values of a point + in 3D space. + timestamp (float|int): Used to store the timestamp of the worker's last execution. + startup_check (bool|str): Used to check if the worker has started successfully + or not. It can be accessed and set through the `startup_check()` method, + which returns a str representing the status of the startup check (either + "success" or "failure"). + physicsClient (RoboCompPhysicsClient|None): Used to handle physics-related + tasks, such as getting robot positions and orientations, and controlling + the movement of the robot. + plane (Tuple[float,float,float]): Used to store the orientation of the + robot's end effector relative to its base. + table_id (int|str): Used to store a unique identifier for each worker, + which can be used to identify the worker in different contexts. + robot_urdf (Tuple[str,str,str]): Used to store the robot's URDF (Universal + Robot Description Format) file path. It can be used to load the robot's + kinematic information and create a robot arm model. + robot_launch_pos (Tuple[float,float,float]): Used to store the initial + position of the robot's end effector. + robot_launch_orien (Tuple[float,float,float]): Used to store the current + orientation of the robot's end effector in 3D space. It is updated + based on the `data` attribute received from the RoboComp server. + end_effector_link_index (int): Used to specify the index of the end effector + link in the robot's skeleton, which determines the position and + orientation of the end effector in space. + robot_id (int|str): Used to identify a specific robot worker within a + worker pool. + home_angles (Tuple[float,float,float]): Used to store the default positions + of the end effector's joints when the worker is in home position. + observation_angles (Tuple[float,float,float]): Used to store the current + angles of the robot's end effector with respect to its base. + table_center (Tuple[float,float,float]): Used to store the center position + of the worker's end effector in the robot's coordinate system. + cylinderId (int|str): A unique identifier for each worker, used to + differentiate between workers in robotics simulations. + threadKinovaAngles (TData): A reference to the current joystick angles of + the Kinova arm, which are updated in real-time through a separate thread. + readDataFromProxy (Dict[str,Any]): Used to read data from a Proxy object, + which represents a robotic arm, and updates the attributes of the + worker based on the received data. + colorKinova (TImage|TRGBD): Used to store the color image captured by the + Kinova gripper. + depthKinova (TImage|TData): Used to store the depth image of the Kinova + arm, which can be obtained through the `kinovaarm_proxy` attribute. + target_angles (Tuple[float]): Used to store the target angles for each + joint of the robot arm in a specific worker mode. + target_position (List[float]): Used to store the desired position of the + robot's end effector for a specific worker. + target_orientation (Tuple[float,float,float]): Used to store the target + orientation of the robot after the user has made an input through the + GUI. It is updated based on the user's input and the current mode of + the robot. + target_velocities (Tuple[float,float,float]): Used to store the target + velocities for the robot's joints when moving to a specific pose. It + contains the target velocities for each joint in the robot's arm. + joy_selected_joint (TData): A parameter that allows to select which joint + of the robot will move according to the joy input, in case of a Kinova + arm. + move_mode (int|str): Used to store the current move mode of the robot, + which can be one of five options: "home", "moving", "pausing", "resuming", + or "stopping". + ext_joints (Tuple[RoboCompCameraRGBDSimpleTImage,RoboCompKinovaArmTPose,]): + Used to store the joint angles of the robot arm and the camera poses. + kinovaarm_proxy (RoboCompKinovaArmTPose|RoboCompKinovaArmTJointAngles): + Used to interact with the Kinova arm. It provides access to the arm's + joint angles and movement. + ext_gripper (Union[float,str]): Used to store the gripper extension value. + timer (float|int): Used to manage the execution time of the worker, allowing + for flexible and efficient handling of tasks. + compute (callable|func): Used to define a function that performs specific + computations based on the values of the worker's attributes. + joint_speeds (List[float]): A list of joint speed values for each joint + of the robot in the range of -1 to 1, representing the desired joint + angles for the robot to move to. + speeds (Tuple[float,float,float]): Represented as a list of 3 float values + representing the speed values for the x, y, and z axes of the robot. + angles (Tuple[float,float,float]): 3D angles representation for the robot's + joints. + gains (float|int): Used to control the speed of movements of the robotic + arm. It sets the gain values for each axis, which determine how quickly + the arm moves in response to input from the user. + posesTimes (Tuple[float,float,float]): Stored as a list of tuples where + each tuple contains the position, orientation, and time of a specific + pose. + poses (Tuple[RoboCompCameraRGBDSimpleTPose]): Used to store the current + pose of the camera in the world coordinate system. + calibrator (Tuple[str,str,str]): Used to store the calibration parameters + for a specific robotic arm, including the gripper position, home + position, and joint angles. + cameraKinovaTimer (float|int): Used to track the time taken by the worker's + camera and kinova arm to perform their tasks. + readKinovaCamera (Callable[[Tuple[str,str,str],Tuple[float,float,float]],Tuple[str,str,str]]): + Used to read data from a Kinova camera. It takes in the camera's API + endpoint and returns a tuple containing the camera's image, depth map, + and RGBD data. + showKinovaStateTimer (float): 0 by default, which means that the worker + will update the kinova state every 1 second. + showKinovaAngles (bool): Used to control whether the angles of the kinova + arm should be shown or not during the execution of the worker. + gainsTimer (float|int): Used to store the time it takes for the worker to + perform its tasks, which allows for more precise control over the + timing of the worker's actions. + updateGains (Dict[str,float]): Used to update the gains of the robot's + actuators based on user inputs from a joystick. + aamed (Tuple[str,float,float,float]): Used to store the current state of + the robot's axes (X, Y, Z, and gripper) in the form of a 3D point. + drawGraphTimer (float|int): Used to control the interval at which the + worker updates its graphical representation during simulation. + drawGraph (Callable[Tuple[str,],None]): Defined as a function that takes + two arguments: the name of the worker and the graph to be drawn. The + function returns None. + pybullet_offset (Tuple[float,float,float]): Used to specify the offset for + the robot's end effector (gripper) when it is in "home" position. + fig (Union[float,int]): Used to store the current position of the robot's + gripper. + ax (Union[float,int]): Used to store the value of a specific axis of a + robot's end effector, such as the X, Y, or Z axis. + jointsErrorMap (Dict[str,float]): Used to store the error maps for each + joint. It maps joint names to their corresponding error values, which + are used to calculate the actual position of the robot's end effector + based on the desired position and the current state of the robot. + left_force_series (List[float]): A series of left forces applied to the + robot's end effector during a specific task or operation. + right_force_series (List[float]): A series of right-hand forces applied + to the robot's end effector, each force being applied over a specific + time interval. + graphTimes (List[float]): Used to store the time points at which the + worker's state is graphed, allowing for visualization of the worker's + behavior over time. + hide (bool): Used to hide or show the worker's actions and variables for + debugging purposes. + + """ def __init__(self, proxy_map, startup_check=False): + """ + Initializes variables and timers for controlling the simulation, camera + calibration, and graph drawing. It also sets up connections to the PyBullet + environment and reads joint angles from the Kinova arm. + + Args: + proxy_map (Dict[str, int]): Used to map joint indices from the PyBullet + API to the corresponding joint indices of the Kinect sensor. It + helps convert data between different frameworks and devices. + startup_check (bool): Used to check if the robot's kinematic tree has + been loaded successfully, before continuing with the initialization + process. + + """ super(SpecificWorker, self).__init__(proxy_map) self.Period = 50 self.rgb = [] + self.timestamp = int(time.time()*1000) if startup_check: self.startup_check() else: @@ -76,94 +256,74 @@ def __init__(self, proxy_map, startup_check=False): p.setAdditionalSearchPath(pybullet_data.getDataPath()) p.configureDebugVisualizer(p.COV_ENABLE_GUI, 0) p.setGravity(0, 0, -9.81) - #p.setGravity(0, 0, 0) + # Set the real time simulation p.setRealTimeSimulation(1) - # p.setTimeStep(1 / 240) flags = p.URDF_USE_INERTIA_FROM_FILE p.resetDebugVisualizerCamera(cameraDistance=1.5, cameraYaw=50, cameraPitch=-35, cameraTargetPosition=[0, 0, 0.5]) - # load floor - self.plane = p.loadURDF("plane.urdf") + # Load floor in the simulation + self.plane = p.loadURDF("./URDFs/plane/plane.urdf") # Load a table to place the arm on - self.table_id = p.loadURDF("/home/robolab/software/bullet3/data/table/table.urdf", basePosition=[0, 0, 0], + self.table_id = p.loadURDF("./URDFs/table/table.urdf", basePosition=[0, 0, 0], baseOrientation=p.getQuaternionFromEuler([0, 0, 1.57]), flags=flags) - # Load Kinova arm - #self.robot = KinovaGen3() - - #//////////////////////////////////////////////////// - - # Carga manual del brazo kinova - - # self.robot_urdf = "/home/robolab/robocomp_ws/src/robocomp/components/manipulation_kinova_gen3/pybullet_controller/kinova/kinova_with_pybullet/gen3_robotiq_2f_85.urdf" - self.robot_urdf = "/home/robolab/robocomp_ws/src/robocomp/components/manipulation_kinova_gen3/pybullet_controller/kinova/kinova_with_pybullet/gen3_robotiq_2f_85-mod.urdf" + # Load the arm in the simulation + self.robot_urdf = "./URDFs/kinova_with_pybullet/gen3_robotiq_2f_85-mod.urdf" self.robot_launch_pos = [-0.3, 0.0, 0.64] self.robot_launch_orien = p.getQuaternionFromEuler([0, 0, 0]) self.end_effector_link_index = 12 - self.home_angles = [0, -0.34, np.pi, -2.54, -6.28, -0.87, np.pi/2, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, - 0.0, - 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] - - self.observation_angles = [0, 0, np.pi, -0.96, -6.28, -2.1, np.pi/2, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, - 0.0, - 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] + self.robot_id = p.loadURDF(self.robot_urdf, self.robot_launch_pos, self.robot_launch_orien, + flags=p.URDF_USE_SELF_COLLISION_EXCLUDE_PARENT) - self.observation_angles_2 = [0.87, 0.17, 4.01, -1.74, -6.80, -1.92, 3.22, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + # Angles for the home position of the robot + self.home_angles = [0, -0.34, np.pi, -2.54, 0, -0.87, np.pi/2, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] - self.observation_angles_3 = [0.26, 0.17, 3.52, -1.13, -6.406, -1.78, 2.18, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + # Angles for the observation position of the robot + self.observation_angles = [0, 0, np.pi, -0.96, 0, -2.1, np.pi/2, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] - self.observation_angles_cube = [2.1753, 0.6980, 2.6924, -1.3950, -6.6862, -1.8915, 3.9945, 0.0, 0.0, 0.0, 0.0, - 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] - - self.robot_id = p.loadURDF(self.robot_urdf, self.robot_launch_pos, self.robot_launch_orien, - flags=p.URDF_USE_SELF_COLLISION_EXCLUDE_PARENT) + # Coordenates for the center of the table + self.table_center = [0.374, 0.0, 0.27] + # Set the initial joint angles of the pybullet arm for i in range(7): p.resetJointState(bodyUniqueId=self.robot_id, jointIndex=i+1, targetValue=self.home_angles[i], targetVelocity=0) + # Load a square to place on the table for calibration + # self.square = p.loadURDF("./URDFs/cube_and_square/cube_small_square.urdf", basePosition=[0.074, 0.0, 0.64], baseOrientation=p.getQuaternionFromEuler([0, 0, 0]), flags=flags) + # texture_path = "./URDFs/cube_and_square/square_texture.png" + # textureIdSquare = p.loadTexture(texture_path) + # p.changeVisualShape(self.square, -1, textureUniqueId=textureIdSquare) - for i in range(7): - p.setJointMotorControl2(self.robot_id, i+1, p.POSITION_CONTROL, targetPosition=self.home_angles[i]) - - #//////////////////////////////////////////////////// - - # Load a cup to place on the table - self.pybullet_cup = p.loadURDF("/home/robolab/software/bullet3/data/dinnerware/cup/cup_small.urdf", basePosition=[0.074, 0.20, 0.64], baseOrientation=p.getQuaternionFromEuler([0, 0, 0]), flags=flags) - - self.square = p.loadURDF("/home/robolab/software/bullet3/data/cube_small_square.urdf", basePosition=[0.074, 0.0, 0.64], baseOrientation=p.getQuaternionFromEuler([0, 0, 0]), flags=flags) - texture_path = "/home/robolab/Escritorio/textura_cubo.png" - textureIdSquare = p.loadTexture(texture_path) - p.changeVisualShape(self.square, -1, textureUniqueId=textureIdSquare) - - # # Load a cube to place on the table - # self.cube = p.loadURDF("/home/robolab/software/bullet3/data/cube_small.urdf", basePosition=[0.074, 0.0, 0.64], baseOrientation=p.getQuaternionFromEuler([0, 0, 0]), flags=flags) - # - # # Cargar la textura - # texture_path = "/home/robolab/Escritorio/textura_cubo_v2.png" + # # Load a cube to place on the table for calibration + # self.cube = p.loadURDF("./URDFs/cube_and_square/cube_small.urdf", basePosition=[0.074, 0.0, 0.64], baseOrientation=p.getQuaternionFromEuler([0, 0, 0]), flags=flags) + # texture_path = "./URDFs/cube_and_texture/cube_texture.png" # textureId = p.loadTexture(texture_path) - # - # # Aplicar la textura al cubo - # # Cambiar el visual shape del cubo # p.changeVisualShape(self.cube, -1, textureUniqueId=textureId) - # Crear una restricción fija entre los dos modelos - fixed_constraint = p.createConstraint(self.table_id, -1, self.robot_id, -1, p.JOINT_FIXED, [0, 0, 0], [0, 0.3, 0.64], [0, 0, 0], childFrameOrientation=p.getQuaternionFromEuler([0, 0, 1.57])) + # Load a cylinder to place on the table + self.cylinderId = p.loadURDF("./URDFs/cylinder/cylinder.urdf", [0.074, 0.2, 0.70], p.getQuaternionFromEuler([0, 0, 0])) - self.hilo_lectura = threading.Thread(target=self.readDataFromProxy) - self.hilo_lectura.start() + # Thread to read the real arm angles from kinova_controller + self.threadKinovaAngles = threading.Thread(target=self.readDataFromProxy) + self.threadKinovaAngles.start() - # wait for half a second + # Queues to store the images from the real arm camera + self.colorKinova = collections.deque(maxlen=5) + self.depthKinova = collections.deque(maxlen=5) + + # Wait for half a second time.sleep(0.5) + # Change the mass of the links of the robot to do the simulation more stable for i in range(7): if i != 0: val = p.getDynamicsInfo(self.robot_id, i)[2] @@ -176,42 +336,32 @@ def __init__(self, proxy_map, startup_check=False): print("Kinova", i, p.getDynamicsInfo(self.robot_id, i)) + + # This variables is to store the position of the end effector of the robot self.target_angles = self.home_angles self.target_position = p.getLinkState(self.robot_id, self.end_effector_link_index)[0] self.target_orientation = p.getLinkState(self.robot_id, self.end_effector_link_index)[1] self.target_velocities = [0.0] * 7 - self.joy_selected_joint = 0 - self.move_mode = 5 - self.n_rotations = np.zeros(7).tolist() - self.n_rotations = [0, -1, 0, -1, -1, -1, 0] - self.ext_joints = self.kinovaarm_proxy.getJointsState() - self.ext_gripper = self.kinovaarm_proxy.getGripperState() - self.posesTimes = np.array([int(time.time()*1000)]) - self.poses = [] - joints = [] - for i in range(7): - actual_angle = (i, p.getJointState(self.robot_id, i + 1)[0]) - joints.append(actual_angle) - self.poses.append(joints) + # This variable is to store the joint selected in the joystick mode + self.joy_selected_joint = 0 - self.timestamp = int(time.time()*1000) + # This variable is to store the mode of the robot + self.move_mode = 4 - # self.initialize_toolbox() + # This variable is to store the state of the real arm + self.ext_joints = self.kinovaarm_proxy.getJointsState() + self.ext_gripper = self.kinovaarm_proxy.getGripperState() + # Timer to do the control loop self.timer.timeout.connect(self.compute) self.timer.start(self.Period) - self.timer2 = QtCore.QTimer(self) - self.timer2.timeout.connect(self.movePybulletWithExternalVel) - #self.timer2.start(50) - + # This variables are needed for move real arm self.joint_speeds = [] for i in range(7): self.joint_speeds.append(0) - self.gains = np.ones(7).tolist() - self.speeds = ifaces.RoboCompKinovaArm.TJointSpeeds() self.speeds.jointSpeeds = self.joint_speeds @@ -219,32 +369,84 @@ def __init__(self, proxy_map, startup_check=False): self.angles.jointAngles = [] - self.timer3 = QtCore.QTimer(self) - self.timer3.timeout.connect(self.moveKinovaWithSpeeds) - # self.timer3.start(self.Period) + # This variables are needed for the gains of real arm speeds + self.gains = np.ones(7).tolist() - self.timer4 = QtCore.QTimer(self) - self.timer4.timeout.connect(self.movePybulletWithToolbox) - # self.timer4.start(50) + self.posesTimes = np.array([int(time.time()*1000)]) + self.poses = [] + joints = [] + for i in range(7): + actual_angle = (i, p.getJointState(self.robot_id, i + 1)[0]) + joints.append(actual_angle) + self.poses.append(joints) - self.colorKinova = collections.deque(maxlen=5) - self.depthKinova = collections.deque(maxlen=5) + # This variable was to test the calibration of the camera self.calibrator = calibrator.Calibrator() - self.timer5 = QtCore.QTimer(self) - self.timer5.timeout.connect(self.readKinovaCamera) - self.timer5.start(self.Period) + # Timer to read the camera of the real arm + self.cameraKinovaTimer = QtCore.QTimer(self) + self.cameraKinovaTimer.timeout.connect(self.readKinovaCamera) + self.cameraKinovaTimer.start(self.Period) + + # Timers to update the gains and show the real arm angles + self.showKinovaStateTimer = QtCore.QTimer(self) + self.showKinovaStateTimer.timeout.connect(self.showKinovaAngles) + self.showKinovaStateTimer.start(1000) - self.timer6 = QtCore.QTimer(self) - self.timer6.timeout.connect(self.correctCupPosition) - # self.timer6.start(500) + self.gainsTimer = QtCore.QTimer(self) + self.gainsTimer.timeout.connect(self.updateGains) # Initialize the AAMED algorithm for the cup position correction - self.aamed = pyAAMED(1080, 1940) + self.aamed = pyAAMED(722//2, 1282//2) self.aamed.setParameters(3.1415926 / 3, 3.4, 0.77) - print("SpecificWorker started") + # self.contactPointTimer = QtCore.QTimer(self) + # self.contactPointTimer.timeout.connect(self.detectContactPoints) + # self.contactPointTimer.start(100) + + self.drawGraphTimer = QtCore.QTimer(self) + self.drawGraphTimer.timeout.connect(self.drawGraph) + self.drawGraphTimer.start(1000) + + self.pybullet_offset = [-0.3, 0.0, 0.6] + + num_joints = p.getNumJoints(self.robot_id) + + # Iterar sobre todos los enlaces y obtener su información + for joint_index in range(num_joints): + joint_info = p.getJointInfo(self.robot_id, joint_index) + link_name = joint_info[12].decode("utf-8") # Nombre del enlace + parent_link_index = joint_info[16] # Índice del enlace padre + link_state = p.getLinkState(self.robot_id, joint_index) + + print(f"Link {joint_index}:") + print(f" Name: {link_name}") + print(f" Parent Link Index: {parent_link_index}") + print(f" Link State: {link_state}") + + plt.ion() + self.fig, self.ax = plt.subplots(1, 1) + plt.show() + + self.jointsErrorMap = {} + for i in range(7): + jointError = [ + self.ext_joints.joints[i].angle - math.degrees(p.getJointState(self.robot_id, i + 1)[0]) % 360] + item = {i: jointError} + self.jointsErrorMap.update(item) + + self.left_force_series = [0] + self.right_force_series = [0] + self.graphTimes = [0] + + self.ax.set_ylim(0, 5) + + plt.tight_layout() + plt.draw() + + self.hide() + print("SpecificWorker started", time.time()*1000 - self.timestamp) def __del__(self): """Destructor""" @@ -255,12 +457,28 @@ def setParams(self, params): # except: # traceback.print_exc() # print("Error reading config params") + """ + Sets parameters passed as an argument to the method. + + Args: + params (Union[str, dict]): Used to set parameters for the function. + + Returns: + bool: 1 when the parameters are set successfully, and 0 otherwise. + + """ return True @QtCore.Slot() def compute(self): + """ + Computes the joint angles and velocities based on the observation angles + and moves the robot using PyBullet and Kinova's arm. It also updates the + error map and graph times. + + """ match self.move_mode: #Move joints case 0: @@ -356,9 +574,6 @@ def compute(self): self.target_velocities = joints_velocity - #print("Joints: ", self.target_angles) - #print(self.n_rotations) - for i in range(8, len(self.target_angles)): p.setJointMotorControl2(self.robot_id, i+1, p.POSITION_CONTROL, targetPosition=self.target_angles[i]) #Move the arm with phisics @@ -366,38 +581,15 @@ def compute(self): except Ice.Exception as e: print(e) - case 4: - try: - print("Toolbox compute init", time.time()*1000 - self.timestamp) - self.toolbox_compute() - print("Toolbox compute end", time.time()*1000 - self.timestamp) - # if self.arrived == True: - # print("Arrived") - # self.timer4.stop() - # self.timer3.stop() - # self.target_velocities = [0.0] * 7 - # self.move_mode = 8 - # self.move_mode = -1 - except Ice.Exception as e: - print(e) - - case 5: #Move to observation angles + case 4: #Move to observation angles print("Moving to observation angles", int(time.time()*1000) - self.timestamp) + self.changePybulletGripper(0.0) + self.kinovaarm_proxy.openGripper() + self.moveKinovaWithAngles(self.observation_angles[:7]) for i in range(7): - p.setJointMotorControl2(self.robot_id, i + 1, p.POSITION_CONTROL, targetPosition=self.observation_angles[i], maxVelocity=np.deg2rad(25)) - - self.target_angles[13] = self.ext_gripper.distance - self.target_angles[15] = - self.ext_gripper.distance - self.target_angles[17] = self.ext_gripper.distance - 0.1 - - self.target_angles[18] = self.ext_gripper.distance - self.target_angles[20] = - self.ext_gripper.distance - self.target_angles[22] = self.ext_gripper.distance - 0.1 - - for i in range(8, len(self.target_angles)): p.setJointMotorControl2(self.robot_id, i + 1, p.POSITION_CONTROL, - targetPosition=self.target_angles[i]) # Move the arm with phisics + targetPosition=self.observation_angles[i], maxVelocity=np.deg2rad(25)) angles = [] for i in range(7): @@ -405,10 +597,12 @@ def compute(self): error = np.sum(np.abs(np.array(angles) - np.array(self.observation_angles[:7]))) + pybulletImage, imageTime = self.read_camera_fixed() + if error < 0.05: print("Observation angles reached", int(time.time()*1000) - self.timestamp) - self.move_mode = 6 - case 6: + self.move_mode = 5 + case 5: #Detect real cup and correct pybullet cup position # self.calibrator.calibrate3(self.robot_id, self.colorKinova) # self.calibrator.cube_test(self.robot_id, self.colorKinova.copy()) # self.calibrator.square_test(self.robot_id, self.colorKinova.copy()) @@ -416,53 +610,283 @@ def compute(self): # yolodetector = YoloDetector.YoloDetector() # results = yolodetector.detect(self.colorKinova) # yolodetector.plot(results) + # print(self.threadPybulletImage.is_alive()) + + error, kinovaTarget = self.correctTargetPosition() - if self.correctCupPosition() > 5: - print("Correcting cup position") + if error > 5 or kinovaTarget is False: + print("Correcting cup position", int(time.time()*1000) - self.timestamp) else: - print("Calibration finished") - self.move_mode = 7 + print("Calibration finished", int(time.time()*1000 - self.timestamp)) + self.move_mode = 6 - case 7: + case 6: #Initialize toolbox # self.showKinovaAngles() self.timer.stop() # self.calibrator.get_kinova_images(self.robot_id, self.kinovaarm_proxy, self.camerargbdsimple_proxy) # self.calibrator.calibrate4(self.robot_id) # self.move_mode = -1 - # self.moveKinovaWithAngles(self.home_angles[:7]) - # - # for i in range(7): - # p.setJointMotorControl2(self.robot_id, i + 1, p.POSITION_CONTROL, targetPosition=self.home_angles[i]) + target_position = list(p.getBasePositionAndOrientation(self.cylinderId)[0]) + target_position[0] = target_position[0] - self.pybullet_offset[0] + target_position[2] = target_position[2] - self.pybullet_offset[2] + 0.17 - self.initialize_toolbox() - self.timer4.start(self.Period) - # self.timer3.start(self.Period) - self.timer6.start(500) + print("initilizing toolbox", time.time()*1000 - self.timestamp) + self.initialize_toolbox(target_position) + print("toolbox initialized", time.time()*1000 - self.timestamp) + # self.showKinovaStateTimer.start(1000) + # self.gainsTimer.start(1000) print("Moving to fixed cup") - self.move_mode = 4 + self.move_mode = 7 self.timer.start(self.Period) + self.loopCounter = 0 + + case 7: #Move to cup with camera feedback + try: + error, kinovaTarget = self.correctTargetPosition() + if error != -1: + self.last_error = error + else: + self.loopCounter += 1 + + # print("Correct cup position end", time.time()*1000 - self.timestamp) + + target_position = list(p.getBasePositionAndOrientation(self.cylinderId)[0]) + target_position[0] = target_position[0] - self.pybullet_offset[0] + target_position[2] = target_position[2] - self.pybullet_offset[2] + 0.17 + + self.toolbox_compute(target_position) + # print("Toolbox compute end", time.time()*1000 - self.timestamp) + self.movePybulletWithToolbox() + # print("Move pybullet with toolbox end", time.time()*1000 - self.timestamp) + self.moveKinovaWithSpeeds() + # print("Move kinova with speeds end", time.time()*1000 - self.timestamp) + + self.posesTimes = np.append(self.posesTimes, int(time.time() * 1000)) + + jointsState = [] + + print("tiempo", self.ext_joints.timestamp, time.time() * 1000) + + for i in range(7): + state = p.getJointState(self.robot_id, i + 1) + angle_speed = (state[0], state[1]) + jointsState.append(angle_speed) + + self.jointsErrorMap[i].append(abs(self.ext_joints.joints[i].angle - math.degrees(state[0]) % 360)) - case 8: - pybulletImage, _ = self.read_camera_fixed() - cv2.imshow("Pybullet", pybulletImage) - cv2.waitKey(1) - print("/////////////////////////////////////////////////////////////////////7") - for i in range(len(self.colorKinova)): - print("Kinovas timestamps:", self.colorKinova[i][1]) + self.graphTimes.append(int(time.time()*1000 - self.timestamp)) + + self.poses.append(jointsState) + + # print("Pybullet poses saved to update gains", time.time()*1000 - self.timestamp) + # print("//====================================//") + + gripper_position = p.getLinkState(self.robot_id, self.end_effector_link_index)[0] + target_position = list(p.getBasePositionAndOrientation(self.cylinderId)[0]) + + # if self.last_error > 50 and (self.loopCounter > 20 or gripper_position[2] - target_position[2] < 0.25): + if self.last_error > 50 and kinovaTarget is False: + print("Adjusting pose for target adjustment") + self.target_position = list(p.getLinkState(self.robot_id, self.end_effector_link_index)[0]) + self.target_position[0] = self.target_position[0] - self.pybullet_offset[0] + self.target_position[2] = 0.55 + self.move_mode = 8 + + if gripper_position[2] - target_position[2] < 0.1 or self.loopCounter > 20: + print("Working without visual feedback", " Last error: ", self.last_error) + self.move_mode = 9 + except Ice.Exception as e: + print(e) + + case 8: #Move to a top position to adjust the cup position + try: + self.arrived = False + + self.correctTargetPosition() + self.toolbox_compute(self.target_position) + self.movePybulletWithToolbox() + self.moveKinovaWithSpeeds() + + self.posesTimes = np.append(self.posesTimes, int(time.time() * 1000)) + + jointsState = [] + for i in range(7): + state = p.getJointState(self.robot_id, i + 1) + angle_speed = (state[0], state[1]) + jointsState.append(angle_speed) + self.jointsErrorMap[i].append(abs(self.ext_joints.joints[i].angle - math.degrees(state[0]) % 360)) + + self.graphTimes.append(int(time.time() * 1000 - self.timestamp)) + + self.poses.append(jointsState) + + print("Arrived: ", self.arrived) + + if self.arrived == True: + self.loopCounter = 0 + self.target_velocities = [0.0] * 7 + self.movePybulletWithToolbox() + self.moveKinovaWithSpeeds() + self.last_error = 0 + self.move_mode = 7 + + except Ice.Exception as e: + print(e) + + case 9: #Move to cup without camera feedback and close the gripper + try: + if not self.arrived: + target_position = list(p.getBasePositionAndOrientation(self.cylinderId)[0]) + target_position[0] = target_position[0] - self.pybullet_offset[0] + target_position[2] = target_position[2] - self.pybullet_offset[2] + 0.17 + self.toolbox_compute(target_position) + self.movePybulletWithToolbox() + self.moveKinovaWithSpeeds() + + imgPybullet, imageTime = self.read_camera_fixed() + + self.posesTimes = np.append(self.posesTimes, int(time.time() * 1000)) + + jointsState = [] + for i in range(7): + state = p.getJointState(self.robot_id, i + 1) + angle_speed = (state[0], state[1]) + jointsState.append(angle_speed) + self.jointsErrorMap[i].append(abs(self.ext_joints.joints[i].angle - math.degrees(state[0]) % 360)) + + self.graphTimes.append(int(time.time() * 1000 - self.timestamp)) + + self.poses.append(jointsState) + + if self.arrived == True: + # print("Arrived") + self.target_velocities = [0.0] * 7 + self.movePybulletWithToolbox() + self.moveKinovaWithSpeeds() + self.kinovaarm_proxy.closeGripper() + self.changePybulletGripper(self.ext_gripper.distance) + self.move_mode = 10 + self.target_position = list(p.getBasePositionAndOrientation(self.cylinderId)[0]) + self.target_position[0] = self.target_position[0] - self.pybullet_offset[0] + self.target_position[2] = self.target_position[2] - self.pybullet_offset[2] + 0.25 + + except Ice.Exception as e: + print(e) + + case 10: #Move to a top position before moving to the center of the table + try: + self.arrived = False + + self.toolbox_compute(self.target_position) + self.movePybulletWithToolbox() + self.moveKinovaWithSpeeds() + + imgPybullet, imageTime = self.read_camera_fixed() + + self.posesTimes = np.append(self.posesTimes, int(time.time() * 1000)) + + jointsState = [] + for i in range(7): + state = p.getJointState(self.robot_id, i + 1) + angle_speed = (state[0], state[1]) + jointsState.append(angle_speed) + self.jointsErrorMap[i].append(abs(self.ext_joints.joints[i].angle - math.degrees(state[0]) % 360)) + + self.graphTimes.append(int(time.time() * 1000 - self.timestamp)) + + self.poses.append(jointsState) + + if self.arrived == True: + # print("Arrived") + self.target_velocities = [0.0] * 7 + self.movePybulletWithToolbox() + self.moveKinovaWithSpeeds() + self.move_mode = 11 + + except Ice.Exception as e: + print(e) + + case 11: #Move to the center of the table and open the gripper + try: + self.arrived = False + + self.toolbox_compute(self.table_center) + self.movePybulletWithToolbox() + self.moveKinovaWithSpeeds() + + imgPybullet, imageTime = self.read_camera_fixed() + + self.posesTimes = np.append(self.posesTimes, int(time.time() * 1000)) + + jointsState = [] + for i in range(7): + state = p.getJointState(self.robot_id, i + 1) + angle_speed = (state[0], state[1]) + jointsState.append(angle_speed) + self.jointsErrorMap[i].append(abs(self.ext_joints.joints[i].angle - math.degrees(state[0]) % 360)) + + self.graphTimes.append(int(time.time() * 1000 - self.timestamp)) + + self.poses.append(jointsState) + + if self.arrived == True: + self.target_velocities = [0.0] * 7 + self.movePybulletWithToolbox() + self.moveKinovaWithSpeeds() + self.kinovaarm_proxy.openGripper() + self.changePybulletGripper(self.ext_gripper.distance) + self.move_mode = 12 + + except Ice.Exception as e: + print(e) + + case 12: #Move to the observation angles and repeat the process(modes 7-12) + print("Reseting pose") + self.timer.stop() + self.moveKinovaWithAngles(self.observation_angles[:7]) + for i in range(7): + p.setJointMotorControl2(self.robot_id, i + 1, p.POSITION_CONTROL, + targetPosition=self.observation_angles[i], + maxVelocity=np.deg2rad(25)) + + angles = [] + for i in range(7): + angles.append(p.getJointState(self.robot_id, i + 1)[0]) + + error = np.sum(np.abs(np.array(angles) - np.array(self.observation_angles[:7]))) + + time.sleep(0.1) + self.timer.start(self.Period) + + if error < 0.05: + self.move_mode = 7 + self.loopCounter = 0 - # self.moveKinovaWithAngles(self.home_angles[:7]) - # for i in range(7): - # p.setJointMotorControl2(self.robot_id, i + 1, p.POSITION_CONTROL, targetPosition=self.home_angles[i]) - #p.stepSimulation() - # pass # =============== Methods ================== def startup_check(self): + """ + Tests various objects and functions from different interfaces, including + ifaces.RoboCompCameraRGBDSimple, ifaces.RoboCompKinovaArm, and + ifaces.RoboCompJoystickAdapter. It covers testing points, images, depth, + RGBD data, poses, grippers, joints, joint speeds, and button and axis parameters. + + """ + print(f"Testing RoboCompCameraRGBDSimple.Point3D from ifaces.RoboCompCameraRGBDSimple") + test = ifaces.RoboCompCameraRGBDSimple.Point3D() + print(f"Testing RoboCompCameraRGBDSimple.TPoints from ifaces.RoboCompCameraRGBDSimple") + test = ifaces.RoboCompCameraRGBDSimple.TPoints() + print(f"Testing RoboCompCameraRGBDSimple.TImage from ifaces.RoboCompCameraRGBDSimple") + test = ifaces.RoboCompCameraRGBDSimple.TImage() + print(f"Testing RoboCompCameraRGBDSimple.TDepth from ifaces.RoboCompCameraRGBDSimple") + test = ifaces.RoboCompCameraRGBDSimple.TDepth() + print(f"Testing RoboCompCameraRGBDSimple.TRGBD from ifaces.RoboCompCameraRGBDSimple") + test = ifaces.RoboCompCameraRGBDSimple.TRGBD() print(f"Testing RoboCompKinovaArm.TPose from ifaces.RoboCompKinovaArm") test = ifaces.RoboCompKinovaArm.TPose() print(f"Testing RoboCompKinovaArm.TGripper from ifaces.RoboCompKinovaArm") @@ -471,6 +895,10 @@ def startup_check(self): test = ifaces.RoboCompKinovaArm.TJoint() print(f"Testing RoboCompKinovaArm.TJoints from ifaces.RoboCompKinovaArm") test = ifaces.RoboCompKinovaArm.TJoints() + print(f"Testing RoboCompKinovaArm.TJointSpeeds from ifaces.RoboCompKinovaArm") + test = ifaces.RoboCompKinovaArm.TJointSpeeds() + print(f"Testing RoboCompKinovaArm.TJointAngles from ifaces.RoboCompKinovaArm") + test = ifaces.RoboCompKinovaArm.TJointAngles() print(f"Testing RoboCompJoystickAdapter.AxisParams from ifaces.RoboCompJoystickAdapter") test = ifaces.RoboCompJoystickAdapter.AxisParams() print(f"Testing RoboCompJoystickAdapter.ButtonParams from ifaces.RoboCompJoystickAdapter") @@ -479,77 +907,181 @@ def startup_check(self): test = ifaces.RoboCompJoystickAdapter.TData() QTimer.singleShot(200, QApplication.instance().quit) - def correctCupPosition(self): - print("Init time", time.time()*1000 - self.timestamp) - # aamed = pyAAMED(1080, 1940) - # aamed.setParameters(3.1415926 / 3, 3.4, 0.77) + def changePybulletGripper(self, distance): + """ + Modifies the target angles of a robot's gripper to move it to a desired + position, using PyBullet library's `setJointMotorControl2` function. + + Args: + distance (float): Used to set the desired angle for the gripper's joints. + + """ + self.target_angles[13] = distance + self.target_angles[15] = - distance + self.target_angles[17] = distance - 0.1 + + self.target_angles[18] = distance + self.target_angles[20] = - distance + self.target_angles[22] = distance - 0.1 + + for i in range(8, len(self.target_angles)): + p.setJointMotorControl2(self.robot_id, i + 1, p.POSITION_CONTROL, + targetPosition=self.target_angles[i]) + + def detectContactPoints(self): + """ + Detects contact points between two links and calculates the force acting + on each link based on the contact forces. + + """ + print("adding contact points", int(time.time()*1000) - self.timestamp) + self.graphTimes.append(int(time.time()*1000)-self.timestamp) + # Get contact points for the left fingertip + contact_points_left = [] + contact_points_left.extend(p.getContactPoints(bodyA=self.robot_id, linkIndexA=17)) + # self.left_force_series.append(len(contact_points_left)) + + force_acum = 0 + for contact in contact_points_left: # Contact force is at index 9 of the contact point tuple + contact_force_left = contact[9] + force_acum += contact_force_left + # print(f"Contact force on left fingertip: {contact_force_left}") + self.left_force_series.append(force_acum) + + # Get contact points for the right fingertip + contact_points_right = [] + contact_points_right.extend(p.getContactPoints(bodyA=self.robot_id, linkIndexA=22)) + # self.right_force_series.append(len(contact_points_right)) + + force_acum = 0 + for contact in contact_points_right: + # Contact force is at index 9 of the contact point tuple + contact_force_right = contact[9] + force_acum += contact_force_right + # print(f"Contact force on right fingertip: {contact_force_right}") + self.right_force_series.append(force_acum) + + # plt.cla() + # plt.plot(self.graphTimes, self.left_force_series, label="Left fingertip force") + # plt.plot(self.graphTimes, self.right_force_series, label="Right fingertip force") + # + # plt.tight_layout() + # plt.draw() + # plt.pause(0.001) + + def drawGraph(self): + + """ + Generates a graph in the current axis using Matplotlib library. It plots + joint error values against time and labels them individually. + + """ + plt.cla() + print("Drawing graph", int(time.time()*1000) - self.timestamp) + + self.ax.set_ylim(0, 5) + + self.ax.plot(self.graphTimes, self.jointsErrorMap.get(0), label="joint 1 error") + self.ax.plot(self.graphTimes, self.jointsErrorMap.get(1), label="joint 2 error") + self.ax.plot(self.graphTimes, self.jointsErrorMap.get(2), label="joint 3 error") + self.ax.plot(self.graphTimes, self.jointsErrorMap.get(3), label="joint 4 error") + self.ax.plot(self.graphTimes, self.jointsErrorMap.get(4), label="joint 5 error") + self.ax.plot(self.graphTimes, self.jointsErrorMap.get(5), label="joint 6 error") + self.ax.plot(self.graphTimes, self.jointsErrorMap.get(6), label="joint 7 error") + + # self.axs[0, 0].plot(self.graphTimes, self.jointsErrorMap.get(0), label="joint 1 error") + # self.axs[1, 0].plot(self.graphTimes, self.jointsErrorMap.get(1), label="joint 2 error") + # self.axs[2, 0].plot(self.graphTimes, self.jointsErrorMap.get(2), label="joint 3 error") + # self.axs[3, 0].plot(self.graphTimes, self.jointsErrorMap.get(3), label="joint 4 error") + # self.axs[0, 1].plot(self.graphTimes, self.jointsErrorMap.get(4), label="joint 5 error") + # self.axs[1, 1].plot(self.graphTimes, self.jointsErrorMap.get(5), label="joint 6 error") + # self.axs[2, 1].plot(self.graphTimes, self.jointsErrorMap.get(6), label="joint 7 error") + + # self.axs[3, 1].plot(self.graphTimes, self.right_force_series, label="Right fingertip force") + # self.axs[3, 1].plot(self.graphTimes, self.left_force_series, label="Left fingertip force") + + plt.tight_layout() + plt.draw() + + print("Graph drawn", int(time.time()*1000) - self.timestamp) + # plt.pause(0.001) + + def correctTargetPosition(self): + """ + Calculates the difference between the kinova target and the detected + position of the robot's end effector using AAMED, and adjusts the position + of the base to match the target position. + + Returns: + float: The error between the predicted position and the ground truth + position, and a boolean value indicating whether a target was detected + in the kinova camera stream. - print("processing first image", time.time()*1000 - self.timestamp) + """ pybulletImage, imageTime = self.read_camera_fixed() + pybulletImage = cv2.resize(pybulletImage, (1280//2, 720//2)) imgGPybullet = cv2.cvtColor(pybulletImage, cv2.COLOR_BGR2GRAY) resPybullet = self.aamed.run_AAMED(imgGPybullet) - # aamed.drawAAMED(imgGPybullet) - # if len(resPybullet) > 0: - # cv2.circle(imgGPybullet, (round(resPybullet[0][1]), round(resPybullet[0][0])), 8, (0, 0, 255), -1) - # cv2.imshow("test pybullet", imgGPybullet) - - print("select second image", time.time()*1000 - self.timestamp) + if isinstance(resPybullet, list): + resPybullet = np.array(resPybullet) diff = 5000 index = 0 for i in range(len(self.colorKinova)): - print("Index: ", i) - print("Kinova timestamps :", self.colorKinova[i][1]) - print("Pybullet timestamp:", imageTime) if abs(imageTime - self.colorKinova[i][1]) < diff: diff = abs(self.colorKinova[i][1] - imageTime) index = i - print("Diff: ", diff) - - print("processing second image", time.time()*1000 - self.timestamp) - imgGKinova = cv2.cvtColor(self.colorKinova[index][0], cv2.COLOR_BGR2GRAY) - resKinova = self.aamed.run_AAMED(imgGKinova) - # aamed.drawAAMED(imgGKinova) - # if len(resKinova) > 0: - # cv2.circle(imgGKinova, (round(resKinova[0][1]), round(resKinova[0][0])), 8, (0, 0, 255), -1) - # cv2.imshow("test kinova", imgGKinova) + imgGKinova = cv2.resize(imgGKinova, (1280//2, 720//2)) + resKinova = np.array(self.aamed.run_AAMED(imgGKinova)) + if isinstance(resKinova, list): + resKinova = np.array(resKinova) - print("second image processed", time.time()*1000 - self.timestamp) + if resKinova.size == 0 or resPybullet.size == 0: + print("No keypoints detected") + return -1, False error = np.abs(resKinova[0][1] - resPybullet[0][1] + resKinova[0][0] - resPybullet[0][0]) - print("x: ", resKinova[0][1] - resPybullet[0][1], " y: ", resKinova[0][0] - resPybullet[0][0]) + if resKinova[0][5] > 0.85 and resPybullet[0][5] > 0.90: + position = list(p.getBasePositionAndOrientation(self.cylinderId)[0]) + position[0] = position[0] - 0.0002 * (resKinova[0][0] - resPybullet[0][0]) + position[1] = position[1] - 0.0002 * (resKinova[0][1] - resPybullet[0][1]) + p.resetBasePositionAndOrientation(self.cylinderId, tuple(position), p.getQuaternionFromEuler([0, 0, 0])) - position = list(p.getBasePositionAndOrientation(self.pybullet_cup)[0]) - position[0] = position[0] - 0.0005 * (resKinova[0][0] - resPybullet[0][0]) - position[1] = position[1] - 0.0005 * (resKinova[0][1] - resPybullet[0][1]) - p.resetBasePositionAndOrientation(self.pybullet_cup, tuple(position), p.getQuaternionFromEuler([0, 0, 0])) + if len(resKinova) > 0: + kinovaTargetDetected = True + else: + kinovaTargetDetected = False - print("Finish time", time.time()*1000 - self.timestamp) - return error + return error, kinovaTargetDetected - def initialize_toolbox(self): + def initialize_toolbox(self, target_position): ## Launch the simulator Swift + """ + Sets up the toolbox environment, including adding kinematic objects, + defining goal and end-effector axes, and initializing the worker's state + variables. + + Args: + target_position (Tuple[float, float, float]): Used to specify the + target position of the end effector (gripper) in the world coordinate + system. + + """ self.env = swift.Swift() self.env.launch(realtime=True) - # env = rtb.backends.PyPlot.PyPlot() - # env.launch(realtime=True) # Create a KionovaGen3 robot object self.kinova = rtb.models.KinovaGen3() print(self.kinova.grippers) + # Set joint angles to ready configuration - # observation_angles = self.observation_angles[:7] - # observation_angles[5] = observation_angles[5] + 2*np.pi - # print(observation_angles) - # self.kinova.q = observation_angles self.kinova.q = self.kinova.qr # Add the robot to the simulator self.env.add(self.kinova) - # kinova = rtb.models.Panda() # axes self.goal_axes = sg.Axes(0.1) @@ -563,36 +1095,45 @@ def initialize_toolbox(self): # Number of joint in the Kinova which we are controlling self.n = 7 - cup_position = list(p.getBasePositionAndOrientation(self.pybullet_cup)[0]) - cup_position[0] = cup_position[0] + 0.3 # Added the robot base offset in pybullet - # objects - self.cup = sg.Cylinder(0.05, 0.1, pose=sm.SE3.Trans(cup_position[0], cup_position[1], 0), color=(0, 0, 1)) + # self.cup = sg.Cylinder(0.05, 0.1, pose=sm.SE3.Trans(cup_position[0], cup_position[1], 0), color=(0, 0, 1)) + # self.cup = sg.Cylinder(0.036, 0.168, pose=sm.SE3.Trans(target_position[0], target_position[1], 0), color=(0, 1, 0)) + # self.cup = sg.Cylinder(0.05, 0.1, pose=sm.SE3.Trans(0.4, 0.4, 0), color=(0, 0, 1)) - self.env.add(self.cup) + # self.env.add(self.cup) # Set the desired end-effector pose self.rot = self.kinova.fkine(self.kinova.q).R self.rot = sm.SO3.OA([1, 0, 0], [0, 0, -1]) - # self.Tep = sm.SE3.Rt(self.rot, [cup_position[0], cup_position[1], 0.23]) - self.Tep = sm.SE3.Rt(self.rot, [cup_position[0], cup_position[1], 0.60]) - # self.Tep = sm.SE3.Rt(self.rot, [0.4, 0.4, 0.13]) # green = x-axis, red = y-axis, blue = z-axis + # self.Tep = sm.SE3.Rt(self.rot, [target_position[0], target_position[1], 0.60]) + self.Tep = sm.SE3.Rt(self.rot, [target_position[0], target_position[1], target_position[2]]) # green = x-axis, red = y-axis, blue = z-axis self.goal_axes.T = self.Tep self.arrived = False self.dt = 0.05 self.env.step(0) - time.sleep(5) + time.sleep(1) - def toolbox_compute(self): + def toolbox_compute(self, target_position): # The current pose of the kinova's end-effector - self.Te = self.kinova.fkine(self.kinova.q) + """ + Computes the joint angles and velocities for a robot arm based on its + current state and the desired position of the end effector. It uses the + kinematic chain of the robot to compute the Jacobian matrix, and then + solves a quadratic program (QP) to find the optimal joint angles that will + reach the target position while satisfying constraints on the joint + velocities and accelerations. - cup_position = list(p.getBasePositionAndOrientation(self.pybullet_cup)[0]) - cup_position[0] = cup_position[0] + 0.3 # Added the robot base offset in pybullet + Args: + target_position (Tuple[float, float, float]): A desired position for + the end effector (EE) to reach. - self.Tep = sm.SE3.Rt(self.rot, [cup_position[0], cup_position[1], 0.60]) # green = x-axis, red = y-axis, blue = z-axis + """ + self.Te = self.kinova.fkine(self.kinova.q) + + # self.Tep = sm.SE3.Rt(self.rot, [cup_position[0], cup_position[1], 0.60]) # green = x-axis, red = y-axis, blue = z-axis + self.Tep = sm.SE3.Rt(self.rot, [target_position[0], target_position[1], target_position[2]]) # green = x-axis, red = y-axis, blue = z-axis # Transform from the end-effector to desired pose self.eTep = self.Te.inv() * self.Tep @@ -601,7 +1142,7 @@ def toolbox_compute(self): # Calulate the required end-effector spatial velocity for the robot # to approach the goal. Gain is set to 1.0 - self.v, self.arrived = rtb.p_servo(self.Te, self.Tep, 1.0, threshold=0.01) + self.v, self.arrived = rtb.p_servo(self.Te, self.Tep, 1.0, threshold=0.015) # Gain term (lambda) for control minimisation self.Y = 0.01 @@ -638,7 +1179,7 @@ def toolbox_compute(self): c = np.r_[-self.kinova.jacobm().reshape((self.n,)), np.zeros(6)] # The lower and upper bounds on the joint velocity and slack variable - lim = np.deg2rad(20) + lim = np.deg2rad(10) self.qdlim = [lim, lim, lim, lim, lim, lim, lim] # inventadas lb = -np.r_[self.qdlim, 10 * np.ones(6)] ub = np.r_[self.qdlim, 10 * np.ones(6)] @@ -653,13 +1194,13 @@ def toolbox_compute(self): joints_angle = [] for i in range(7): joints_angle.append(p.getJointState(self.robot_id, i + 1)[0]) - if i == 4: - joints_angle[i] = joints_angle[i] - if i == 5: - joints_angle[i] = joints_angle[i] - print("Error joints", self.kinova.q-joints_angle) - print("/////////////////////////////////////////////////////////////////////////////////////////////////////") + error = np.sum(np.rad2deg(np.abs(np.array(joints_angle) - np.array(self.kinova.q)))) + # print("Error joints", np.rad2deg(self.kinova.q-joints_angle), "Error: ", error) + if error > 1: + self.kinova.q = joints_angle + + # print("/////////////////////////////////////////////////////////////////////////////////////////////////////") # Update the ee axes self.ee_axes.T = self.Te @@ -723,98 +1264,134 @@ def cvPose2BulletView(self, q, t): viewMatrix = T.T.reshape(16) return viewMatrix def read_camera_fixed(self): - # print("Getting the pose", time.time()*1000-self.timestamp) - com_p, com_o, _, _, _, _ = p.getLinkState(self.robot_id, 9) - # print("Pose obtained", time.time()*1000-self.timestamp) - # Define camera intrinsic parameters - width = 1280 # image width - height = 720 # image height - f_in_pixels = 1298 #1298 - near = 0.01 # near clipping plane - far = 100 # far clipping plane + """ + Reads camera data from a PyBullet simulation and processes it to generate + an RGB image that is displayed in a window. + + Returns: + float: The camera image as a numpy array. + + """ + while True: + # print("Getting the pose", time.time()*1000-self.timestamp) + com_p, com_o, _, _, _, _ = p.getLinkState(self.robot_id, 9) + # print("Pose obtained", time.time()*1000-self.timestamp) + # Define camera intrinsic parameters + width = 1280 # image width + height = 720 # image height + f_in_pixels = 1298 #1298 + near = 0.01 # near clipping plane + far = 100 # far clipping plane + + # Optical center in pixel coordinates + optical_center_x_pixels = 646.23 #620 # example x-coordinate in pixels + optical_center_y_pixels = 267.62 #238 # example y-coordinate in pixels + + fov = 2 * np.degrees(np.arctan(width / (2 * f_in_pixels))) + + k = np.array([[f_in_pixels, 0, optical_center_x_pixels], + [0, f_in_pixels, optical_center_y_pixels], + [0, 0, 1]]) + + projection_matrix = self.cvK2BulletP(k, width, height, near, far) - # Optical center in pixel coordinates - optical_center_x_pixels = 646.23 #620 # example x-coordinate in pixels - optical_center_y_pixels = 267.62 #238 # example y-coordinate in pixels + # print("Projection matrix obtained", time.time() * 1000 - self.timestamp) + # print("fixed proyection matrix", projection_matrix) - fov = 2 * np.degrees(np.arctan(width / (2 * f_in_pixels))) + # Define camera extrinsic parameters + camera_translation = np.array([0.0, 0.0, 0.0]) - k = np.array([[f_in_pixels, 0, optical_center_x_pixels], - [0, f_in_pixels, optical_center_y_pixels], - [0, 0, 1]]) + camera_rotation_matrix = np.array([ + [1.0, 0.0, 0.0], + [0.0, 1.0, 0.0], + [0.0, 0.0, 1.0] + ]) - projection_matrix = self.cvK2BulletP(k, width, height, near, far) + camera_translation += com_p + com_o_matrix = p.getMatrixFromQuaternion(com_o) + camera_rotation_matrix += np.array(com_o_matrix).reshape(3, 3) - # print("Projection matrix obtained", time.time() * 1000 - self.timestamp) - # print("fixed proyection matrix", projection_matrix) + view_matrix = self.cvPose2BulletView(com_o, camera_translation) - # Define camera extrinsic parameters - camera_translation = np.array([0.0, 0.0, 0.0]) + # print("View matrix obtained", time.time() * 1000 - self.timestamp) + # print("fixed view matrix", np.matrix(view_matrix)) + # print("//////////////////////////////////////////////////////////////////") - camera_rotation_matrix = np.array([ - [1.0, 0.0, 0.0], - [0.0, 1.0, 0.0], - [0.0, 0.0, 1.0] - ]) + # Get the camera image + img = p.getCameraImage(width, height, view_matrix, projection_matrix, renderer=p.ER_BULLET_HARDWARE_OPENGL, flags=p.ER_NO_SEGMENTATION_MASK) - camera_translation += com_p - com_o_matrix = p.getMatrixFromQuaternion(com_o) - camera_rotation_matrix += np.array(com_o_matrix).reshape(3, 3) + # print("Camera image obtained", time.time() * 1000 - self.timestamp) + rgb = img[2] + # rgb = cv2.resize(rgb, (1280, 720)) + rgb = cv2.cvtColor(rgb, cv2.COLOR_RGB2BGR) - view_matrix = self.cvPose2BulletView(com_o, camera_translation) + ## sum in one frame rgb and self.colorKinova[0][0] + sum = cv2.addWeighted(rgb, 0.5, self.colorKinova[0][0], 0.5, 0) + cv2.imshow("Pybullet", sum) + # print("Showing Pybullet image", time.time() * 1000 - self.timestamp) + cv2.waitKey(3) - # print("View matrix obtained", time.time() * 1000 - self.timestamp) - # print("fixed view matrix", np.matrix(view_matrix)) - # print("//////////////////////////////////////////////////////////////////") + # print("Returning the image", time.time() * 1000 - self.timestamp) - # Get the camera image - img = p.getCameraImage(width, height, view_matrix, projection_matrix, renderer=p.ER_BULLET_HARDWARE_OPENGL) + # print("Triying to put in queue", time.time()*1000 - self.timestamp) - # print("Camera image obtained", time.time() * 1000 - self.timestamp) - rgb = img[2] - rgb = cv2.cvtColor(rgb, cv2.COLOR_RGB2BGR) + # self.pybulletImageQueue.put_nowait([rgb, time.time()*1000]) - # print("Returning the image", time.time() * 1000 - self.timestamp) + # print("Pybullet image put in queue", time.time()*1000 - self.timestamp) - return rgb, time.time()*1000 + # time.sleep(0.05) + return rgb, time.time()*1000 def readKinovaCamera(self): + """ + Reads camera data from Kinova, including depth and color images, and appends + them to instance variables `depthKinova` and `colorKinova`, respectively. + + Returns: + bool: True if the operation was successful, and False otherwise. + + """ try: both = self.camerargbdsimple_proxy.getAll("CameraRGBDViewer") - # self.colorKinova.append(both.image) - # self.depthKinova.append(both.depth) - # print(both.image.alivetime) - depthImage = (np.frombuffer(both.depth.depth, dtype=np.int16) .reshape(both.depth.height, both.depth.width)) depthImage = cv2.normalize(src=depthImage, dst=None, alpha=0, beta=255, norm_type=cv2.NORM_MINMAX, dtype=cv2.CV_8U) self.depthKinova.append([depthImage, both.depth.alivetime]) - kinovaImage = (np.frombuffer(both.image.image, np.uint8) .reshape(both.image.height, both.image.width, both.image.depth)) - self.colorKinova.append([kinovaImage, both.image.alivetime]) - cv2.imshow("ColorKinova", self.colorKinova[0][0]) - # cv2.imshow("DepthKinova", self.depthKinova) - cv2.waitKey(1) except Ice.Exception as e: print(e) return True def showKinovaAngles(self): + """ + Prints information about the kinematics of the robot, including joint + angles, torques, and distance of the gripper. + + """ + print("//--------------------------------------------------------------------------------------------------//") ext_angles = [] + ext_torques = [] + diff_from_pybullet = [] for i in range(7): ext_angles.append(self.ext_joints.joints[i].angle) - print(np.deg2rad(ext_angles)) - - # angles = [] - # for i in range(7): - # angles.append(p.getJointState(self.robot_id, i+1)[0]) - # print(np.rad2deg(angles)%360) + ext_torques.append(self.ext_joints.joints[i].force) + diff_from_pybullet.append((math.degrees(p.getJointState(self.robot_id, i + 1)[0]) % 360) - self.ext_joints.joints[i].angle) + print("Kinova angles", ext_angles) + print("Kinova torqe", ext_torques) + print("Diff from pybullet", diff_from_pybullet) + print("Gripper distance", self.ext_gripper.distance) def movePybulletWithExternalVel(self): + """ + Sets the target velocities of joints external to the robot and then converts + them from degrees to radians before setting them as the motor control + targets for the robot's joints using PyBullet's `setJointMotorControl2` function. + + """ for i in range(len(self.ext_joints.joints)): self.target_velocities[i] = self.ext_joints.joints[i].velocity @@ -825,11 +1402,24 @@ def movePybulletWithExternalVel(self): targetVelocity=self.target_velocities[i]) def movePybulletWithToolbox(self): + # print("Pybullet move with toolbox init", time.time()*1000 - self.timestamp) + """ + Controls the movement of a robot using PyBullet, setting joint motor control + values for each joint based on target velocities provided in a list. + + """ for i in range(len(self.target_velocities)): p.setJointMotorControl2(self.robot_id, i+1, p.VELOCITY_CONTROL, targetVelocity=self.target_velocities[i]) + # print("Pybullet move with toolbox end", time.time()*1000 - self.timestamp) def readDataFromProxy(self): + """ + Continuously retrieves joint and gripper state data from a Kinova arm + proxy, scales down the gripper distance by 80%, and sleeps for 0.05 seconds + before repeating the process. + + """ while True: self.ext_joints = self.kinovaarm_proxy.getJointsState() self.ext_gripper = self.kinovaarm_proxy.getGripperState() @@ -839,26 +1429,58 @@ def readDataFromProxy(self): time.sleep(0.05) def moveKinovaWithAngles(self, angles): + """ + Updates the joint angles of a Kinova arm based on provided angles, and + then calls the `moveJointsWithAngle` method of the kinova arm proxy to + execute the movements. + + Args: + angles (np.array): 360-degree radian values representing the + angles for each joint to move. + + """ array = np.round(np.rad2deg(angles) % 360) self.angles.jointAngles = array.tolist() self.kinovaarm_proxy.moveJointsWithAngle(self.angles) def moveKinovaWithSpeeds(self): + # print("Kinova move with speeds init", time.time()*1000 - self.timestamp) + """ + Calculates and applies joint speeds for a Kinova arm based on joint angles + and gains, and then moves the joints with those speeds using the + `kinovaarm_proxy` object. + + """ self.joint_speeds = [] + for i in range(7): - speed = np.rad2deg(p.getJointState(self.robot_id, i + 1)[1]) * self.gains[i] + angle = p.getJointState(self.robot_id, i + 1)[0] + error = (np.deg2rad(self.ext_joints.joints[i].angle) + - angle + math.pi) % (2 * math.pi) - math.pi + + speed = np.rad2deg(p.getJointState(self.robot_id, i + 1)[1]) * self.gains[i] - np.rad2deg(error) * 0.3 + + # print("e", error) self.joint_speeds.append(speed) self.speeds.jointSpeeds = self.joint_speeds - #print(self.gains) + + # print("Kinova move with speeds proxy action start", time.time()*1000 - self.timestamp) self.kinovaarm_proxy.moveJointsWithSpeed(self.speeds) + # print("Kinova move with speeds end", time.time()*1000 - self.timestamp) def updateGains(self): + """ + Updates the gains of its joints based on the difference between the current + pose and the best recorded pose, with a limit on the speed of the joint's + movement. + + """ self.posesTimes = self.posesTimes - self.ext_joints.timestamp best_timestamp = np.abs(self.posesTimes).argmin() - print("Best timestamp: ", best_timestamp, self.posesTimes[best_timestamp], - self.ext_joints.timestamp) + # print("Best timestamp: ", best_timestamp, self.posesTimes[best_timestamp], + # self.ext_joints.timestamp) joints_state = self.poses[best_timestamp] for i in range(7): @@ -867,15 +1489,14 @@ def updateGains(self): error = (np.deg2rad(self.ext_joints.joints[i].angle) - angle + math.pi) % (2 * math.pi) - math.pi if abs(speed) > 0.01: - self.gains[i] += error * 0.1 - print("Gains: joint ", i, self.gains[i], "Kinova angle: ", - np.deg2rad(self.ext_joints.joints[i].angle), - "Pybullet angle: ", self.target_angles[i], "Error:", error) - # now draw the gains as timeseries in matplotlib + self.gains[i] += error * 0.2 + # print("Gains: joint ", i, self.gains[i], "Kinova angle: ", + # np.deg2rad(self.ext_joints.joints[i].angle), + # "Pybullet angle: ", self.target_angles[i], "Error:", error) + self.posesTimes = np.array([int(time.time() * 1000)]) self.poses = joints_state - print("/////////////////////") # =============== Methods for SubscribesTo ================ # ========================================================= @@ -883,6 +1504,16 @@ def updateGains(self): # SUBSCRIPTION to sendData method from JoystickAdapter interface # def JoystickAdapter_sendData(self, data): + """ + Receives data from a joystick and sends it to the robot's actuators, + adjusting the robot's position and orientation based on the data received. + + Args: + data (dict): Passed data from the joystick interface, which contains + information about the current state of the + joystick axes and buttons. + + """ match self.move_mode: case 0: for axis in data.axes: @@ -994,3 +1625,45 @@ def JoystickAdapter_sendData(self, data): self.move_mode = (self.move_mode + button.step) % 5 case "home": pass + + ###################### + # From the RoboCompCameraRGBDSimple you can call this methods: + # self.camerargbdsimple_proxy.getAll(...) + # self.camerargbdsimple_proxy.getDepth(...) + # self.camerargbdsimple_proxy.getImage(...) + # self.camerargbdsimple_proxy.getPoints(...) + + ###################### + # From the RoboCompCameraRGBDSimple you can use this types: + # RoboCompCameraRGBDSimple.Point3D + # RoboCompCameraRGBDSimple.TPoints + # RoboCompCameraRGBDSimple.TImage + # RoboCompCameraRGBDSimple.TDepth + # RoboCompCameraRGBDSimple.TRGBD + + ###################### + # From the RoboCompKinovaArm you can call this methods: + # self.kinovaarm_proxy.closeGripper(...) + # self.kinovaarm_proxy.getCenterOfTool(...) + # self.kinovaarm_proxy.getGripperState(...) + # self.kinovaarm_proxy.getJointsState(...) + # self.kinovaarm_proxy.moveJointsWithAngle(...) + # self.kinovaarm_proxy.moveJointsWithSpeed(...) + # self.kinovaarm_proxy.openGripper(...) + # self.kinovaarm_proxy.setCenterOfTool(...) + + ###################### + # From the RoboCompKinovaArm you can use this types: + # RoboCompKinovaArm.TPose + # RoboCompKinovaArm.TGripper + # RoboCompKinovaArm.TJoint + # RoboCompKinovaArm.TJoints + # RoboCompKinovaArm.TJointSpeeds + # RoboCompKinovaArm.TJointAngles + + ###################### + # From the RoboCompJoystickAdapter you can use this types: + # RoboCompJoystickAdapter.AxisParams + # RoboCompJoystickAdapter.ButtonParams + # RoboCompJoystickAdapter.TData +