@inproceedings{huang:ssci14, author = "Huang, Pei-Chi and Lehman, Joel and Mok, Aloysius K. and Miikkulainen, Risto and Sentis, Luis", title = "Grasping Novel Objects with a Dexterous Robotic Hand through Neuroevolution", booktitle = "IEEE Symposium Series on Computational Intelligence", publisher = "IEEE", site = "http://nn.cs.utexas.edu/?huang:ssci14", year = "2014", abstract = "Robotic grasping of a target object without advance knowledge of its three-dimensional model is a challenging problem. Many studies indicate that robot learning from demonstration (LfD) is a promising way to improve grasping performance, but complete automation of the grasping task in unforeseen circumstances remains difficult. As an alternative to LfD, this paper leverages limited human supervision to achieve robotic grasping of unknown objects in unforeseen circumstances. The technical question is what form of human supervision best minimizes the effort of the human supervisor. The approach here applies a human-supplied bounding box to focus the robot's visual processing on the target object, thereby lessening the dimensionality of the robot's computer vision processing. After the human supervisor defines the bounding box through the man-machine interface, the rest of the grasping task is automated through a vision-based feature-extraction approach where the dexterous hand learns to grasp objects without relying on pre-computed object models through the NEAT neuroevolution algorithm. Given only lowlevel sensing data from a commercial depth sensor Kinect, our approach evolves neural networks to identify appropriate hand positions and orientations for grasping novel objects. Further, the machine learning results from simulation have been validated by transferring the training results to a physical robot called Dreamer made by the Meka Robotics company. The results demonstrate that grasping novel objects through exploiting neuroevolution from simulation to reality is possible." }