DR. CHIDENTREE TREESATAYAPUN
Profesor InvestigadorPUBLICACIONES
Para ver las publicaciones de todo Robótica y Manufactura Avanzada, ver: Publicaciones RYMA
Navarro-Gonzalez, Jose Luis; Lopez-Juarez, Ismael; Rios-Cabrera, Reyes; Ordaz-Hernandez, Keny On-line knowledge acquisition and enhancement in robotic assembly tasks Artículo de revista En: Robotics and Computer-Integrated Manufacturing, vol. 33, pp. 78 - 89, 2015, ISSN: 0736-5845, (Special Issue on Knowledge Driven Robotics and Manufacturing). Gutierrez-Rosas, P. A.; Vazquez-Lopez, J. A.; Yañez-Mendiola, J.; Lopez-Juarez, Ismael Color's measurement and discrimination, of a cosmetic product by an artificial vision system Proceedings Article En: 2015, ISSN: 0719-6806. Lopez-Juarez, Ismael; Castelan, Mario; Castro-Martînez, Francisco Javier; Peña-Cabrera, Mario; Osorio-Comparan, Roman Using Object’s Contour, Form and Depth to Embed Recognition Capability into Industrial Robots Artículo de revista En: Journal of Applied Research and Technology, vol. 11, no 1, pp. 5 - 17, 2013, ISSN: 1665-6423.2015
Artículos de revista
@article{NavarroGonzalez201578b,
title = {On-line knowledge acquisition and enhancement in robotic assembly tasks},
author = {Navarro-Gonzalez, Jose Luis and Lopez-Juarez, Ismael and Rios-Cabrera, Reyes and Ordaz-Hernandez, Keny},
url = {http://www.sciencedirect.com/science/article/pii/S073658451400074X},
doi = {http://dx.doi.org/10.1016/j.rcim.2014.08.013},
issn = {0736-5845},
year = {2015},
date = {2015-01-01},
journal = {Robotics and Computer-Integrated Manufacturing},
volume = {33},
pages = {78 - 89},
abstract = {Abstract Industrial robots are reliable machines for manufacturing tasks such as welding, panting, assembly, palletizing or kitting operations. They are traditionally programmed by an operator using a teach pendant in a point-to-point scheme with limited sensing capabilities such as industrial vision systems and force/torque sensing. The use of these sensing capabilities is associated to the particular robot controller, operative systems and programming language. Today, robots can react to environment changes specific to their task domain but are still unable to learn skills to effectively use their current knowledge. The need for such a skill in unstructured environments where knowledge can be acquired and enhanced is desirable so that robots can effectively interact in multimodal real-world scenarios. In this article we present a multimodal assembly controller (MAC) approach to embed and effectively enhance knowledge into industrial robots working in multimodal manufacturing scenarios such as assembly during kitting operations with varying shapes and tolerances. During learning, the robot uses its vision and force capabilities resembling a human operator carrying out the same operation. The approach consists of using a MAC based on the Fuzzy ARTMAP artificial neural network in conjunction with a knowledge base. The robot starts the operation having limited initial knowledge about what task it has to accomplish. During the operation, the robot learns the skill for recognising assembly parts and how to assemble them. The skill acquisition is evaluated by counting the steps to complete the assembly, length of the followed assembly path and compliant behaviour. The performance improves with time so that the robot becomes an expert demonstrated by the assembly of a kit with different part geometries. The kit is unknown by the robot at the beginning of the operation; therefore, the kit type, location and orientation are unknown as well as the parts to be assembled since they are randomly fed by a conveyor belt.},
note = {Special Issue on Knowledge Driven Robotics and Manufacturing},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Proceedings Articles
@inproceedings{Guti\'{e}rrez-Rosas2015,
title = {Color's measurement and discrimination, of a cosmetic product by an artificial vision system},
author = {Gutierrez-Rosas, P. A. and Vazquez-Lopez, J. A. and Ya\~{n}ez-Mendiola, J. and Lopez-Juarez, Ismael},
url = {http://ieeexplore.ieee.org/stamp/stamp.jsp?arnumber=7404664},
doi = { 10.1109/Chilecon.2015.7404664 },
issn = {0719-6806},
year = {2015},
date = {2015-00-00},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
2013
Artículos de revista
@article{LopezJuarez20135,
title = {Using Object’s Contour, Form and Depth to Embed Recognition Capability into Industrial Robots},
author = {Lopez-Juarez, Ismael and Castelan, Mario and Castro-Mart\^{i}nez, Francisco Javier and Pe\~{n}a-Cabrera, Mario and Osorio-Comparan, Roman},
url = {http://www.sciencedirect.com/science/article/pii/S1665642313715116},
doi = {http://dx.doi.org/10.1016/S1665-6423(13)71511-6},
issn = {1665-6423},
year = {2013},
date = {2013-01-01},
journal = {Journal of Applied Research and Technology},
volume = {11},
number = {1},
pages = {5 - 17},
abstract = {Abstract Robot vision systems can differentiate parts by pattern matching irrespective of part orientation and location. Some manufacturers offer 3D guidance systems using robust vision and laser systems so that a 3D programmed point can be repeated even if the part is moved varying its location, rotation and orientation within the working space. Despite these developments, current industrial robots are still unable to recognize objects in a robust manner; that is, to distinguish an object among equally shaped objects taking into account not only the object’s contour but also its form and depth information, which is precisely the major contribution of this research. Our hypothesis establishes that it is possible to integrate a robust invariant object recognition capability into industrial robots by using image features from the object’s contour (boundary object information), its form (i.e., type of curvature or topographical surface information) and depth information (from stereo disparity maps). These features can be concatenated in order to form an invariant vector descriptor which is the input to an artificial neural network (ANN) for learning and recognition purposes. In this paper we present the recognition results under different working conditions using a KUKA KR16 industrial robot, which validated our approach.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}