{"id":76,"date":"2017-09-10T05:46:36","date_gmt":"2017-09-10T05:46:36","guid":{"rendered":"https:\/\/ryma.cinvestav.mx\/rrios\/?page_id=76"},"modified":"2026-01-09T21:07:36","modified_gmt":"2026-01-09T21:07:36","slug":"publicaciones","status":"publish","type":"page","link":"https:\/\/ryma.cinvestav.mx\/rrios\/publicaciones\/","title":{"rendered":"Publicaciones"},"content":{"rendered":"<p>[et_pb_section bb_built=\u00bb1&#8243; fullwidth=\u00bbon\u00bb specialty=\u00bboff\u00bb background_image=\u00bbhttps:\/\/ryma.cinvestav.mx\/rrios\/wp-content\/uploads\/sites\/4\/2017\/09\/IMAGEN-CAMPA\u00d1A1.jpg\u00bb _builder_version=\u00bb3.0.72&#8243; background_color_gradient_start=\u00bbrgba(0,0,0,0.56)\u00bb background_color_gradient_end=\u00bb#ffffff\u00bb background_color_gradient_direction=\u00bb122deg\u00bb background_color_gradient_direction_radial=\u00bbleft\u00bb background_color_gradient_start_position=\u00bb9%\u00bb locked=\u00bbon\u00bb inner_width=\u00bbauto\u00bb inner_max_width=\u00bb1080px\u00bb background_color_gradient_stops=\u00bbrgba(0,0,0,0.56) 9%|#ffffff 100%\u00bb next_background_color=\u00bb#ffffff\u00bb][et_pb_fullwidth_header admin_label=\u00bbNombre del Investigador\u00bb title=\u00bbDR. REYES RIOS-CABRERA\u00bb subhead=\u00bbProfesor Investigador\u00bb background_layout=\u00bbdark\u00bb text_orientation=\u00bbleft\u00bb header_fullscreen=\u00bboff\u00bb header_scroll_down=\u00bboff\u00bb content_orientation=\u00bbcenter\u00bb image_orientation=\u00bbcenter\u00bb title_font=\u00bb|on|||\u00bb subhead_font=\u00bb|on|||\u00bb custom_button_one=\u00bboff\u00bb button_one_letter_spacing=\u00bb0&#8243; button_one_icon_placement=\u00bbright\u00bb button_one_letter_spacing_hover=\u00bb0&#8243; custom_button_two=\u00bboff\u00bb button_two_letter_spacing=\u00bb0&#8243; button_two_icon_placement=\u00bbright\u00bb button_two_letter_spacing_hover=\u00bb0&#8243; subhead_font_size=\u00bb20px\u00bb _builder_version=\u00bb3.0.72&#8243; background_overlay_color=\u00bbrgba(0,0,0,0.3)\u00bb parent_locked=\u00bbon\u00bb content_max_width=\u00bbnone\u00bb background_color=\u00bbrgba(255, 255, 255, 0)\u00bb button_one_text_size__hover_enabled=\u00bboff\u00bb button_two_text_size__hover_enabled=\u00bboff\u00bb button_one_text_color__hover_enabled=\u00bboff\u00bb button_two_text_color__hover_enabled=\u00bboff\u00bb button_one_border_width__hover_enabled=\u00bboff\u00bb button_two_border_width__hover_enabled=\u00bboff\u00bb button_one_border_color__hover_enabled=\u00bboff\u00bb button_two_border_color__hover_enabled=\u00bboff\u00bb button_one_border_radius__hover_enabled=\u00bboff\u00bb button_two_border_radius__hover_enabled=\u00bboff\u00bb button_one_letter_spacing__hover_enabled=\u00bbon\u00bb button_one_letter_spacing__hover=\u00bb0&#8243; button_two_letter_spacing__hover_enabled=\u00bbon\u00bb button_two_letter_spacing__hover=\u00bb0&#8243; button_one_bg_color__hover_enabled=\u00bboff\u00bb button_two_bg_color__hover_enabled=\u00bboff\u00bb background_color_gradient_stops=\u00bb#2b87da 0%|#29c4a9 100%\u00bb button_one_bg_color_gradient_stops=\u00bb#2b87da 0%|#29c4a9 100%\u00bb button_two_bg_color_gradient_stops=\u00bb#2b87da 0%|#29c4a9 100%\u00bb \/][et_pb_fullwidth_menu menu_id=\u00bb6&#8243; background_layout=\u00bblight\u00bb text_orientation=\u00bbleft\u00bb submenu_direction=\u00bbdownwards\u00bb fullwidth_menu=\u00bboff\u00bb active_link_color=\u00bbrgba(131,0,233,0.81)\u00bb menu_font=\u00bb|on|||\u00bb _builder_version=\u00bb3.0.72&#8243; background_color_gradient_start=\u00bb#009f93&#8243; background_color_gradient_end=\u00bbrgba(0,0,0,0.27)\u00bb background_color_gradient_start_position=\u00bb20%\u00bb background_color=\u00bb#d3d3d3&#8243; parent_locked=\u00bbon\u00bb background_color_gradient_stops=\u00bb#009f93 20%|rgba(0,0,0,0.27) 100%\u00bb \/][\/et_pb_section][et_pb_section bb_built=\u00bb1&#8243; _builder_version=\u00bb3.0.72&#8243; custom_css_main_element=\u00bbbox-shadow: inset 0px 3px 2px rgba(50, 50, 50, 0.75);\u00bb locked=\u00bboff\u00bb inner_width=\u00bbauto\u00bb inner_max_width=\u00bb1080px\u00bb background_color_gradient_stops=\u00bb#2b87da 0%|#29c4a9 100%\u00bb prev_background_color=\u00bb#000000&#8243; next_background_color=\u00bbrgba(0,0,0,0.32)\u00bb][et_pb_row parent_locked=\u00bboff\u00bb background_position=\u00bbtop_left\u00bb background_repeat=\u00bbrepeat\u00bb background_size=\u00bbinitial\u00bb width=\u00bb80%\u00bb max_width=\u00bb1080px\u00bb background_color_gradient_stops=\u00bb#2b87da 0%|#29c4a9 100%\u00bb][et_pb_column type=\u00bb4_4&#8243; custom_padding__hover=\u00bb|||\u00bb custom_padding=\u00bb|||\u00bb background_color_gradient_stops=\u00bb#2b87da 0%|#29c4a9 100%\u00bb][et_pb_text _builder_version=\u00bb4.20.0&#8243; parent_locked=\u00bboff\u00bb module_alignment=\u00bbleft\u00bb background_pattern_color=\u00bbrgba(0,0,0,0.2)\u00bb background_mask_color=\u00bb#ffffff\u00bb text_text_shadow_horizontal_length=\u00bbtext_text_shadow_style,%91object Object%93&#8243; text_text_shadow_horizontal_length_tablet=\u00bb0px\u00bb text_text_shadow_vertical_length=\u00bbtext_text_shadow_style,%91object Object%93&#8243; text_text_shadow_vertical_length_tablet=\u00bb0px\u00bb text_text_shadow_blur_strength=\u00bbtext_text_shadow_style,%91object Object%93&#8243; text_text_shadow_blur_strength_tablet=\u00bb1px\u00bb link_text_shadow_horizontal_length=\u00bblink_text_shadow_style,%91object Object%93&#8243; link_text_shadow_horizontal_length_tablet=\u00bb0px\u00bb link_text_shadow_vertical_length=\u00bblink_text_shadow_style,%91object Object%93&#8243; link_text_shadow_vertical_length_tablet=\u00bb0px\u00bb link_text_shadow_blur_strength=\u00bblink_text_shadow_style,%91object Object%93&#8243; link_text_shadow_blur_strength_tablet=\u00bb1px\u00bb ul_text_shadow_horizontal_length=\u00bbul_text_shadow_style,%91object Object%93&#8243; ul_text_shadow_horizontal_length_tablet=\u00bb0px\u00bb ul_text_shadow_vertical_length=\u00bbul_text_shadow_style,%91object Object%93&#8243; ul_text_shadow_vertical_length_tablet=\u00bb0px\u00bb ul_text_shadow_blur_strength=\u00bbul_text_shadow_style,%91object Object%93&#8243; ul_text_shadow_blur_strength_tablet=\u00bb1px\u00bb ol_text_shadow_horizontal_length=\u00bbol_text_shadow_style,%91object Object%93&#8243; ol_text_shadow_horizontal_length_tablet=\u00bb0px\u00bb ol_text_shadow_vertical_length=\u00bbol_text_shadow_style,%91object Object%93&#8243; ol_text_shadow_vertical_length_tablet=\u00bb0px\u00bb ol_text_shadow_blur_strength=\u00bbol_text_shadow_style,%91object Object%93&#8243; ol_text_shadow_blur_strength_tablet=\u00bb1px\u00bb quote_text_shadow_horizontal_length=\u00bbquote_text_shadow_style,%91object Object%93&#8243; quote_text_shadow_horizontal_length_tablet=\u00bb0px\u00bb quote_text_shadow_vertical_length=\u00bbquote_text_shadow_style,%91object Object%93&#8243; quote_text_shadow_vertical_length_tablet=\u00bb0px\u00bb quote_text_shadow_blur_strength=\u00bbquote_text_shadow_style,%91object Object%93&#8243; quote_text_shadow_blur_strength_tablet=\u00bb1px\u00bb header_text_shadow_horizontal_length=\u00bbheader_text_shadow_style,%91object Object%93&#8243; header_text_shadow_horizontal_length_tablet=\u00bb0px\u00bb header_text_shadow_vertical_length=\u00bbheader_text_shadow_style,%91object Object%93&#8243; header_text_shadow_vertical_length_tablet=\u00bb0px\u00bb header_text_shadow_blur_strength=\u00bbheader_text_shadow_style,%91object Object%93&#8243; header_text_shadow_blur_strength_tablet=\u00bb1px\u00bb header_2_text_shadow_horizontal_length=\u00bbheader_2_text_shadow_style,%91object Object%93&#8243; header_2_text_shadow_horizontal_length_tablet=\u00bb0px\u00bb header_2_text_shadow_vertical_length=\u00bbheader_2_text_shadow_style,%91object Object%93&#8243; header_2_text_shadow_vertical_length_tablet=\u00bb0px\u00bb header_2_text_shadow_blur_strength=\u00bbheader_2_text_shadow_style,%91object Object%93&#8243; header_2_text_shadow_blur_strength_tablet=\u00bb1px\u00bb header_3_text_shadow_horizontal_length=\u00bbheader_3_text_shadow_style,%91object Object%93&#8243; header_3_text_shadow_horizontal_length_tablet=\u00bb0px\u00bb header_3_text_shadow_vertical_length=\u00bbheader_3_text_shadow_style,%91object Object%93&#8243; header_3_text_shadow_vertical_length_tablet=\u00bb0px\u00bb header_3_text_shadow_blur_strength=\u00bbheader_3_text_shadow_style,%91object Object%93&#8243; header_3_text_shadow_blur_strength_tablet=\u00bb1px\u00bb header_4_text_shadow_horizontal_length=\u00bbheader_4_text_shadow_style,%91object Object%93&#8243; header_4_text_shadow_horizontal_length_tablet=\u00bb0px\u00bb header_4_text_shadow_vertical_length=\u00bbheader_4_text_shadow_style,%91object Object%93&#8243; header_4_text_shadow_vertical_length_tablet=\u00bb0px\u00bb header_4_text_shadow_blur_strength=\u00bbheader_4_text_shadow_style,%91object Object%93&#8243; header_4_text_shadow_blur_strength_tablet=\u00bb1px\u00bb header_5_text_shadow_horizontal_length=\u00bbheader_5_text_shadow_style,%91object Object%93&#8243; header_5_text_shadow_horizontal_length_tablet=\u00bb0px\u00bb header_5_text_shadow_vertical_length=\u00bbheader_5_text_shadow_style,%91object Object%93&#8243; header_5_text_shadow_vertical_length_tablet=\u00bb0px\u00bb header_5_text_shadow_blur_strength=\u00bbheader_5_text_shadow_style,%91object Object%93&#8243; header_5_text_shadow_blur_strength_tablet=\u00bb1px\u00bb header_6_text_shadow_horizontal_length=\u00bbheader_6_text_shadow_style,%91object Object%93&#8243; header_6_text_shadow_horizontal_length_tablet=\u00bb0px\u00bb header_6_text_shadow_vertical_length=\u00bbheader_6_text_shadow_style,%91object Object%93&#8243; header_6_text_shadow_vertical_length_tablet=\u00bb0px\u00bb header_6_text_shadow_blur_strength=\u00bbheader_6_text_shadow_style,%91object Object%93&#8243; header_6_text_shadow_blur_strength_tablet=\u00bb1px\u00bb box_shadow_horizontal_tablet=\u00bb0px\u00bb box_shadow_vertical_tablet=\u00bb0px\u00bb box_shadow_blur_tablet=\u00bb40px\u00bb box_shadow_spread_tablet=\u00bb0px\u00bb vertical_offset_tablet=\u00bb0&#8243; horizontal_offset_tablet=\u00bb0&#8243; z_index_tablet=\u00bb0&#8243;]<\/p>\n<hr \/>\n<div class='et-box et-shadow'>\n\t\t\t\t\t<div class='et-box-content'><h2><strong>PUBLICACIONES<\/strong><\/h2><\/div><\/div>\n<p>Para ver las publicaciones de todo Rob\u00f3tica y Manufactura Avanzada, ver:\u00a0 <strong><a href=\"https:\/\/ryma.cinvestav.mx\/investigacion\/publicaciones\/\">Publicaciones RYMA<\/a><\/strong><\/p>\n<p>Para una lista actualizada de publicaciones vea: <strong><a href=\"https:\/\/scholar.google.com\/citations?user=zFaPTk8AAAAJ&amp;hl=es\">GOOGLE SCHOLAR<\/a><\/strong><\/p>\n<p><strong><div class=\"teachpress_pub_list\"><form name=\"tppublistform\" method=\"get\"><a name=\"tppubs\" id=\"tppubs\"><\/a><div class=\"teachpress_filter\"><select class=\"default\" name=\"yr\" id=\"yr\" tabindex=\"2\" onchange=\"teachpress_jumpMenu('parent',this, 'https:\/\/ryma.cinvestav.mx\/rrios\/publicaciones\/?')\">\r\n                   <option value=\"tgid=&amp;type=&amp;auth=&amp;usr=&amp;yr=#tppubs\">Todos los a\u00f1os<\/option>\r\n                   <option value = \"tgid=&amp;type=&amp;auth=&amp;usr=&amp;yr=2020#tppubs\" >2020<\/option><option value = \"tgid=&amp;type=&amp;auth=&amp;usr=&amp;yr=2019#tppubs\" >2019<\/option><option value = \"tgid=&amp;type=&amp;auth=&amp;usr=&amp;yr=2018#tppubs\" >2018<\/option><option value = \"tgid=&amp;type=&amp;auth=&amp;usr=&amp;yr=2017#tppubs\" >2017<\/option><option value = \"tgid=&amp;type=&amp;auth=&amp;usr=&amp;yr=2016#tppubs\" >2016<\/option><option value = \"tgid=&amp;type=&amp;auth=&amp;usr=&amp;yr=2015#tppubs\" >2015<\/option><option value = \"tgid=&amp;type=&amp;auth=&amp;usr=&amp;yr=2014#tppubs\" >2014<\/option><option value = \"tgid=&amp;type=&amp;auth=&amp;usr=&amp;yr=2013#tppubs\" >2013<\/option><option value = \"tgid=&amp;type=&amp;auth=&amp;usr=&amp;yr=2012#tppubs\" >2012<\/option><option value = \"tgid=&amp;type=&amp;auth=&amp;usr=&amp;yr=2011#tppubs\" >2011<\/option><option value = \"tgid=&amp;type=&amp;auth=&amp;usr=&amp;yr=2010#tppubs\" >2010<\/option><option value = \"tgid=&amp;type=&amp;auth=&amp;usr=&amp;yr=2009#tppubs\" >2009<\/option><option value = \"tgid=&amp;type=&amp;auth=&amp;usr=&amp;yr=2008#tppubs\" >2008<\/option><option value = \"tgid=&amp;type=&amp;auth=&amp;usr=&amp;yr=2007#tppubs\" >2007<\/option><option value = \"tgid=&amp;type=&amp;auth=&amp;usr=&amp;yr=2006#tppubs\" >2006<\/option><option value = \"tgid=&amp;type=&amp;auth=&amp;usr=&amp;yr=2005#tppubs\" >2005<\/option><option value = \"tgid=&amp;type=&amp;auth=&amp;usr=&amp;yr=2004#tppubs\" >2004<\/option><option value = \"tgid=&amp;type=&amp;auth=&amp;usr=&amp;yr=2003#tppubs\" >2003<\/option>\r\n                <\/select><select class=\"default\" name=\"type\" id=\"type\" tabindex=\"3\" onchange=\"teachpress_jumpMenu('parent',this, 'https:\/\/ryma.cinvestav.mx\/rrios\/publicaciones\/?')\">\r\n                   <option value=\"tgid=&amp;yr=&amp;auth=&amp;usr=&amp;type=#tppubs\">Todas las tipolog\u00edas<\/option>\r\n                   <option value = \"tgid=&amp;yr=&amp;auth=&amp;usr=&amp;type=article#tppubs\" >Art\u00edculos de revista<\/option><option value = \"tgid=&amp;yr=&amp;auth=&amp;usr=&amp;type=conference#tppubs\" >Conferencias<\/option><option value = \"tgid=&amp;yr=&amp;auth=&amp;usr=&amp;type=inbook#tppubs\" >Cap\u00edtulos de libros<\/option><option value = \"tgid=&amp;yr=&amp;auth=&amp;usr=&amp;type=inproceedings#tppubs\" >Proceedings Articles<\/option><option value = \"tgid=&amp;yr=&amp;auth=&amp;usr=&amp;type=proceedings#tppubs\" >Actas de congresos<\/option>\r\n                <\/select><select class=\"default\" name=\"usr\" id=\"usr\" tabindex=\"6\" onchange=\"teachpress_jumpMenu('parent',this, 'https:\/\/ryma.cinvestav.mx\/rrios\/publicaciones\/?')\">\r\n                   <option value=\"tgid=&amp;yr=&amp;type=&amp;auth=&amp;usr=#tppubs\">Todos los usuarios<\/option>\r\n                   <option value = \"tgid=&amp;yr=&amp;type=&amp;auth=&amp;usr=12#tppubs\" >mcastelan<\/option>\r\n                <\/select><\/div><\/form><div class=\"teachpress_publication_list\"><h3 class=\"tp_h3\" id=\"tp_h3_2019\">2019<\/h3><h3 class=\"tp_h3\" id=\"tp_h3_article\">Art\u00edculos de revista<\/h3><div class=\"tp_publication tp_publication_article\"><div class=\"tp_pub_info\"><p class=\"tp_pub_author\"> Rico-Fernandez, Maria;  Rios-Cabrera, Reyes;  Castelan, Mario;  Guerrero-Reyes, Hector;  Juarez-Maldonado, Antonio<\/p><p class=\"tp_pub_title\"><a class=\"tp_title_link\" onclick=\"teachpress_pub_showhide('224','tp_links')\" style=\"cursor:pointer;\">A contextualized approach for segmentation of foliage in different crop species<\/a> <span class=\"tp_pub_type tp_  article\">Art\u00edculo de revista<\/span> <\/p><p class=\"tp_pub_additional\"><span class=\"tp_pub_additional_in\">En: <\/span><span class=\"tp_pub_additional_journal\">Computers and Electronics in Agriculture, <\/span><span class=\"tp_pub_additional_volume\">vol. 156, <\/span><span class=\"tp_pub_additional_pages\">pp. 378-386, <\/span><span class=\"tp_pub_additional_year\">2019<\/span>, <span class=\"tp_pub_additional_issn\">ISSN: 0168-1699<\/span>.<\/p><p class=\"tp_pub_menu\"><span class=\"tp_resource_link\"><a id=\"tp_links_sh_224\" class=\"tp_show\" onclick=\"teachpress_pub_showhide('224','tp_links')\" title=\"Mostrar enlaces y recursos\" style=\"cursor:pointer;\">Enlaces<\/a><\/span> | <span class=\"tp_bibtex_link\"><a id=\"tp_bibtex_sh_224\" class=\"tp_show\" onclick=\"teachpress_pub_showhide('224','tp_bibtex')\" title=\"Mostrar entrada BibTeX \" style=\"cursor:pointer;\">BibTeX<\/a><\/span><\/p><div class=\"tp_bibtex\" id=\"tp_bibtex_224\" style=\"display:none;\"><div class=\"tp_bibtex_entry\"><pre>@article{Rico-Fernandez2019,<br \/>\r\ntitle = {A contextualized approach for segmentation of foliage in different crop species},<br \/>\r\nauthor = {Rico-Fernandez, Maria and Rios-Cabrera, Reyes and Castelan, Mario and Guerrero-Reyes, Hector and Juarez-Maldonado, Antonio},<br \/>\r\neditor = {Elsevier},<br \/>\r\nurl = {https:\/\/doi.org\/10.1016\/j.compag.2018.11.033},<br \/>\r\nissn = {0168-1699},<br \/>\r\nyear  = {2019},<br \/>\r\ndate = {2019-01-01},<br \/>\r\njournal = {Computers and Electronics in Agriculture},<br \/>\r\nvolume = {156},<br \/>\r\npages = {378-386},<br \/>\r\nkeywords = {},<br \/>\r\npubstate = {published},<br \/>\r\ntppubtype = {article}<br \/>\r\n}<br \/>\r\n<\/pre><\/div><p class=\"tp_close_menu\"><a class=\"tp_close\" onclick=\"teachpress_pub_showhide('224','tp_bibtex')\">Cerrar<\/a><\/p><\/div><div class=\"tp_links\" id=\"tp_links_224\" style=\"display:none;\"><div class=\"tp_links_entry\"><ul class=\"tp_pub_list\"><li><i class=\"fas fa-globe\"><\/i><a class=\"tp_pub_list\" href=\"https:\/\/doi.org\/10.1016\/j.compag.2018.11.033\" title=\"https:\/\/doi.org\/10.1016\/j.compag.2018.11.033\" target=\"_blank\">https:\/\/doi.org\/10.1016\/j.compag.2018.11.033<\/a><\/li><\/ul><\/div><p class=\"tp_close_menu\"><a class=\"tp_close\" onclick=\"teachpress_pub_showhide('224','tp_links')\">Cerrar<\/a><\/p><\/div><\/div><\/div><h3 class=\"tp_h3\" id=\"tp_h3_2017\">2017<\/h3><h3 class=\"tp_h3\" id=\"tp_h3_article\">Art\u00edculos de revista<\/h3><div class=\"tp_publication tp_publication_article\"><div class=\"tp_pub_info\"><p class=\"tp_pub_author\"> Lopez-Juarez, Ismael;  Rios-Cabrera, Reyes;  Hsieh, S J;  Howarth, M.<\/p><p class=\"tp_pub_title\"><a class=\"tp_title_link\" onclick=\"teachpress_pub_showhide('205','tp_links')\" style=\"cursor:pointer;\">A hybrid non-invasive method for internal\/external quality assessment of potatoes<\/a> <span class=\"tp_pub_type tp_  article\">Art\u00edculo de revista<\/span> <\/p><p class=\"tp_pub_additional\"><span class=\"tp_pub_additional_in\">En: <\/span><span class=\"tp_pub_additional_journal\">European Food Research and Technology, <\/span><span class=\"tp_pub_additional_year\">2017<\/span>, <span class=\"tp_pub_additional_issn\">ISSN: 1438-2385<\/span>.<\/p><p class=\"tp_pub_menu\"><span class=\"tp_abstract_link\"><a id=\"tp_abstract_sh_205\" class=\"tp_show\" onclick=\"teachpress_pub_showhide('205','tp_abstract')\" title=\"Mostrar resumen\" style=\"cursor:pointer;\">Resumen<\/a><\/span> | <span class=\"tp_resource_link\"><a id=\"tp_links_sh_205\" class=\"tp_show\" onclick=\"teachpress_pub_showhide('205','tp_links')\" title=\"Mostrar enlaces y recursos\" style=\"cursor:pointer;\">Enlaces<\/a><\/span> | <span class=\"tp_bibtex_link\"><a id=\"tp_bibtex_sh_205\" class=\"tp_show\" onclick=\"teachpress_pub_showhide('205','tp_bibtex')\" title=\"Mostrar entrada BibTeX \" style=\"cursor:pointer;\">BibTeX<\/a><\/span><\/p><div class=\"tp_bibtex\" id=\"tp_bibtex_205\" style=\"display:none;\"><div class=\"tp_bibtex_entry\"><pre>@article{Lopez-Juarez2017,<br \/>\r\ntitle = {A hybrid non-invasive method for internal\/external quality assessment of potatoes},<br \/>\r\nauthor = {Lopez-Juarez, Ismael and Rios-Cabrera, Reyes and Hsieh,S J and Howarth, M .},<br \/>\r\nurl = {https:\/\/doi.org\/10.1007\/s00217-017-2936-9},<br \/>\r\ndoi = {10.1007\/s00217-017-2936-9},<br \/>\r\nissn = {1438-2385},<br \/>\r\nyear  = {2017},<br \/>\r\ndate = {2017-07-11},<br \/>\r\njournal = {European Food Research and Technology},<br \/>\r\nabstract = {Consumers purchase fruits and vegetables based on its quality, which can be defined as a degree of excellence which is the result of a combination of characteristics, attributes and properties that have significance for market acceptability. In this paper, a novel hybrid active imaging methodology for potato quality inspection that uses an optical colour camera and an infrared thermal camera is presented. The methodology employs an artificial neural network (ANN) that uses quality data composed by two descriptors as input. The ANN works as a feature classifier so that its output is the potato quality grade. The input vector contains information related to external characteristics, such as shape, weight, length and width. Internal characteristics are also accounted for in the input vector in the form of excessive sugar content. The extra sugar content of the potato is an important problem for potato growers and potato chip manufacturers. Extra sugar content could result in diseases or wounds in the potato tuber. In general, potato tubers with low sugar content are considered as having a higher quality. The validation of the methodology was made through experimentation which consisted in fusing both, external and internal characteristics in the input vector to the ANN for an overall quality classification. Results using internal data as obtained from an infrared camera and fused with optical external parameters demonstrated the feasibility of the method since the prediction accuracy increased during potato grading.},<br \/>\r\nkeywords = {},<br \/>\r\npubstate = {published},<br \/>\r\ntppubtype = {article}<br \/>\r\n}<br \/>\r\n<\/pre><\/div><p class=\"tp_close_menu\"><a class=\"tp_close\" onclick=\"teachpress_pub_showhide('205','tp_bibtex')\">Cerrar<\/a><\/p><\/div><div class=\"tp_abstract\" id=\"tp_abstract_205\" style=\"display:none;\"><div class=\"tp_abstract_entry\">Consumers purchase fruits and vegetables based on its quality, which can be defined as a degree of excellence which is the result of a combination of characteristics, attributes and properties that have significance for market acceptability. In this paper, a novel hybrid active imaging methodology for potato quality inspection that uses an optical colour camera and an infrared thermal camera is presented. The methodology employs an artificial neural network (ANN) that uses quality data composed by two descriptors as input. The ANN works as a feature classifier so that its output is the potato quality grade. The input vector contains information related to external characteristics, such as shape, weight, length and width. Internal characteristics are also accounted for in the input vector in the form of excessive sugar content. The extra sugar content of the potato is an important problem for potato growers and potato chip manufacturers. Extra sugar content could result in diseases or wounds in the potato tuber. In general, potato tubers with low sugar content are considered as having a higher quality. The validation of the methodology was made through experimentation which consisted in fusing both, external and internal characteristics in the input vector to the ANN for an overall quality classification. Results using internal data as obtained from an infrared camera and fused with optical external parameters demonstrated the feasibility of the method since the prediction accuracy increased during potato grading.<\/div><p class=\"tp_close_menu\"><a class=\"tp_close\" onclick=\"teachpress_pub_showhide('205','tp_abstract')\">Cerrar<\/a><\/p><\/div><div class=\"tp_links\" id=\"tp_links_205\" style=\"display:none;\"><div class=\"tp_links_entry\"><ul class=\"tp_pub_list\"><li><i class=\"fas fa-globe\"><\/i><a class=\"tp_pub_list\" href=\"https:\/\/doi.org\/10.1007\/s00217-017-2936-9\" title=\"https:\/\/doi.org\/10.1007\/s00217-017-2936-9\" target=\"_blank\">https:\/\/doi.org\/10.1007\/s00217-017-2936-9<\/a><\/li><li><i class=\"ai ai-doi\"><\/i><a class=\"tp_pub_list\" href=\"https:\/\/dx.doi.org\/10.1007\/s00217-017-2936-9\" title=\"DOI de seguimiento:10.1007\/s00217-017-2936-9\" target=\"_blank\">doi:10.1007\/s00217-017-2936-9<\/a><\/li><\/ul><\/div><p class=\"tp_close_menu\"><a class=\"tp_close\" onclick=\"teachpress_pub_showhide('205','tp_links')\">Cerrar<\/a><\/p><\/div><\/div><\/div><h3 class=\"tp_h3\" id=\"tp_h3_2016\">2016<\/h3><h3 class=\"tp_h3\" id=\"tp_h3_article\">Art\u00edculos de revista<\/h3><div class=\"tp_publication tp_publication_article\"><div class=\"tp_pub_info\"><p class=\"tp_pub_author\"> Aviles-Vi\u00f1as, Jaime F;  Rios-Cabrera, Reyes;  Lopez-Juarez, Ismael<\/p><p class=\"tp_pub_title\"><a class=\"tp_title_link\" onclick=\"teachpress_pub_showhide('7','tp_links')\" style=\"cursor:pointer;\">On-line learning of welding bead geometry in industrial robots<\/a> <span class=\"tp_pub_type tp_  article\">Art\u00edculo de revista<\/span> <\/p><p class=\"tp_pub_additional\"><span class=\"tp_pub_additional_in\">En: <\/span><span class=\"tp_pub_additional_journal\">The International Journal of Advanced Manufacturing Technology, <\/span><span class=\"tp_pub_additional_volume\">vol. 83, <\/span><span class=\"tp_pub_additional_number\">no 1, <\/span><span class=\"tp_pub_additional_pages\">pp. 217\u2013231, <\/span><span class=\"tp_pub_additional_year\">2016<\/span>, <span class=\"tp_pub_additional_issn\">ISSN: 1433-3015<\/span>.<\/p><p class=\"tp_pub_menu\"><span class=\"tp_abstract_link\"><a id=\"tp_abstract_sh_7\" class=\"tp_show\" onclick=\"teachpress_pub_showhide('7','tp_abstract')\" title=\"Mostrar resumen\" style=\"cursor:pointer;\">Resumen<\/a><\/span> | <span class=\"tp_resource_link\"><a id=\"tp_links_sh_7\" class=\"tp_show\" onclick=\"teachpress_pub_showhide('7','tp_links')\" title=\"Mostrar enlaces y recursos\" style=\"cursor:pointer;\">Enlaces<\/a><\/span> | <span class=\"tp_bibtex_link\"><a id=\"tp_bibtex_sh_7\" class=\"tp_show\" onclick=\"teachpress_pub_showhide('7','tp_bibtex')\" title=\"Mostrar entrada BibTeX \" style=\"cursor:pointer;\">BibTeX<\/a><\/span><\/p><div class=\"tp_bibtex\" id=\"tp_bibtex_7\" style=\"display:none;\"><div class=\"tp_bibtex_entry\"><pre>@article{Aviles-Vi\\~{n}as2016b,<br \/>\r\ntitle = {On-line learning of welding bead geometry in industrial robots},<br \/>\r\nauthor = {Aviles-Vi\\~{n}as, Jaime F and Rios-Cabrera, Reyes and Lopez-Juarez, Ismael },<br \/>\r\nurl = {http:\/\/dx.doi.org\/10.1007\/s00170-015-7422-6},<br \/>\r\ndoi = {10.1007\/s00170-015-7422-6},<br \/>\r\nissn = {1433-3015},<br \/>\r\nyear  = {2016},<br \/>\r\ndate = {2016-01-01},<br \/>\r\njournal = {The International Journal of Advanced Manufacturing Technology},<br \/>\r\nvolume = {83},<br \/>\r\nnumber = {1},<br \/>\r\npages = {217--231},<br \/>\r\nabstract = {In this paper, we propose an architecture based on an artificial neural network (ANN), to learn welding skills automatically in industrial robots. With the aid of an optic camera and a laser-based sensor, the bead geometry (width and height) is measured. We propose a real-time computer vision algorithm to extract training patterns in order to acquire knowledge to later predict specific geometries. The proposal is implemented and tested in an industrial KUKA KR16 robot and a GMAW type machine within a manufacturing cell. Several data analysis are described as well as off-line and on-line training, learning strategies, and testing experimentation. It is demonstrated during our experiments that, after learning the skill, the robot is able to produce the requested bead geometry even without any knowledge about the welding parameters such as arc voltage and current. We implemented an on-line learning test, where the whole experiments and learning process take only about 4 min. Using this knowledge later, we obtained up to 95 % accuracy in prediction.},<br \/>\r\nkeywords = {},<br \/>\r\npubstate = {published},<br \/>\r\ntppubtype = {article}<br \/>\r\n}<br \/>\r\n<\/pre><\/div><p class=\"tp_close_menu\"><a class=\"tp_close\" onclick=\"teachpress_pub_showhide('7','tp_bibtex')\">Cerrar<\/a><\/p><\/div><div class=\"tp_abstract\" id=\"tp_abstract_7\" style=\"display:none;\"><div class=\"tp_abstract_entry\">In this paper, we propose an architecture based on an artificial neural network (ANN), to learn welding skills automatically in industrial robots. With the aid of an optic camera and a laser-based sensor, the bead geometry (width and height) is measured. We propose a real-time computer vision algorithm to extract training patterns in order to acquire knowledge to later predict specific geometries. The proposal is implemented and tested in an industrial KUKA KR16 robot and a GMAW type machine within a manufacturing cell. Several data analysis are described as well as off-line and on-line training, learning strategies, and testing experimentation. It is demonstrated during our experiments that, after learning the skill, the robot is able to produce the requested bead geometry even without any knowledge about the welding parameters such as arc voltage and current. We implemented an on-line learning test, where the whole experiments and learning process take only about 4 min. Using this knowledge later, we obtained up to 95 % accuracy in prediction.<\/div><p class=\"tp_close_menu\"><a class=\"tp_close\" onclick=\"teachpress_pub_showhide('7','tp_abstract')\">Cerrar<\/a><\/p><\/div><div class=\"tp_links\" id=\"tp_links_7\" style=\"display:none;\"><div class=\"tp_links_entry\"><ul class=\"tp_pub_list\"><li><i class=\"fas fa-globe\"><\/i><a class=\"tp_pub_list\" href=\"http:\/\/dx.doi.org\/10.1007\/s00170-015-7422-6\" title=\"http:\/\/dx.doi.org\/10.1007\/s00170-015-7422-6\" target=\"_blank\">http:\/\/dx.doi.org\/10.1007\/s00170-015-7422-6<\/a><\/li><li><i class=\"ai ai-doi\"><\/i><a class=\"tp_pub_list\" href=\"https:\/\/dx.doi.org\/10.1007\/s00170-015-7422-6\" title=\"DOI de seguimiento:10.1007\/s00170-015-7422-6\" target=\"_blank\">doi:10.1007\/s00170-015-7422-6<\/a><\/li><\/ul><\/div><p class=\"tp_close_menu\"><a class=\"tp_close\" onclick=\"teachpress_pub_showhide('7','tp_links')\">Cerrar<\/a><\/p><\/div><\/div><\/div><div class=\"tp_publication tp_publication_article\"><div class=\"tp_pub_info\"><p class=\"tp_pub_author\"> Rios-Cabrera, Reyes;  Morales-Diaz, America B.;  Aviles-Vi\u00f1as, Jaime F;  Lopez-Juarez, Ismael<\/p><p class=\"tp_pub_title\"><a class=\"tp_title_link\" onclick=\"teachpress_pub_showhide('6','tp_links')\" style=\"cursor:pointer;\">Robotic GMAW online learning: issues and experiments<\/a> <span class=\"tp_pub_type tp_  article\">Art\u00edculo de revista<\/span> <\/p><p class=\"tp_pub_additional\"><span class=\"tp_pub_additional_in\">En: <\/span><span class=\"tp_pub_additional_journal\">The International Journal of Advanced Manufacturing Technology, <\/span><span class=\"tp_pub_additional_volume\">vol. 87, <\/span><span class=\"tp_pub_additional_number\">no 5, <\/span><span class=\"tp_pub_additional_pages\">pp. 2113\u20132134, <\/span><span class=\"tp_pub_additional_year\">2016<\/span>, <span class=\"tp_pub_additional_issn\">ISSN: 1433-3015<\/span>.<\/p><p class=\"tp_pub_menu\"><span class=\"tp_abstract_link\"><a id=\"tp_abstract_sh_6\" class=\"tp_show\" onclick=\"teachpress_pub_showhide('6','tp_abstract')\" title=\"Mostrar resumen\" style=\"cursor:pointer;\">Resumen<\/a><\/span> | <span class=\"tp_resource_link\"><a id=\"tp_links_sh_6\" class=\"tp_show\" onclick=\"teachpress_pub_showhide('6','tp_links')\" title=\"Mostrar enlaces y recursos\" style=\"cursor:pointer;\">Enlaces<\/a><\/span> | <span class=\"tp_bibtex_link\"><a id=\"tp_bibtex_sh_6\" class=\"tp_show\" onclick=\"teachpress_pub_showhide('6','tp_bibtex')\" title=\"Mostrar entrada BibTeX \" style=\"cursor:pointer;\">BibTeX<\/a><\/span><\/p><div class=\"tp_bibtex\" id=\"tp_bibtex_6\" style=\"display:none;\"><div class=\"tp_bibtex_entry\"><pre>@article{Rios-Cabrera2016,<br \/>\r\ntitle = {Robotic GMAW online learning: issues and experiments},<br \/>\r\nauthor = {Rios-Cabrera, Reyes and Morales-Diaz, America B. and Aviles-Vi\\~{n}as, Jaime F and Lopez-Juarez, Ismael },<br \/>\r\nurl = {http:\/\/dx.doi.org\/10.1007\/s00170-016-8618-0},<br \/>\r\ndoi = {10.1007\/s00170-016-8618-0},<br \/>\r\nissn = {1433-3015},<br \/>\r\nyear  = {2016},<br \/>\r\ndate = {2016-01-01},<br \/>\r\njournal = {The International Journal of Advanced Manufacturing Technology},<br \/>\r\nvolume = {87},<br \/>\r\nnumber = {5},<br \/>\r\npages = {2113--2134},<br \/>\r\nabstract = {This paper presents three main contributions: (i) an experimental analysis of variables, using well-defined statistical patterns applied to the main parameters of the welding process. (ii) An on-line\/off-line learning and testing method, showing that robots can acquire a useful knowledge base without human intervention to learn and reproduce bead geometries. And finally, (iii) an on-line testing analysis including penetration of the bead, that is used to train an artificial neural network (ANN). For the experiments, an optic camera was used in order to measure bead geometry (width and height). Also real-time computer vision algorithms were implemented to extract training patterns. The proposal was carried out using an industrial KUKA robot and a GMAW type machine inside a manufacturing cell. We present expermental analysis that show different issues and solutions to build an industrial adaptive system for the robotics welding process.},<br \/>\r\nkeywords = {},<br \/>\r\npubstate = {published},<br \/>\r\ntppubtype = {article}<br \/>\r\n}<br \/>\r\n<\/pre><\/div><p class=\"tp_close_menu\"><a class=\"tp_close\" onclick=\"teachpress_pub_showhide('6','tp_bibtex')\">Cerrar<\/a><\/p><\/div><div class=\"tp_abstract\" id=\"tp_abstract_6\" style=\"display:none;\"><div class=\"tp_abstract_entry\">This paper presents three main contributions: (i) an experimental analysis of variables, using well-defined statistical patterns applied to the main parameters of the welding process. (ii) An on-line\/off-line learning and testing method, showing that robots can acquire a useful knowledge base without human intervention to learn and reproduce bead geometries. And finally, (iii) an on-line testing analysis including penetration of the bead, that is used to train an artificial neural network (ANN). For the experiments, an optic camera was used in order to measure bead geometry (width and height). Also real-time computer vision algorithms were implemented to extract training patterns. The proposal was carried out using an industrial KUKA robot and a GMAW type machine inside a manufacturing cell. We present expermental analysis that show different issues and solutions to build an industrial adaptive system for the robotics welding process.<\/div><p class=\"tp_close_menu\"><a class=\"tp_close\" onclick=\"teachpress_pub_showhide('6','tp_abstract')\">Cerrar<\/a><\/p><\/div><div class=\"tp_links\" id=\"tp_links_6\" style=\"display:none;\"><div class=\"tp_links_entry\"><ul class=\"tp_pub_list\"><li><i class=\"fas fa-globe\"><\/i><a class=\"tp_pub_list\" href=\"http:\/\/dx.doi.org\/10.1007\/s00170-016-8618-0\" title=\"http:\/\/dx.doi.org\/10.1007\/s00170-016-8618-0\" target=\"_blank\">http:\/\/dx.doi.org\/10.1007\/s00170-016-8618-0<\/a><\/li><li><i class=\"ai ai-doi\"><\/i><a class=\"tp_pub_list\" href=\"https:\/\/dx.doi.org\/10.1007\/s00170-016-8618-0\" title=\"DOI de seguimiento:10.1007\/s00170-016-8618-0\" target=\"_blank\">doi:10.1007\/s00170-016-8618-0<\/a><\/li><\/ul><\/div><p class=\"tp_close_menu\"><a class=\"tp_close\" onclick=\"teachpress_pub_showhide('6','tp_links')\">Cerrar<\/a><\/p><\/div><\/div><\/div><div class=\"tp_publication tp_publication_article\"><div class=\"tp_pub_info\"><p class=\"tp_pub_author\"> Benitez Perez, H.;  Lopez-Juarez, Ismael;  Garza-Alanis, P. C.;  Rios-Cabrera, Reyes;  Duran Chavesti, A.<\/p><p class=\"tp_pub_title\"><a class=\"tp_title_link\" onclick=\"teachpress_pub_showhide('163','tp_links')\" style=\"cursor:pointer;\">Reconfiguration Distributed Objects in an Intelligent Manufacturing Cell<\/a> <span class=\"tp_pub_type tp_  article\">Art\u00edculo de revista<\/span> <\/p><p class=\"tp_pub_additional\"><span class=\"tp_pub_additional_in\">En: <\/span><span class=\"tp_pub_additional_journal\">IEEE Latin America Transactions, <\/span><span class=\"tp_pub_additional_volume\">vol. 14, <\/span><span class=\"tp_pub_additional_number\">no 1, <\/span><span class=\"tp_pub_additional_pages\">pp. 136-146, <\/span><span class=\"tp_pub_additional_year\">2016<\/span>, <span class=\"tp_pub_additional_issn\">ISSN: 1548-0992<\/span>.<\/p><p class=\"tp_pub_menu\"><span class=\"tp_abstract_link\"><a id=\"tp_abstract_sh_163\" class=\"tp_show\" onclick=\"teachpress_pub_showhide('163','tp_abstract')\" title=\"Mostrar resumen\" style=\"cursor:pointer;\">Resumen<\/a><\/span> | <span class=\"tp_resource_link\"><a id=\"tp_links_sh_163\" class=\"tp_show\" onclick=\"teachpress_pub_showhide('163','tp_links')\" title=\"Mostrar enlaces y recursos\" style=\"cursor:pointer;\">Enlaces<\/a><\/span> | <span class=\"tp_bibtex_link\"><a id=\"tp_bibtex_sh_163\" class=\"tp_show\" onclick=\"teachpress_pub_showhide('163','tp_bibtex')\" title=\"Mostrar entrada BibTeX \" style=\"cursor:pointer;\">BibTeX<\/a><\/span><\/p><div class=\"tp_bibtex\" id=\"tp_bibtex_163\" style=\"display:none;\"><div class=\"tp_bibtex_entry\"><pre>@article{7430073,<br \/>\r\ntitle = {Reconfiguration Distributed Objects in an Intelligent Manufacturing Cell},<br \/>\r\nauthor = {Benitez Perez, H. and Lopez-Juarez, Ismael and Garza-Alanis, P. C. and Rios-Cabrera, Reyes and Duran Chavesti, A.},<br \/>\r\ndoi = {10.1109\/TLA.2016.7430073},<br \/>\r\nissn = {1548-0992},<br \/>\r\nyear  = {2016},<br \/>\r\ndate = {2016-01-01},<br \/>\r\njournal = {IEEE Latin America Transactions},<br \/>\r\nvolume = {14},<br \/>\r\nnumber = {1},<br \/>\r\npages = {136-146},<br \/>\r\nabstract = {A manufacture system with the abilities of easy reconfiguration and highly scalability becomes flexible, dynamic and open to the use of software technologies. To give these abilities to a manufacture cell formed of three industrial robots and two conveyors, a middleware based on the programming standard Common Object Request Broker Architecture (CORA) was developed, thus creating a distributed manufacture cell, allowing us to have a real production with different final products. In order to optimize the production times of the different products to be manufactured, a product scheduler was developed using the algorithm Earlies Deadline First (EDF) and the support algorithm Deferrable Server (DS). Given that failures may occur on any of the specialized modules of the manufacture system, the self reconfiguration of the manufacture system is something very desirable. This article propose an algorithm to solve this problem, the algorithm identifies the failures in relation to the time it takes the system to make a product, then makes a modification on the working speed of the plant elements of the specialized modules.},<br \/>\r\nkeywords = {},<br \/>\r\npubstate = {published},<br \/>\r\ntppubtype = {article}<br \/>\r\n}<br \/>\r\n<\/pre><\/div><p class=\"tp_close_menu\"><a class=\"tp_close\" onclick=\"teachpress_pub_showhide('163','tp_bibtex')\">Cerrar<\/a><\/p><\/div><div class=\"tp_abstract\" id=\"tp_abstract_163\" style=\"display:none;\"><div class=\"tp_abstract_entry\">A manufacture system with the abilities of easy reconfiguration and highly scalability becomes flexible, dynamic and open to the use of software technologies. To give these abilities to a manufacture cell formed of three industrial robots and two conveyors, a middleware based on the programming standard Common Object Request Broker Architecture (CORA) was developed, thus creating a distributed manufacture cell, allowing us to have a real production with different final products. In order to optimize the production times of the different products to be manufactured, a product scheduler was developed using the algorithm Earlies Deadline First (EDF) and the support algorithm Deferrable Server (DS). Given that failures may occur on any of the specialized modules of the manufacture system, the self reconfiguration of the manufacture system is something very desirable. This article propose an algorithm to solve this problem, the algorithm identifies the failures in relation to the time it takes the system to make a product, then makes a modification on the working speed of the plant elements of the specialized modules.<\/div><p class=\"tp_close_menu\"><a class=\"tp_close\" onclick=\"teachpress_pub_showhide('163','tp_abstract')\">Cerrar<\/a><\/p><\/div><div class=\"tp_links\" id=\"tp_links_163\" style=\"display:none;\"><div class=\"tp_links_entry\"><ul class=\"tp_pub_list\"><li><i class=\"ai ai-doi\"><\/i><a class=\"tp_pub_list\" href=\"https:\/\/dx.doi.org\/10.1109\/TLA.2016.7430073\" title=\"DOI de seguimiento:10.1109\/TLA.2016.7430073\" target=\"_blank\">doi:10.1109\/TLA.2016.7430073<\/a><\/li><\/ul><\/div><p class=\"tp_close_menu\"><a class=\"tp_close\" onclick=\"teachpress_pub_showhide('163','tp_links')\">Cerrar<\/a><\/p><\/div><\/div><\/div><h3 class=\"tp_h3\" id=\"tp_h3_proceedings\">Actas de congresos<\/h3><div class=\"tp_publication tp_publication_proceedings\"><div class=\"tp_pub_info\"><p class=\"tp_pub_author\"> Mirelez-Delgado, Flabio;  Morales-Diaz, America B.;  Rios-Cabrera, Reyes;  Gutierrez-Flores, Hugo<\/p><p class=\"tp_pub_title\">Towards intelligent robotic agents for cooperative tasks <span class=\"tp_pub_type tp_  proceedings\">Actas de congresos<\/span> <\/p><p class=\"tp_pub_additional\"><span class=\"tp_pub_additional_year\">2016<\/span>.<\/p><p class=\"tp_pub_menu\"><span class=\"tp_bibtex_link\"><a id=\"tp_bibtex_sh_139\" class=\"tp_show\" onclick=\"teachpress_pub_showhide('139','tp_bibtex')\" title=\"Mostrar entrada BibTeX \" style=\"cursor:pointer;\">BibTeX<\/a><\/span><\/p><div class=\"tp_bibtex\" id=\"tp_bibtex_139\" style=\"display:none;\"><div class=\"tp_bibtex_entry\"><pre>@proceedings{Mirelez-Delgado2016,<br \/>\r\ntitle = {Towards intelligent robotic agents for cooperative tasks},<br \/>\r\nauthor = {Mirelez-Delgado, Flabio and Morales-Diaz, America B. and Rios-Cabrera, Reyes and Gutierrez-Flores, Hugo},<br \/>\r\nyear  = {2016},<br \/>\r\ndate = {2016-06-06},<br \/>\r\nkeywords = {},<br \/>\r\npubstate = {published},<br \/>\r\ntppubtype = {proceedings}<br \/>\r\n}<br \/>\r\n<\/pre><\/div><p class=\"tp_close_menu\"><a class=\"tp_close\" onclick=\"teachpress_pub_showhide('139','tp_bibtex')\">Cerrar<\/a><\/p><\/div><\/div><\/div><div class=\"tp_publication tp_publication_proceedings\"><div class=\"tp_pub_info\"><p class=\"tp_pub_author\"> Mirelez-Delgado, Flabio;  Morales-Diaz, America B.;  Rios-Cabrera, Reyes<\/p><p class=\"tp_pub_title\">Kinematic control for an omnidirectional mobile manipulator <span class=\"tp_pub_type tp_  proceedings\">Actas de congresos<\/span> <\/p><p class=\"tp_pub_additional\"><span class=\"tp_pub_additional_year\">2016<\/span>.<\/p><p class=\"tp_pub_menu\"><span class=\"tp_bibtex_link\"><a id=\"tp_bibtex_sh_157\" class=\"tp_show\" onclick=\"teachpress_pub_showhide('157','tp_bibtex')\" title=\"Mostrar entrada BibTeX \" style=\"cursor:pointer;\">BibTeX<\/a><\/span><\/p><div class=\"tp_bibtex\" id=\"tp_bibtex_157\" style=\"display:none;\"><div class=\"tp_bibtex_entry\"><pre>@proceedings{Mirelez-Delgado2016,<br \/>\r\ntitle = {Kinematic control for an omnidirectional mobile manipulator},<br \/>\r\nauthor = {Mirelez-Delgado, Flabio and Morales-Diaz, America B. and Rios-Cabrera, Reyes},<br \/>\r\nyear  = {2016},<br \/>\r\ndate = {2016-06-06},<br \/>\r\nkeywords = {},<br \/>\r\npubstate = {published},<br \/>\r\ntppubtype = {proceedings}<br \/>\r\n}<br \/>\r\n<\/pre><\/div><p class=\"tp_close_menu\"><a class=\"tp_close\" onclick=\"teachpress_pub_showhide('157','tp_bibtex')\">Cerrar<\/a><\/p><\/div><\/div><\/div><h3 class=\"tp_h3\" id=\"tp_h3_2015\">2015<\/h3><h3 class=\"tp_h3\" id=\"tp_h3_article\">Art\u00edculos de revista<\/h3><div class=\"tp_publication tp_publication_article\"><div class=\"tp_pub_info\"><p class=\"tp_pub_author\"> Aviles-Vi\u00f1as, Jaime F;  Lopez-Juarez, Ismael;  Rios-Cabrera, Reyes<\/p><p class=\"tp_pub_title\"><a class=\"tp_title_link\" onclick=\"teachpress_pub_showhide('13','tp_links')\" style=\"cursor:pointer;\">Acquisition of welding skills in industrial robots<\/a> <span class=\"tp_pub_type tp_  article\">Art\u00edculo de revista<\/span> <\/p><p class=\"tp_pub_additional\"><span class=\"tp_pub_additional_in\">En: <\/span><span class=\"tp_pub_additional_journal\">Industrial Robot: An International Journal, <\/span><span class=\"tp_pub_additional_volume\">vol. 42, <\/span><span class=\"tp_pub_additional_number\">no 2, <\/span><span class=\"tp_pub_additional_pages\">pp. 156-166, <\/span><span class=\"tp_pub_additional_year\">2015<\/span>.<\/p><p class=\"tp_pub_menu\"><span class=\"tp_abstract_link\"><a id=\"tp_abstract_sh_13\" class=\"tp_show\" onclick=\"teachpress_pub_showhide('13','tp_abstract')\" title=\"Mostrar resumen\" style=\"cursor:pointer;\">Resumen<\/a><\/span> | <span class=\"tp_resource_link\"><a id=\"tp_links_sh_13\" class=\"tp_show\" onclick=\"teachpress_pub_showhide('13','tp_links')\" title=\"Mostrar enlaces y recursos\" style=\"cursor:pointer;\">Enlaces<\/a><\/span> | <span class=\"tp_bibtex_link\"><a id=\"tp_bibtex_sh_13\" class=\"tp_show\" onclick=\"teachpress_pub_showhide('13','tp_bibtex')\" title=\"Mostrar entrada BibTeX \" style=\"cursor:pointer;\">BibTeX<\/a><\/span><\/p><div class=\"tp_bibtex\" id=\"tp_bibtex_13\" style=\"display:none;\"><div class=\"tp_bibtex_entry\"><pre>@article{doi:10.1108\/IR-09-2014-0395,<br \/>\r\ntitle = {Acquisition of welding skills in industrial robots},<br \/>\r\nauthor = {Aviles-Vi\\~{n}as, Jaime F and Lopez-Juarez, Ismael and Rios-Cabrera, Reyes },<br \/>\r\nurl = {http:\/\/dx.doi.org\/10.1108\/IR-09-2014-0395},<br \/>\r\ndoi = {10.1108\/IR-09-2014-0395},<br \/>\r\nyear  = {2015},<br \/>\r\ndate = {2015-01-01},<br \/>\r\njournal = {Industrial Robot: An International Journal},<br \/>\r\nvolume = {42},<br \/>\r\nnumber = {2},<br \/>\r\npages = {156-166},<br \/>\r\nabstract = {Purpose \\textendash The purpose of this paper was to propose a method based on an Artificial Neural Network and a real-time vision algorithm, to learn welding skills in industrial robotics. Design\/methodology\/approach \\textendash By using an optic camera to measure the bead geometry (width and height), the authors propose a real-time computer vision algorithm to extract training patterns and to enable an industrial robot to acquire and learn autonomously the welding skill. To test the approach, an industrial KUKA robot and a welding gas metal arc welding machine were used in a manufacturing cell. Findings \\textendash Several data analyses are described, showing empirically that industrial robots can acquire the skill even if the specific welding parameters are unknown. Research limitations\/implications \\textendash The approach considers only stringer beads. Weave bead and bead penetration are not considered. Practical implications \\textendash With the proposed approach, it is possible to learn specific welding parameters despite of the material, type of robot or welding machine. This is due to the fact that the feedback system produces automatic measurements that are labelled prior to the learning process. Originality\/value \\textendash The main contribution is that the complex learning process is reduced into an input-process-output system, where the process part is learnt automatically without human supervision, by registering the patterns with an automatically calibrated vision system.},<br \/>\r\nkeywords = {},<br \/>\r\npubstate = {published},<br \/>\r\ntppubtype = {article}<br \/>\r\n}<br \/>\r\n<\/pre><\/div><p class=\"tp_close_menu\"><a class=\"tp_close\" onclick=\"teachpress_pub_showhide('13','tp_bibtex')\">Cerrar<\/a><\/p><\/div><div class=\"tp_abstract\" id=\"tp_abstract_13\" style=\"display:none;\"><div class=\"tp_abstract_entry\">Purpose \u2013 The purpose of this paper was to propose a method based on an Artificial Neural Network and a real-time vision algorithm, to learn welding skills in industrial robotics. Design\/methodology\/approach \u2013 By using an optic camera to measure the bead geometry (width and height), the authors propose a real-time computer vision algorithm to extract training patterns and to enable an industrial robot to acquire and learn autonomously the welding skill. To test the approach, an industrial KUKA robot and a welding gas metal arc welding machine were used in a manufacturing cell. Findings \u2013 Several data analyses are described, showing empirically that industrial robots can acquire the skill even if the specific welding parameters are unknown. Research limitations\/implications \u2013 The approach considers only stringer beads. Weave bead and bead penetration are not considered. Practical implications \u2013 With the proposed approach, it is possible to learn specific welding parameters despite of the material, type of robot or welding machine. This is due to the fact that the feedback system produces automatic measurements that are labelled prior to the learning process. Originality\/value \u2013 The main contribution is that the complex learning process is reduced into an input-process-output system, where the process part is learnt automatically without human supervision, by registering the patterns with an automatically calibrated vision system.<\/div><p class=\"tp_close_menu\"><a class=\"tp_close\" onclick=\"teachpress_pub_showhide('13','tp_abstract')\">Cerrar<\/a><\/p><\/div><div class=\"tp_links\" id=\"tp_links_13\" style=\"display:none;\"><div class=\"tp_links_entry\"><ul class=\"tp_pub_list\"><li><i class=\"fas fa-globe\"><\/i><a class=\"tp_pub_list\" href=\"http:\/\/dx.doi.org\/10.1108\/IR-09-2014-0395\" title=\"http:\/\/dx.doi.org\/10.1108\/IR-09-2014-0395\" target=\"_blank\">http:\/\/dx.doi.org\/10.1108\/IR-09-2014-0395<\/a><\/li><li><i class=\"ai ai-doi\"><\/i><a class=\"tp_pub_list\" href=\"https:\/\/dx.doi.org\/10.1108\/IR-09-2014-0395\" title=\"DOI de seguimiento:10.1108\/IR-09-2014-0395\" target=\"_blank\">doi:10.1108\/IR-09-2014-0395<\/a><\/li><\/ul><\/div><p class=\"tp_close_menu\"><a class=\"tp_close\" onclick=\"teachpress_pub_showhide('13','tp_links')\">Cerrar<\/a><\/p><\/div><\/div><\/div><div class=\"tp_publication tp_publication_article\"><div class=\"tp_pub_info\"><p class=\"tp_pub_author\"> Navarro-Gonzalez, Jose Luis;  Lopez-Juarez, Ismael;  Ordaz-Hernandez, Keny;  Rios-Cabrera, Reyes<\/p><p class=\"tp_pub_title\"><a class=\"tp_title_link\" onclick=\"teachpress_pub_showhide('14','tp_links')\" style=\"cursor:pointer;\">On-line incremental learning for unknown conditions during assembly operations with industrial robots<\/a> <span class=\"tp_pub_type tp_  article\">Art\u00edculo de revista<\/span> <\/p><p class=\"tp_pub_additional\"><span class=\"tp_pub_additional_in\">En: <\/span><span class=\"tp_pub_additional_journal\">Evolving Systems, <\/span><span class=\"tp_pub_additional_volume\">vol. 6, <\/span><span class=\"tp_pub_additional_number\">no 2, <\/span><span class=\"tp_pub_additional_pages\">pp. 101\u2013114, <\/span><span class=\"tp_pub_additional_year\">2015<\/span>, <span class=\"tp_pub_additional_issn\">ISSN: 1868-6486<\/span>.<\/p><p class=\"tp_pub_menu\"><span class=\"tp_abstract_link\"><a id=\"tp_abstract_sh_14\" class=\"tp_show\" onclick=\"teachpress_pub_showhide('14','tp_abstract')\" title=\"Mostrar resumen\" style=\"cursor:pointer;\">Resumen<\/a><\/span> | <span class=\"tp_resource_link\"><a id=\"tp_links_sh_14\" class=\"tp_show\" onclick=\"teachpress_pub_showhide('14','tp_links')\" title=\"Mostrar enlaces y recursos\" style=\"cursor:pointer;\">Enlaces<\/a><\/span> | <span class=\"tp_bibtex_link\"><a id=\"tp_bibtex_sh_14\" class=\"tp_show\" onclick=\"teachpress_pub_showhide('14','tp_bibtex')\" title=\"Mostrar entrada BibTeX \" style=\"cursor:pointer;\">BibTeX<\/a><\/span><\/p><div class=\"tp_bibtex\" id=\"tp_bibtex_14\" style=\"display:none;\"><div class=\"tp_bibtex_entry\"><pre>@article{Navarro-Gonzalez2015,<br \/>\r\ntitle = {On-line incremental learning for unknown conditions during assembly operations with industrial robots},<br \/>\r\nauthor = {Navarro-Gonzalez, Jose Luis and Lopez-Juarez, Ismael and Ordaz-Hernandez, Keny and Rios-Cabrera, Reyes },<br \/>\r\nurl = {http:\/\/dx.doi.org\/10.1007\/s12530-014-9125-x},<br \/>\r\ndoi = {10.1007\/s12530-014-9125-x},<br \/>\r\nissn = {1868-6486},<br \/>\r\nyear  = {2015},<br \/>\r\ndate = {2015-01-01},<br \/>\r\njournal = {Evolving Systems},<br \/>\r\nvolume = {6},<br \/>\r\nnumber = {2},<br \/>\r\npages = {101--114},<br \/>\r\nabstract = {The assembly operation using industrial robots can be accomplished successfully in well-structured environments where the mating pair location is known in advance. However, in real-world scenarios there are uncertainties associated to sensing, control and modelling errors that make the assembly task very complex. In addition, there are also unmodeled uncertainties that have to be taken into account for an effective control algorithm to succeed. Among these uncertainties, it can be mentioned disturbances, backlash and aging of mechanisms. In this paper, a method to overcome the effect of those uncertainties based on the Fuzzy ARTMAP artificial neural network (ANN) to successfully accomplish the assembly task is proposed. Experimental work is reported using an industrial 6 DOF robot arm in conjunction with a vision system for part location and wrist force\/torque sensing data for assembly. Force data is fed into an ANN evolving controller during a typical peg in hole (PIH) assembly operation. The controller uses an incremental learning mechanism that is solely guided by the sensed forces. In this article, two approaches are presented in order to compare the incremental learning capability of the manipulator. The first approach uses a primitive knowledge base (PKB) containing 16 primitive movements to learn online the first insertion. During assembly, the manipulator learns new patterns according to the learning criteria which turn the PKB into an enhanced knowledge base (EKB). During a second insertion the controller uses effectively the EKB and operation improves. The second approach employs minimum information (it contains only the assembly direction) and the process starts from scratch. After several operations, that knowledge base increases by including only the needed patterns to perform the insertion. Experimental results showed that the evolving controller is able to assemble the matting pairs enhancing its knowledge whenever it is needed depending on the part geometry and level of expertise. Our approach is demonstrated through several PIH operations with different tolerances and part geometry. As the robot's expertise evolves, the PIH operation is carried out faster with shorter assembly trajectories.},<br \/>\r\nkeywords = {},<br \/>\r\npubstate = {published},<br \/>\r\ntppubtype = {article}<br \/>\r\n}<br \/>\r\n<\/pre><\/div><p class=\"tp_close_menu\"><a class=\"tp_close\" onclick=\"teachpress_pub_showhide('14','tp_bibtex')\">Cerrar<\/a><\/p><\/div><div class=\"tp_abstract\" id=\"tp_abstract_14\" style=\"display:none;\"><div class=\"tp_abstract_entry\">The assembly operation using industrial robots can be accomplished successfully in well-structured environments where the mating pair location is known in advance. However, in real-world scenarios there are uncertainties associated to sensing, control and modelling errors that make the assembly task very complex. In addition, there are also unmodeled uncertainties that have to be taken into account for an effective control algorithm to succeed. Among these uncertainties, it can be mentioned disturbances, backlash and aging of mechanisms. In this paper, a method to overcome the effect of those uncertainties based on the Fuzzy ARTMAP artificial neural network (ANN) to successfully accomplish the assembly task is proposed. Experimental work is reported using an industrial 6 DOF robot arm in conjunction with a vision system for part location and wrist force\/torque sensing data for assembly. Force data is fed into an ANN evolving controller during a typical peg in hole (PIH) assembly operation. The controller uses an incremental learning mechanism that is solely guided by the sensed forces. In this article, two approaches are presented in order to compare the incremental learning capability of the manipulator. The first approach uses a primitive knowledge base (PKB) containing 16 primitive movements to learn online the first insertion. During assembly, the manipulator learns new patterns according to the learning criteria which turn the PKB into an enhanced knowledge base (EKB). During a second insertion the controller uses effectively the EKB and operation improves. The second approach employs minimum information (it contains only the assembly direction) and the process starts from scratch. After several operations, that knowledge base increases by including only the needed patterns to perform the insertion. Experimental results showed that the evolving controller is able to assemble the matting pairs enhancing its knowledge whenever it is needed depending on the part geometry and level of expertise. Our approach is demonstrated through several PIH operations with different tolerances and part geometry. As the robot's expertise evolves, the PIH operation is carried out faster with shorter assembly trajectories.<\/div><p class=\"tp_close_menu\"><a class=\"tp_close\" onclick=\"teachpress_pub_showhide('14','tp_abstract')\">Cerrar<\/a><\/p><\/div><div class=\"tp_links\" id=\"tp_links_14\" style=\"display:none;\"><div class=\"tp_links_entry\"><ul class=\"tp_pub_list\"><li><i class=\"fas fa-globe\"><\/i><a class=\"tp_pub_list\" href=\"http:\/\/dx.doi.org\/10.1007\/s12530-014-9125-x\" title=\"http:\/\/dx.doi.org\/10.1007\/s12530-014-9125-x\" target=\"_blank\">http:\/\/dx.doi.org\/10.1007\/s12530-014-9125-x<\/a><\/li><li><i class=\"ai ai-doi\"><\/i><a class=\"tp_pub_list\" href=\"https:\/\/dx.doi.org\/10.1007\/s12530-014-9125-x\" title=\"DOI de seguimiento:10.1007\/s12530-014-9125-x\" target=\"_blank\">doi:10.1007\/s12530-014-9125-x<\/a><\/li><\/ul><\/div><p class=\"tp_close_menu\"><a class=\"tp_close\" onclick=\"teachpress_pub_showhide('14','tp_links')\">Cerrar<\/a><\/p><\/div><\/div><\/div><div class=\"tp_publication tp_publication_article\"><div class=\"tp_pub_info\"><p class=\"tp_pub_author\"> Navarro-Gonzalez, Jose Luis;  Lopez-Juarez, Ismael;  Rios-Cabrera, Reyes;  Ordaz-Hernandez, Keny<\/p><p class=\"tp_pub_title\"><a class=\"tp_title_link\" onclick=\"teachpress_pub_showhide('15','tp_links')\" style=\"cursor:pointer;\">On-line knowledge acquisition and enhancement in robotic assembly tasks<\/a> <span class=\"tp_pub_type tp_  article\">Art\u00edculo de revista<\/span> <\/p><p class=\"tp_pub_additional\"><span class=\"tp_pub_additional_in\">En: <\/span><span class=\"tp_pub_additional_journal\">Robotics and Computer-Integrated Manufacturing, <\/span><span class=\"tp_pub_additional_volume\">vol. 33, <\/span><span class=\"tp_pub_additional_pages\">pp. 78 - 89, <\/span><span class=\"tp_pub_additional_year\">2015<\/span>, <span class=\"tp_pub_additional_issn\">ISSN: 0736-5845<\/span><span class=\"tp_pub_additional_note\">, (Special Issue on Knowledge Driven Robotics and Manufacturing)<\/span>.<\/p><p class=\"tp_pub_menu\"><span class=\"tp_abstract_link\"><a id=\"tp_abstract_sh_15\" class=\"tp_show\" onclick=\"teachpress_pub_showhide('15','tp_abstract')\" title=\"Mostrar resumen\" style=\"cursor:pointer;\">Resumen<\/a><\/span> | <span class=\"tp_resource_link\"><a id=\"tp_links_sh_15\" class=\"tp_show\" onclick=\"teachpress_pub_showhide('15','tp_links')\" title=\"Mostrar enlaces y recursos\" style=\"cursor:pointer;\">Enlaces<\/a><\/span> | <span class=\"tp_bibtex_link\"><a id=\"tp_bibtex_sh_15\" class=\"tp_show\" onclick=\"teachpress_pub_showhide('15','tp_bibtex')\" title=\"Mostrar entrada BibTeX \" style=\"cursor:pointer;\">BibTeX<\/a><\/span><\/p><div class=\"tp_bibtex\" id=\"tp_bibtex_15\" style=\"display:none;\"><div class=\"tp_bibtex_entry\"><pre>@article{NavarroGonzalez201578b,<br \/>\r\ntitle = {On-line knowledge acquisition and enhancement in robotic assembly tasks},<br \/>\r\nauthor = {Navarro-Gonzalez, Jose Luis and Lopez-Juarez, Ismael and Rios-Cabrera, Reyes and Ordaz-Hernandez, Keny},<br \/>\r\nurl = {http:\/\/www.sciencedirect.com\/science\/article\/pii\/S073658451400074X},<br \/>\r\ndoi = {http:\/\/dx.doi.org\/10.1016\/j.rcim.2014.08.013},<br \/>\r\nissn = {0736-5845},<br \/>\r\nyear  = {2015},<br \/>\r\ndate = {2015-01-01},<br \/>\r\njournal = {Robotics and Computer-Integrated Manufacturing},<br \/>\r\nvolume = {33},<br \/>\r\npages = {78 - 89},<br \/>\r\nabstract = {Abstract Industrial robots are reliable machines for manufacturing tasks such as welding, panting, assembly, palletizing or kitting operations. They are traditionally programmed by an operator using a teach pendant in a point-to-point scheme with limited sensing capabilities such as industrial vision systems and force\/torque sensing. The use of these sensing capabilities is associated to the particular robot controller, operative systems and programming language. Today, robots can react to environment changes specific to their task domain but are still unable to learn skills to effectively use their current knowledge. The need for such a skill in unstructured environments where knowledge can be acquired and enhanced is desirable so that robots can effectively interact in multimodal real-world scenarios. In this article we present a multimodal assembly controller (MAC) approach to embed and effectively enhance knowledge into industrial robots working in multimodal manufacturing scenarios such as assembly during kitting operations with varying shapes and tolerances. During learning, the robot uses its vision and force capabilities resembling a human operator carrying out the same operation. The approach consists of using a MAC based on the Fuzzy ARTMAP artificial neural network in conjunction with a knowledge base. The robot starts the operation having limited initial knowledge about what task it has to accomplish. During the operation, the robot learns the skill for recognising assembly parts and how to assemble them. The skill acquisition is evaluated by counting the steps to complete the assembly, length of the followed assembly path and compliant behaviour. The performance improves with time so that the robot becomes an expert demonstrated by the assembly of a kit with different part geometries. The kit is unknown by the robot at the beginning of the operation; therefore, the kit type, location and orientation are unknown as well as the parts to be assembled since they are randomly fed by a conveyor belt.},<br \/>\r\nnote = {Special Issue on Knowledge Driven Robotics and Manufacturing},<br \/>\r\nkeywords = {},<br \/>\r\npubstate = {published},<br \/>\r\ntppubtype = {article}<br \/>\r\n}<br \/>\r\n<\/pre><\/div><p class=\"tp_close_menu\"><a class=\"tp_close\" onclick=\"teachpress_pub_showhide('15','tp_bibtex')\">Cerrar<\/a><\/p><\/div><div class=\"tp_abstract\" id=\"tp_abstract_15\" style=\"display:none;\"><div class=\"tp_abstract_entry\">Abstract Industrial robots are reliable machines for manufacturing tasks such as welding, panting, assembly, palletizing or kitting operations. They are traditionally programmed by an operator using a teach pendant in a point-to-point scheme with limited sensing capabilities such as industrial vision systems and force\/torque sensing. The use of these sensing capabilities is associated to the particular robot controller, operative systems and programming language. Today, robots can react to environment changes specific to their task domain but are still unable to learn skills to effectively use their current knowledge. The need for such a skill in unstructured environments where knowledge can be acquired and enhanced is desirable so that robots can effectively interact in multimodal real-world scenarios. In this article we present a multimodal assembly controller (MAC) approach to embed and effectively enhance knowledge into industrial robots working in multimodal manufacturing scenarios such as assembly during kitting operations with varying shapes and tolerances. During learning, the robot uses its vision and force capabilities resembling a human operator carrying out the same operation. The approach consists of using a MAC based on the Fuzzy ARTMAP artificial neural network in conjunction with a knowledge base. The robot starts the operation having limited initial knowledge about what task it has to accomplish. During the operation, the robot learns the skill for recognising assembly parts and how to assemble them. The skill acquisition is evaluated by counting the steps to complete the assembly, length of the followed assembly path and compliant behaviour. The performance improves with time so that the robot becomes an expert demonstrated by the assembly of a kit with different part geometries. The kit is unknown by the robot at the beginning of the operation; therefore, the kit type, location and orientation are unknown as well as the parts to be assembled since they are randomly fed by a conveyor belt.<\/div><p class=\"tp_close_menu\"><a class=\"tp_close\" onclick=\"teachpress_pub_showhide('15','tp_abstract')\">Cerrar<\/a><\/p><\/div><div class=\"tp_links\" id=\"tp_links_15\" style=\"display:none;\"><div class=\"tp_links_entry\"><ul class=\"tp_pub_list\"><li><i class=\"fas fa-globe\"><\/i><a class=\"tp_pub_list\" href=\"http:\/\/www.sciencedirect.com\/science\/article\/pii\/S073658451400074X\" title=\"http:\/\/www.sciencedirect.com\/science\/article\/pii\/S073658451400074X\" target=\"_blank\">http:\/\/www.sciencedirect.com\/science\/article\/pii\/S073658451400074X<\/a><\/li><li><i class=\"ai ai-doi\"><\/i><a class=\"tp_pub_list\" href=\"https:\/\/dx.doi.org\/http:\/\/dx.doi.org\/10.1016\/j.rcim.2014.08.013\" title=\"DOI de seguimiento:http:\/\/dx.doi.org\/10.1016\/j.rcim.2014.08.013\" target=\"_blank\">doi:http:\/\/dx.doi.org\/10.1016\/j.rcim.2014.08.013<\/a><\/li><\/ul><\/div><p class=\"tp_close_menu\"><a class=\"tp_close\" onclick=\"teachpress_pub_showhide('15','tp_links')\">Cerrar<\/a><\/p><\/div><\/div><\/div><h3 class=\"tp_h3\" id=\"tp_h3_inproceedings\">Proceedings Articles<\/h3><div class=\"tp_publication tp_publication_inproceedings\"><div class=\"tp_pub_info\"><p class=\"tp_pub_author\"> Mirelez-Delgado, Flabio;  Morales-Diaz, America B.;  Rios-Cabrera, Reyes;  Perez-Villeda, Hector Manuel<\/p><p class=\"tp_pub_title\"><a class=\"tp_title_link\" onclick=\"teachpress_pub_showhide('105','tp_links')\" style=\"cursor:pointer;\">Control Servovisual de un Kuka youBot para la manipulacion y traslado de objetos<\/a> <span class=\"tp_pub_type tp_  inproceedings\">Proceedings Article<\/span> <\/p><p class=\"tp_pub_additional\"><span class=\"tp_pub_additional_in\">En: <\/span><span class=\"tp_pub_additional_year\">2015<\/span>.<\/p><p class=\"tp_pub_menu\"><span class=\"tp_abstract_link\"><a id=\"tp_abstract_sh_105\" class=\"tp_show\" onclick=\"teachpress_pub_showhide('105','tp_abstract')\" title=\"Mostrar resumen\" style=\"cursor:pointer;\">Resumen<\/a><\/span> | <span class=\"tp_resource_link\"><a id=\"tp_links_sh_105\" class=\"tp_show\" onclick=\"teachpress_pub_showhide('105','tp_links')\" title=\"Mostrar enlaces y recursos\" style=\"cursor:pointer;\">Enlaces<\/a><\/span> | <span class=\"tp_bibtex_link\"><a id=\"tp_bibtex_sh_105\" class=\"tp_show\" onclick=\"teachpress_pub_showhide('105','tp_bibtex')\" title=\"Mostrar entrada BibTeX \" style=\"cursor:pointer;\">BibTeX<\/a><\/span><\/p><div class=\"tp_bibtex\" id=\"tp_bibtex_105\" style=\"display:none;\"><div class=\"tp_bibtex_entry\"><pre>@inproceedings{Mireles-Delgado2015,<br \/>\r\ntitle = {Control Servovisual de un Kuka youBot para la manipulacion y traslado de objetos},<br \/>\r\nauthor = {Mirelez-Delgado, Flabio and Morales-Diaz, America B. and Rios-Cabrera, Reyes and Perez-Villeda, Hector Manuel},<br \/>\r\nurl = {http:\/\/amca.mx\/memorias\/amca2015\/articulos\/0044_MiCT3-04.pdf},<br \/>\r\nyear  = {2015},<br \/>\r\ndate = {2015-01-01},<br \/>\r\nabstract = {Este trabajo presenta la implementaci\u00b4on de un Control Servovisual Basado en<br \/>\r\nImagen en un robot manipulador m\u00b4ovil omnidireccional Kuka youBot. El sistema de visi\u00b4on<br \/>\r\nest\u00b4a compuesto por un sensor RGB-D Asus Xtion Pror. La ley de control implementada tiene<br \/>\r\nla estructura de un PD cl\u00b4asico para la plataforma m\u00b4ovil. El manipulador m\u00b4ovil se desplaza a<br \/>\r\npuntos 3D conocidos mediante el c\u00b4alculo de cinem\u00b4atica inversa. En este art\u00b4\u0131culo se demuestra<br \/>\r\nla efectividad del algoritmo en la localizaci\u00b4on del objeto de inter\u00b4es as\u00b4\u0131 como en la manipulaci\u00b4on<br \/>\r\ndel mismo para llevarlo de su lugar original a otro espacio deseado.<br \/>\r\n},<br \/>\r\nkeywords = {},<br \/>\r\npubstate = {published},<br \/>\r\ntppubtype = {inproceedings}<br \/>\r\n}<br \/>\r\n<\/pre><\/div><p class=\"tp_close_menu\"><a class=\"tp_close\" onclick=\"teachpress_pub_showhide('105','tp_bibtex')\">Cerrar<\/a><\/p><\/div><div class=\"tp_abstract\" id=\"tp_abstract_105\" style=\"display:none;\"><div class=\"tp_abstract_entry\">Este trabajo presenta la implementaci\u00b4on de un Control Servovisual Basado en<br \/>\r\nImagen en un robot manipulador m\u00b4ovil omnidireccional Kuka youBot. El sistema de visi\u00b4on<br \/>\r\nest\u00b4a compuesto por un sensor RGB-D Asus Xtion Pror. La ley de control implementada tiene<br \/>\r\nla estructura de un PD cl\u00b4asico para la plataforma m\u00b4ovil. El manipulador m\u00b4ovil se desplaza a<br \/>\r\npuntos 3D conocidos mediante el c\u00b4alculo de cinem\u00b4atica inversa. En este art\u00b4\u0131culo se demuestra<br \/>\r\nla efectividad del algoritmo en la localizaci\u00b4on del objeto de inter\u00b4es as\u00b4\u0131 como en la manipulaci\u00b4on<br \/>\r\ndel mismo para llevarlo de su lugar original a otro espacio deseado.<br \/>\r\n<\/div><p class=\"tp_close_menu\"><a class=\"tp_close\" onclick=\"teachpress_pub_showhide('105','tp_abstract')\">Cerrar<\/a><\/p><\/div><div class=\"tp_links\" id=\"tp_links_105\" style=\"display:none;\"><div class=\"tp_links_entry\"><ul class=\"tp_pub_list\"><li><i class=\"fas fa-file-pdf\"><\/i><a class=\"tp_pub_list\" href=\"http:\/\/amca.mx\/memorias\/amca2015\/articulos\/0044_MiCT3-04.pdf\" title=\"http:\/\/amca.mx\/memorias\/amca2015\/articulos\/0044_MiCT3-04.pdf\" target=\"_blank\">http:\/\/amca.mx\/memorias\/amca2015\/articulos\/0044_MiCT3-04.pdf<\/a><\/li><\/ul><\/div><p class=\"tp_close_menu\"><a class=\"tp_close\" onclick=\"teachpress_pub_showhide('105','tp_links')\">Cerrar<\/a><\/p><\/div><\/div><\/div><h3 class=\"tp_h3\" id=\"tp_h3_2014\">2014<\/h3><h3 class=\"tp_h3\" id=\"tp_h3_article\">Art\u00edculos de revista<\/h3><div class=\"tp_publication tp_publication_article\"><div class=\"tp_pub_info\"><p class=\"tp_pub_author\"> Rios-Cabrera, Reyes;  Tuytelaars, Tinne<\/p><p class=\"tp_pub_title\"><a class=\"tp_title_link\" onclick=\"teachpress_pub_showhide('64','tp_links')\" style=\"cursor:pointer;\">Boosting Masked Dominant Orientation Templates for Efficient Object Detection<\/a> <span class=\"tp_pub_type tp_  article\">Art\u00edculo de revista<\/span> <\/p><p class=\"tp_pub_additional\"><span class=\"tp_pub_additional_in\">En: <\/span><span class=\"tp_pub_additional_journal\">Comput. Vis. Image Underst., <\/span><span class=\"tp_pub_additional_volume\">vol. 120, <\/span><span class=\"tp_pub_additional_pages\">pp. 103\u2013116, <\/span><span class=\"tp_pub_additional_year\">2014<\/span>, <span class=\"tp_pub_additional_issn\">ISSN: 1077-3142<\/span>.<\/p><p class=\"tp_pub_menu\"><span class=\"tp_resource_link\"><a id=\"tp_links_sh_64\" class=\"tp_show\" onclick=\"teachpress_pub_showhide('64','tp_links')\" title=\"Mostrar enlaces y recursos\" style=\"cursor:pointer;\">Enlaces<\/a><\/span> | <span class=\"tp_bibtex_link\"><a id=\"tp_bibtex_sh_64\" class=\"tp_show\" onclick=\"teachpress_pub_showhide('64','tp_bibtex')\" title=\"Mostrar entrada BibTeX \" style=\"cursor:pointer;\">BibTeX<\/a><\/span><\/p><div class=\"tp_bibtex\" id=\"tp_bibtex_64\" style=\"display:none;\"><div class=\"tp_bibtex_entry\"><pre>@article{Rios-Cabrera:2014:BMD:2583127.2583285,<br \/>\r\ntitle = {Boosting Masked Dominant Orientation Templates for Efficient Object Detection},<br \/>\r\nauthor = {Rios-Cabrera, Reyes and Tuytelaars, Tinne},<br \/>\r\nurl = {http:\/\/dx.doi.org\/10.1016\/j.cviu.2013.12.008},<br \/>\r\ndoi = {10.1016\/j.cviu.2013.12.008},<br \/>\r\nissn = {1077-3142},<br \/>\r\nyear  = {2014},<br \/>\r\ndate = {2014-01-01},<br \/>\r\njournal = {Comput. Vis. Image Underst.},<br \/>\r\nvolume = {120},<br \/>\r\npages = {103--116},<br \/>\r\npublisher = {Elsevier Science Inc.},<br \/>\r\naddress = {New York, NY, USA},<br \/>\r\nkeywords = {},<br \/>\r\npubstate = {published},<br \/>\r\ntppubtype = {article}<br \/>\r\n}<br \/>\r\n<\/pre><\/div><p class=\"tp_close_menu\"><a class=\"tp_close\" onclick=\"teachpress_pub_showhide('64','tp_bibtex')\">Cerrar<\/a><\/p><\/div><div class=\"tp_links\" id=\"tp_links_64\" style=\"display:none;\"><div class=\"tp_links_entry\"><ul class=\"tp_pub_list\"><li><i class=\"fas fa-globe\"><\/i><a class=\"tp_pub_list\" href=\"http:\/\/dx.doi.org\/10.1016\/j.cviu.2013.12.008\" title=\"http:\/\/dx.doi.org\/10.1016\/j.cviu.2013.12.008\" target=\"_blank\">http:\/\/dx.doi.org\/10.1016\/j.cviu.2013.12.008<\/a><\/li><li><i class=\"ai ai-doi\"><\/i><a class=\"tp_pub_list\" href=\"https:\/\/dx.doi.org\/10.1016\/j.cviu.2013.12.008\" title=\"DOI de seguimiento:10.1016\/j.cviu.2013.12.008\" target=\"_blank\">doi:10.1016\/j.cviu.2013.12.008<\/a><\/li><\/ul><\/div><p class=\"tp_close_menu\"><a class=\"tp_close\" onclick=\"teachpress_pub_showhide('64','tp_links')\">Cerrar<\/a><\/p><\/div><\/div><\/div><h3 class=\"tp_h3\" id=\"tp_h3_2013\">2013<\/h3><h3 class=\"tp_h3\" id=\"tp_h3_inproceedings\">Proceedings Articles<\/h3><div class=\"tp_publication tp_publication_inproceedings\"><div class=\"tp_pub_info\"><p class=\"tp_pub_author\"> Rios-Cabrera, Reyes;  Tuytelaars, Tinne<\/p><p class=\"tp_pub_title\">Discriminatively Trained Templates for 3D Object Detection: A Real Time Scalable Approach <span class=\"tp_pub_type tp_  inproceedings\">Proceedings Article<\/span> <\/p><p class=\"tp_pub_additional\"><span class=\"tp_pub_additional_in\">En: <\/span><span class=\"tp_pub_additional_booktitle\">The IEEE International Conference on Computer Vision (ICCV), <\/span><span class=\"tp_pub_additional_year\">2013<\/span>.<\/p><p class=\"tp_pub_menu\"><span class=\"tp_bibtex_link\"><a id=\"tp_bibtex_sh_215\" class=\"tp_show\" onclick=\"teachpress_pub_showhide('215','tp_bibtex')\" title=\"Mostrar entrada BibTeX \" style=\"cursor:pointer;\">BibTeX<\/a><\/span><\/p><div class=\"tp_bibtex\" id=\"tp_bibtex_215\" style=\"display:none;\"><div class=\"tp_bibtex_entry\"><pre>@inproceedings{Rios-Cabrera_2013_ICCV__B,<br \/>\r\ntitle = {Discriminatively Trained Templates for 3D Object Detection: A Real Time Scalable Approach},<br \/>\r\nauthor = {Rios-Cabrera, Reyes and Tuytelaars, Tinne},<br \/>\r\nyear  = {2013},<br \/>\r\ndate = {2013-12-01},<br \/>\r\nbooktitle = {The IEEE International Conference on Computer Vision (ICCV)},<br \/>\r\nkeywords = {},<br \/>\r\npubstate = {published},<br \/>\r\ntppubtype = {inproceedings}<br \/>\r\n}<br \/>\r\n<\/pre><\/div><p class=\"tp_close_menu\"><a class=\"tp_close\" onclick=\"teachpress_pub_showhide('215','tp_bibtex')\">Cerrar<\/a><\/p><\/div><\/div><\/div><div class=\"tp_publication tp_publication_inproceedings\"><div class=\"tp_pub_info\"><p class=\"tp_pub_author\"> Rios-Cabrera, Reyes;  Tuytelaars, Tinne<\/p><p class=\"tp_pub_title\"><a class=\"tp_title_link\" onclick=\"teachpress_pub_showhide('203','tp_links')\" style=\"cursor:pointer;\">Discriminatively Trained Templates for 3D Object Detection: A Real Time Scalable Approach.<\/a> <span class=\"tp_pub_type tp_  inproceedings\">Proceedings Article<\/span> <\/p><p class=\"tp_pub_additional\"><span class=\"tp_pub_additional_in\">En: <\/span><span class=\"tp_pub_additional_booktitle\">ICCV, <\/span><span class=\"tp_pub_additional_pages\">pp. 2048-2055, <\/span><span class=\"tp_pub_additional_publisher\">IEEE Computer Society, <\/span><span class=\"tp_pub_additional_year\">2013<\/span>, <span class=\"tp_pub_additional_isbn\">ISBN: 978-1-4799-2839-2<\/span>.<\/p><p class=\"tp_pub_menu\"><span class=\"tp_resource_link\"><a id=\"tp_links_sh_203\" class=\"tp_show\" onclick=\"teachpress_pub_showhide('203','tp_links')\" title=\"Mostrar enlaces y recursos\" style=\"cursor:pointer;\">Enlaces<\/a><\/span> | <span class=\"tp_bibtex_link\"><a id=\"tp_bibtex_sh_203\" class=\"tp_show\" onclick=\"teachpress_pub_showhide('203','tp_bibtex')\" title=\"Mostrar entrada BibTeX \" style=\"cursor:pointer;\">BibTeX<\/a><\/span><\/p><div class=\"tp_bibtex\" id=\"tp_bibtex_203\" style=\"display:none;\"><div class=\"tp_bibtex_entry\"><pre>@inproceedings{conf\/iccv\/Rios-CabreraT13,<br \/>\r\ntitle = {Discriminatively Trained Templates for 3D Object Detection: A Real Time Scalable Approach.},<br \/>\r\nauthor = {Rios-Cabrera, Reyes and Tuytelaars, Tinne},<br \/>\r\nurl = {http:\/\/dblp.uni-trier.de\/db\/conf\/iccv\/iccv2013.html#Rios-CabreraT13},<br \/>\r\nisbn = {978-1-4799-2839-2},<br \/>\r\nyear  = {2013},<br \/>\r\ndate = {2013-01-01},<br \/>\r\nbooktitle = {ICCV},<br \/>\r\npages = {2048-2055},<br \/>\r\npublisher = {IEEE Computer Society},<br \/>\r\ncrossref = {conf\/iccv\/2013},<br \/>\r\nkeywords = {},<br \/>\r\npubstate = {published},<br \/>\r\ntppubtype = {inproceedings}<br \/>\r\n}<br \/>\r\n<\/pre><\/div><p class=\"tp_close_menu\"><a class=\"tp_close\" onclick=\"teachpress_pub_showhide('203','tp_bibtex')\">Cerrar<\/a><\/p><\/div><div class=\"tp_links\" id=\"tp_links_203\" style=\"display:none;\"><div class=\"tp_links_entry\"><ul class=\"tp_pub_list\"><li><i class=\"fas fa-globe\"><\/i><a class=\"tp_pub_list\" href=\"http:\/\/dblp.uni-trier.de\/db\/conf\/iccv\/iccv2013.html#Rios-CabreraT13\" title=\"http:\/\/dblp.uni-trier.de\/db\/conf\/iccv\/iccv2013.html#Rios-CabreraT13\" target=\"_blank\">http:\/\/dblp.uni-trier.de\/db\/conf\/iccv\/iccv2013.html#Rios-CabreraT13<\/a><\/li><\/ul><\/div><p class=\"tp_close_menu\"><a class=\"tp_close\" onclick=\"teachpress_pub_showhide('203','tp_links')\">Cerrar<\/a><\/p><\/div><\/div><\/div><h3 class=\"tp_h3\" id=\"tp_h3_2012\">2012<\/h3><h3 class=\"tp_h3\" id=\"tp_h3_article\">Art\u00edculos de revista<\/h3><div class=\"tp_publication tp_publication_article\"><div class=\"tp_pub_info\"><p class=\"tp_pub_author\"> Rios-Cabrera, Reyes;  Tuytelaars, Tinne;  Van Gool, Luc J.<\/p><p class=\"tp_pub_title\"><a class=\"tp_title_link\" onclick=\"teachpress_pub_showhide('204','tp_links')\" style=\"cursor:pointer;\">Efficient multi-camera vehicle detection, tracking, and identification in a tunnel surveillance application<\/a> <span class=\"tp_pub_type tp_  article\">Art\u00edculo de revista<\/span> <\/p><p class=\"tp_pub_additional\"><span class=\"tp_pub_additional_in\">En: <\/span><span class=\"tp_pub_additional_journal\">Computer Vision and Image Understanding, <\/span><span class=\"tp_pub_additional_volume\">vol. 116, <\/span><span class=\"tp_pub_additional_number\">no 6, <\/span><span class=\"tp_pub_additional_pages\">pp. 742 - 753, <\/span><span class=\"tp_pub_additional_year\">2012<\/span>, <span class=\"tp_pub_additional_issn\">ISSN: 1077-3142<\/span>.<\/p><p class=\"tp_pub_menu\"><span class=\"tp_resource_link\"><a id=\"tp_links_sh_204\" class=\"tp_show\" onclick=\"teachpress_pub_showhide('204','tp_links')\" title=\"Mostrar enlaces y recursos\" style=\"cursor:pointer;\">Enlaces<\/a><\/span> | <span class=\"tp_bibtex_link\"><a id=\"tp_bibtex_sh_204\" class=\"tp_show\" onclick=\"teachpress_pub_showhide('204','tp_bibtex')\" title=\"Mostrar entrada BibTeX \" style=\"cursor:pointer;\">BibTeX<\/a><\/span><\/p><div class=\"tp_bibtex\" id=\"tp_bibtex_204\" style=\"display:none;\"><div class=\"tp_bibtex_entry\"><pre>@article{RIOSCABRERA2012742,<br \/>\r\ntitle = {Efficient multi-camera vehicle detection, tracking, and identification in a tunnel surveillance application},<br \/>\r\nauthor = {Rios-Cabrera, Reyes and Tuytelaars, Tinne and Van Gool, Luc J.},<br \/>\r\nurl = {http:\/\/www.sciencedirect.com\/science\/article\/pii\/S1077314212000380},<br \/>\r\ndoi = {http:\/\/dx.doi.org\/10.1016\/j.cviu.2012.02.006},<br \/>\r\nissn = {1077-3142},<br \/>\r\nyear  = {2012},<br \/>\r\ndate = {2012-01-01},<br \/>\r\njournal = {Computer Vision and Image Understanding},<br \/>\r\nvolume = {116},<br \/>\r\nnumber = {6},<br \/>\r\npages = {742 - 753},<br \/>\r\nkeywords = {},<br \/>\r\npubstate = {published},<br \/>\r\ntppubtype = {article}<br \/>\r\n}<br \/>\r\n<\/pre><\/div><p class=\"tp_close_menu\"><a class=\"tp_close\" onclick=\"teachpress_pub_showhide('204','tp_bibtex')\">Cerrar<\/a><\/p><\/div><div class=\"tp_links\" id=\"tp_links_204\" style=\"display:none;\"><div class=\"tp_links_entry\"><ul class=\"tp_pub_list\"><li><i class=\"fas fa-globe\"><\/i><a class=\"tp_pub_list\" href=\"http:\/\/www.sciencedirect.com\/science\/article\/pii\/S1077314212000380\" title=\"http:\/\/www.sciencedirect.com\/science\/article\/pii\/S1077314212000380\" target=\"_blank\">http:\/\/www.sciencedirect.com\/science\/article\/pii\/S1077314212000380<\/a><\/li><li><i class=\"ai ai-doi\"><\/i><a class=\"tp_pub_list\" href=\"https:\/\/dx.doi.org\/http:\/\/dx.doi.org\/10.1016\/j.cviu.2012.02.006\" title=\"DOI de seguimiento:http:\/\/dx.doi.org\/10.1016\/j.cviu.2012.02.006\" target=\"_blank\">doi:http:\/\/dx.doi.org\/10.1016\/j.cviu.2012.02.006<\/a><\/li><\/ul><\/div><p class=\"tp_close_menu\"><a class=\"tp_close\" onclick=\"teachpress_pub_showhide('204','tp_links')\">Cerrar<\/a><\/p><\/div><\/div><\/div><h3 class=\"tp_h3\" id=\"tp_h3_2011\">2011<\/h3><h3 class=\"tp_h3\" id=\"tp_h3_inproceedings\">Proceedings Articles<\/h3><div class=\"tp_publication tp_publication_inproceedings\"><div class=\"tp_pub_info\"><p class=\"tp_pub_author\"> Rios-Cabrera, Reyes;  Tuytelaars, Tinne;  Van Gool, Luc J.<\/p><p class=\"tp_pub_title\"><a class=\"tp_title_link\" onclick=\"teachpress_pub_showhide('202','tp_links')\" style=\"cursor:pointer;\">Efficient multi-camera detection, tracking, and identification using a shared set of haar-features.<\/a> <span class=\"tp_pub_type tp_  inproceedings\">Proceedings Article<\/span> <\/p><p class=\"tp_pub_additional\"><span class=\"tp_pub_additional_in\">En: <\/span><span class=\"tp_pub_additional_booktitle\">CVPR, <\/span><span class=\"tp_pub_additional_pages\">pp. 65-71, <\/span><span class=\"tp_pub_additional_publisher\">IEEE Computer Society, <\/span><span class=\"tp_pub_additional_year\">2011<\/span>, <span class=\"tp_pub_additional_isbn\">ISBN: 978-1-4577-0394-2<\/span>.<\/p><p class=\"tp_pub_menu\"><span class=\"tp_resource_link\"><a id=\"tp_links_sh_202\" class=\"tp_show\" onclick=\"teachpress_pub_showhide('202','tp_links')\" title=\"Mostrar enlaces y recursos\" style=\"cursor:pointer;\">Enlaces<\/a><\/span> | <span class=\"tp_bibtex_link\"><a id=\"tp_bibtex_sh_202\" class=\"tp_show\" onclick=\"teachpress_pub_showhide('202','tp_bibtex')\" title=\"Mostrar entrada BibTeX \" style=\"cursor:pointer;\">BibTeX<\/a><\/span><\/p><div class=\"tp_bibtex\" id=\"tp_bibtex_202\" style=\"display:none;\"><div class=\"tp_bibtex_entry\"><pre>@inproceedings{conf\/cvpr\/Rios-CabreraTG11,<br \/>\r\ntitle = {Efficient multi-camera detection, tracking, and identification using a shared set of haar-features.},<br \/>\r\nauthor = {Rios-Cabrera, Reyes and Tuytelaars, Tinne and Van Gool, Luc J.},<br \/>\r\nurl = {http:\/\/dblp.uni-trier.de\/db\/conf\/cvpr\/cvpr2011.html#Rios-CabreraTG11},<br \/>\r\nisbn = {978-1-4577-0394-2},<br \/>\r\nyear  = {2011},<br \/>\r\ndate = {2011-01-01},<br \/>\r\nbooktitle = {CVPR},<br \/>\r\npages = {65-71},<br \/>\r\npublisher = {IEEE Computer Society},<br \/>\r\ncrossref = {conf\/cvpr\/2011},<br \/>\r\nkeywords = {},<br \/>\r\npubstate = {published},<br \/>\r\ntppubtype = {inproceedings}<br \/>\r\n}<br \/>\r\n<\/pre><\/div><p class=\"tp_close_menu\"><a class=\"tp_close\" onclick=\"teachpress_pub_showhide('202','tp_bibtex')\">Cerrar<\/a><\/p><\/div><div class=\"tp_links\" id=\"tp_links_202\" style=\"display:none;\"><div class=\"tp_links_entry\"><ul class=\"tp_pub_list\"><li><i class=\"fas fa-globe\"><\/i><a class=\"tp_pub_list\" href=\"http:\/\/dblp.uni-trier.de\/db\/conf\/cvpr\/cvpr2011.html#Rios-CabreraTG11\" title=\"http:\/\/dblp.uni-trier.de\/db\/conf\/cvpr\/cvpr2011.html#Rios-CabreraTG11\" target=\"_blank\">http:\/\/dblp.uni-trier.de\/db\/conf\/cvpr\/cvpr2011.html#Rios-CabreraTG11<\/a><\/li><\/ul><\/div><p class=\"tp_close_menu\"><a class=\"tp_close\" onclick=\"teachpress_pub_showhide('202','tp_links')\">Cerrar<\/a><\/p><\/div><\/div><\/div><\/div><\/div><\/strong><\/p>\n<p>[\/et_pb_text][\/et_pb_column][\/et_pb_row][\/et_pb_section][et_pb_section bb_built=\u00bb1&#8243; fullwidth=\u00bboff\u00bb specialty=\u00bboff\u00bb background_color=\u00bbrgba(0,0,0,0.32)\u00bb inner_shadow=\u00bbon\u00bb custom_css_main_element=\u00bbbox-shadow: inset 0px 3px 2px rgba(150, 150, 150, 0.85);\u00bb _builder_version=\u00bb3.0.76&#8243; locked=\u00bboff\u00bb prev_background_color=\u00bb#ffffff\u00bb inner_width=\u00bbauto\u00bb inner_max_width=\u00bb1080px\u00bb background_color_gradient_stops=\u00bb#2b87da 0%|#29c4a9 100%\u00bb global_module=\u00bb430&#8243;][et_pb_row global_parent=\u00bb430&#8243; make_fullwidth=\u00bboff\u00bb use_custom_width=\u00bboff\u00bb width_unit=\u00bbon\u00bb use_custom_gutter=\u00bboff\u00bb allow_player_pause=\u00bboff\u00bb parallax=\u00bboff\u00bb parallax_method=\u00bbon\u00bb make_equal=\u00bboff\u00bb custom_margin=\u00bb-40px|||\u00bb background_position=\u00bbtop_left\u00bb background_repeat=\u00bbrepeat\u00bb background_size=\u00bbinitial\u00bb parent_locked=\u00bboff\u00bb width=\u00bb80%\u00bb max_width=\u00bb1080px\u00bb background_color_gradient_stops=\u00bb#2b87da 0%|#29c4a9 100%\u00bb][et_pb_column type=\u00bb4_4&#8243; global_parent=\u00bb430&#8243; custom_padding__hover=\u00bb|||\u00bb custom_padding=\u00bb|||\u00bb parallax_method=\u00bbon\u00bb background_color_gradient_stops=\u00bb#2b87da 0%|#29c4a9 100%\u00bb][et_pb_image admin_label=\u00bbLogoCINVESTAV del Pie de p\u00e1gina\u00bb global_parent=\u00bb430&#8243; src=\u00bbhttps:\/\/ryma.cinvestav.mx\/wp-content\/uploads\/2014\/08\/roboticaCinvestavOK_transparencia_white.png\u00bb alt=\u00bbRob\u00f3tica y Manufactura Avanzada, Cinvestav\u00bb show_in_lightbox=\u00bboff\u00bb url_new_window=\u00bboff\u00bb use_overlay=\u00bboff\u00bb animation=\u00bboff\u00bb sticky=\u00bbon\u00bb align=\u00bbcenter\u00bb max_width=\u00bb95px\u00bb max_width_last_edited=\u00bbon|desktop\u00bb force_fullwidth=\u00bboff\u00bb always_center_on_mobile=\u00bbon\u00bb border_style=\u00bbsolid\u00bb custom_margin=\u00bb||15px|\u00bb _builder_version=\u00bb3.0.76&#8243; parent_locked=\u00bboff\u00bb url=\u00bbhttps:\/\/ryma.cinvestav.mx\/\u00bb show_bottom_space=\u00bbon\u00bb align_last_edited=\u00bbon|desktop\u00bb align_tablet=\u00bbcenter\u00bb background_color_gradient_stops=\u00bb#2b87da 0%|#29c4a9 100%\u00bb \/][et_pb_text global_parent=\u00bb430&#8243; _builder_version=\u00bb3.0.72&#8243; background_layout=\u00bbdark\u00bb text_orientation=\u00bbcenter\u00bb border_style=\u00bbsolid\u00bb custom_margin=\u00bb||-50px|\u00bb parent_locked=\u00bboff\u00bb module_alignment=\u00bbcenter\u00bb background_color_gradient_stops=\u00bb#2b87da 0%|#29c4a9 100%\u00bb]<\/p>\n<hr \/>\n<p style=\"text-align: center\">Av. Industrial\u00a0Metalurgia\u00a0#1062,\u00a0Parque Ind. Ramos Arizpe,\u00a0Ramos Arizpe, Coah.\u00a0C.P. 25900, M\u00e9xico. \u00a0Tel. +52 (844) 438-9600<\/p>\n<p>[\/et_pb_text][\/et_pb_column][\/et_pb_row][\/et_pb_section]<\/p>\n","protected":false},"excerpt":{"rendered":"<p><div class='et-box et-shadow'>\n\t\t\t\t\t<div class='et-box-content'>PUBLICACIONES<\/div><\/div>Para ver las publicaciones de todo Rob\u00f3tica y Manufactura Avanzada, ver:\u00a0 Publicaciones RYMAPara una lista actualizada de publicaciones vea: GOOGLE SCHOLAR<div class=\"teachpress_pub_list\"><form name=\"tppublistform\" method=\"get\"><a name=\"tppubs\" id=\"tppubs\"><\/a><div class=\"teachpress_filter\"><select class=\"default\" name=\"yr\" id=\"yr\" tabindex=\"2\" onchange=\"teachpress_jumpMenu('parent',this, 'https:\/\/ryma.cinvestav.mx\/rrios\/publicaciones\/?')\">\r\n                   <option value=\"tgid=&amp;type=&amp;auth=&amp;usr=&amp;yr=#tppubs\">Todos los a\u00f1os<\/option>\r\n                   <option value = \"tgid=&amp;type=&amp;auth=&amp;usr=&amp;yr=2020#tppubs\" >2020<\/option><option value = \"tgid=&amp;type=&amp;auth=&amp;usr=&amp;yr=2019#tppubs\" >2019<\/option><option value = \"tgid=&amp;type=&amp;auth=&amp;usr=&amp;yr=2018#tppubs\" >2018<\/option><option value = \"tgid=&amp;type=&amp;auth=&amp;usr=&amp;yr=2017#tppubs\" >2017<\/option><option value = \"tgid=&amp;type=&amp;auth=&amp;usr=&amp;yr=2016#tppubs\" >2016<\/option><option value = \"tgid=&amp;type=&amp;auth=&amp;usr=&amp;yr=2015#tppubs\" >2015<\/option><option value = \"tgid=&amp;type=&amp;auth=&amp;usr=&amp;yr=2014#tppubs\" >2014<\/option><option value = \"tgid=&amp;type=&amp;auth=&amp;usr=&amp;yr=2013#tppubs\" >2013<\/option><option value = \"tgid=&amp;type=&amp;auth=&amp;usr=&amp;yr=2012#tppubs\" >2012<\/option><option value = \"tgid=&amp;type=&amp;auth=&amp;usr=&amp;yr=2011#tppubs\" >2011<\/option><option value = \"tgid=&amp;type=&amp;auth=&amp;usr=&amp;yr=2010#tppubs\" >2010<\/option><option value = \"tgid=&amp;type=&amp;auth=&amp;usr=&amp;yr=2009#tppubs\" >2009<\/option><option value = \"tgid=&amp;type=&amp;auth=&amp;usr=&amp;yr=2008#tppubs\" >2008<\/option><option value = \"tgid=&amp;type=&amp;auth=&amp;usr=&amp;yr=2007#tppubs\" >2007<\/option><option value = \"tgid=&amp;type=&amp;auth=&amp;usr=&amp;yr=2006#tppubs\" >2006<\/option><option value = \"tgid=&amp;type=&amp;auth=&amp;usr=&amp;yr=2005#tppubs\" >2005<\/option><option value = \"tgid=&amp;type=&amp;auth=&amp;usr=&amp;yr=2004#tppubs\" >2004<\/option><option value = \"tgid=&amp;type=&amp;auth=&amp;usr=&amp;yr=2003#tppubs\" >2003<\/option>\r\n                <\/select><select class=\"default\" name=\"type\" id=\"type\" tabindex=\"3\" onchange=\"teachpress_jumpMenu('parent',this, 'https:\/\/ryma.cinvestav.mx\/rrios\/publicaciones\/?')\">\r\n                   <option value=\"tgid=&amp;yr=&amp;auth=&amp;usr=&amp;type=#tppubs\">Todas las tipolog\u00edas<\/option>\r\n                   <option value = \"tgid=&amp;yr=&amp;auth=&amp;usr=&amp;type=article#tppubs\" >Art\u00edculos de revista<\/option><option value = \"tgid=&amp;yr=&amp;auth=&amp;usr=&amp;type=conference#tppubs\" >Conferencias<\/option><option value = \"tgid=&amp;yr=&amp;auth=&amp;usr=&amp;type=inbook#tppubs\" >Cap\u00edtulos de libros<\/option><option value = \"tgid=&amp;yr=&amp;auth=&amp;usr=&amp;type=inproceedings#tppubs\" >Proceedings Articles<\/option><option value = \"tgid=&amp;yr=&amp;auth=&amp;usr=&amp;type=proceedings#tppubs\" >Actas de congresos<\/option>\r\n                <\/select><select class=\"default\" name=\"usr\" id=\"usr\" tabindex=\"6\" onchange=\"teachpress_jumpMenu('parent',this, 'https:\/\/ryma.cinvestav.mx\/rrios\/publicaciones\/?')\">\r\n                   <option value=\"tgid=&amp;yr=&amp;type=&amp;auth=&amp;usr=#tppubs\">Todos los usuarios<\/option>\r\n                   <option value = \"tgid=&amp;yr=&amp;type=&amp;auth=&amp;usr=12#tppubs\" >mcastelan<\/option>\r\n                <\/select><\/div><\/form><div class=\"teachpress_publication_list\"><h3 class=\"tp_h3\" id=\"tp_h3_2019\">2019<\/h3><h3 class=\"tp_h3\" id=\"tp_h3_article\">Art\u00edculos de revista<\/h3><div class=\"tp_publication tp_publication_article\"><div class=\"tp_pub_info\"><p class=\"tp_pub_author\"> Rico-Fernandez, Maria;  Rios-Cabrera, Reyes;  Castelan, Mario;  Guerrero-Reyes, Hector;  Juarez-Maldonado, Antonio<\/p><p class=\"tp_pub_title\"><a class=\"tp_title_link\" onclick=\"teachpress_pub_showhide('224','tp_links')\" style=\"cursor:pointer;\">A contextualized approach for segmentation of foliage in different crop species<\/a> <span class=\"tp_pub_type tp_  article\">Art\u00edculo de revista<\/span> <\/p><p class=\"tp_pub_additional\"><span class=\"tp_pub_additional_in\">En: <\/span><span class=\"tp_pub_additional_journal\">Computers and Electronics in Agriculture, <\/span><span class=\"tp_pub_additional_volume\">vol. 156, <\/span><span class=\"tp_pub_additional_pages\">pp. 378-386, <\/span><span class=\"tp_pub_additional_year\">2019<\/span>, <span class=\"tp_pub_additional_issn\">ISSN: 0168-1699<\/span>.<\/p><p class=\"tp_pub_menu\"><span class=\"tp_resource_link\"><a id=\"tp_links_sh_224\" class=\"tp_show\" onclick=\"teachpress_pub_showhide('224','tp_links')\" title=\"Mostrar enlaces y recursos\" style=\"cursor:pointer;\">Enlaces<\/a><\/span> | <span class=\"tp_bibtex_link\"><a id=\"tp_bibtex_sh_224\" class=\"tp_show\" onclick=\"teachpress_pub_showhide('224','tp_bibtex')\" title=\"Mostrar entrada BibTeX \" style=\"cursor:pointer;\">BibTeX<\/a><\/span><\/p><div class=\"tp_bibtex\" id=\"tp_bibtex_224\" style=\"display:none;\"><div class=\"tp_bibtex_entry\"><pre>@article{Rico-Fernandez2019,<br \/>\r\ntitle = {A contextualized approach for segmentation of foliage in different crop species},<br \/>\r\nauthor = {Rico-Fernandez, Maria and Rios-Cabrera, Reyes and Castelan, Mario and Guerrero-Reyes, Hector and Juarez-Maldonado, Antonio},<br \/>\r\neditor = {Elsevier},<br \/>\r\nurl = {https:\/\/doi.org\/10.1016\/j.compag.2018.11.033},<br \/>\r\nissn = {0168-1699},<br \/>\r\nyear  = {2019},<br \/>\r\ndate = {2019-01-01},<br \/>\r\njournal = {Computers and Electronics in Agriculture},<br \/>\r\nvolume = {156},<br \/>\r\npages = {378-386},<br \/>\r\nkeywords = {},<br \/>\r\npubstate = {published},<br \/>\r\ntppubtype = {article}<br \/>\r\n}<br \/>\r\n<\/pre><\/div><p class=\"tp_close_menu\"><a class=\"tp_close\" onclick=\"teachpress_pub_showhide('224','tp_bibtex')\">Cerrar<\/a><\/p><\/div><div class=\"tp_links\" id=\"tp_links_224\" style=\"display:none;\"><div class=\"tp_links_entry\"><ul class=\"tp_pub_list\"><li><i class=\"fas fa-globe\"><\/i><a class=\"tp_pub_list\" href=\"https:\/\/doi.org\/10.1016\/j.compag.2018.11.033\" title=\"https:\/\/doi.org\/10.1016\/j.compag.2018.11.033\" target=\"_blank\">https:\/\/doi.org\/10.1016\/j.compag.2018.11.033<\/a><\/li><\/ul><\/div><p class=\"tp_close_menu\"><a class=\"tp_close\" onclick=\"teachpress_pub_showhide('224','tp_links')\">Cerrar<\/a><\/p><\/div><\/div><\/div><h3 class=\"tp_h3\" id=\"tp_h3_2017\">2017<\/h3><h3 class=\"tp_h3\" id=\"tp_h3_article\">Art\u00edculos de revista<\/h3><div class=\"tp_publication tp_publication_article\"><div class=\"tp_pub_info\"><p class=\"tp_pub_author\"> Lopez-Juarez, Ismael;  Rios-Cabrera, Reyes;  Hsieh, S J;  Howarth, M.<\/p><p class=\"tp_pub_title\"><a class=\"tp_title_link\" onclick=\"teachpress_pub_showhide('205','tp_links')\" style=\"cursor:pointer;\">A hybrid non-invasive method for internal\/external quality assessment of potatoes<\/a> <span class=\"tp_pub_type tp_  article\">Art\u00edculo de revista<\/span> <\/p><p class=\"tp_pub_additional\"><span class=\"tp_pub_additional_in\">En: <\/span><span class=\"tp_pub_additional_journal\">European Food Research and Technology, <\/span><span class=\"tp_pub_additional_year\">2017<\/span>, <span class=\"tp_pub_additional_issn\">ISSN: 1438-2385<\/span>.<\/p><p class=\"tp_pub_menu\"><span class=\"tp_abstract_link\"><a id=\"tp_abstract_sh_205\" class=\"tp_show\" onclick=\"teachpress_pub_showhide('205','tp_abstract')\" title=\"Mostrar resumen\" style=\"cursor:pointer;\">Resumen<\/a><\/span> | <span class=\"tp_resource_link\"><a id=\"tp_links_sh_205\" class=\"tp_show\" onclick=\"teachpress_pub_showhide('205','tp_links')\" title=\"Mostrar enlaces y recursos\" style=\"cursor:pointer;\">Enlaces<\/a><\/span> | <span class=\"tp_bibtex_link\"><a id=\"tp_bibtex_sh_205\" class=\"tp_show\" onclick=\"teachpress_pub_showhide('205','tp_bibtex')\" title=\"Mostrar entrada BibTeX \" style=\"cursor:pointer;\">BibTeX<\/a><\/span><\/p><div class=\"tp_bibtex\" id=\"tp_bibtex_205\" style=\"display:none;\"><div class=\"tp_bibtex_entry\"><pre>@article{Lopez-Juarez2017,<br \/>\r\ntitle = {A hybrid non-invasive method for internal\/external quality assessment of potatoes},<br \/>\r\nauthor = {Lopez-Juarez, Ismael and Rios-Cabrera, Reyes and Hsieh,S J and Howarth, M .},<br \/>\r\nurl = {https:\/\/doi.org\/10.1007\/s00217-017-2936-9},<br \/>\r\ndoi = {10.1007\/s00217-017-2936-9},<br \/>\r\nissn = {1438-2385},<br \/>\r\nyear  = {2017},<br \/>\r\ndate = {2017-07-11},<br \/>\r\njournal = {European Food Research and Technology},<br \/>\r\nabstract = {Consumers purchase fruits and vegetables based on its quality, which can be defined as a degree of excellence which is the result of a combination of characteristics, attributes and properties that have significance for market acceptability. In this paper, a novel hybrid active imaging methodology for potato quality inspection that uses an optical colour camera and an infrared thermal camera is presented. The methodology employs an artificial neural network (ANN) that uses quality data composed by two descriptors as input. The ANN works as a feature classifier so that its output is the potato quality grade. The input vector contains information related to external characteristics, such as shape, weight, length and width. Internal characteristics are also accounted for in the input vector in the form of excessive sugar content. The extra sugar content of the potato is an important problem for potato growers and potato chip manufacturers. Extra sugar content could result in diseases or wounds in the potato tuber. In general, potato tubers with low sugar content are considered as having a higher quality. The validation of the methodology was made through experimentation which consisted in fusing both, external and internal characteristics in the input vector to the ANN for an overall quality classification. Results using internal data as obtained from an infrared camera and fused with optical external parameters demonstrated the feasibility of the method since the prediction accuracy increased during potato grading.},<br \/>\r\nkeywords = {},<br \/>\r\npubstate = {published},<br \/>\r\ntppubtype = {article}<br \/>\r\n}<br \/>\r\n<\/pre><\/div><p class=\"tp_close_menu\"><a class=\"tp_close\" onclick=\"teachpress_pub_showhide('205','tp_bibtex')\">Cerrar<\/a><\/p><\/div><div class=\"tp_abstract\" id=\"tp_abstract_205\" style=\"display:none;\"><div class=\"tp_abstract_entry\">Consumers purchase fruits and vegetables based on its quality, which can be defined as a degree of excellence which is the result of a combination of characteristics, attributes and properties that have significance for market acceptability. In this paper, a novel hybrid active imaging methodology for potato quality inspection that uses an optical colour camera and an infrared thermal camera is presented. The methodology employs an artificial neural network (ANN) that uses quality data composed by two descriptors as input. The ANN works as a feature classifier so that its output is the potato quality grade. The input vector contains information related to external characteristics, such as shape, weight, length and width. Internal characteristics are also accounted for in the input vector in the form of excessive sugar content. The extra sugar content of the potato is an important problem for potato growers and potato chip manufacturers. Extra sugar content could result in diseases or wounds in the potato tuber. In general, potato tubers with low sugar content are considered as having a higher quality. The validation of the methodology was made through experimentation which consisted in fusing both, external and internal characteristics in the input vector to the ANN for an overall quality classification. Results using internal data as obtained from an infrared camera and fused with optical external parameters demonstrated the feasibility of the method since the prediction accuracy increased during potato grading.<\/div><p class=\"tp_close_menu\"><a class=\"tp_close\" onclick=\"teachpress_pub_showhide('205','tp_abstract')\">Cerrar<\/a><\/p><\/div><div class=\"tp_links\" id=\"tp_links_205\" style=\"display:none;\"><div class=\"tp_links_entry\"><ul class=\"tp_pub_list\"><li><i class=\"fas fa-globe\"><\/i><a class=\"tp_pub_list\" href=\"https:\/\/doi.org\/10.1007\/s00217-017-2936-9\" title=\"https:\/\/doi.org\/10.1007\/s00217-017-2936-9\" target=\"_blank\">https:\/\/doi.org\/10.1007\/s00217-017-2936-9<\/a><\/li><li><i class=\"ai ai-doi\"><\/i><a class=\"tp_pub_list\" href=\"https:\/\/dx.doi.org\/10.1007\/s00217-017-2936-9\" title=\"DOI de seguimiento:10.1007\/s00217-017-2936-9\" target=\"_blank\">doi:10.1007\/s00217-017-2936-9<\/a><\/li><\/ul><\/div><p class=\"tp_close_menu\"><a class=\"tp_close\" onclick=\"teachpress_pub_showhide('205','tp_links')\">Cerrar<\/a><\/p><\/div><\/div><\/div><h3 class=\"tp_h3\" id=\"tp_h3_2016\">2016<\/h3><h3 class=\"tp_h3\" id=\"tp_h3_article\">Art\u00edculos de revista<\/h3><div class=\"tp_publication tp_publication_article\"><div class=\"tp_pub_info\"><p class=\"tp_pub_author\"> Aviles-Vi\u00f1as, Jaime F;  Rios-Cabrera, Reyes;  Lopez-Juarez, Ismael<\/p><p class=\"tp_pub_title\"><a class=\"tp_title_link\" onclick=\"teachpress_pub_showhide('7','tp_links')\" style=\"cursor:pointer;\">On-line learning of welding bead geometry in industrial robots<\/a> <span class=\"tp_pub_type tp_  article\">Art\u00edculo de revista<\/span> <\/p><p class=\"tp_pub_additional\"><span class=\"tp_pub_additional_in\">En: <\/span><span class=\"tp_pub_additional_journal\">The International Journal of Advanced Manufacturing Technology, <\/span><span class=\"tp_pub_additional_volume\">vol. 83, <\/span><span class=\"tp_pub_additional_number\">no 1, <\/span><span class=\"tp_pub_additional_pages\">pp. 217\u2013231, <\/span><span class=\"tp_pub_additional_year\">2016<\/span>, <span class=\"tp_pub_additional_issn\">ISSN: 1433-3015<\/span>.<\/p><p class=\"tp_pub_menu\"><span class=\"tp_abstract_link\"><a id=\"tp_abstract_sh_7\" class=\"tp_show\" onclick=\"teachpress_pub_showhide('7','tp_abstract')\" title=\"Mostrar resumen\" style=\"cursor:pointer;\">Resumen<\/a><\/span> | <span class=\"tp_resource_link\"><a id=\"tp_links_sh_7\" class=\"tp_show\" onclick=\"teachpress_pub_showhide('7','tp_links')\" title=\"Mostrar enlaces y recursos\" style=\"cursor:pointer;\">Enlaces<\/a><\/span> | <span class=\"tp_bibtex_link\"><a id=\"tp_bibtex_sh_7\" class=\"tp_show\" onclick=\"teachpress_pub_showhide('7','tp_bibtex')\" title=\"Mostrar entrada BibTeX \" style=\"cursor:pointer;\">BibTeX<\/a><\/span><\/p><div class=\"tp_bibtex\" id=\"tp_bibtex_7\" style=\"display:none;\"><div class=\"tp_bibtex_entry\"><pre>@article{Aviles-Vi\\~{n}as2016b,<br \/>\r\ntitle = {On-line learning of welding bead geometry in industrial robots},<br \/>\r\nauthor = {Aviles-Vi\\~{n}as, Jaime F and Rios-Cabrera, Reyes and Lopez-Juarez, Ismael },<br \/>\r\nurl = {http:\/\/dx.doi.org\/10.1007\/s00170-015-7422-6},<br \/>\r\ndoi = {10.1007\/s00170-015-7422-6},<br \/>\r\nissn = {1433-3015},<br \/>\r\nyear  = {2016},<br \/>\r\ndate = {2016-01-01},<br \/>\r\njournal = {The International Journal of Advanced Manufacturing Technology},<br \/>\r\nvolume = {83},<br \/>\r\nnumber = {1},<br \/>\r\npages = {217--231},<br \/>\r\nabstract = {In this paper, we propose an architecture based on an artificial neural network (ANN), to learn welding skills automatically in industrial robots. With the aid of an optic camera and a laser-based sensor, the bead geometry (width and height) is measured. We propose a real-time computer vision algorithm to extract training patterns in order to acquire knowledge to later predict specific geometries. The proposal is implemented and tested in an industrial KUKA KR16 robot and a GMAW type machine within a manufacturing cell. Several data analysis are described as well as off-line and on-line training, learning strategies, and testing experimentation. It is demonstrated during our experiments that, after learning the skill, the robot is able to produce the requested bead geometry even without any knowledge about the welding parameters such as arc voltage and current. We implemented an on-line learning test, where the whole experiments and learning process take only about 4 min. Using this knowledge later, we obtained up to 95 % accuracy in prediction.},<br \/>\r\nkeywords = {},<br \/>\r\npubstate = {published},<br \/>\r\ntppubtype = {article}<br \/>\r\n}<br \/>\r\n<\/pre><\/div><p class=\"tp_close_menu\"><a class=\"tp_close\" onclick=\"teachpress_pub_showhide('7','tp_bibtex')\">Cerrar<\/a><\/p><\/div><div class=\"tp_abstract\" id=\"tp_abstract_7\" style=\"display:none;\"><div class=\"tp_abstract_entry\">In this paper, we propose an architecture based on an artificial neural network (ANN), to learn welding skills automatically in industrial robots. With the aid of an optic camera and a laser-based sensor, the bead geometry (width and height) is measured. We propose a real-time computer vision algorithm to extract training patterns in order to acquire knowledge to later predict specific geometries. The proposal is implemented and tested in an industrial KUKA KR16 robot and a GMAW type machine within a manufacturing cell. Several data analysis are described as well as off-line and on-line training, learning strategies, and testing experimentation. It is demonstrated during our experiments that, after learning the skill, the robot is able to produce the requested bead geometry even without any knowledge about the welding parameters such as arc voltage and current. We implemented an on-line learning test, where the whole experiments and learning process take only about 4 min. Using this knowledge later, we obtained up to 95 % accuracy in prediction.<\/div><p class=\"tp_close_menu\"><a class=\"tp_close\" onclick=\"teachpress_pub_showhide('7','tp_abstract')\">Cerrar<\/a><\/p><\/div><div class=\"tp_links\" id=\"tp_links_7\" style=\"display:none;\"><div class=\"tp_links_entry\"><ul class=\"tp_pub_list\"><li><i class=\"fas fa-globe\"><\/i><a class=\"tp_pub_list\" href=\"http:\/\/dx.doi.org\/10.1007\/s00170-015-7422-6\" title=\"http:\/\/dx.doi.org\/10.1007\/s00170-015-7422-6\" target=\"_blank\">http:\/\/dx.doi.org\/10.1007\/s00170-015-7422-6<\/a><\/li><li><i class=\"ai ai-doi\"><\/i><a class=\"tp_pub_list\" href=\"https:\/\/dx.doi.org\/10.1007\/s00170-015-7422-6\" title=\"DOI de seguimiento:10.1007\/s00170-015-7422-6\" target=\"_blank\">doi:10.1007\/s00170-015-7422-6<\/a><\/li><\/ul><\/div><p class=\"tp_close_menu\"><a class=\"tp_close\" onclick=\"teachpress_pub_showhide('7','tp_links')\">Cerrar<\/a><\/p><\/div><\/div><\/div><div class=\"tp_publication tp_publication_article\"><div class=\"tp_pub_info\"><p class=\"tp_pub_author\"> Rios-Cabrera, Reyes;  Morales-Diaz, America B.;  Aviles-Vi\u00f1as, Jaime F;  Lopez-Juarez, Ismael<\/p><p class=\"tp_pub_title\"><a class=\"tp_title_link\" onclick=\"teachpress_pub_showhide('6','tp_links')\" style=\"cursor:pointer;\">Robotic GMAW online learning: issues and experiments<\/a> <span class=\"tp_pub_type tp_  article\">Art\u00edculo de revista<\/span> <\/p><p class=\"tp_pub_additional\"><span class=\"tp_pub_additional_in\">En: <\/span><span class=\"tp_pub_additional_journal\">The International Journal of Advanced Manufacturing Technology, <\/span><span class=\"tp_pub_additional_volume\">vol. 87, <\/span><span class=\"tp_pub_additional_number\">no 5, <\/span><span class=\"tp_pub_additional_pages\">pp. 2113\u20132134, <\/span><span class=\"tp_pub_additional_year\">2016<\/span>, <span class=\"tp_pub_additional_issn\">ISSN: 1433-3015<\/span>.<\/p><p class=\"tp_pub_menu\"><span class=\"tp_abstract_link\"><a id=\"tp_abstract_sh_6\" class=\"tp_show\" onclick=\"teachpress_pub_showhide('6','tp_abstract')\" title=\"Mostrar resumen\" style=\"cursor:pointer;\">Resumen<\/a><\/span> | <span class=\"tp_resource_link\"><a id=\"tp_links_sh_6\" class=\"tp_show\" onclick=\"teachpress_pub_showhide('6','tp_links')\" title=\"Mostrar enlaces y recursos\" style=\"cursor:pointer;\">Enlaces<\/a><\/span> | <span class=\"tp_bibtex_link\"><a id=\"tp_bibtex_sh_6\" class=\"tp_show\" onclick=\"teachpress_pub_showhide('6','tp_bibtex')\" title=\"Mostrar entrada BibTeX \" style=\"cursor:pointer;\">BibTeX<\/a><\/span><\/p><div class=\"tp_bibtex\" id=\"tp_bibtex_6\" style=\"display:none;\"><div class=\"tp_bibtex_entry\"><pre>@article{Rios-Cabrera2016,<br \/>\r\ntitle = {Robotic GMAW online learning: issues and experiments},<br \/>\r\nauthor = {Rios-Cabrera, Reyes and Morales-Diaz, America B. and Aviles-Vi\\~{n}as, Jaime F and Lopez-Juarez, Ismael },<br \/>\r\nurl = {http:\/\/dx.doi.org\/10.1007\/s00170-016-8618-0},<br \/>\r\ndoi = {10.1007\/s00170-016-8618-0},<br \/>\r\nissn = {1433-3015},<br \/>\r\nyear  = {2016},<br \/>\r\ndate = {2016-01-01},<br \/>\r\njournal = {The International Journal of Advanced Manufacturing Technology},<br \/>\r\nvolume = {87},<br \/>\r\nnumber = {5},<br \/>\r\npages = {2113--2134},<br \/>\r\nabstract = {This paper presents three main contributions: (i) an experimental analysis of variables, using well-defined statistical patterns applied to the main parameters of the welding process. (ii) An on-line\/off-line learning and testing method, showing that robots can acquire a useful knowledge base without human intervention to learn and reproduce bead geometries. And finally, (iii) an on-line testing analysis including penetration of the bead, that is used to train an artificial neural network (ANN). For the experiments, an optic camera was used in order to measure bead geometry (width and height). Also real-time computer vision algorithms were implemented to extract training patterns. The proposal was carried out using an industrial KUKA robot and a GMAW type machine inside a manufacturing cell. We present expermental analysis that show different issues and solutions to build an industrial adaptive system for the robotics welding process.},<br \/>\r\nkeywords = {},<br \/>\r\npubstate = {published},<br \/>\r\ntppubtype = {article}<br \/>\r\n}<br \/>\r\n<\/pre><\/div><p class=\"tp_close_menu\"><a class=\"tp_close\" onclick=\"teachpress_pub_showhide('6','tp_bibtex')\">Cerrar<\/a><\/p><\/div><div class=\"tp_abstract\" id=\"tp_abstract_6\" style=\"display:none;\"><div class=\"tp_abstract_entry\">This paper presents three main contributions: (i) an experimental analysis of variables, using well-defined statistical patterns applied to the main parameters of the welding process. (ii) An on-line\/off-line learning and testing method, showing that robots can acquire a useful knowledge base without human intervention to learn and reproduce bead geometries. And finally, (iii) an on-line testing analysis including penetration of the bead, that is used to train an artificial neural network (ANN). For the experiments, an optic camera was used in order to measure bead geometry (width and height). Also real-time computer vision algorithms were implemented to extract training patterns. The proposal was carried out using an industrial KUKA robot and a GMAW type machine inside a manufacturing cell. We present expermental analysis that show different issues and solutions to build an industrial adaptive system for the robotics welding process.<\/div><p class=\"tp_close_menu\"><a class=\"tp_close\" onclick=\"teachpress_pub_showhide('6','tp_abstract')\">Cerrar<\/a><\/p><\/div><div class=\"tp_links\" id=\"tp_links_6\" style=\"display:none;\"><div class=\"tp_links_entry\"><ul class=\"tp_pub_list\"><li><i class=\"fas fa-globe\"><\/i><a class=\"tp_pub_list\" href=\"http:\/\/dx.doi.org\/10.1007\/s00170-016-8618-0\" title=\"http:\/\/dx.doi.org\/10.1007\/s00170-016-8618-0\" target=\"_blank\">http:\/\/dx.doi.org\/10.1007\/s00170-016-8618-0<\/a><\/li><li><i class=\"ai ai-doi\"><\/i><a class=\"tp_pub_list\" href=\"https:\/\/dx.doi.org\/10.1007\/s00170-016-8618-0\" title=\"DOI de seguimiento:10.1007\/s00170-016-8618-0\" target=\"_blank\">doi:10.1007\/s00170-016-8618-0<\/a><\/li><\/ul><\/div><p class=\"tp_close_menu\"><a class=\"tp_close\" onclick=\"teachpress_pub_showhide('6','tp_links')\">Cerrar<\/a><\/p><\/div><\/div><\/div><div class=\"tp_publication tp_publication_article\"><div class=\"tp_pub_info\"><p class=\"tp_pub_author\"> Benitez Perez, H.;  Lopez-Juarez, Ismael;  Garza-Alanis, P. C.;  Rios-Cabrera, Reyes;  Duran Chavesti, A.<\/p><p class=\"tp_pub_title\"><a class=\"tp_title_link\" onclick=\"teachpress_pub_showhide('163','tp_links')\" style=\"cursor:pointer;\">Reconfiguration Distributed Objects in an Intelligent Manufacturing Cell<\/a> <span class=\"tp_pub_type tp_  article\">Art\u00edculo de revista<\/span> <\/p><p class=\"tp_pub_additional\"><span class=\"tp_pub_additional_in\">En: <\/span><span class=\"tp_pub_additional_journal\">IEEE Latin America Transactions, <\/span><span class=\"tp_pub_additional_volume\">vol. 14, <\/span><span class=\"tp_pub_additional_number\">no 1, <\/span><span class=\"tp_pub_additional_pages\">pp. 136-146, <\/span><span class=\"tp_pub_additional_year\">2016<\/span>, <span class=\"tp_pub_additional_issn\">ISSN: 1548-0992<\/span>.<\/p><p class=\"tp_pub_menu\"><span class=\"tp_abstract_link\"><a id=\"tp_abstract_sh_163\" class=\"tp_show\" onclick=\"teachpress_pub_showhide('163','tp_abstract')\" title=\"Mostrar resumen\" style=\"cursor:pointer;\">Resumen<\/a><\/span> | <span class=\"tp_resource_link\"><a id=\"tp_links_sh_163\" class=\"tp_show\" onclick=\"teachpress_pub_showhide('163','tp_links')\" title=\"Mostrar enlaces y recursos\" style=\"cursor:pointer;\">Enlaces<\/a><\/span> | <span class=\"tp_bibtex_link\"><a id=\"tp_bibtex_sh_163\" class=\"tp_show\" onclick=\"teachpress_pub_showhide('163','tp_bibtex')\" title=\"Mostrar entrada BibTeX \" style=\"cursor:pointer;\">BibTeX<\/a><\/span><\/p><div class=\"tp_bibtex\" id=\"tp_bibtex_163\" style=\"display:none;\"><div class=\"tp_bibtex_entry\"><pre>@article{7430073,<br \/>\r\ntitle = {Reconfiguration Distributed Objects in an Intelligent Manufacturing Cell},<br \/>\r\nauthor = {Benitez Perez, H. and Lopez-Juarez, Ismael and Garza-Alanis, P. C. and Rios-Cabrera, Reyes and Duran Chavesti, A.},<br \/>\r\ndoi = {10.1109\/TLA.2016.7430073},<br \/>\r\nissn = {1548-0992},<br \/>\r\nyear  = {2016},<br \/>\r\ndate = {2016-01-01},<br \/>\r\njournal = {IEEE Latin America Transactions},<br \/>\r\nvolume = {14},<br \/>\r\nnumber = {1},<br \/>\r\npages = {136-146},<br \/>\r\nabstract = {A manufacture system with the abilities of easy reconfiguration and highly scalability becomes flexible, dynamic and open to the use of software technologies. To give these abilities to a manufacture cell formed of three industrial robots and two conveyors, a middleware based on the programming standard Common Object Request Broker Architecture (CORA) was developed, thus creating a distributed manufacture cell, allowing us to have a real production with different final products. In order to optimize the production times of the different products to be manufactured, a product scheduler was developed using the algorithm Earlies Deadline First (EDF) and the support algorithm Deferrable Server (DS). Given that failures may occur on any of the specialized modules of the manufacture system, the self reconfiguration of the manufacture system is something very desirable. This article propose an algorithm to solve this problem, the algorithm identifies the failures in relation to the time it takes the system to make a product, then makes a modification on the working speed of the plant elements of the specialized modules.},<br \/>\r\nkeywords = {},<br \/>\r\npubstate = {published},<br \/>\r\ntppubtype = {article}<br \/>\r\n}<br \/>\r\n<\/pre><\/div><p class=\"tp_close_menu\"><a class=\"tp_close\" onclick=\"teachpress_pub_showhide('163','tp_bibtex')\">Cerrar<\/a><\/p><\/div><div class=\"tp_abstract\" id=\"tp_abstract_163\" style=\"display:none;\"><div class=\"tp_abstract_entry\">A manufacture system with the abilities of easy reconfiguration and highly scalability becomes flexible, dynamic and open to the use of software technologies. To give these abilities to a manufacture cell formed of three industrial robots and two conveyors, a middleware based on the programming standard Common Object Request Broker Architecture (CORA) was developed, thus creating a distributed manufacture cell, allowing us to have a real production with different final products. In order to optimize the production times of the different products to be manufactured, a product scheduler was developed using the algorithm Earlies Deadline First (EDF) and the support algorithm Deferrable Server (DS). Given that failures may occur on any of the specialized modules of the manufacture system, the self reconfiguration of the manufacture system is something very desirable. This article propose an algorithm to solve this problem, the algorithm identifies the failures in relation to the time it takes the system to make a product, then makes a modification on the working speed of the plant elements of the specialized modules.<\/div><p class=\"tp_close_menu\"><a class=\"tp_close\" onclick=\"teachpress_pub_showhide('163','tp_abstract')\">Cerrar<\/a><\/p><\/div><div class=\"tp_links\" id=\"tp_links_163\" style=\"display:none;\"><div class=\"tp_links_entry\"><ul class=\"tp_pub_list\"><li><i class=\"ai ai-doi\"><\/i><a class=\"tp_pub_list\" href=\"https:\/\/dx.doi.org\/10.1109\/TLA.2016.7430073\" title=\"DOI de seguimiento:10.1109\/TLA.2016.7430073\" target=\"_blank\">doi:10.1109\/TLA.2016.7430073<\/a><\/li><\/ul><\/div><p class=\"tp_close_menu\"><a class=\"tp_close\" onclick=\"teachpress_pub_showhide('163','tp_links')\">Cerrar<\/a><\/p><\/div><\/div><\/div><h3 class=\"tp_h3\" id=\"tp_h3_proceedings\">Actas de congresos<\/h3><div class=\"tp_publication tp_publication_proceedings\"><div class=\"tp_pub_info\"><p class=\"tp_pub_author\"> Mirelez-Delgado, Flabio;  Morales-Diaz, America B.;  Rios-Cabrera, Reyes;  Gutierrez-Flores, Hugo<\/p><p class=\"tp_pub_title\">Towards intelligent robotic agents for cooperative tasks <span class=\"tp_pub_type tp_  proceedings\">Actas de congresos<\/span> <\/p><p class=\"tp_pub_additional\"><span class=\"tp_pub_additional_year\">2016<\/span>.<\/p><p class=\"tp_pub_menu\"><span class=\"tp_bibtex_link\"><a id=\"tp_bibtex_sh_139\" class=\"tp_show\" onclick=\"teachpress_pub_showhide('139','tp_bibtex')\" title=\"Mostrar entrada BibTeX \" style=\"cursor:pointer;\">BibTeX<\/a><\/span><\/p><div class=\"tp_bibtex\" id=\"tp_bibtex_139\" style=\"display:none;\"><div class=\"tp_bibtex_entry\"><pre>@proceedings{Mirelez-Delgado2016,<br \/>\r\ntitle = {Towards intelligent robotic agents for cooperative tasks},<br \/>\r\nauthor = {Mirelez-Delgado, Flabio and Morales-Diaz, America B. and Rios-Cabrera, Reyes and Gutierrez-Flores, Hugo},<br \/>\r\nyear  = {2016},<br \/>\r\ndate = {2016-06-06},<br \/>\r\nkeywords = {},<br \/>\r\npubstate = {published},<br \/>\r\ntppubtype = {proceedings}<br \/>\r\n}<br \/>\r\n<\/pre><\/div><p class=\"tp_close_menu\"><a class=\"tp_close\" onclick=\"teachpress_pub_showhide('139','tp_bibtex')\">Cerrar<\/a><\/p><\/div><\/div><\/div><div class=\"tp_publication tp_publication_proceedings\"><div class=\"tp_pub_info\"><p class=\"tp_pub_author\"> Mirelez-Delgado, Flabio;  Morales-Diaz, America B.;  Rios-Cabrera, Reyes<\/p><p class=\"tp_pub_title\">Kinematic control for an omnidirectional mobile manipulator <span class=\"tp_pub_type tp_  proceedings\">Actas de congresos<\/span> <\/p><p class=\"tp_pub_additional\"><span class=\"tp_pub_additional_year\">2016<\/span>.<\/p><p class=\"tp_pub_menu\"><span class=\"tp_bibtex_link\"><a id=\"tp_bibtex_sh_157\" class=\"tp_show\" onclick=\"teachpress_pub_showhide('157','tp_bibtex')\" title=\"Mostrar entrada BibTeX \" style=\"cursor:pointer;\">BibTeX<\/a><\/span><\/p><div class=\"tp_bibtex\" id=\"tp_bibtex_157\" style=\"display:none;\"><div class=\"tp_bibtex_entry\"><pre>@proceedings{Mirelez-Delgado2016,<br \/>\r\ntitle = {Kinematic control for an omnidirectional mobile manipulator},<br \/>\r\nauthor = {Mirelez-Delgado, Flabio and Morales-Diaz, America B. and Rios-Cabrera, Reyes},<br \/>\r\nyear  = {2016},<br \/>\r\ndate = {2016-06-06},<br \/>\r\nkeywords = {},<br \/>\r\npubstate = {published},<br \/>\r\ntppubtype = {proceedings}<br \/>\r\n}<br \/>\r\n<\/pre><\/div><p class=\"tp_close_menu\"><a class=\"tp_close\" onclick=\"teachpress_pub_showhide('157','tp_bibtex')\">Cerrar<\/a><\/p><\/div><\/div><\/div><h3 class=\"tp_h3\" id=\"tp_h3_2015\">2015<\/h3><h3 class=\"tp_h3\" id=\"tp_h3_article\">Art\u00edculos de revista<\/h3><div class=\"tp_publication tp_publication_article\"><div class=\"tp_pub_info\"><p class=\"tp_pub_author\"> Aviles-Vi\u00f1as, Jaime F;  Lopez-Juarez, Ismael;  Rios-Cabrera, Reyes<\/p><p class=\"tp_pub_title\"><a class=\"tp_title_link\" onclick=\"teachpress_pub_showhide('13','tp_links')\" style=\"cursor:pointer;\">Acquisition of welding skills in industrial robots<\/a> <span class=\"tp_pub_type tp_  article\">Art\u00edculo de revista<\/span> <\/p><p class=\"tp_pub_additional\"><span class=\"tp_pub_additional_in\">En: <\/span><span class=\"tp_pub_additional_journal\">Industrial Robot: An International Journal, <\/span><span class=\"tp_pub_additional_volume\">vol. 42, <\/span><span class=\"tp_pub_additional_number\">no 2, <\/span><span class=\"tp_pub_additional_pages\">pp. 156-166, <\/span><span class=\"tp_pub_additional_year\">2015<\/span>.<\/p><p class=\"tp_pub_menu\"><span class=\"tp_abstract_link\"><a id=\"tp_abstract_sh_13\" class=\"tp_show\" onclick=\"teachpress_pub_showhide('13','tp_abstract')\" title=\"Mostrar resumen\" style=\"cursor:pointer;\">Resumen<\/a><\/span> | <span class=\"tp_resource_link\"><a id=\"tp_links_sh_13\" class=\"tp_show\" onclick=\"teachpress_pub_showhide('13','tp_links')\" title=\"Mostrar enlaces y recursos\" style=\"cursor:pointer;\">Enlaces<\/a><\/span> | <span class=\"tp_bibtex_link\"><a id=\"tp_bibtex_sh_13\" class=\"tp_show\" onclick=\"teachpress_pub_showhide('13','tp_bibtex')\" title=\"Mostrar entrada BibTeX \" style=\"cursor:pointer;\">BibTeX<\/a><\/span><\/p><div class=\"tp_bibtex\" id=\"tp_bibtex_13\" style=\"display:none;\"><div class=\"tp_bibtex_entry\"><pre>@article{doi:10.1108\/IR-09-2014-0395,<br \/>\r\ntitle = {Acquisition of welding skills in industrial robots},<br \/>\r\nauthor = {Aviles-Vi\\~{n}as, Jaime F and Lopez-Juarez, Ismael and Rios-Cabrera, Reyes },<br \/>\r\nurl = {http:\/\/dx.doi.org\/10.1108\/IR-09-2014-0395},<br \/>\r\ndoi = {10.1108\/IR-09-2014-0395},<br \/>\r\nyear  = {2015},<br \/>\r\ndate = {2015-01-01},<br \/>\r\njournal = {Industrial Robot: An International Journal},<br \/>\r\nvolume = {42},<br \/>\r\nnumber = {2},<br \/>\r\npages = {156-166},<br \/>\r\nabstract = {Purpose \\textendash The purpose of this paper was to propose a method based on an Artificial Neural Network and a real-time vision algorithm, to learn welding skills in industrial robotics. Design\/methodology\/approach \\textendash By using an optic camera to measure the bead geometry (width and height), the authors propose a real-time computer vision algorithm to extract training patterns and to enable an industrial robot to acquire and learn autonomously the welding skill. To test the approach, an industrial KUKA robot and a welding gas metal arc welding machine were used in a manufacturing cell. Findings \\textendash Several data analyses are described, showing empirically that industrial robots can acquire the skill even if the specific welding parameters are unknown. Research limitations\/implications \\textendash The approach considers only stringer beads. Weave bead and bead penetration are not considered. Practical implications \\textendash With the proposed approach, it is possible to learn specific welding parameters despite of the material, type of robot or welding machine. This is due to the fact that the feedback system produces automatic measurements that are labelled prior to the learning process. Originality\/value \\textendash The main contribution is that the complex learning process is reduced into an input-process-output system, where the process part is learnt automatically without human supervision, by registering the patterns with an automatically calibrated vision system.},<br \/>\r\nkeywords = {},<br \/>\r\npubstate = {published},<br \/>\r\ntppubtype = {article}<br \/>\r\n}<br \/>\r\n<\/pre><\/div><p class=\"tp_close_menu\"><a class=\"tp_close\" onclick=\"teachpress_pub_showhide('13','tp_bibtex')\">Cerrar<\/a><\/p><\/div><div class=\"tp_abstract\" id=\"tp_abstract_13\" style=\"display:none;\"><div class=\"tp_abstract_entry\">Purpose \u2013 The purpose of this paper was to propose a method based on an Artificial Neural Network and a real-time vision algorithm, to learn welding skills in industrial robotics. Design\/methodology\/approach \u2013 By using an optic camera to measure the bead geometry (width and height), the authors propose a real-time computer vision algorithm to extract training patterns and to enable an industrial robot to acquire and learn autonomously the welding skill. To test the approach, an industrial KUKA robot and a welding gas metal arc welding machine were used in a manufacturing cell. Findings \u2013 Several data analyses are described, showing empirically that industrial robots can acquire the skill even if the specific welding parameters are unknown. Research limitations\/implications \u2013 The approach considers only stringer beads. Weave bead and bead penetration are not considered. Practical implications \u2013 With the proposed approach, it is possible to learn specific welding parameters despite of the material, type of robot or welding machine. This is due to the fact that the feedback system produces automatic measurements that are labelled prior to the learning process. Originality\/value \u2013 The main contribution is that the complex learning process is reduced into an input-process-output system, where the process part is learnt automatically without human supervision, by registering the patterns with an automatically calibrated vision system.<\/div><p class=\"tp_close_menu\"><a class=\"tp_close\" onclick=\"teachpress_pub_showhide('13','tp_abstract')\">Cerrar<\/a><\/p><\/div><div class=\"tp_links\" id=\"tp_links_13\" style=\"display:none;\"><div class=\"tp_links_entry\"><ul class=\"tp_pub_list\"><li><i class=\"fas fa-globe\"><\/i><a class=\"tp_pub_list\" href=\"http:\/\/dx.doi.org\/10.1108\/IR-09-2014-0395\" title=\"http:\/\/dx.doi.org\/10.1108\/IR-09-2014-0395\" target=\"_blank\">http:\/\/dx.doi.org\/10.1108\/IR-09-2014-0395<\/a><\/li><li><i class=\"ai ai-doi\"><\/i><a class=\"tp_pub_list\" href=\"https:\/\/dx.doi.org\/10.1108\/IR-09-2014-0395\" title=\"DOI de seguimiento:10.1108\/IR-09-2014-0395\" target=\"_blank\">doi:10.1108\/IR-09-2014-0395<\/a><\/li><\/ul><\/div><p class=\"tp_close_menu\"><a class=\"tp_close\" onclick=\"teachpress_pub_showhide('13','tp_links')\">Cerrar<\/a><\/p><\/div><\/div><\/div><div class=\"tp_publication tp_publication_article\"><div class=\"tp_pub_info\"><p class=\"tp_pub_author\"> Navarro-Gonzalez, Jose Luis;  Lopez-Juarez, Ismael;  Ordaz-Hernandez, Keny;  Rios-Cabrera, Reyes<\/p><p class=\"tp_pub_title\"><a class=\"tp_title_link\" onclick=\"teachpress_pub_showhide('14','tp_links')\" style=\"cursor:pointer;\">On-line incremental learning for unknown conditions during assembly operations with industrial robots<\/a> <span class=\"tp_pub_type tp_  article\">Art\u00edculo de revista<\/span> <\/p><p class=\"tp_pub_additional\"><span class=\"tp_pub_additional_in\">En: <\/span><span class=\"tp_pub_additional_journal\">Evolving Systems, <\/span><span class=\"tp_pub_additional_volume\">vol. 6, <\/span><span class=\"tp_pub_additional_number\">no 2, <\/span><span class=\"tp_pub_additional_pages\">pp. 101\u2013114, <\/span><span class=\"tp_pub_additional_year\">2015<\/span>, <span class=\"tp_pub_additional_issn\">ISSN: 1868-6486<\/span>.<\/p><p class=\"tp_pub_menu\"><span class=\"tp_abstract_link\"><a id=\"tp_abstract_sh_14\" class=\"tp_show\" onclick=\"teachpress_pub_showhide('14','tp_abstract')\" title=\"Mostrar resumen\" style=\"cursor:pointer;\">Resumen<\/a><\/span> | <span class=\"tp_resource_link\"><a id=\"tp_links_sh_14\" class=\"tp_show\" onclick=\"teachpress_pub_showhide('14','tp_links')\" title=\"Mostrar enlaces y recursos\" style=\"cursor:pointer;\">Enlaces<\/a><\/span> | <span class=\"tp_bibtex_link\"><a id=\"tp_bibtex_sh_14\" class=\"tp_show\" onclick=\"teachpress_pub_showhide('14','tp_bibtex')\" title=\"Mostrar entrada BibTeX \" style=\"cursor:pointer;\">BibTeX<\/a><\/span><\/p><div class=\"tp_bibtex\" id=\"tp_bibtex_14\" style=\"display:none;\"><div class=\"tp_bibtex_entry\"><pre>@article{Navarro-Gonzalez2015,<br \/>\r\ntitle = {On-line incremental learning for unknown conditions during assembly operations with industrial robots},<br \/>\r\nauthor = {Navarro-Gonzalez, Jose Luis and Lopez-Juarez, Ismael and Ordaz-Hernandez, Keny and Rios-Cabrera, Reyes },<br \/>\r\nurl = {http:\/\/dx.doi.org\/10.1007\/s12530-014-9125-x},<br \/>\r\ndoi = {10.1007\/s12530-014-9125-x},<br \/>\r\nissn = {1868-6486},<br \/>\r\nyear  = {2015},<br \/>\r\ndate = {2015-01-01},<br \/>\r\njournal = {Evolving Systems},<br \/>\r\nvolume = {6},<br \/>\r\nnumber = {2},<br \/>\r\npages = {101--114},<br \/>\r\nabstract = {The assembly operation using industrial robots can be accomplished successfully in well-structured environments where the mating pair location is known in advance. However, in real-world scenarios there are uncertainties associated to sensing, control and modelling errors that make the assembly task very complex. In addition, there are also unmodeled uncertainties that have to be taken into account for an effective control algorithm to succeed. Among these uncertainties, it can be mentioned disturbances, backlash and aging of mechanisms. In this paper, a method to overcome the effect of those uncertainties based on the Fuzzy ARTMAP artificial neural network (ANN) to successfully accomplish the assembly task is proposed. Experimental work is reported using an industrial 6 DOF robot arm in conjunction with a vision system for part location and wrist force\/torque sensing data for assembly. Force data is fed into an ANN evolving controller during a typical peg in hole (PIH) assembly operation. The controller uses an incremental learning mechanism that is solely guided by the sensed forces. In this article, two approaches are presented in order to compare the incremental learning capability of the manipulator. The first approach uses a primitive knowledge base (PKB) containing 16 primitive movements to learn online the first insertion. During assembly, the manipulator learns new patterns according to the learning criteria which turn the PKB into an enhanced knowledge base (EKB). During a second insertion the controller uses effectively the EKB and operation improves. The second approach employs minimum information (it contains only the assembly direction) and the process starts from scratch. After several operations, that knowledge base increases by including only the needed patterns to perform the insertion. Experimental results showed that the evolving controller is able to assemble the matting pairs enhancing its knowledge whenever it is needed depending on the part geometry and level of expertise. Our approach is demonstrated through several PIH operations with different tolerances and part geometry. As the robot's expertise evolves, the PIH operation is carried out faster with shorter assembly trajectories.},<br \/>\r\nkeywords = {},<br \/>\r\npubstate = {published},<br \/>\r\ntppubtype = {article}<br \/>\r\n}<br \/>\r\n<\/pre><\/div><p class=\"tp_close_menu\"><a class=\"tp_close\" onclick=\"teachpress_pub_showhide('14','tp_bibtex')\">Cerrar<\/a><\/p><\/div><div class=\"tp_abstract\" id=\"tp_abstract_14\" style=\"display:none;\"><div class=\"tp_abstract_entry\">The assembly operation using industrial robots can be accomplished successfully in well-structured environments where the mating pair location is known in advance. However, in real-world scenarios there are uncertainties associated to sensing, control and modelling errors that make the assembly task very complex. In addition, there are also unmodeled uncertainties that have to be taken into account for an effective control algorithm to succeed. Among these uncertainties, it can be mentioned disturbances, backlash and aging of mechanisms. In this paper, a method to overcome the effect of those uncertainties based on the Fuzzy ARTMAP artificial neural network (ANN) to successfully accomplish the assembly task is proposed. Experimental work is reported using an industrial 6 DOF robot arm in conjunction with a vision system for part location and wrist force\/torque sensing data for assembly. Force data is fed into an ANN evolving controller during a typical peg in hole (PIH) assembly operation. The controller uses an incremental learning mechanism that is solely guided by the sensed forces. In this article, two approaches are presented in order to compare the incremental learning capability of the manipulator. The first approach uses a primitive knowledge base (PKB) containing 16 primitive movements to learn online the first insertion. During assembly, the manipulator learns new patterns according to the learning criteria which turn the PKB into an enhanced knowledge base (EKB). During a second insertion the controller uses effectively the EKB and operation improves. The second approach employs minimum information (it contains only the assembly direction) and the process starts from scratch. After several operations, that knowledge base increases by including only the needed patterns to perform the insertion. Experimental results showed that the evolving controller is able to assemble the matting pairs enhancing its knowledge whenever it is needed depending on the part geometry and level of expertise. Our approach is demonstrated through several PIH operations with different tolerances and part geometry. As the robot's expertise evolves, the PIH operation is carried out faster with shorter assembly trajectories.<\/div><p class=\"tp_close_menu\"><a class=\"tp_close\" onclick=\"teachpress_pub_showhide('14','tp_abstract')\">Cerrar<\/a><\/p><\/div><div class=\"tp_links\" id=\"tp_links_14\" style=\"display:none;\"><div class=\"tp_links_entry\"><ul class=\"tp_pub_list\"><li><i class=\"fas fa-globe\"><\/i><a class=\"tp_pub_list\" href=\"http:\/\/dx.doi.org\/10.1007\/s12530-014-9125-x\" title=\"http:\/\/dx.doi.org\/10.1007\/s12530-014-9125-x\" target=\"_blank\">http:\/\/dx.doi.org\/10.1007\/s12530-014-9125-x<\/a><\/li><li><i class=\"ai ai-doi\"><\/i><a class=\"tp_pub_list\" href=\"https:\/\/dx.doi.org\/10.1007\/s12530-014-9125-x\" title=\"DOI de seguimiento:10.1007\/s12530-014-9125-x\" target=\"_blank\">doi:10.1007\/s12530-014-9125-x<\/a><\/li><\/ul><\/div><p class=\"tp_close_menu\"><a class=\"tp_close\" onclick=\"teachpress_pub_showhide('14','tp_links')\">Cerrar<\/a><\/p><\/div><\/div><\/div><div class=\"tp_publication tp_publication_article\"><div class=\"tp_pub_info\"><p class=\"tp_pub_author\"> Navarro-Gonzalez, Jose Luis;  Lopez-Juarez, Ismael;  Rios-Cabrera, Reyes;  Ordaz-Hernandez, Keny<\/p><p class=\"tp_pub_title\"><a class=\"tp_title_link\" onclick=\"teachpress_pub_showhide('15','tp_links')\" style=\"cursor:pointer;\">On-line knowledge acquisition and enhancement in robotic assembly tasks<\/a> <span class=\"tp_pub_type tp_  article\">Art\u00edculo de revista<\/span> <\/p><p class=\"tp_pub_additional\"><span class=\"tp_pub_additional_in\">En: <\/span><span class=\"tp_pub_additional_journal\">Robotics and Computer-Integrated Manufacturing, <\/span><span class=\"tp_pub_additional_volume\">vol. 33, <\/span><span class=\"tp_pub_additional_pages\">pp. 78 - 89, <\/span><span class=\"tp_pub_additional_year\">2015<\/span>, <span class=\"tp_pub_additional_issn\">ISSN: 0736-5845<\/span><span class=\"tp_pub_additional_note\">, (Special Issue on Knowledge Driven Robotics and Manufacturing)<\/span>.<\/p><p class=\"tp_pub_menu\"><span class=\"tp_abstract_link\"><a id=\"tp_abstract_sh_15\" class=\"tp_show\" onclick=\"teachpress_pub_showhide('15','tp_abstract')\" title=\"Mostrar resumen\" style=\"cursor:pointer;\">Resumen<\/a><\/span> | <span class=\"tp_resource_link\"><a id=\"tp_links_sh_15\" class=\"tp_show\" onclick=\"teachpress_pub_showhide('15','tp_links')\" title=\"Mostrar enlaces y recursos\" style=\"cursor:pointer;\">Enlaces<\/a><\/span> | <span class=\"tp_bibtex_link\"><a id=\"tp_bibtex_sh_15\" class=\"tp_show\" onclick=\"teachpress_pub_showhide('15','tp_bibtex')\" title=\"Mostrar entrada BibTeX \" style=\"cursor:pointer;\">BibTeX<\/a><\/span><\/p><div class=\"tp_bibtex\" id=\"tp_bibtex_15\" style=\"display:none;\"><div class=\"tp_bibtex_entry\"><pre>@article{NavarroGonzalez201578b,<br \/>\r\ntitle = {On-line knowledge acquisition and enhancement in robotic assembly tasks},<br \/>\r\nauthor = {Navarro-Gonzalez, Jose Luis and Lopez-Juarez, Ismael and Rios-Cabrera, Reyes and Ordaz-Hernandez, Keny},<br \/>\r\nurl = {http:\/\/www.sciencedirect.com\/science\/article\/pii\/S073658451400074X},<br \/>\r\ndoi = {http:\/\/dx.doi.org\/10.1016\/j.rcim.2014.08.013},<br \/>\r\nissn = {0736-5845},<br \/>\r\nyear  = {2015},<br \/>\r\ndate = {2015-01-01},<br \/>\r\njournal = {Robotics and Computer-Integrated Manufacturing},<br \/>\r\nvolume = {33},<br \/>\r\npages = {78 - 89},<br \/>\r\nabstract = {Abstract Industrial robots are reliable machines for manufacturing tasks such as welding, panting, assembly, palletizing or kitting operations. They are traditionally programmed by an operator using a teach pendant in a point-to-point scheme with limited sensing capabilities such as industrial vision systems and force\/torque sensing. The use of these sensing capabilities is associated to the particular robot controller, operative systems and programming language. Today, robots can react to environment changes specific to their task domain but are still unable to learn skills to effectively use their current knowledge. The need for such a skill in unstructured environments where knowledge can be acquired and enhanced is desirable so that robots can effectively interact in multimodal real-world scenarios. In this article we present a multimodal assembly controller (MAC) approach to embed and effectively enhance knowledge into industrial robots working in multimodal manufacturing scenarios such as assembly during kitting operations with varying shapes and tolerances. During learning, the robot uses its vision and force capabilities resembling a human operator carrying out the same operation. The approach consists of using a MAC based on the Fuzzy ARTMAP artificial neural network in conjunction with a knowledge base. The robot starts the operation having limited initial knowledge about what task it has to accomplish. During the operation, the robot learns the skill for recognising assembly parts and how to assemble them. The skill acquisition is evaluated by counting the steps to complete the assembly, length of the followed assembly path and compliant behaviour. The performance improves with time so that the robot becomes an expert demonstrated by the assembly of a kit with different part geometries. The kit is unknown by the robot at the beginning of the operation; therefore, the kit type, location and orientation are unknown as well as the parts to be assembled since they are randomly fed by a conveyor belt.},<br \/>\r\nnote = {Special Issue on Knowledge Driven Robotics and Manufacturing},<br \/>\r\nkeywords = {},<br \/>\r\npubstate = {published},<br \/>\r\ntppubtype = {article}<br \/>\r\n}<br \/>\r\n<\/pre><\/div><p class=\"tp_close_menu\"><a class=\"tp_close\" onclick=\"teachpress_pub_showhide('15','tp_bibtex')\">Cerrar<\/a><\/p><\/div><div class=\"tp_abstract\" id=\"tp_abstract_15\" style=\"display:none;\"><div class=\"tp_abstract_entry\">Abstract Industrial robots are reliable machines for manufacturing tasks such as welding, panting, assembly, palletizing or kitting operations. They are traditionally programmed by an operator using a teach pendant in a point-to-point scheme with limited sensing capabilities such as industrial vision systems and force\/torque sensing. The use of these sensing capabilities is associated to the particular robot controller, operative systems and programming language. Today, robots can react to environment changes specific to their task domain but are still unable to learn skills to effectively use their current knowledge. The need for such a skill in unstructured environments where knowledge can be acquired and enhanced is desirable so that robots can effectively interact in multimodal real-world scenarios. In this article we present a multimodal assembly controller (MAC) approach to embed and effectively enhance knowledge into industrial robots working in multimodal manufacturing scenarios such as assembly during kitting operations with varying shapes and tolerances. During learning, the robot uses its vision and force capabilities resembling a human operator carrying out the same operation. The approach consists of using a MAC based on the Fuzzy ARTMAP artificial neural network in conjunction with a knowledge base. The robot starts the operation having limited initial knowledge about what task it has to accomplish. During the operation, the robot learns the skill for recognising assembly parts and how to assemble them. The skill acquisition is evaluated by counting the steps to complete the assembly, length of the followed assembly path and compliant behaviour. The performance improves with time so that the robot becomes an expert demonstrated by the assembly of a kit with different part geometries. The kit is unknown by the robot at the beginning of the operation; therefore, the kit type, location and orientation are unknown as well as the parts to be assembled since they are randomly fed by a conveyor belt.<\/div><p class=\"tp_close_menu\"><a class=\"tp_close\" onclick=\"teachpress_pub_showhide('15','tp_abstract')\">Cerrar<\/a><\/p><\/div><div class=\"tp_links\" id=\"tp_links_15\" style=\"display:none;\"><div class=\"tp_links_entry\"><ul class=\"tp_pub_list\"><li><i class=\"fas fa-globe\"><\/i><a class=\"tp_pub_list\" href=\"http:\/\/www.sciencedirect.com\/science\/article\/pii\/S073658451400074X\" title=\"http:\/\/www.sciencedirect.com\/science\/article\/pii\/S073658451400074X\" target=\"_blank\">http:\/\/www.sciencedirect.com\/science\/article\/pii\/S073658451400074X<\/a><\/li><li><i class=\"ai ai-doi\"><\/i><a class=\"tp_pub_list\" href=\"https:\/\/dx.doi.org\/http:\/\/dx.doi.org\/10.1016\/j.rcim.2014.08.013\" title=\"DOI de seguimiento:http:\/\/dx.doi.org\/10.1016\/j.rcim.2014.08.013\" target=\"_blank\">doi:http:\/\/dx.doi.org\/10.1016\/j.rcim.2014.08.013<\/a><\/li><\/ul><\/div><p class=\"tp_close_menu\"><a class=\"tp_close\" onclick=\"teachpress_pub_showhide('15','tp_links')\">Cerrar<\/a><\/p><\/div><\/div><\/div><h3 class=\"tp_h3\" id=\"tp_h3_inproceedings\">Proceedings Articles<\/h3><div class=\"tp_publication tp_publication_inproceedings\"><div class=\"tp_pub_info\"><p class=\"tp_pub_author\"> Mirelez-Delgado, Flabio;  Morales-Diaz, America B.;  Rios-Cabrera, Reyes;  Perez-Villeda, Hector Manuel<\/p><p class=\"tp_pub_title\"><a class=\"tp_title_link\" onclick=\"teachpress_pub_showhide('105','tp_links')\" style=\"cursor:pointer;\">Control Servovisual de un Kuka youBot para la manipulacion y traslado de objetos<\/a> <span class=\"tp_pub_type tp_  inproceedings\">Proceedings Article<\/span> <\/p><p class=\"tp_pub_additional\"><span class=\"tp_pub_additional_in\">En: <\/span><span class=\"tp_pub_additional_year\">2015<\/span>.<\/p><p class=\"tp_pub_menu\"><span class=\"tp_abstract_link\"><a id=\"tp_abstract_sh_105\" class=\"tp_show\" onclick=\"teachpress_pub_showhide('105','tp_abstract')\" title=\"Mostrar resumen\" style=\"cursor:pointer;\">Resumen<\/a><\/span> | <span class=\"tp_resource_link\"><a id=\"tp_links_sh_105\" class=\"tp_show\" onclick=\"teachpress_pub_showhide('105','tp_links')\" title=\"Mostrar enlaces y recursos\" style=\"cursor:pointer;\">Enlaces<\/a><\/span> | <span class=\"tp_bibtex_link\"><a id=\"tp_bibtex_sh_105\" class=\"tp_show\" onclick=\"teachpress_pub_showhide('105','tp_bibtex')\" title=\"Mostrar entrada BibTeX \" style=\"cursor:pointer;\">BibTeX<\/a><\/span><\/p><div class=\"tp_bibtex\" id=\"tp_bibtex_105\" style=\"display:none;\"><div class=\"tp_bibtex_entry\"><pre>@inproceedings{Mireles-Delgado2015,<br \/>\r\ntitle = {Control Servovisual de un Kuka youBot para la manipulacion y traslado de objetos},<br \/>\r\nauthor = {Mirelez-Delgado, Flabio and Morales-Diaz, America B. and Rios-Cabrera, Reyes and Perez-Villeda, Hector Manuel},<br \/>\r\nurl = {http:\/\/amca.mx\/memorias\/amca2015\/articulos\/0044_MiCT3-04.pdf},<br \/>\r\nyear  = {2015},<br \/>\r\ndate = {2015-01-01},<br \/>\r\nabstract = {Este trabajo presenta la implementaci\u00b4on de un Control Servovisual Basado en<br \/>\r\nImagen en un robot manipulador m\u00b4ovil omnidireccional Kuka youBot. El sistema de visi\u00b4on<br \/>\r\nest\u00b4a compuesto por un sensor RGB-D Asus Xtion Pror. La ley de control implementada tiene<br \/>\r\nla estructura de un PD cl\u00b4asico para la plataforma m\u00b4ovil. El manipulador m\u00b4ovil se desplaza a<br \/>\r\npuntos 3D conocidos mediante el c\u00b4alculo de cinem\u00b4atica inversa. En este art\u00b4\u0131culo se demuestra<br \/>\r\nla efectividad del algoritmo en la localizaci\u00b4on del objeto de inter\u00b4es as\u00b4\u0131 como en la manipulaci\u00b4on<br \/>\r\ndel mismo para llevarlo de su lugar original a otro espacio deseado.<br \/>\r\n},<br \/>\r\nkeywords = {},<br \/>\r\npubstate = {published},<br \/>\r\ntppubtype = {inproceedings}<br \/>\r\n}<br \/>\r\n<\/pre><\/div><p class=\"tp_close_menu\"><a class=\"tp_close\" onclick=\"teachpress_pub_showhide('105','tp_bibtex')\">Cerrar<\/a><\/p><\/div><div class=\"tp_abstract\" id=\"tp_abstract_105\" style=\"display:none;\"><div class=\"tp_abstract_entry\">Este trabajo presenta la implementaci\u00b4on de un Control Servovisual Basado en<br \/>\r\nImagen en un robot manipulador m\u00b4ovil omnidireccional Kuka youBot. El sistema de visi\u00b4on<br \/>\r\nest\u00b4a compuesto por un sensor RGB-D Asus Xtion Pror. La ley de control implementada tiene<br \/>\r\nla estructura de un PD cl\u00b4asico para la plataforma m\u00b4ovil. El manipulador m\u00b4ovil se desplaza a<br \/>\r\npuntos 3D conocidos mediante el c\u00b4alculo de cinem\u00b4atica inversa. En este art\u00b4\u0131culo se demuestra<br \/>\r\nla efectividad del algoritmo en la localizaci\u00b4on del objeto de inter\u00b4es as\u00b4\u0131 como en la manipulaci\u00b4on<br \/>\r\ndel mismo para llevarlo de su lugar original a otro espacio deseado.<br \/>\r\n<\/div><p class=\"tp_close_menu\"><a class=\"tp_close\" onclick=\"teachpress_pub_showhide('105','tp_abstract')\">Cerrar<\/a><\/p><\/div><div class=\"tp_links\" id=\"tp_links_105\" style=\"display:none;\"><div class=\"tp_links_entry\"><ul class=\"tp_pub_list\"><li><i class=\"fas fa-file-pdf\"><\/i><a class=\"tp_pub_list\" href=\"http:\/\/amca.mx\/memorias\/amca2015\/articulos\/0044_MiCT3-04.pdf\" title=\"http:\/\/amca.mx\/memorias\/amca2015\/articulos\/0044_MiCT3-04.pdf\" target=\"_blank\">http:\/\/amca.mx\/memorias\/amca2015\/articulos\/0044_MiCT3-04.pdf<\/a><\/li><\/ul><\/div><p class=\"tp_close_menu\"><a class=\"tp_close\" onclick=\"teachpress_pub_showhide('105','tp_links')\">Cerrar<\/a><\/p><\/div><\/div><\/div><h3 class=\"tp_h3\" id=\"tp_h3_2014\">2014<\/h3><h3 class=\"tp_h3\" id=\"tp_h3_article\">Art\u00edculos de revista<\/h3><div class=\"tp_publication tp_publication_article\"><div class=\"tp_pub_info\"><p class=\"tp_pub_author\"> Rios-Cabrera, Reyes;  Tuytelaars, Tinne<\/p><p class=\"tp_pub_title\"><a class=\"tp_title_link\" onclick=\"teachpress_pub_showhide('64','tp_links')\" style=\"cursor:pointer;\">Boosting Masked Dominant Orientation Templates for Efficient Object Detection<\/a> <span class=\"tp_pub_type tp_  article\">Art\u00edculo de revista<\/span> <\/p><p class=\"tp_pub_additional\"><span class=\"tp_pub_additional_in\">En: <\/span><span class=\"tp_pub_additional_journal\">Comput. Vis. Image Underst., <\/span><span class=\"tp_pub_additional_volume\">vol. 120, <\/span><span class=\"tp_pub_additional_pages\">pp. 103\u2013116, <\/span><span class=\"tp_pub_additional_year\">2014<\/span>, <span class=\"tp_pub_additional_issn\">ISSN: 1077-3142<\/span>.<\/p><p class=\"tp_pub_menu\"><span class=\"tp_resource_link\"><a id=\"tp_links_sh_64\" class=\"tp_show\" onclick=\"teachpress_pub_showhide('64','tp_links')\" title=\"Mostrar enlaces y recursos\" style=\"cursor:pointer;\">Enlaces<\/a><\/span> | <span class=\"tp_bibtex_link\"><a id=\"tp_bibtex_sh_64\" class=\"tp_show\" onclick=\"teachpress_pub_showhide('64','tp_bibtex')\" title=\"Mostrar entrada BibTeX \" style=\"cursor:pointer;\">BibTeX<\/a><\/span><\/p><div class=\"tp_bibtex\" id=\"tp_bibtex_64\" style=\"display:none;\"><div class=\"tp_bibtex_entry\"><pre>@article{Rios-Cabrera:2014:BMD:2583127.2583285,<br \/>\r\ntitle = {Boosting Masked Dominant Orientation Templates for Efficient Object Detection},<br \/>\r\nauthor = {Rios-Cabrera, Reyes and Tuytelaars, Tinne},<br \/>\r\nurl = {http:\/\/dx.doi.org\/10.1016\/j.cviu.2013.12.008},<br \/>\r\ndoi = {10.1016\/j.cviu.2013.12.008},<br \/>\r\nissn = {1077-3142},<br \/>\r\nyear  = {2014},<br \/>\r\ndate = {2014-01-01},<br \/>\r\njournal = {Comput. Vis. Image Underst.},<br \/>\r\nvolume = {120},<br \/>\r\npages = {103--116},<br \/>\r\npublisher = {Elsevier Science Inc.},<br \/>\r\naddress = {New York, NY, USA},<br \/>\r\nkeywords = {},<br \/>\r\npubstate = {published},<br \/>\r\ntppubtype = {article}<br \/>\r\n}<br \/>\r\n<\/pre><\/div><p class=\"tp_close_menu\"><a class=\"tp_close\" onclick=\"teachpress_pub_showhide('64','tp_bibtex')\">Cerrar<\/a><\/p><\/div><div class=\"tp_links\" id=\"tp_links_64\" style=\"display:none;\"><div class=\"tp_links_entry\"><ul class=\"tp_pub_list\"><li><i class=\"fas fa-globe\"><\/i><a class=\"tp_pub_list\" href=\"http:\/\/dx.doi.org\/10.1016\/j.cviu.2013.12.008\" title=\"http:\/\/dx.doi.org\/10.1016\/j.cviu.2013.12.008\" target=\"_blank\">http:\/\/dx.doi.org\/10.1016\/j.cviu.2013.12.008<\/a><\/li><li><i class=\"ai ai-doi\"><\/i><a class=\"tp_pub_list\" href=\"https:\/\/dx.doi.org\/10.1016\/j.cviu.2013.12.008\" title=\"DOI de seguimiento:10.1016\/j.cviu.2013.12.008\" target=\"_blank\">doi:10.1016\/j.cviu.2013.12.008<\/a><\/li><\/ul><\/div><p class=\"tp_close_menu\"><a class=\"tp_close\" onclick=\"teachpress_pub_showhide('64','tp_links')\">Cerrar<\/a><\/p><\/div><\/div><\/div><h3 class=\"tp_h3\" id=\"tp_h3_2013\">2013<\/h3><h3 class=\"tp_h3\" id=\"tp_h3_inproceedings\">Proceedings Articles<\/h3><div class=\"tp_publication tp_publication_inproceedings\"><div class=\"tp_pub_info\"><p class=\"tp_pub_author\"> Rios-Cabrera, Reyes;  Tuytelaars, Tinne<\/p><p class=\"tp_pub_title\">Discriminatively Trained Templates for 3D Object Detection: A Real Time Scalable Approach <span class=\"tp_pub_type tp_  inproceedings\">Proceedings Article<\/span> <\/p><p class=\"tp_pub_additional\"><span class=\"tp_pub_additional_in\">En: <\/span><span class=\"tp_pub_additional_booktitle\">The IEEE International Conference on Computer Vision (ICCV), <\/span><span class=\"tp_pub_additional_year\">2013<\/span>.<\/p><p class=\"tp_pub_menu\"><span class=\"tp_bibtex_link\"><a id=\"tp_bibtex_sh_215\" class=\"tp_show\" onclick=\"teachpress_pub_showhide('215','tp_bibtex')\" title=\"Mostrar entrada BibTeX \" style=\"cursor:pointer;\">BibTeX<\/a><\/span><\/p><div class=\"tp_bibtex\" id=\"tp_bibtex_215\" style=\"display:none;\"><div class=\"tp_bibtex_entry\"><pre>@inproceedings{Rios-Cabrera_2013_ICCV__B,<br \/>\r\ntitle = {Discriminatively Trained Templates for 3D Object Detection: A Real Time Scalable Approach},<br \/>\r\nauthor = {Rios-Cabrera, Reyes and Tuytelaars, Tinne},<br \/>\r\nyear  = {2013},<br \/>\r\ndate = {2013-12-01},<br \/>\r\nbooktitle = {The IEEE International Conference on Computer Vision (ICCV)},<br \/>\r\nkeywords = {},<br \/>\r\npubstate = {published},<br \/>\r\ntppubtype = {inproceedings}<br \/>\r\n}<br \/>\r\n<\/pre><\/div><p class=\"tp_close_menu\"><a class=\"tp_close\" onclick=\"teachpress_pub_showhide('215','tp_bibtex')\">Cerrar<\/a><\/p><\/div><\/div><\/div><div class=\"tp_publication tp_publication_inproceedings\"><div class=\"tp_pub_info\"><p class=\"tp_pub_author\"> Rios-Cabrera, Reyes;  Tuytelaars, Tinne<\/p><p class=\"tp_pub_title\"><a class=\"tp_title_link\" onclick=\"teachpress_pub_showhide('203','tp_links')\" style=\"cursor:pointer;\">Discriminatively Trained Templates for 3D Object Detection: A Real Time Scalable Approach.<\/a> <span class=\"tp_pub_type tp_  inproceedings\">Proceedings Article<\/span> <\/p><p class=\"tp_pub_additional\"><span class=\"tp_pub_additional_in\">En: <\/span><span class=\"tp_pub_additional_booktitle\">ICCV, <\/span><span class=\"tp_pub_additional_pages\">pp. 2048-2055, <\/span><span class=\"tp_pub_additional_publisher\">IEEE Computer Society, <\/span><span class=\"tp_pub_additional_year\">2013<\/span>, <span class=\"tp_pub_additional_isbn\">ISBN: 978-1-4799-2839-2<\/span>.<\/p><p class=\"tp_pub_menu\"><span class=\"tp_resource_link\"><a id=\"tp_links_sh_203\" class=\"tp_show\" onclick=\"teachpress_pub_showhide('203','tp_links')\" title=\"Mostrar enlaces y recursos\" style=\"cursor:pointer;\">Enlaces<\/a><\/span> | <span class=\"tp_bibtex_link\"><a id=\"tp_bibtex_sh_203\" class=\"tp_show\" onclick=\"teachpress_pub_showhide('203','tp_bibtex')\" title=\"Mostrar entrada BibTeX \" style=\"cursor:pointer;\">BibTeX<\/a><\/span><\/p><div class=\"tp_bibtex\" id=\"tp_bibtex_203\" style=\"display:none;\"><div class=\"tp_bibtex_entry\"><pre>@inproceedings{conf\/iccv\/Rios-CabreraT13,<br \/>\r\ntitle = {Discriminatively Trained Templates for 3D Object Detection: A Real Time Scalable Approach.},<br \/>\r\nauthor = {Rios-Cabrera, Reyes and Tuytelaars, Tinne},<br \/>\r\nurl = {http:\/\/dblp.uni-trier.de\/db\/conf\/iccv\/iccv2013.html#Rios-CabreraT13},<br \/>\r\nisbn = {978-1-4799-2839-2},<br \/>\r\nyear  = {2013},<br \/>\r\ndate = {2013-01-01},<br \/>\r\nbooktitle = {ICCV},<br \/>\r\npages = {2048-2055},<br \/>\r\npublisher = {IEEE Computer Society},<br \/>\r\ncrossref = {conf\/iccv\/2013},<br \/>\r\nkeywords = {},<br \/>\r\npubstate = {published},<br \/>\r\ntppubtype = {inproceedings}<br \/>\r\n}<br \/>\r\n<\/pre><\/div><p class=\"tp_close_menu\"><a class=\"tp_close\" onclick=\"teachpress_pub_showhide('203','tp_bibtex')\">Cerrar<\/a><\/p><\/div><div class=\"tp_links\" id=\"tp_links_203\" style=\"display:none;\"><div class=\"tp_links_entry\"><ul class=\"tp_pub_list\"><li><i class=\"fas fa-globe\"><\/i><a class=\"tp_pub_list\" href=\"http:\/\/dblp.uni-trier.de\/db\/conf\/iccv\/iccv2013.html#Rios-CabreraT13\" title=\"http:\/\/dblp.uni-trier.de\/db\/conf\/iccv\/iccv2013.html#Rios-CabreraT13\" target=\"_blank\">http:\/\/dblp.uni-trier.de\/db\/conf\/iccv\/iccv2013.html#Rios-CabreraT13<\/a><\/li><\/ul><\/div><p class=\"tp_close_menu\"><a class=\"tp_close\" onclick=\"teachpress_pub_showhide('203','tp_links')\">Cerrar<\/a><\/p><\/div><\/div><\/div><h3 class=\"tp_h3\" id=\"tp_h3_2012\">2012<\/h3><h3 class=\"tp_h3\" id=\"tp_h3_article\">Art\u00edculos de revista<\/h3><div class=\"tp_publication tp_publication_article\"><div class=\"tp_pub_info\"><p class=\"tp_pub_author\"> Rios-Cabrera, Reyes;  Tuytelaars, Tinne;  Van Gool, Luc J.<\/p><p class=\"tp_pub_title\"><a class=\"tp_title_link\" onclick=\"teachpress_pub_showhide('204','tp_links')\" style=\"cursor:pointer;\">Efficient multi-camera vehicle detection, tracking, and identification in a tunnel surveillance application<\/a> <span class=\"tp_pub_type tp_  article\">Art\u00edculo de revista<\/span> <\/p><p class=\"tp_pub_additional\"><span class=\"tp_pub_additional_in\">En: <\/span><span class=\"tp_pub_additional_journal\">Computer Vision and Image Understanding, <\/span><span class=\"tp_pub_additional_volume\">vol. 116, <\/span><span class=\"tp_pub_additional_number\">no 6, <\/span><span class=\"tp_pub_additional_pages\">pp. 742 - 753, <\/span><span class=\"tp_pub_additional_year\">2012<\/span>, <span class=\"tp_pub_additional_issn\">ISSN: 1077-3142<\/span>.<\/p><p class=\"tp_pub_menu\"><span class=\"tp_resource_link\"><a id=\"tp_links_sh_204\" class=\"tp_show\" onclick=\"teachpress_pub_showhide('204','tp_links')\" title=\"Mostrar enlaces y recursos\" style=\"cursor:pointer;\">Enlaces<\/a><\/span> | <span class=\"tp_bibtex_link\"><a id=\"tp_bibtex_sh_204\" class=\"tp_show\" onclick=\"teachpress_pub_showhide('204','tp_bibtex')\" title=\"Mostrar entrada BibTeX \" style=\"cursor:pointer;\">BibTeX<\/a><\/span><\/p><div class=\"tp_bibtex\" id=\"tp_bibtex_204\" style=\"display:none;\"><div class=\"tp_bibtex_entry\"><pre>@article{RIOSCABRERA2012742,<br \/>\r\ntitle = {Efficient multi-camera vehicle detection, tracking, and identification in a tunnel surveillance application},<br \/>\r\nauthor = {Rios-Cabrera, Reyes and Tuytelaars, Tinne and Van Gool, Luc J.},<br \/>\r\nurl = {http:\/\/www.sciencedirect.com\/science\/article\/pii\/S1077314212000380},<br \/>\r\ndoi = {http:\/\/dx.doi.org\/10.1016\/j.cviu.2012.02.006},<br \/>\r\nissn = {1077-3142},<br \/>\r\nyear  = {2012},<br \/>\r\ndate = {2012-01-01},<br \/>\r\njournal = {Computer Vision and Image Understanding},<br \/>\r\nvolume = {116},<br \/>\r\nnumber = {6},<br \/>\r\npages = {742 - 753},<br \/>\r\nkeywords = {},<br \/>\r\npubstate = {published},<br \/>\r\ntppubtype = {article}<br \/>\r\n}<br \/>\r\n<\/pre><\/div><p class=\"tp_close_menu\"><a class=\"tp_close\" onclick=\"teachpress_pub_showhide('204','tp_bibtex')\">Cerrar<\/a><\/p><\/div><div class=\"tp_links\" id=\"tp_links_204\" style=\"display:none;\"><div class=\"tp_links_entry\"><ul class=\"tp_pub_list\"><li><i class=\"fas fa-globe\"><\/i><a class=\"tp_pub_list\" href=\"http:\/\/www.sciencedirect.com\/science\/article\/pii\/S1077314212000380\" title=\"http:\/\/www.sciencedirect.com\/science\/article\/pii\/S1077314212000380\" target=\"_blank\">http:\/\/www.sciencedirect.com\/science\/article\/pii\/S1077314212000380<\/a><\/li><li><i class=\"ai ai-doi\"><\/i><a class=\"tp_pub_list\" href=\"https:\/\/dx.doi.org\/http:\/\/dx.doi.org\/10.1016\/j.cviu.2012.02.006\" title=\"DOI de seguimiento:http:\/\/dx.doi.org\/10.1016\/j.cviu.2012.02.006\" target=\"_blank\">doi:http:\/\/dx.doi.org\/10.1016\/j.cviu.2012.02.006<\/a><\/li><\/ul><\/div><p class=\"tp_close_menu\"><a class=\"tp_close\" onclick=\"teachpress_pub_showhide('204','tp_links')\">Cerrar<\/a><\/p><\/div><\/div><\/div><h3 class=\"tp_h3\" id=\"tp_h3_2011\">2011<\/h3><h3 class=\"tp_h3\" id=\"tp_h3_inproceedings\">Proceedings Articles<\/h3><div class=\"tp_publication tp_publication_inproceedings\"><div class=\"tp_pub_info\"><p class=\"tp_pub_author\"> Rios-Cabrera, Reyes;  Tuytelaars, Tinne;  Van Gool, Luc J.<\/p><p class=\"tp_pub_title\"><a class=\"tp_title_link\" onclick=\"teachpress_pub_showhide('202','tp_links')\" style=\"cursor:pointer;\">Efficient multi-camera detection, tracking, and identification using a shared set of haar-features.<\/a> <span class=\"tp_pub_type tp_  inproceedings\">Proceedings Article<\/span> <\/p><p class=\"tp_pub_additional\"><span class=\"tp_pub_additional_in\">En: <\/span><span class=\"tp_pub_additional_booktitle\">CVPR, <\/span><span class=\"tp_pub_additional_pages\">pp. 65-71, <\/span><span class=\"tp_pub_additional_publisher\">IEEE Computer Society, <\/span><span class=\"tp_pub_additional_year\">2011<\/span>, <span class=\"tp_pub_additional_isbn\">ISBN: 978-1-4577-0394-2<\/span>.<\/p><p class=\"tp_pub_menu\"><span class=\"tp_resource_link\"><a id=\"tp_links_sh_202\" class=\"tp_show\" onclick=\"teachpress_pub_showhide('202','tp_links')\" title=\"Mostrar enlaces y recursos\" style=\"cursor:pointer;\">Enlaces<\/a><\/span> | <span class=\"tp_bibtex_link\"><a id=\"tp_bibtex_sh_202\" class=\"tp_show\" onclick=\"teachpress_pub_showhide('202','tp_bibtex')\" title=\"Mostrar entrada BibTeX \" style=\"cursor:pointer;\">BibTeX<\/a><\/span><\/p><div class=\"tp_bibtex\" id=\"tp_bibtex_202\" style=\"display:none;\"><div class=\"tp_bibtex_entry\"><pre>@inproceedings{conf\/cvpr\/Rios-CabreraTG11,<br \/>\r\ntitle = {Efficient multi-camera detection, tracking, and identification using a shared set of haar-features.},<br \/>\r\nauthor = {Rios-Cabrera, Reyes and Tuytelaars, Tinne and Van Gool, Luc J.},<br \/>\r\nurl = {http:\/\/dblp.uni-trier.de\/db\/conf\/cvpr\/cvpr2011.html#Rios-CabreraTG11},<br \/>\r\nisbn = {978-1-4577-0394-2},<br \/>\r\nyear  = {2011},<br \/>\r\ndate = {2011-01-01},<br \/>\r\nbooktitle = {CVPR},<br \/>\r\npages = {65-71},<br \/>\r\npublisher = {IEEE Computer Society},<br \/>\r\ncrossref = {conf\/cvpr\/2011},<br \/>\r\nkeywords = {},<br \/>\r\npubstate = {published},<br \/>\r\ntppubtype = {inproceedings}<br \/>\r\n}<br \/>\r\n<\/pre><\/div><p class=\"tp_close_menu\"><a class=\"tp_close\" onclick=\"teachpress_pub_showhide('202','tp_bibtex')\">Cerrar<\/a><\/p><\/div><div class=\"tp_links\" id=\"tp_links_202\" style=\"display:none;\"><div class=\"tp_links_entry\"><ul class=\"tp_pub_list\"><li><i class=\"fas fa-globe\"><\/i><a class=\"tp_pub_list\" href=\"http:\/\/dblp.uni-trier.de\/db\/conf\/cvpr\/cvpr2011.html#Rios-CabreraTG11\" title=\"http:\/\/dblp.uni-trier.de\/db\/conf\/cvpr\/cvpr2011.html#Rios-CabreraTG11\" target=\"_blank\">http:\/\/dblp.uni-trier.de\/db\/conf\/cvpr\/cvpr2011.html#Rios-CabreraTG11<\/a><\/li><\/ul><\/div><p class=\"tp_close_menu\"><a class=\"tp_close\" onclick=\"teachpress_pub_showhide('202','tp_links')\">Cerrar<\/a><\/p><\/div><\/div><\/div><\/div><\/div> Av. Industrial\u00a0Metalurgia\u00a0#1062,\u00a0Parque Ind. Ramos Arizpe,\u00a0Ramos Arizpe, Coah.\u00a0C.P. 25900, M\u00e9xico. \u00a0Tel. +52 (844) 438-9600<\/p>\n","protected":false},"author":11,"featured_media":0,"parent":0,"menu_order":0,"comment_status":"closed","ping_status":"closed","template":"","meta":{"_et_pb_use_builder":"on","_et_pb_old_content":"","_et_gb_content_width":"","footnotes":""},"class_list":["post-76","page","type-page","status-publish","hentry"],"yoast_head":"<!-- This site is optimized with the Yoast SEO plugin v27.3 - https:\/\/yoast.com\/product\/yoast-seo-wordpress\/ -->\n<title>Publicaciones - Prof. Dr. Reyes Rios-Cabrera<\/title>\n<meta name=\"robots\" content=\"index, follow, max-snippet:-1, max-image-preview:large, max-video-preview:-1\" \/>\n<link rel=\"canonical\" href=\"https:\/\/ryma.cinvestav.mx\/rrios\/publicaciones\/\" \/>\n<meta property=\"og:locale\" content=\"es_ES\" \/>\n<meta property=\"og:type\" content=\"article\" \/>\n<meta property=\"og:title\" content=\"Publicaciones - Prof. Dr. Reyes Rios-Cabrera\" \/>\n<meta property=\"og:description\" content=\"Para ver las publicaciones de todo Rob\u00f3tica y Manufactura Avanzada, ver:\u00a0 Publicaciones RYMAPara una lista actualizada de publicaciones vea: GOOGLE SCHOLAR Av. Industrial\u00a0Metalurgia\u00a0#1062,\u00a0Parque Ind. Ramos Arizpe,\u00a0Ramos Arizpe, Coah.\u00a0C.P. 25900, M\u00e9xico. \u00a0Tel. +52 (844) 438-9600\" \/>\n<meta property=\"og:url\" content=\"https:\/\/ryma.cinvestav.mx\/rrios\/publicaciones\/\" \/>\n<meta property=\"og:site_name\" content=\"Prof. Dr. Reyes Rios-Cabrera\" \/>\n<meta property=\"article:modified_time\" content=\"2026-01-09T21:07:36+00:00\" \/>\n<meta name=\"twitter:card\" content=\"summary_large_image\" \/>\n<meta name=\"twitter:label1\" content=\"Tiempo de lectura\" \/>\n\t<meta name=\"twitter:data1\" content=\"7 minutos\" \/>\n<script type=\"application\/ld+json\" class=\"yoast-schema-graph\">{\"@context\":\"https:\\\/\\\/schema.org\",\"@graph\":[{\"@type\":\"WebPage\",\"@id\":\"https:\\\/\\\/ryma.cinvestav.mx\\\/rrios\\\/publicaciones\\\/\",\"url\":\"https:\\\/\\\/ryma.cinvestav.mx\\\/rrios\\\/publicaciones\\\/\",\"name\":\"Publicaciones - Prof. Dr. Reyes Rios-Cabrera\",\"isPartOf\":{\"@id\":\"https:\\\/\\\/ryma.cinvestav.mx\\\/rrios\\\/#website\"},\"datePublished\":\"2017-09-10T05:46:36+00:00\",\"dateModified\":\"2026-01-09T21:07:36+00:00\",\"breadcrumb\":{\"@id\":\"https:\\\/\\\/ryma.cinvestav.mx\\\/rrios\\\/publicaciones\\\/#breadcrumb\"},\"inLanguage\":\"es\",\"potentialAction\":[{\"@type\":\"ReadAction\",\"target\":[\"https:\\\/\\\/ryma.cinvestav.mx\\\/rrios\\\/publicaciones\\\/\"]}]},{\"@type\":\"BreadcrumbList\",\"@id\":\"https:\\\/\\\/ryma.cinvestav.mx\\\/rrios\\\/publicaciones\\\/#breadcrumb\",\"itemListElement\":[{\"@type\":\"ListItem\",\"position\":1,\"name\":\"Portada\",\"item\":\"https:\\\/\\\/ryma.cinvestav.mx\\\/rrios\\\/\"},{\"@type\":\"ListItem\",\"position\":2,\"name\":\"Publicaciones\"}]},{\"@type\":\"WebSite\",\"@id\":\"https:\\\/\\\/ryma.cinvestav.mx\\\/rrios\\\/#website\",\"url\":\"https:\\\/\\\/ryma.cinvestav.mx\\\/rrios\\\/\",\"name\":\"Prof. Dr. Reyes Rios-Cabrera\",\"description\":\"Miembro de Rob\u00f3tica y Manufactura Avanzada - Cinvestav\",\"potentialAction\":[{\"@type\":\"SearchAction\",\"target\":{\"@type\":\"EntryPoint\",\"urlTemplate\":\"https:\\\/\\\/ryma.cinvestav.mx\\\/rrios\\\/?s={search_term_string}\"},\"query-input\":{\"@type\":\"PropertyValueSpecification\",\"valueRequired\":true,\"valueName\":\"search_term_string\"}}],\"inLanguage\":\"es\"}]}<\/script>\n<!-- \/ Yoast SEO plugin. -->","yoast_head_json":{"title":"Publicaciones - Prof. Dr. Reyes Rios-Cabrera","robots":{"index":"index","follow":"follow","max-snippet":"max-snippet:-1","max-image-preview":"max-image-preview:large","max-video-preview":"max-video-preview:-1"},"canonical":"https:\/\/ryma.cinvestav.mx\/rrios\/publicaciones\/","og_locale":"es_ES","og_type":"article","og_title":"Publicaciones - Prof. Dr. Reyes Rios-Cabrera","og_description":"Para ver las publicaciones de todo Rob\u00f3tica y Manufactura Avanzada, ver:\u00a0 Publicaciones RYMAPara una lista actualizada de publicaciones vea: GOOGLE SCHOLAR Av. Industrial\u00a0Metalurgia\u00a0#1062,\u00a0Parque Ind. Ramos Arizpe,\u00a0Ramos Arizpe, Coah.\u00a0C.P. 25900, M\u00e9xico. \u00a0Tel. +52 (844) 438-9600","og_url":"https:\/\/ryma.cinvestav.mx\/rrios\/publicaciones\/","og_site_name":"Prof. Dr. Reyes Rios-Cabrera","article_modified_time":"2026-01-09T21:07:36+00:00","twitter_card":"summary_large_image","twitter_misc":{"Tiempo de lectura":"7 minutos"},"schema":{"@context":"https:\/\/schema.org","@graph":[{"@type":"WebPage","@id":"https:\/\/ryma.cinvestav.mx\/rrios\/publicaciones\/","url":"https:\/\/ryma.cinvestav.mx\/rrios\/publicaciones\/","name":"Publicaciones - Prof. Dr. Reyes Rios-Cabrera","isPartOf":{"@id":"https:\/\/ryma.cinvestav.mx\/rrios\/#website"},"datePublished":"2017-09-10T05:46:36+00:00","dateModified":"2026-01-09T21:07:36+00:00","breadcrumb":{"@id":"https:\/\/ryma.cinvestav.mx\/rrios\/publicaciones\/#breadcrumb"},"inLanguage":"es","potentialAction":[{"@type":"ReadAction","target":["https:\/\/ryma.cinvestav.mx\/rrios\/publicaciones\/"]}]},{"@type":"BreadcrumbList","@id":"https:\/\/ryma.cinvestav.mx\/rrios\/publicaciones\/#breadcrumb","itemListElement":[{"@type":"ListItem","position":1,"name":"Portada","item":"https:\/\/ryma.cinvestav.mx\/rrios\/"},{"@type":"ListItem","position":2,"name":"Publicaciones"}]},{"@type":"WebSite","@id":"https:\/\/ryma.cinvestav.mx\/rrios\/#website","url":"https:\/\/ryma.cinvestav.mx\/rrios\/","name":"Prof. Dr. Reyes Rios-Cabrera","description":"Miembro de Rob\u00f3tica y Manufactura Avanzada - Cinvestav","potentialAction":[{"@type":"SearchAction","target":{"@type":"EntryPoint","urlTemplate":"https:\/\/ryma.cinvestav.mx\/rrios\/?s={search_term_string}"},"query-input":{"@type":"PropertyValueSpecification","valueRequired":true,"valueName":"search_term_string"}}],"inLanguage":"es"}]}},"_links":{"self":[{"href":"https:\/\/ryma.cinvestav.mx\/rrios\/wp-json\/wp\/v2\/pages\/76","targetHints":{"allow":["GET"]}}],"collection":[{"href":"https:\/\/ryma.cinvestav.mx\/rrios\/wp-json\/wp\/v2\/pages"}],"about":[{"href":"https:\/\/ryma.cinvestav.mx\/rrios\/wp-json\/wp\/v2\/types\/page"}],"author":[{"embeddable":true,"href":"https:\/\/ryma.cinvestav.mx\/rrios\/wp-json\/wp\/v2\/users\/11"}],"replies":[{"embeddable":true,"href":"https:\/\/ryma.cinvestav.mx\/rrios\/wp-json\/wp\/v2\/comments?post=76"}],"version-history":[{"count":27,"href":"https:\/\/ryma.cinvestav.mx\/rrios\/wp-json\/wp\/v2\/pages\/76\/revisions"}],"predecessor-version":[{"id":168,"href":"https:\/\/ryma.cinvestav.mx\/rrios\/wp-json\/wp\/v2\/pages\/76\/revisions\/168"}],"wp:attachment":[{"href":"https:\/\/ryma.cinvestav.mx\/rrios\/wp-json\/wp\/v2\/media?parent=76"}],"curies":[{"name":"wp","href":"https:\/\/api.w.org\/{rel}","templated":true}]}}