{"id":2126,"date":"2025-02-25T12:12:56","date_gmt":"2025-02-25T12:12:56","guid":{"rendered":"https:\/\/www.toshiba.eu\/cambridge-research-laboratory\/?page_id=2126"},"modified":"2025-05-06T15:19:59","modified_gmt":"2025-05-06T15:19:59","slug":"dialogue-systems","status":"publish","type":"page","link":"https:\/\/www.toshiba.eu\/cambridge-research-laboratory\/embodied-ai\/language-interaction-group\/dialogue-systems\/","title":{"rendered":"Dialogue Systems"},"content":{"rendered":"\t\t<div data-elementor-type=\"wp-page\" data-elementor-id=\"2126\" class=\"elementor elementor-2126\" data-elementor-post-type=\"page\">\n\t\t\t\t<div class=\"elementor-element elementor-element-f6de83c inner-hero mr-0 width-ih e-flex e-con-boxed e-con e-parent\" data-id=\"f6de83c\" data-element_type=\"container\" data-e-type=\"container\" data-settings=\"{&quot;background_background&quot;:&quot;classic&quot;}\">\n\t\t\t\t\t<div class=\"e-con-inner\">\n\t\t\t\t<div class=\"elementor-element elementor-element-6b4544a font-48 elementor-widget elementor-widget-heading\" data-id=\"6b4544a\" data-element_type=\"widget\" data-e-type=\"widget\" data-widget_type=\"heading.default\">\n\t\t\t\t<div class=\"elementor-widget-container\">\n\t\t\t\t\t<h1 class=\"elementor-heading-title elementor-size-default\">Dialogue Systems<\/h1>\t\t\t\t<\/div>\n\t\t\t\t<\/div>\n\t\t\t\t\t<\/div>\n\t\t\t\t<\/div>\n\t\t<div class=\"elementor-element elementor-element-2f1ef7e bd-row home mr-0 e-flex e-con-boxed e-con e-parent\" data-id=\"2f1ef7e\" data-element_type=\"container\" data-e-type=\"container\" data-settings=\"{&quot;background_background&quot;:&quot;classic&quot;}\">\n\t\t\t\t\t<div class=\"e-con-inner\">\n\t\t\t\t<div class=\"elementor-element elementor-element-abc6346 elementor-icon-list--layout-inline elementor-align-start bd-nav elementor-list-item-link-full_width elementor-widget elementor-widget-icon-list\" data-id=\"abc6346\" data-element_type=\"widget\" data-e-type=\"widget\" data-widget_type=\"icon-list.default\">\n\t\t\t\t<div class=\"elementor-widget-container\">\n\t\t\t\t\t\t\t<ul class=\"elementor-icon-list-items elementor-inline-items\">\n\t\t\t\t\t\t\t<li class=\"elementor-icon-list-item elementor-inline-item\">\n\t\t\t\t\t\t\t\t\t\t\t<a href=\"https:\/\/www.toshiba.eu\/cambridge-research-laboratory\">\n\n\t\t\t\t\t\t\t\t\t\t\t\t<span class=\"elementor-icon-list-icon\">\n\t\t\t\t\t\t\t<svg xmlns=\"http:\/\/www.w3.org\/2000\/svg\" width=\"16\" height=\"15\" viewBox=\"0 0 16 15\" fill=\"none\"><path d=\"M15.1962 7.48458C15.1962 7.99073 14.8211 8.38722 14.396 8.38722H13.5958L13.6133 12.892C13.6133 12.9679 13.6083 13.0438 13.6008 13.1197V13.5753C13.6008 14.1967 13.1532 14.7 12.6005 14.7H12.2004C12.1729 14.7 12.1454 14.7 12.1179 14.6972C12.0829 14.7 12.0479 14.7 12.0129 14.7L11.2002 14.6972H10.6C10.0474 14.6972 9.59976 14.1939 9.59976 13.5725V11.0979C9.59976 10.6002 9.24217 10.1981 8.79956 10.1981H7.19914C6.75653 10.1981 6.39894 10.6002 6.39894 11.0979V13.5725C6.39894 14.1939 5.95132 14.6972 5.39868 14.6972H4.00082C3.96331 14.6972 3.9258 14.6944 3.88829 14.6916C3.85828 14.6944 3.82827 14.6972 3.79826 14.6972H3.39816C2.84552 14.6972 2.3979 14.1939 2.3979 13.5725V10.4231C2.3979 10.3978 2.3979 10.3696 2.4004 10.3443V8.38441H1.60019C1.15008 8.38441 0.799988 7.99073 0.799988 7.48177C0.799988 7.2287 0.875007 7.00374 1.05005 6.8069L7.45921 0.525005C7.63425 0.328168 7.83431 0.300049 8.00935 0.300049C8.1844 0.300049 8.38445 0.356288 8.53449 0.496885L14.9211 6.80971C15.1212 7.00655 15.2212 7.23151 15.1962 7.48458Z\" fill=\"#007BFF\"><\/path><\/svg>\t\t\t\t\t\t<\/span>\n\t\t\t\t\t\t\t\t\t\t<span class=\"elementor-icon-list-text\">Home<\/span>\n\t\t\t\t\t\t\t\t\t\t\t<\/a>\n\t\t\t\t\t\t\t\t\t<\/li>\n\t\t\t\t\t\t\t\t<li class=\"elementor-icon-list-item elementor-inline-item\">\n\t\t\t\t\t\t\t\t\t\t\t<a href=\"https:\/\/www.toshiba.eu\/cambridge-research-laboratory\">\n\n\t\t\t\t\t\t\t\t\t\t\t<span class=\"elementor-icon-list-text\">CRL<\/span>\n\t\t\t\t\t\t\t\t\t\t\t<\/a>\n\t\t\t\t\t\t\t\t\t<\/li>\n\t\t\t\t\t\t\t\t<li class=\"elementor-icon-list-item elementor-inline-item\">\n\t\t\t\t\t\t\t\t\t\t\t<a href=\"https:\/\/www.toshiba.eu\/cambridge-research-laboratory\/embodied-ai\/\">\n\n\t\t\t\t\t\t\t\t\t\t\t<span class=\"elementor-icon-list-text\">Embodied AI<\/span>\n\t\t\t\t\t\t\t\t\t\t\t<\/a>\n\t\t\t\t\t\t\t\t\t<\/li>\n\t\t\t\t\t\t\t\t<li class=\"elementor-icon-list-item elementor-inline-item\">\n\t\t\t\t\t\t\t\t\t\t\t<a href=\"https:\/\/www.toshiba.eu\/cambridge-research-laboratory\/embodied-ai\/language-interaction-group\/\">\n\n\t\t\t\t\t\t\t\t\t\t\t<span class=\"elementor-icon-list-text\"> Language &amp; Interaction<\/span>\n\t\t\t\t\t\t\t\t\t\t\t<\/a>\n\t\t\t\t\t\t\t\t\t<\/li>\n\t\t\t\t\t\t\t\t<li class=\"elementor-icon-list-item elementor-inline-item\">\n\t\t\t\t\t\t\t\t\t\t<span class=\"elementor-icon-list-text\">Dialogue Systems<\/span>\n\t\t\t\t\t\t\t\t\t<\/li>\n\t\t\t\t\t\t<\/ul>\n\t\t\t\t\t\t<\/div>\n\t\t\t\t<\/div>\n\t\t\t\t\t<\/div>\n\t\t\t\t<\/div>\n\t\t<div class=\"elementor-element elementor-element-3b834cf ds-intro pt-74 pb-73 container-1746 e-flex e-con-boxed e-con e-parent\" data-id=\"3b834cf\" data-element_type=\"container\" data-e-type=\"container\">\n\t\t\t\t\t<div class=\"e-con-inner\">\n\t\t\t\t<div class=\"elementor-element elementor-element-865b88e font-20 elementor-widget elementor-widget-text-editor\" data-id=\"865b88e\" data-element_type=\"widget\" data-e-type=\"widget\" data-widget_type=\"text-editor.default\">\n\t\t\t\t<div class=\"elementor-widget-container\">\n\t\t\t\t\t\t\t\t\t<p>The Dialogue group works on fundamental research related to the modelling of human-machine communication. Our aim is to develop methods that enable users to complete tasks in collaboration with the informational and embodied automatic agents using natural spoken and multimodal interfaces. Our current research areas include natural language interpretation for dialogue, statistical dialogue management, emotion detection from multimodal input, domain adaptation, and the use of unstructured data in dialogue. In our research we explore supervised, unsupervised, and reinforcement learning methods, currently focusing on the application of generative adversarial networks for dialogue tasks.<\/p>\t\t\t\t\t\t\t\t<\/div>\n\t\t\t\t<\/div>\n\t\t\t\t\t<\/div>\n\t\t\t\t<\/div>\n\t\t<div class=\"elementor-element elementor-element-10b642d ssds pt-64 pb-82 container-1746 e-flex e-con-boxed e-con e-parent\" data-id=\"10b642d\" data-element_type=\"container\" data-e-type=\"container\" data-settings=\"{&quot;background_background&quot;:&quot;classic&quot;}\">\n\t\t\t\t\t<div class=\"e-con-inner\">\n\t\t<div class=\"elementor-element elementor-element-77476bb e-con-full col-left pr-20 e-flex e-con e-child\" data-id=\"77476bb\" data-element_type=\"container\" data-e-type=\"container\">\n\t\t\t\t<div class=\"elementor-element elementor-element-9c8689f font-52 ml-10n elementor-widget elementor-widget-heading\" data-id=\"9c8689f\" data-element_type=\"widget\" data-e-type=\"widget\" data-widget_type=\"heading.default\">\n\t\t\t\t<div class=\"elementor-widget-container\">\n\t\t\t\t\t<h2 class=\"elementor-heading-title elementor-size-default\">We work on all aspects of statistical spoken dialogue systems<\/h2>\t\t\t\t<\/div>\n\t\t\t\t<\/div>\n\t\t\t\t<div class=\"elementor-element elementor-element-3cfb19a font-16 pt-16 elementor-widget elementor-widget-text-editor\" data-id=\"3cfb19a\" data-element_type=\"widget\" data-e-type=\"widget\" data-widget_type=\"text-editor.default\">\n\t\t\t\t<div class=\"elementor-widget-container\">\n\t\t\t\t\t\t\t\t\t<p>For interpretation in dialogue, we developed the Action State Update (ASU) approach, a statistical method that handles references in user utterances without the need for a domain-specific Natural Language Understanding component. We use a multi-dimensional approach to dialogue management, aiming to support more natural interactions and to enable more efficient adaptation to new domains. We detect user emotions and consider them in dialogue response generation.<\/p>\t\t\t\t\t\t\t\t<\/div>\n\t\t\t\t<\/div>\n\t\t\t\t<\/div>\n\t\t<div class=\"elementor-element elementor-element-8fb6245 e-con-full col-right e-flex e-con e-child\" data-id=\"8fb6245\" data-element_type=\"container\" data-e-type=\"container\">\n\t\t\t\t<div class=\"elementor-element elementor-element-528c97c figure-img elementor-widget elementor-widget-image\" data-id=\"528c97c\" data-element_type=\"widget\" data-e-type=\"widget\" data-widget_type=\"image.default\">\n\t\t\t\t<div class=\"elementor-widget-container\">\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t<img fetchpriority=\"high\" decoding=\"async\" width=\"580\" height=\"352\" src=\"https:\/\/www.toshiba.eu\/cambridge-research-laboratory\/wp-content\/uploads\/2025\/02\/dialogue-systems-intro-img.png\" class=\"attachment-full size-full wp-image-2148\" alt=\"We work on all aspects of statistical spoken dialogue systems.\" srcset=\"https:\/\/www.toshiba.eu\/cambridge-research-laboratory\/wp-content\/uploads\/2025\/02\/dialogue-systems-intro-img.png 580w, https:\/\/www.toshiba.eu\/cambridge-research-laboratory\/wp-content\/uploads\/2025\/02\/dialogue-systems-intro-img-300x182.png 300w\" sizes=\"(max-width: 580px) 100vw, 580px\" \/>\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t<\/div>\n\t\t\t\t<\/div>\n\t\t\t\t<\/div>\n\t\t\t\t\t<\/div>\n\t\t\t\t<\/div>\n\t\t<div class=\"elementor-element elementor-element-1eeccb2 nlu pt-63 pb-83 container-1746 e-flex e-con-boxed e-con e-parent\" data-id=\"1eeccb2\" data-element_type=\"container\" data-e-type=\"container\">\n\t\t\t\t\t<div class=\"e-con-inner\">\n\t\t<div class=\"elementor-element elementor-element-191d71a e-con-full col-right e-flex e-con e-child\" data-id=\"191d71a\" data-element_type=\"container\" data-e-type=\"container\">\n\t\t\t\t<div class=\"elementor-element elementor-element-850b4f2 figure-img img-multply elementor-widget elementor-widget-image\" data-id=\"850b4f2\" data-element_type=\"widget\" data-e-type=\"widget\" data-widget_type=\"image.default\">\n\t\t\t\t<div class=\"elementor-widget-container\">\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t<img decoding=\"async\" width=\"580\" height=\"473\" src=\"https:\/\/www.toshiba.eu\/cambridge-research-laboratory\/wp-content\/uploads\/2025\/02\/dialogue-systems-nlu-img.jpg\" class=\"attachment-full size-full wp-image-2167\" alt=\"Interpretation without a domain-specific Natural Language Understanding (NLU)\" srcset=\"https:\/\/www.toshiba.eu\/cambridge-research-laboratory\/wp-content\/uploads\/2025\/02\/dialogue-systems-nlu-img.jpg 580w, https:\/\/www.toshiba.eu\/cambridge-research-laboratory\/wp-content\/uploads\/2025\/02\/dialogue-systems-nlu-img-300x245.jpg 300w\" sizes=\"(max-width: 580px) 100vw, 580px\" \/>\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t<\/div>\n\t\t\t\t<\/div>\n\t\t\t\t<\/div>\n\t\t<div class=\"elementor-element elementor-element-d198f53 e-con-full col-left pl-45 e-flex e-con e-child\" data-id=\"d198f53\" data-element_type=\"container\" data-e-type=\"container\">\n\t\t\t\t<div class=\"elementor-element elementor-element-c5dcf81 font-52 ml-10n wd-521 elementor-widget elementor-widget-heading\" data-id=\"c5dcf81\" data-element_type=\"widget\" data-e-type=\"widget\" data-widget_type=\"heading.default\">\n\t\t\t\t<div class=\"elementor-widget-container\">\n\t\t\t\t\t<h2 class=\"elementor-heading-title elementor-size-default\">Interpretation without a domain-specific Natural Language Understanding (NLU)<\/h2>\t\t\t\t<\/div>\n\t\t\t\t<\/div>\n\t\t\t\t<div class=\"elementor-element elementor-element-09509c3 font-16 pt-16 elementor-widget elementor-widget-text-editor\" data-id=\"09509c3\" data-element_type=\"widget\" data-e-type=\"widget\" data-widget_type=\"text-editor.default\">\n\t\t\t\t<div class=\"elementor-widget-container\">\n\t\t\t\t\t\t\t\t\t<p>Interpretation in a dialogue system processes a user utterance and updates the dialogue state, which is used by the policy to decide on the next system action. Traditionally, interpretation relies on detection of domain-specific semantics, including intents, entities, and relations, requiring a task-specific annotated dataset for training an NLU model. In contrast, Action State Update (ASU) approach is centred on user actions. We discretize user actions based on the domain structure and train a binary action detection classifier eliminating the need for costly domain-specific semantic annotations.<\/p>\t\t\t\t\t\t\t\t<\/div>\n\t\t\t\t<\/div>\n\t\t\t\t<\/div>\n\t\t\t\t\t<\/div>\n\t\t\t\t<\/div>\n\t\t<div class=\"elementor-element elementor-element-e4f6e19 multi-manage pt-67 pb-74 container-1746 e-flex e-con-boxed e-con e-parent\" data-id=\"e4f6e19\" data-element_type=\"container\" data-e-type=\"container\" data-settings=\"{&quot;background_background&quot;:&quot;classic&quot;}\">\n\t\t\t\t\t<div class=\"e-con-inner\">\n\t\t<div class=\"elementor-element elementor-element-86001b4 e-con-full col-left pr-46 e-flex e-con e-child\" data-id=\"86001b4\" data-element_type=\"container\" data-e-type=\"container\">\n\t\t\t\t<div class=\"elementor-element elementor-element-43dd13a font-52 ml-10n elementor-widget elementor-widget-heading\" data-id=\"43dd13a\" data-element_type=\"widget\" data-e-type=\"widget\" data-widget_type=\"heading.default\">\n\t\t\t\t<div class=\"elementor-widget-container\">\n\t\t\t\t\t<h2 class=\"elementor-heading-title elementor-size-default\">Multi-dimensional dialogue management<\/h2>\t\t\t\t<\/div>\n\t\t\t\t<\/div>\n\t\t\t\t<div class=\"elementor-element elementor-element-a4b662c font-16 pt-14 elementor-widget elementor-widget-text-editor\" data-id=\"a4b662c\" data-element_type=\"widget\" data-e-type=\"widget\" data-widget_type=\"text-editor.default\">\n\t\t\t\t<div class=\"elementor-widget-container\">\n\t\t\t\t\t\t\t\t\tThe action selection component of a dialogue system decides what the most appropriate response to give to the user is, given the current dialogue state. This decision is driven by a hand-coded or trained dialogue policy. In statistical dialogue systems, the dialogue policy is typically trained using Reinforcement Learning, often in interaction with a simulated user. In our approach to action selection, we distinguish between different aspects, or \u2018dimensions\u2019, of the dialogue process that can be addressed simultaneously. We therefore implement multiple agents that each focus on one of these dimensions, and train their associated policies accordingly. As some of these dimensions can be considered task- and\/or domain-independent, their policies can be re-used and adapted to new tasks and\/or domains.\t\t\t\t\t\t\t\t<\/div>\n\t\t\t\t<\/div>\n\t\t\t\t<\/div>\n\t\t<div class=\"elementor-element elementor-element-32eb645 e-con-full col-right e-flex e-con e-child\" data-id=\"32eb645\" data-element_type=\"container\" data-e-type=\"container\">\n\t\t\t\t<div class=\"elementor-element elementor-element-1354d38 figure-img elementor-widget elementor-widget-image\" data-id=\"1354d38\" data-element_type=\"widget\" data-e-type=\"widget\" data-widget_type=\"image.default\">\n\t\t\t\t<div class=\"elementor-widget-container\">\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t<img decoding=\"async\" width=\"580\" height=\"422\" src=\"https:\/\/www.toshiba.eu\/cambridge-research-laboratory\/wp-content\/uploads\/2025\/02\/dialogue-systems-multi-dimenssion-img.png\" class=\"attachment-full size-full wp-image-2198\" alt=\"Multi-dimensional dialogue management\" srcset=\"https:\/\/www.toshiba.eu\/cambridge-research-laboratory\/wp-content\/uploads\/2025\/02\/dialogue-systems-multi-dimenssion-img.png 580w, https:\/\/www.toshiba.eu\/cambridge-research-laboratory\/wp-content\/uploads\/2025\/02\/dialogue-systems-multi-dimenssion-img-300x218.png 300w\" sizes=\"(max-width: 580px) 100vw, 580px\" \/>\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t<\/div>\n\t\t\t\t<\/div>\n\t\t\t\t<\/div>\n\t\t\t\t\t<\/div>\n\t\t\t\t<\/div>\n\t\t<div class=\"elementor-element elementor-element-6966a5c esti-ds pt-65 pb-82 container-1746 e-flex e-con-boxed e-con e-parent\" data-id=\"6966a5c\" data-element_type=\"container\" data-e-type=\"container\">\n\t\t\t\t\t<div class=\"e-con-inner\">\n\t\t<div class=\"elementor-element elementor-element-816b861 e-con-full content-row e-flex e-con e-child\" data-id=\"816b861\" data-element_type=\"container\" data-e-type=\"container\">\n\t\t<div class=\"elementor-element elementor-element-2f45252 e-con-full col-right e-flex e-con e-child\" data-id=\"2f45252\" data-element_type=\"container\" data-e-type=\"container\">\n\t\t\t\t<div class=\"elementor-element elementor-element-ec3753f figure-img img-multply elementor-widget elementor-widget-image\" data-id=\"ec3753f\" data-element_type=\"widget\" data-e-type=\"widget\" data-widget_type=\"image.default\">\n\t\t\t\t<div class=\"elementor-widget-container\">\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t<img loading=\"lazy\" decoding=\"async\" width=\"580\" height=\"317\" src=\"https:\/\/www.toshiba.eu\/cambridge-research-laboratory\/wp-content\/uploads\/2025\/02\/dialogue-systems-estimation-img.jpg\" class=\"attachment-full size-full wp-image-2202\" alt=\"User state estimation in dialogue systems\" srcset=\"https:\/\/www.toshiba.eu\/cambridge-research-laboratory\/wp-content\/uploads\/2025\/02\/dialogue-systems-estimation-img.jpg 580w, https:\/\/www.toshiba.eu\/cambridge-research-laboratory\/wp-content\/uploads\/2025\/02\/dialogue-systems-estimation-img-300x164.jpg 300w\" sizes=\"(max-width: 580px) 100vw, 580px\" \/>\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t<\/div>\n\t\t\t\t<\/div>\n\t\t\t\t<\/div>\n\t\t<div class=\"elementor-element elementor-element-d1c2060 e-con-full col-left pl-46 e-flex e-con e-child\" data-id=\"d1c2060\" data-element_type=\"container\" data-e-type=\"container\">\n\t\t\t\t<div class=\"elementor-element elementor-element-5ab1556 font-52 ml-17n wd-521 elementor-widget elementor-widget-heading\" data-id=\"5ab1556\" data-element_type=\"widget\" data-e-type=\"widget\" data-widget_type=\"heading.default\">\n\t\t\t\t<div class=\"elementor-widget-container\">\n\t\t\t\t\t<h2 class=\"elementor-heading-title elementor-size-default\">User state estimation in dialogue systems<\/h2>\t\t\t\t<\/div>\n\t\t\t\t<\/div>\n\t\t\t\t<div class=\"elementor-element elementor-element-6dc6dc6 font-16 pt-16 elementor-widget elementor-widget-text-editor\" data-id=\"6dc6dc6\" data-element_type=\"widget\" data-e-type=\"widget\" data-widget_type=\"text-editor.default\">\n\t\t\t\t<div class=\"elementor-widget-container\">\n\t\t\t\t\t\t\t\t\tThe role of user state estimation becomes more and more important for modern day dialogue systems aiming to be more adaptive to user and situation. By considering user emotion and ongoing interaction quality for response generation, dialogue systems are able to be more engaging and likely to be more user friendly. To estimate emotion and interaction quality we are using deep learning methods and utilize multiple input modalities such as speech, text and video for improved estimation performance.\t\t\t\t\t\t\t\t<\/div>\n\t\t\t\t<\/div>\n\t\t\t\t<\/div>\n\t\t\t\t<\/div>\n\t\t<div class=\"elementor-element elementor-element-8d75109 e-con-full button-row pt-39 e-flex e-con e-child\" data-id=\"8d75109\" data-element_type=\"container\" data-e-type=\"container\">\n\t\t\t\t<div class=\"elementor-element elementor-element-52931f3 elementor-align-left btn-bb-red elementor-widget elementor-widget-button\" data-id=\"52931f3\" data-element_type=\"widget\" data-e-type=\"widget\" data-widget_type=\"button.default\">\n\t\t\t\t<div class=\"elementor-widget-container\">\n\t\t\t\t\t\t\t\t\t<div class=\"elementor-button-wrapper\">\n\t\t\t\t\t<a class=\"elementor-button elementor-button-link elementor-size-sm\" href=\"https:\/\/www.toshiba.eu\/cambridge-research-laboratory\/embodied-ai\/\">\n\t\t\t\t\t\t<span class=\"elementor-button-content-wrapper\">\n\t\t\t\t\t\t<span class=\"elementor-button-icon\">\n\t\t\t\t<svg xmlns=\"http:\/\/www.w3.org\/2000\/svg\" width=\"53\" height=\"8\" viewBox=\"0 0 53 8\" fill=\"none\"><path d=\"M52.3536 4.35355C52.5488 4.15829 52.5488 3.84171 52.3536 3.64645L49.1716 0.464466C48.9763 0.269204 48.6597 0.269204 48.4645 0.464466C48.2692 0.659728 48.2692 0.976311 48.4645 1.17157L51.2929 4L48.4645 6.82843C48.2692 7.02369 48.2692 7.34027 48.4645 7.53553C48.6597 7.7308 48.9763 7.7308 49.1716 7.53553L52.3536 4.35355ZM0 4.5H52V3.5H0V4.5Z\" fill=\"transparent\"><\/path><\/svg>\t\t\t<\/span>\n\t\t\t\t\t\t\t\t\t<span class=\"elementor-button-text\">back to Embodied AI<\/span>\n\t\t\t\t\t<\/span>\n\t\t\t\t\t<\/a>\n\t\t\t\t<\/div>\n\t\t\t\t\t\t\t\t<\/div>\n\t\t\t\t<\/div>\n\t\t\t\t<\/div>\n\t\t\t\t\t<\/div>\n\t\t\t\t<\/div>\n\t\t<div class=\"elementor-element elementor-element-e1bed4f e-con-full e-flex e-con e-parent\" data-id=\"e1bed4f\" data-element_type=\"container\" data-e-type=\"container\">\n\t\t\t\t<div class=\"elementor-element elementor-element-beb2e10 elementor-widget elementor-widget-template\" data-id=\"beb2e10\" data-element_type=\"widget\" data-e-type=\"widget\" data-widget_type=\"template.default\">\n\t\t\t\t<div class=\"elementor-widget-container\">\n\t\t\t\t\t\t\t<div class=\"elementor-template\">\n\t\t\t\t\t<div data-elementor-type=\"section\" data-elementor-id=\"6952\" class=\"elementor elementor-6952\" data-elementor-post-type=\"elementor_library\">\n\t\t\t<div class=\"elementor-element elementor-element-b11637a latest-pub-qig pt-83 pb-47 container-1746 e-flex e-con-boxed e-con e-parent\" data-id=\"b11637a\" data-element_type=\"container\" data-e-type=\"container\" data-settings=\"{&quot;background_background&quot;:&quot;classic&quot;}\">\n\t\t\t\t\t<div class=\"e-con-inner\">\n\t\t<div class=\"elementor-element elementor-element-35ccd75 e-con-full col-left pr-112 e-flex e-con e-child\" data-id=\"35ccd75\" data-element_type=\"container\" data-e-type=\"container\">\n\t\t\t\t<div class=\"elementor-element elementor-element-5cfee9b crl-dual-head pb-26 font-62 elementor-widget elementor-widget-exad-exclusive-dual-heading\" data-id=\"5cfee9b\" data-element_type=\"widget\" data-e-type=\"widget\" data-widget_type=\"exad-exclusive-dual-heading.default\">\n\t\t\t\t<div class=\"elementor-widget-container\">\n\t\t\t\t\t\r\n        <div class=\"exad-dual-heading\">\r\n            <div class=\"exad-dual-heading-wrapper\">\r\n            \r\n                <h2 class=\"exad-dual-heading-title\">\r\n                                    <span class=\"first-heading\">Language &amp; Interaction Group<\/span>\r\n                    <span class=\"second-heading\">Latest Publications<\/span>\r\n                                    <\/h2>\r\n\r\n                                    <p class=\"exad-dual-heading-description\">Information contained in news and other announcements is current on the date of posting, but subject to change without notice.<\/p>\r\n                  \r\n\r\n            <\/div>\r\n        <\/div>\r\n        \t\t\t\t<\/div>\n\t\t\t\t<\/div>\n\t\t\t\t<div class=\"elementor-element elementor-element-ad8e818 elementor-align-left btn-bb-red lp-btn elementor-widget elementor-widget-button\" data-id=\"ad8e818\" data-element_type=\"widget\" data-e-type=\"widget\" data-widget_type=\"button.default\">\n\t\t\t\t<div class=\"elementor-widget-container\">\n\t\t\t\t\t\t\t\t\t<div class=\"elementor-button-wrapper\">\n\t\t\t\t\t<a class=\"elementor-button elementor-button-link elementor-size-sm\" href=\"\/cambridge-research-laboratory\/publications_category\/speech-technology-group\/\">\n\t\t\t\t\t\t<span class=\"elementor-button-content-wrapper\">\n\t\t\t\t\t\t<span class=\"elementor-button-icon\">\n\t\t\t\t<svg xmlns=\"http:\/\/www.w3.org\/2000\/svg\" width=\"53\" height=\"8\" viewBox=\"0 0 53 8\" fill=\"none\"><path d=\"M52.3536 4.35355C52.5488 4.15829 52.5488 3.84171 52.3536 3.64645L49.1716 0.464466C48.9763 0.269204 48.6597 0.269204 48.4645 0.464466C48.2692 0.659728 48.2692 0.976311 48.4645 1.17157L51.2929 4L48.4645 6.82843C48.2692 7.02369 48.2692 7.34027 48.4645 7.53553C48.6597 7.7308 48.9763 7.7308 49.1716 7.53553L52.3536 4.35355ZM0 4.5H52V3.5H0V4.5Z\" fill=\"transparent\"><\/path><\/svg>\t\t\t<\/span>\n\t\t\t\t\t\t\t\t\t<span class=\"elementor-button-text\">See What's New<\/span>\n\t\t\t\t\t<\/span>\n\t\t\t\t\t<\/a>\n\t\t\t\t<\/div>\n\t\t\t\t\t\t\t\t<\/div>\n\t\t\t\t<\/div>\n\t\t\t\t<\/div>\n\t\t<div class=\"elementor-element elementor-element-90c2aac e-con-full col-right e-flex e-con e-child\" data-id=\"90c2aac\" data-element_type=\"container\" data-e-type=\"container\">\n\t\t\t\t<div class=\"elementor-element elementor-element-b3fe631 elementor-grid-1 pp-equal-height-yes lp-post elementor-widget__width-inherit elementor-grid-tablet-1 elementor-grid-mobile-1 elementor-widget elementor-widget-pp-posts\" data-id=\"b3fe631\" data-element_type=\"widget\" data-e-type=\"widget\" data-widget_type=\"pp-posts.template\">\n\t\t\t\t<div class=\"elementor-widget-container\">\n\t\t\t\t\t\t\t\t<div class=\"pp-posts-container\">\n\t\t\t\t\t<div class=\"pp-posts-empty\">\n\t\t\t\t\t\t\t<p>It seems we can&#039;t find what you&#039;re looking for.<\/p>\n\t\t\t\n\t\t\t\t\t<\/div>\n\t\t\t\t\t<\/div>\n\t\t\t\t\t\t\t<\/div>\n\t\t\t\t<\/div>\n\t\t\t\t<\/div>\n\t\t\t\t\t<\/div>\n\t\t\t\t<\/div>\n\t\t\t\t<\/div>\n\t\t\t\t<\/div>\n\t\t\t\t\t\t<\/div>\n\t\t\t\t<\/div>\n\t\t\t\t<\/div>\n\t\t<div class=\"elementor-element elementor-element-72bb74d e-con-full e-flex e-con e-parent\" data-id=\"72bb74d\" data-element_type=\"container\" data-e-type=\"container\">\n\t\t\t\t<div class=\"elementor-element elementor-element-0a5db74 elementor-widget elementor-widget-template\" data-id=\"0a5db74\" data-element_type=\"widget\" data-e-type=\"widget\" data-widget_type=\"template.default\">\n\t\t\t\t<div class=\"elementor-widget-container\">\n\t\t\t\t\t\t\t<div class=\"elementor-template\">\n\t\t\t\t\t<div data-elementor-type=\"section\" data-elementor-id=\"549\" class=\"elementor elementor-549\" data-elementor-post-type=\"elementor_library\">\n\t\t\t<div class=\"elementor-element elementor-element-36f62f0 big-card pt-80 pb-38 e-flex e-con-boxed e-con e-parent\" data-id=\"36f62f0\" data-element_type=\"container\" data-e-type=\"container\" data-settings=\"{&quot;background_background&quot;:&quot;classic&quot;}\">\n\t\t\t\t\t<div class=\"e-con-inner\">\n\t\t<div class=\"elementor-element elementor-element-97ffccc e-con-full card-content e-flex e-con e-child\" data-id=\"97ffccc\" data-element_type=\"container\" data-e-type=\"container\" data-settings=\"{&quot;background_background&quot;:&quot;classic&quot;}\">\n\t\t\t\t<div class=\"elementor-element elementor-element-1ddc4d7 font-52 pb-6 elementor-widget elementor-widget-heading\" data-id=\"1ddc4d7\" data-element_type=\"widget\" data-e-type=\"widget\" data-widget_type=\"heading.default\">\n\t\t\t\t<div class=\"elementor-widget-container\">\n\t\t\t\t\t<h2 class=\"elementor-heading-title elementor-size-default\">Contact Us<\/h2>\t\t\t\t<\/div>\n\t\t\t\t<\/div>\n\t\t\t\t<div class=\"elementor-element elementor-element-c2782e9 elementor-align-left btn-txt-white elementor-widget elementor-widget-button\" data-id=\"c2782e9\" data-element_type=\"widget\" data-e-type=\"widget\" data-widget_type=\"button.default\">\n\t\t\t\t<div class=\"elementor-widget-container\">\n\t\t\t\t\t\t\t\t\t<div class=\"elementor-button-wrapper\">\n\t\t\t\t\t<a class=\"elementor-button elementor-button-link elementor-size-sm\" href=\"https:\/\/www.toshiba.eu\/cambridge-research-laboratory\/contact-us\/\">\n\t\t\t\t\t\t<span class=\"elementor-button-content-wrapper\">\n\t\t\t\t\t\t<span class=\"elementor-button-icon\">\n\t\t\t\t<svg xmlns=\"http:\/\/www.w3.org\/2000\/svg\" width=\"53\" height=\"8\" viewBox=\"0 0 53 8\" fill=\"none\"><path d=\"M52.3536 4.35355C52.5488 4.15829 52.5488 3.84171 52.3536 3.64645L49.1716 0.464466C48.9763 0.269204 48.6597 0.269204 48.4645 0.464466C48.2692 0.659728 48.2692 0.976311 48.4645 1.17157L51.2929 4L48.4645 6.82843C48.2692 7.02369 48.2692 7.34027 48.4645 7.53553C48.6597 7.7308 48.9763 7.7308 49.1716 7.53553L52.3536 4.35355ZM0 4.5H52V3.5H0V4.5Z\" fill=\"transparent\"><\/path><\/svg>\t\t\t<\/span>\n\t\t\t\t\t\t\t\t\t<span class=\"elementor-button-text\">Learn More<\/span>\n\t\t\t\t\t<\/span>\n\t\t\t\t\t<\/a>\n\t\t\t\t<\/div>\n\t\t\t\t\t\t\t\t<\/div>\n\t\t\t\t<\/div>\n\t\t\t\t<\/div>\n\t\t\t\t\t<\/div>\n\t\t\t\t<\/div>\n\t\t\t\t<\/div>\n\t\t\t\t<\/div>\n\t\t\t\t\t\t<\/div>\n\t\t\t\t<\/div>\n\t\t\t\t<\/div>\n\t\t<div class=\"elementor-element elementor-element-ced3797 e-con-full e-flex e-con e-parent\" data-id=\"ced3797\" data-element_type=\"container\" data-e-type=\"container\">\n\t\t\t\t<div class=\"elementor-element elementor-element-bda7061 elementor-widget elementor-widget-template\" data-id=\"bda7061\" data-element_type=\"widget\" data-e-type=\"widget\" data-widget_type=\"template.default\">\n\t\t\t\t<div class=\"elementor-widget-container\">\n\t\t\t\t\t\t\t<div class=\"elementor-template\">\n\t\t\t\t\t<div data-elementor-type=\"section\" data-elementor-id=\"552\" class=\"elementor elementor-552\" data-elementor-post-type=\"elementor_library\">\n\t\t\t<div class=\"elementor-element elementor-element-b1e3327 big-card pb-38 e-flex e-con-boxed e-con e-parent\" data-id=\"b1e3327\" data-element_type=\"container\" data-e-type=\"container\" data-settings=\"{&quot;background_background&quot;:&quot;classic&quot;}\">\n\t\t\t\t\t<div class=\"e-con-inner\">\n\t\t<div class=\"elementor-element elementor-element-3e9aaf4 e-con-full card-content e-flex e-con e-child\" data-id=\"3e9aaf4\" data-element_type=\"container\" data-e-type=\"container\" data-settings=\"{&quot;background_background&quot;:&quot;classic&quot;}\">\n\t\t\t\t<div class=\"elementor-element elementor-element-8959040 font-52 pb-6 elementor-widget elementor-widget-heading\" data-id=\"8959040\" data-element_type=\"widget\" data-e-type=\"widget\" data-widget_type=\"heading.default\">\n\t\t\t\t<div class=\"elementor-widget-container\">\n\t\t\t\t\t<h2 class=\"elementor-heading-title elementor-size-default\">Vacancies<\/h2>\t\t\t\t<\/div>\n\t\t\t\t<\/div>\n\t\t\t\t<div class=\"elementor-element elementor-element-4e50eff elementor-align-left btn-txt-white elementor-widget elementor-widget-button\" data-id=\"4e50eff\" data-element_type=\"widget\" data-e-type=\"widget\" data-widget_type=\"button.default\">\n\t\t\t\t<div class=\"elementor-widget-container\">\n\t\t\t\t\t\t\t\t\t<div class=\"elementor-button-wrapper\">\n\t\t\t\t\t<a class=\"elementor-button elementor-button-link elementor-size-sm\" href=\"https:\/\/careers.toshiba.eu\/Home\/Job?lstRegion=1&#038;lstdepartment=12&#038;chkCategory=15\" rel=\"noopener\">\n\t\t\t\t\t\t<span class=\"elementor-button-content-wrapper\">\n\t\t\t\t\t\t<span class=\"elementor-button-icon\">\n\t\t\t\t<svg xmlns=\"http:\/\/www.w3.org\/2000\/svg\" width=\"53\" height=\"8\" viewBox=\"0 0 53 8\" fill=\"none\"><path d=\"M52.3536 4.35355C52.5488 4.15829 52.5488 3.84171 52.3536 3.64645L49.1716 0.464466C48.9763 0.269204 48.6597 0.269204 48.4645 0.464466C48.2692 0.659728 48.2692 0.976311 48.4645 1.17157L51.2929 4L48.4645 6.82843C48.2692 7.02369 48.2692 7.34027 48.4645 7.53553C48.6597 7.7308 48.9763 7.7308 49.1716 7.53553L52.3536 4.35355ZM0 4.5H52V3.5H0V4.5Z\" fill=\"transparent\"><\/path><\/svg>\t\t\t<\/span>\n\t\t\t\t\t\t\t\t\t<span class=\"elementor-button-text\">Learn More<\/span>\n\t\t\t\t\t<\/span>\n\t\t\t\t\t<\/a>\n\t\t\t\t<\/div>\n\t\t\t\t\t\t\t\t<\/div>\n\t\t\t\t<\/div>\n\t\t\t\t<\/div>\n\t\t\t\t\t<\/div>\n\t\t\t\t<\/div>\n\t\t\t\t<\/div>\n\t\t\t\t<\/div>\n\t\t\t\t\t\t<\/div>\n\t\t\t\t<\/div>\n\t\t\t\t<\/div>\n\t\t<div class=\"elementor-element elementor-element-266cac7 e-con-full e-flex e-con e-parent\" data-id=\"266cac7\" data-element_type=\"container\" data-e-type=\"container\">\n\t\t\t\t<div class=\"elementor-element elementor-element-29a5e95 elementor-widget elementor-widget-template\" data-id=\"29a5e95\" data-element_type=\"widget\" data-e-type=\"widget\" data-widget_type=\"template.default\">\n\t\t\t\t<div class=\"elementor-widget-container\">\n\t\t\t\t\t\t\t<div class=\"elementor-template\">\n\t\t\t\t\t<div data-elementor-type=\"section\" data-elementor-id=\"557\" class=\"elementor elementor-557\" data-elementor-post-type=\"elementor_library\">\n\t\t\t<div class=\"elementor-element elementor-element-e4f4860 outro-cards pb-82 e-flex e-con-boxed e-con e-parent\" data-id=\"e4f4860\" data-element_type=\"container\" data-e-type=\"container\">\n\t\t\t\t\t<div class=\"e-con-inner\">\n\t\t<div class=\"elementor-element elementor-element-903217a e-con-full out-card e-flex e-con e-child\" data-id=\"903217a\" data-element_type=\"container\" data-e-type=\"container\" data-settings=\"{&quot;background_background&quot;:&quot;classic&quot;}\">\n\t\t<div class=\"elementor-element elementor-element-78f6ea1 e-con-full card-content e-flex e-con e-child\" data-id=\"78f6ea1\" data-element_type=\"container\" data-e-type=\"container\">\n\t\t\t\t<div class=\"elementor-element elementor-element-3e3121a font-26 elementor-widget elementor-widget-heading\" data-id=\"3e3121a\" data-element_type=\"widget\" data-e-type=\"widget\" data-widget_type=\"heading.default\">\n\t\t\t\t<div class=\"elementor-widget-container\">\n\t\t\t\t\t<h4 class=\"elementor-heading-title elementor-size-default\">Newsroom<\/h4>\t\t\t\t<\/div>\n\t\t\t\t<\/div>\n\t\t\t\t<div class=\"elementor-element elementor-element-c407530 elementor-align-left btn-txt-white elementor-widget elementor-widget-button\" data-id=\"c407530\" data-element_type=\"widget\" data-e-type=\"widget\" data-widget_type=\"button.default\">\n\t\t\t\t<div class=\"elementor-widget-container\">\n\t\t\t\t\t\t\t\t\t<div class=\"elementor-button-wrapper\">\n\t\t\t\t\t<a class=\"elementor-button elementor-button-link elementor-size-sm\" href=\"\/cambridge-research-laboratory\/news\/\">\n\t\t\t\t\t\t<span class=\"elementor-button-content-wrapper\">\n\t\t\t\t\t\t<span class=\"elementor-button-icon\">\n\t\t\t\t<svg xmlns=\"http:\/\/www.w3.org\/2000\/svg\" width=\"53\" height=\"8\" viewBox=\"0 0 53 8\" fill=\"none\"><path d=\"M52.3536 4.35355C52.5488 4.15829 52.5488 3.84171 52.3536 3.64645L49.1716 0.464466C48.9763 0.269204 48.6597 0.269204 48.4645 0.464466C48.2692 0.659728 48.2692 0.976311 48.4645 1.17157L51.2929 4L48.4645 6.82843C48.2692 7.02369 48.2692 7.34027 48.4645 7.53553C48.6597 7.7308 48.9763 7.7308 49.1716 7.53553L52.3536 4.35355ZM0 4.5H52V3.5H0V4.5Z\" fill=\"transparent\"><\/path><\/svg>\t\t\t<\/span>\n\t\t\t\t\t\t\t\t\t<span class=\"elementor-button-text\">Learn More<\/span>\n\t\t\t\t\t<\/span>\n\t\t\t\t\t<\/a>\n\t\t\t\t<\/div>\n\t\t\t\t\t\t\t\t<\/div>\n\t\t\t\t<\/div>\n\t\t\t\t<\/div>\n\t\t\t\t<\/div>\n\t\t<div class=\"elementor-element elementor-element-9ef5d6c e-con-full out-card e-flex e-con e-child\" data-id=\"9ef5d6c\" data-element_type=\"container\" data-e-type=\"container\" data-settings=\"{&quot;background_background&quot;:&quot;classic&quot;}\">\n\t\t<div class=\"elementor-element elementor-element-692e946 e-con-full card-content e-flex e-con e-child\" data-id=\"692e946\" data-element_type=\"container\" data-e-type=\"container\">\n\t\t\t\t<div class=\"elementor-element elementor-element-ff12981 font-26 elementor-widget elementor-widget-heading\" data-id=\"ff12981\" data-element_type=\"widget\" data-e-type=\"widget\" data-widget_type=\"heading.default\">\n\t\t\t\t<div class=\"elementor-widget-container\">\n\t\t\t\t\t<h4 class=\"elementor-heading-title elementor-size-default\">Global R &amp; D<\/h4>\t\t\t\t<\/div>\n\t\t\t\t<\/div>\n\t\t\t\t<div class=\"elementor-element elementor-element-9bd39c1 elementor-align-left btn-txt-white elementor-widget elementor-widget-button\" data-id=\"9bd39c1\" data-element_type=\"widget\" data-e-type=\"widget\" data-widget_type=\"button.default\">\n\t\t\t\t<div class=\"elementor-widget-container\">\n\t\t\t\t\t\t\t\t\t<div class=\"elementor-button-wrapper\">\n\t\t\t\t\t<a class=\"elementor-button elementor-button-link elementor-size-sm\" href=\"https:\/\/www.global.toshiba\/ww\/technology\/corporate\/rdc.html\" rel=\"noopener\">\n\t\t\t\t\t\t<span class=\"elementor-button-content-wrapper\">\n\t\t\t\t\t\t<span class=\"elementor-button-icon\">\n\t\t\t\t<svg xmlns=\"http:\/\/www.w3.org\/2000\/svg\" width=\"53\" height=\"8\" viewBox=\"0 0 53 8\" fill=\"none\"><path d=\"M52.3536 4.35355C52.5488 4.15829 52.5488 3.84171 52.3536 3.64645L49.1716 0.464466C48.9763 0.269204 48.6597 0.269204 48.4645 0.464466C48.2692 0.659728 48.2692 0.976311 48.4645 1.17157L51.2929 4L48.4645 6.82843C48.2692 7.02369 48.2692 7.34027 48.4645 7.53553C48.6597 7.7308 48.9763 7.7308 49.1716 7.53553L52.3536 4.35355ZM0 4.5H52V3.5H0V4.5Z\" fill=\"transparent\"><\/path><\/svg>\t\t\t<\/span>\n\t\t\t\t\t\t\t\t\t<span class=\"elementor-button-text\">Learn More<\/span>\n\t\t\t\t\t<\/span>\n\t\t\t\t\t<\/a>\n\t\t\t\t<\/div>\n\t\t\t\t\t\t\t\t<\/div>\n\t\t\t\t<\/div>\n\t\t\t\t<\/div>\n\t\t\t\t<\/div>\n\t\t\t\t\t<\/div>\n\t\t\t\t<\/div>\n\t\t\t\t<\/div>\n\t\t\t\t<\/div>\n\t\t\t\t\t\t<\/div>\n\t\t\t\t<\/div>\n\t\t\t\t<\/div>\n\t\t\t\t<\/div>\n\t\t","protected":false},"excerpt":{"rendered":"<p>Dialogue Systems Home CRL Embodied AI Language &amp; Interaction Dialogue Systems The Dialogue group works on fundamental research related to [&hellip;]<\/p>\n","protected":false},"author":1,"featured_media":0,"parent":1619,"menu_order":0,"comment_status":"closed","ping_status":"closed","template":"","meta":{"_acf_changed":false,"site-sidebar-layout":"no-sidebar","site-content-layout":"","ast-site-content-layout":"full-width-container","site-content-style":"default","site-sidebar-style":"default","ast-global-header-display":"","ast-banner-title-visibility":"","ast-main-header-display":"","ast-hfb-above-header-display":"","ast-hfb-below-header-display":"","ast-hfb-mobile-header-display":"","site-post-title":"disabled","ast-breadcrumbs-content":"","ast-featured-img":"disabled","footer-sml-layout":"","ast-disable-related-posts":"","theme-transparent-header-meta":"default","adv-header-id-meta":"","stick-header-meta":"","header-above-stick-meta":"","header-main-stick-meta":"","header-below-stick-meta":"","astra-migrate-meta-layouts":"set","ast-page-background-enabled":"default","ast-page-background-meta":{"desktop":{"background-color":"var(--ast-global-color-5)","background-image":"","background-repeat":"repeat","background-position":"center center","background-size":"auto","background-attachment":"scroll","background-type":"","background-media":"","overlay-type":"","overlay-color":"","overlay-opacity":"","overlay-gradient":""},"tablet":{"background-color":"","background-image":"","background-repeat":"repeat","background-position":"center center","background-size":"auto","background-attachment":"scroll","background-type":"","background-media":"","overlay-type":"","overlay-color":"","overlay-opacity":"","overlay-gradient":""},"mobile":{"background-color":"","background-image":"","background-repeat":"repeat","background-position":"center center","background-size":"auto","background-attachment":"scroll","background-type":"","background-media":"","overlay-type":"","overlay-color":"","overlay-opacity":"","overlay-gradient":""}},"ast-content-background-meta":{"desktop":{"background-color":"var(--ast-global-color-4)","background-image":"","background-repeat":"repeat","background-position":"center center","background-size":"auto","background-attachment":"scroll","background-type":"","background-media":"","overlay-type":"","overlay-color":"","overlay-opacity":"","overlay-gradient":""},"tablet":{"background-color":"var(--ast-global-color-4)","background-image":"","background-repeat":"repeat","background-position":"center center","background-size":"auto","background-attachment":"scroll","background-type":"","background-media":"","overlay-type":"","overlay-color":"","overlay-opacity":"","overlay-gradient":""},"mobile":{"background-color":"var(--ast-global-color-4)","background-image":"","background-repeat":"repeat","background-position":"center center","background-size":"auto","background-attachment":"scroll","background-type":"","background-media":"","overlay-type":"","overlay-color":"","overlay-opacity":"","overlay-gradient":""}},"footnotes":""},"class_list":["post-2126","page","type-page","status-publish","hentry"],"acf":[],"_links":{"self":[{"href":"https:\/\/www.toshiba.eu\/cambridge-research-laboratory\/wp-json\/wp\/v2\/pages\/2126","targetHints":{"allow":["GET"]}}],"collection":[{"href":"https:\/\/www.toshiba.eu\/cambridge-research-laboratory\/wp-json\/wp\/v2\/pages"}],"about":[{"href":"https:\/\/www.toshiba.eu\/cambridge-research-laboratory\/wp-json\/wp\/v2\/types\/page"}],"author":[{"embeddable":true,"href":"https:\/\/www.toshiba.eu\/cambridge-research-laboratory\/wp-json\/wp\/v2\/users\/1"}],"replies":[{"embeddable":true,"href":"https:\/\/www.toshiba.eu\/cambridge-research-laboratory\/wp-json\/wp\/v2\/comments?post=2126"}],"version-history":[{"count":135,"href":"https:\/\/www.toshiba.eu\/cambridge-research-laboratory\/wp-json\/wp\/v2\/pages\/2126\/revisions"}],"predecessor-version":[{"id":7462,"href":"https:\/\/www.toshiba.eu\/cambridge-research-laboratory\/wp-json\/wp\/v2\/pages\/2126\/revisions\/7462"}],"up":[{"embeddable":true,"href":"https:\/\/www.toshiba.eu\/cambridge-research-laboratory\/wp-json\/wp\/v2\/pages\/1619"}],"wp:attachment":[{"href":"https:\/\/www.toshiba.eu\/cambridge-research-laboratory\/wp-json\/wp\/v2\/media?parent=2126"}],"curies":[{"name":"wp","href":"https:\/\/api.w.org\/{rel}","templated":true}]}}