data
dict
{ "proceeding": { "id": "12OmNzVXNIo", "title": "Image and Signal Processing, Congress on", "acronym": "cisp", "groupId": "1001793", "volume": "4", "displayVolume": "4", "year": "2008", "__typename": "ProceedingType" }, "article": { "id": "12OmNwoPtr7", "doi": "10.1109/CISP.2008.77", "title": "Direct Reckoning Reverberation Time from the Scene Images Based on Rough Fuzzy Neural Network", "normalizedTitle": "Direct Reckoning Reverberation Time from the Scene Images Based on Rough Fuzzy Neural Network", "abstract": "As the most important objective parameter, reverberation time plays a significant role in acoustic field characteristics evaluation of the hall. We can get it by measuring an actual room. In this paper, a new method is proposed based on rough fuzzy neural network to reckon the reverberation time from a scene image, which is obviously distinguished from the conventional methods. Finally the validity of the network is proved through the experiment results of network training on the test data. It provides a brand-new idea for virtual reality technique and sound quality evaluation of virtual environment with using this method.", "abstracts": [ { "abstractType": "Regular", "content": "As the most important objective parameter, reverberation time plays a significant role in acoustic field characteristics evaluation of the hall. We can get it by measuring an actual room. In this paper, a new method is proposed based on rough fuzzy neural network to reckon the reverberation time from a scene image, which is obviously distinguished from the conventional methods. Finally the validity of the network is proved through the experiment results of network training on the test data. It provides a brand-new idea for virtual reality technique and sound quality evaluation of virtual environment with using this method.", "__typename": "ArticleAbstractType" } ], "normalizedAbstract": "As the most important objective parameter, reverberation time plays a significant role in acoustic field characteristics evaluation of the hall. We can get it by measuring an actual room. In this paper, a new method is proposed based on rough fuzzy neural network to reckon the reverberation time from a scene image, which is obviously distinguished from the conventional methods. Finally the validity of the network is proved through the experiment results of network training on the test data. It provides a brand-new idea for virtual reality technique and sound quality evaluation of virtual environment with using this method.", "fno": "3119d398", "keywords": [ "Reverberation Time", "Scene Image", "Rough Fuzzy Neural Network" ], "authors": [ { "affiliation": null, "fullName": "Wei Gong", "givenName": "Wei", "surname": "Gong", "__typename": "ArticleAuthorType" }, { "affiliation": null, "fullName": "Na Qi", "givenName": "Na", "surname": "Qi", "__typename": "ArticleAuthorType" } ], "idPrefix": "cisp", "isOpenAccess": false, "showRecommendedArticles": true, "showBuyMe": true, "hasPdf": true, "pubDate": "2008-05-01T00:00:00", "pubType": "proceedings", "pages": "398-402", "year": "2008", "issn": null, "isbn": "978-0-7695-3119-9", "notes": null, "notesType": null, "__typename": "ArticleType" }, "webExtras": [], "adjacentArticles": { "previous": { "fno": "3119d393", "articleId": "12OmNzUPpkb", "__typename": "AdjacentArticleType" }, "next": { "fno": "3119d403", "articleId": "12OmNxw5BuC", "__typename": "AdjacentArticleType" }, "__typename": "AdjacentArticlesType" }, "recommendedArticles": [ { "id": "proceedings/iihmsp/2006/2745/0/04041661", "title": "Audio Watermarking Based on Reverberation", "doi": null, "abstractUrl": "/proceedings-article/iihmsp/2006/04041661/12OmNAZOK0k", "parentPublication": { "id": "proceedings/iihmsp/2006/2745/0", "title": "2006 International Conference on Intelligent Information Hiding and Multimedia", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/cso/2012/1365/0/06274808", "title": "Methods of Measuring a Classroom's Indoor Audio Environmental Parameters", "doi": null, "abstractUrl": "/proceedings-article/cso/2012/06274808/12OmNApLGPD", "parentPublication": { "id": "proceedings/cso/2012/1365/0", "title": "2012 Fifth International Joint Conference on Computational Sciences and Optimization (CSO)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/iccima/1999/0300/0/03000107", "title": "Modular Rough Fuzzy MLP: Evolutionary Design", "doi": null, "abstractUrl": "/proceedings-article/iccima/1999/03000107/12OmNCfjezi", "parentPublication": { "id": "proceedings/iccima/1999/0300/0", "title": "Computational Intelligence and Multimedia Applications, International Conference on", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/icnds/2009/3635/2/3635b036", "title": "Application of Rough Set and Fuzzy Neural Network in Information Handling", "doi": null, "abstractUrl": "/proceedings-article/icnds/2009/3635b036/12OmNvAiSui", "parentPublication": { "id": "proceedings/icnds/2009/3635/1", "title": "2009 International Conference on Networking and Digital Society (ICNDS)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/icci/2006/0475/2/04216501", "title": "Temporal Rough Neural Network", "doi": null, "abstractUrl": "/proceedings-article/icci/2006/04216501/12OmNvmXJ9z", "parentPublication": { "id": "proceedings/icci/2006/0475/2", "title": "Cognitive Informatics, IEEE International Conference on", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/icsem/2010/4223/2/4223b254", "title": "Application of Rough Set on Extraction of Sound Quality Parameters", "doi": null, "abstractUrl": "/proceedings-article/icsem/2010/4223b254/12OmNvyjGhw", "parentPublication": { "id": "proceedings/icsem/2010/4223/2", "title": "2010 International Conference on System Science, Engineering Design and Manufacturing Informatization", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/aqtr/2006/0360/1/04022835", "title": "Comparative Performance Analysis of Artificial Reverberation Algorithms", "doi": null, "abstractUrl": "/proceedings-article/aqtr/2006/04022835/12OmNwB2dTf", "parentPublication": { "id": "proceedings/aqtr/2006/0360/1", "title": "2006 IEEE-TTTC International Conference on Automation, Quality and Testing, Robotics", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/cso/2009/3605/2/3605c350", "title": "An Adaptive Fuzzy Neural Network for Extracting Scene Image Parameters", "doi": null, "abstractUrl": "/proceedings-article/cso/2009/3605c350/12OmNz6iOlx", "parentPublication": { "id": "proceedings/cso/2009/3605/2", "title": "2009 International Joint Conference on Computational Sciences and Optimization", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "trans/tg/2013/04/ttg2013040567", "title": "Aural Proxies and Directionally-Varying Reverberation for Interactive Sound Propagation in Virtual Environments", "doi": null, "abstractUrl": "/journal/tg/2013/04/ttg2013040567/13rRUxD9gXG", "parentPublication": { "id": "trans/tg", "title": "IEEE Transactions on Visualization & Computer Graphics", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/icmew/2019/9214/0/921400a378", "title": "Blind Estimation of Reverberation Time using Binaural Complex Ideal Ratio Mask", "doi": null, "abstractUrl": "/proceedings-article/icmew/2019/921400a378/1cJ0EdKcoMw", "parentPublication": { "id": "proceedings/icmew/2019/9214/0", "title": "2019 IEEE International Conference on Multimedia & Expo Workshops (ICMEW)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" } ], "articleVideos": [] }
{ "proceeding": { "id": "12OmNwl8GHY", "title": "2012 13th ACIS International Conference on Software Engineering, Artificial Intelligence, Networking and Parallel & Distributed Computing (SNPD 2012)", "acronym": "snpd", "groupId": "1001811", "volume": "0", "displayVolume": "0", "year": "2012", "__typename": "ProceedingType" }, "article": { "id": "12OmNz4SOxH", "doi": "10.1109/SNPD.2012.99", "title": "Evaluation of Realism of Dynamic Sound Space Using a Virtual Auditory Display", "normalizedTitle": "Evaluation of Realism of Dynamic Sound Space Using a Virtual Auditory Display", "abstract": "We can perceive a sound position from binaural signals using mainly head-related transfer functions (HRTFs). Using the theorem presented herein, we can display a sound image to a specific position in virtual auditory space by HRTFs. However, HRTF is defined commonly in a free-field, and a virtual sound image is perceived as a dry source without reflection, reverberation, or ambient noise. Therefore, the virtual sound space might be unnatural. The authors developed a software-based virtual auditory display (VAD) that outputs audio signals for a set of headphones with a three-dimensional position sensor. The VAD software can display a dynamic virtual auditory space that is responsive to a listener's head movement. Subjective evaluations were conducted to clarify the relation between the perceived reality of virtual sound space and ambient sound. Evaluation results of the reality of the virtual sound space displayed by the VAD software are introduced.", "abstracts": [ { "abstractType": "Regular", "content": "We can perceive a sound position from binaural signals using mainly head-related transfer functions (HRTFs). Using the theorem presented herein, we can display a sound image to a specific position in virtual auditory space by HRTFs. However, HRTF is defined commonly in a free-field, and a virtual sound image is perceived as a dry source without reflection, reverberation, or ambient noise. Therefore, the virtual sound space might be unnatural. The authors developed a software-based virtual auditory display (VAD) that outputs audio signals for a set of headphones with a three-dimensional position sensor. The VAD software can display a dynamic virtual auditory space that is responsive to a listener's head movement. Subjective evaluations were conducted to clarify the relation between the perceived reality of virtual sound space and ambient sound. Evaluation results of the reality of the virtual sound space displayed by the VAD software are introduced.", "__typename": "ArticleAbstractType" } ], "normalizedAbstract": "We can perceive a sound position from binaural signals using mainly head-related transfer functions (HRTFs). Using the theorem presented herein, we can display a sound image to a specific position in virtual auditory space by HRTFs. However, HRTF is defined commonly in a free-field, and a virtual sound image is perceived as a dry source without reflection, reverberation, or ambient noise. Therefore, the virtual sound space might be unnatural. The authors developed a software-based virtual auditory display (VAD) that outputs audio signals for a set of headphones with a three-dimensional position sensor. The VAD software can display a dynamic virtual auditory space that is responsive to a listener's head movement. Subjective evaluations were conducted to clarify the relation between the perceived reality of virtual sound space and ambient sound. Evaluation results of the reality of the virtual sound space displayed by the VAD software are introduced.", "fno": "06299338", "keywords": [ "Audio Recording", "Auditory Displays", "Headphones", "Transfer Functions", "Virtual Reality", "Dynamic Sound Space Realism Evaluation", "Sound Position", "Binaural Signals", "Head Related Transfer Functions", "HRTF", "Virtual Sound Image", "Software Based Virtual Auditory Display", "Audio Signals", "Headphone Set", "Three Dimensional Position Sensor", "VAD Software", "Dynamic Virtual Auditory Space", "Listener Head Movement", "Perceived Reality", "Ambient Sound", "Virtual Sound Space", "Noise", "Rendering Computer Graphics", "Magnetic Heads", "Head", "Auditory Displays", "Noise Measurement", "Aerospace Electronics", "Virtual Auditory Display", "Head Movement", "Ambient Noise", "First Order Reflective Sound", "Realism Of Virtual Sound Space" ], "authors": [ { "affiliation": null, "fullName": "Yukio Iwaya", "givenName": "Yukio", "surname": "Iwaya", "__typename": "ArticleAuthorType" }, { "affiliation": null, "fullName": "Masahi Toyoda", "givenName": "Masahi", "surname": "Toyoda", "__typename": "ArticleAuthorType" }, { "affiliation": null, "fullName": "Makoto Otani", "givenName": "Makoto", "surname": "Otani", "__typename": "ArticleAuthorType" }, { "affiliation": null, "fullName": "Yôiti Suzuki", "givenName": "Yôiti", "surname": "Suzuki", "__typename": "ArticleAuthorType" } ], "idPrefix": "snpd", "isOpenAccess": false, "showRecommendedArticles": true, "showBuyMe": true, "hasPdf": true, "pubDate": "2012-08-01T00:00:00", "pubType": "proceedings", "pages": "561-566", "year": "2012", "issn": null, "isbn": "978-1-4673-2120-4", "notes": null, "notesType": null, "__typename": "ArticleType" }, "webExtras": [], "adjacentArticles": { "previous": { "fno": "06299337", "articleId": "12OmNwudQQM", "__typename": "AdjacentArticleType" }, "next": { "fno": "06299339", "articleId": "12OmNzahc4J", "__typename": "AdjacentArticleType" }, "__typename": "AdjacentArticlesType" }, "recommendedArticles": [ { "id": "proceedings/sive/2015/1969/0/07361293", "title": "Evaluating vertical localization performance of 3D sound rendering models with a perceptual metric", "doi": null, "abstractUrl": "/proceedings-article/sive/2015/07361293/12OmNAObbB4", "parentPublication": { "id": "proceedings/sive/2015/1969/0", "title": "2015 IEEE 2nd VR Workshop on Sonic Interactions for Virtual Environments (SIVE)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/ismar/2014/6184/0/06948409", "title": "P-HRTF: Efficient personalized HRTF computation for high-fidelity spatial sound", "doi": null, "abstractUrl": "/proceedings-article/ismar/2014/06948409/12OmNAT0mSm", "parentPublication": { "id": "proceedings/ismar/2014/6184/0", "title": "2014 IEEE International Symposium on Mixed and Augmented Reality (ISMAR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/icassp/2009/2353/0/04959980", "title": "Interpolation of head-related transfer functions by spatial linear prediction", "doi": null, "abstractUrl": "/proceedings-article/icassp/2009/04959980/12OmNBJeyHh", "parentPublication": { "id": "proceedings/icassp/2009/2353/0", "title": "Acoustics, Speech, and Signal Processing, IEEE International Conference on", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/cw/2002/1862/0/18620191", "title": "Robotic Spatial Sound Localization and Its 3-D Sound Human Interface", "doi": null, "abstractUrl": "/proceedings-article/cw/2002/18620191/12OmNCf1DwV", "parentPublication": { "id": "proceedings/cw/2002/1862/0", "title": "First International Symposium on Cyber Worlds, 2002. Proceedings.", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/ism/2008/3454/0/3454a468", "title": "Design and Implementation of 3D Auditory Scene Visualizer towards Auditory Awareness with Face Tracking", "doi": null, "abstractUrl": "/proceedings-article/ism/2008/3454a468/12OmNvvtGWy", "parentPublication": { "id": "proceedings/ism/2008/3454/0", "title": "2008 Tenth IEEE International Symposium on Multimedia", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vrais/1993/1363/0/00380779", "title": "Synthesis of 3D virtual auditory space via a spatial feature extraction and regularization model", "doi": null, "abstractUrl": "/proceedings-article/vrais/1993/00380779/12OmNxRF781", "parentPublication": { "id": "proceedings/vrais/1993/1363/0", "title": "Virtual Reality Annual International Symposium", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/isuc/2008/3433/0/3433a042", "title": "3D Auditory Scene Visualizer with Face Tracking: Design and Implementation for Auditory Awareness Compensation", "doi": null, "abstractUrl": "/proceedings-article/isuc/2008/3433a042/12OmNy3AgDk", "parentPublication": { "id": "proceedings/isuc/2008/3433/0", "title": "2008 Second International Symposium on Universal Communication", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/icpr/2014/5209/0/5209a797", "title": "Computational Auditory Scene Analysis Based Voice Activity Detection", "doi": null, "abstractUrl": "/proceedings-article/icpr/2014/5209a797/12OmNzuZUDu", "parentPublication": { "id": "proceedings/icpr/2014/5209/0", "title": "2014 22nd International Conference on Pattern Recognition (ICPR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "mags/mu/2005/02/u2080", "title": "Navigation with Auditory Cues in a Virtual Environment", "doi": null, "abstractUrl": "/magazine/mu/2005/02/u2080/13rRUwInvi2", "parentPublication": { "id": "mags/mu", "title": "IEEE MultiMedia", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/mipr/2019/1198/0/119800a175", "title": "Minimum Audible Movement Angle in Virtual Auditory Environment: Effect of Stimulus Frequency", "doi": null, "abstractUrl": "/proceedings-article/mipr/2019/119800a175/19wB6L2ojhm", "parentPublication": { "id": "proceedings/mipr/2019/1198/0", "title": "2019 IEEE Conference on Multimedia Information Processing and Retrieval (MIPR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" } ], "articleVideos": [] }
{ "proceeding": { "id": "12OmNwCJOWD", "title": "Acoustics, Speech, and Signal Processing, IEEE International Conference on", "acronym": "icassp", "groupId": "1000002", "volume": "0", "displayVolume": "0", "year": "1991", "__typename": "ProceedingType" }, "article": { "id": "12OmNzw8ja1", "doi": "10.1109/ICASSP.1991.150779", "title": "Estimation of position and waveform of a specified sound source decreasing the effect of other sound sources and reflection", "normalizedTitle": "Estimation of position and waveform of a specified sound source decreasing the effect of other sound sources and reflection", "abstract": "A method is described to estimate, using many sensors (microphones) both the position and the waveform of individual sound sources by decreasing the effects of other sound sources and reflection from walls and/or ceilings. Some experiments using two sound sources and 16 sensors have been carried out in a room with a reverberation time of 0.5 s, and it is found that the waveform of the specified sound source could be estimated by decreasing the effect of other sound sources by about 20 dB.<>", "abstracts": [ { "abstractType": "Regular", "content": "A method is described to estimate, using many sensors (microphones) both the position and the waveform of individual sound sources by decreasing the effects of other sound sources and reflection from walls and/or ceilings. Some experiments using two sound sources and 16 sensors have been carried out in a room with a reverberation time of 0.5 s, and it is found that the waveform of the specified sound source could be estimated by decreasing the effect of other sound sources by about 20 dB.<>", "__typename": "ArticleAbstractType" } ], "normalizedAbstract": "A method is described to estimate, using many sensors (microphones) both the position and the waveform of individual sound sources by decreasing the effects of other sound sources and reflection from walls and/or ceilings. Some experiments using two sound sources and 16 sensors have been carried out in a room with a reverberation time of 0.5 s, and it is found that the waveform of the specified sound source could be estimated by decreasing the effect of other sound sources by about 20 dB.", "fno": "00150779", "keywords": [ "Acoustic Signal Processing", "Acoustic Wave Reflection", "Architectural Acoustics", "Microphones", "Sound Reflection", "Position Estimation", "Waveform Estimation", "Microphones", "Sound Sources", "Sensors", "Walls", "Ceilings", "Room", "Reverberation Time", "0 5 S", "Microphones", "Finite Impulse Response Filter", "Acoustic Reflection", "Acoustic Sensors", "Sensor Phenomena And Characterization", "Acoustic Noise", "Working Environment Noise", "Yield Estimation", "Human Voice", "Teleconferencing" ], "authors": [ { "affiliation": "Comput. Center, Tohoku Univ., Sendai, Japan", "fullName": "M. Abe", "givenName": "M.", "surname": "Abe", "__typename": "ArticleAuthorType" }, { "affiliation": "Comput. Center, Tohoku Univ., Sendai, Japan", "fullName": "K. Fujii", "givenName": "K.", "surname": "Fujii", "__typename": "ArticleAuthorType" }, { "affiliation": "Comput. Center, Tohoku Univ., Sendai, Japan", "fullName": "T. Sone", "givenName": "T.", "surname": "Sone", "__typename": "ArticleAuthorType" }, { "affiliation": "Comput. Center, Tohoku Univ., Sendai, Japan", "fullName": "K. Kido", "givenName": "K.", "surname": "Kido", "__typename": "ArticleAuthorType" } ], "idPrefix": "icassp", "isOpenAccess": false, "showRecommendedArticles": true, "showBuyMe": true, "hasPdf": true, "pubDate": "1991-01-01T00:00:00", "pubType": "proceedings", "pages": "2337,2338,2339,2340", "year": "1991", "issn": "1520-6149", "isbn": null, "notes": null, "notesType": null, "__typename": "ArticleType" }, "webExtras": [], "adjacentArticles": { "previous": { "fno": "00150778", "articleId": "12OmNvTTc79", "__typename": "AdjacentArticleType" }, "next": { "fno": "00150781", "articleId": "12OmNyz5JW6", "__typename": "AdjacentArticleType" }, "__typename": "AdjacentArticlesType" }, "recommendedArticles": [ { "id": "proceedings/percomw/2016/1941/0/07457115", "title": "Nondeterministic sound source localization with smartphones in crowdsensing", "doi": null, "abstractUrl": "/proceedings-article/percomw/2016/07457115/12OmNB0nWgB", "parentPublication": { "id": "proceedings/percomw/2016/1941/0", "title": "2016 IEEE International Conference on Pervasive Computing and Communication Workshops (PerCom Workshops)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/icme/2004/8603/3/01394611", "title": "Distributed sound rendering for interactive virtual environments", "doi": null, "abstractUrl": "/proceedings-article/icme/2004/01394611/12OmNBmf3ae", "parentPublication": { "id": "proceedings/icme/2004/8603/3", "title": "2004 IEEE International Conference on Multimedia and Expo (ICME)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/icassp/2004/8484/4/01326773", "title": "Head-tracking and subject positioning using binaural headset microphones and common modulation anchor sources", "doi": null, "abstractUrl": "/proceedings-article/icassp/2004/01326773/12OmNCbCs1F", "parentPublication": { "id": "proceedings/icassp/2004/8484/4", "title": "2004 IEEE International Conference on Acoustics, Speech, and Signal Processing", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/cmpeur/1989/1940/0/00093401", "title": "Intelligent microphone-an adaptive sound sensing system", "doi": null, "abstractUrl": "/proceedings-article/cmpeur/1989/00093401/12OmNrnJ6Vt", "parentPublication": { "id": "proceedings/cmpeur/1989/1940/0", "title": "COMPEURO 89 Proceedings VLSI and Computer Peripherals.", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/avss/2013/0703/0/06636622", "title": "Sound source localization for video surveillance camera", "doi": null, "abstractUrl": "/proceedings-article/avss/2013/06636622/12OmNwswg2F", "parentPublication": { "id": "proceedings/avss/2013/0703/0", "title": "2013 10th IEEE International Conference on Advanced Video and Signal Based Surveillance (AVSS)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/icpads/2017/2129/0/212901a787", "title": "SOLO: 2D Localization with Single Sound Source and Single Microphone", "doi": null, "abstractUrl": "/proceedings-article/icpads/2017/212901a787/12OmNy2Jt7n", "parentPublication": { "id": "proceedings/icpads/2017/2129/0", "title": "2017 IEEE 23rd International Conference on Parallel and Distributed Systems (ICPADS)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/snpd/2012/2120/0/06299336", "title": "Digital Archive for Japanese Intangible Cultural Heritage Based on Reproduction of High-Fidelity Sound Field in Yamahoko Parade of Gion Festival", "doi": null, "abstractUrl": "/proceedings-article/snpd/2012/06299336/12OmNy3iFkX", "parentPublication": { "id": "proceedings/snpd/2012/2120/0", "title": "2012 13th ACIS International Conference on Software Engineering, Artificial Intelligence, Networking and Parallel & Distributed Computing (SNPD 2012)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/iccet/2009/3521/1/3521a281", "title": "Sound Absorption Measurement of Acoustical Material and Structure Using the Echo-Pulse Method", "doi": null, "abstractUrl": "/proceedings-article/iccet/2009/3521a281/12OmNyxFKit", "parentPublication": { "id": null, "title": null, "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/iciev/2014/5179/0/06850764", "title": "A low cost stethoscopic system for real time auscultation of heart sound", "doi": null, "abstractUrl": "/proceedings-article/iciev/2014/06850764/12OmNzAohV3", "parentPublication": { "id": "proceedings/iciev/2014/5179/0", "title": "2014 International Conference on Informatics, Electronics & Vision (ICIEV)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/icassp/1988/9999/0/00197179", "title": "Active systems for sound attenuation in ducts", "doi": null, "abstractUrl": "/proceedings-article/icassp/1988/00197179/12OmNzBOhNm", "parentPublication": { "id": "proceedings/icassp/1988/9999/0", "title": "ICASSP-88., International Conference on Acoustics, Speech, and Signal Processing", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" } ], "articleVideos": [] }
{ "proceeding": { "id": "12OmNz61d8A", "title": "EUROMICRO '07. 2007 33rd Euromicro Conference on Software Engineering and Advanced Applications", "acronym": "euromicro", "groupId": "1002914", "volume": "0", "displayVolume": "0", "year": "2007", "__typename": "ProceedingType" }, "article": { "id": "12OmNxE2mUb", "doi": "10.1109/EUROMICRO.2007.52", "title": "Reflective and Refractive Variables: A Model for Effective and Maintainable Adaptive-and-Dependable Software", "normalizedTitle": "Reflective and Refractive Variables: A Model for Effective and Maintainable Adaptive-and-Dependable Software", "abstract": "We propose a simple and effective tool for the expression of tasks such as cross-layer optimization strategies or sensors-related applications. The approach is based on what we refer to as \"reflective and refractive variables\". Both types of variables are associated with external entities, e.g. sensors or actuators. A reflective variable is a volatile variable, that is, a variable that might be concurrently modified by multiple threads. A library of threads is made available, each of which interfaces a set of sensors and continuously update the value of a corresponding set of sensors. One such thread is \"cpu\", which exports the current level of usage of the local CPU as an integer between 0 and 100. This integer is reflected into the integer reflective variable cpu. A refractive variable is a reflective variable that can be modified. Each modification is caught and interpreted as a request to change the value of an actuator. For instance, setting variable \"tcp sendrate\" would request a cross-layer adjustment to the thread interfacing the local TCP layer entity. This allows express in an easy way complex operations in the application layer of any programming language, e.g. plain old C. We describe our translator and the work we are carrying out within PATS to build simple and powerful libraries of scripts based on reflective and refractive variables, including robotics applications and RFID tags processing.", "abstracts": [ { "abstractType": "Regular", "content": "We propose a simple and effective tool for the expression of tasks such as cross-layer optimization strategies or sensors-related applications. The approach is based on what we refer to as \"reflective and refractive variables\". Both types of variables are associated with external entities, e.g. sensors or actuators. A reflective variable is a volatile variable, that is, a variable that might be concurrently modified by multiple threads. A library of threads is made available, each of which interfaces a set of sensors and continuously update the value of a corresponding set of sensors. One such thread is \"cpu\", which exports the current level of usage of the local CPU as an integer between 0 and 100. This integer is reflected into the integer reflective variable cpu. A refractive variable is a reflective variable that can be modified. Each modification is caught and interpreted as a request to change the value of an actuator. For instance, setting variable \"tcp sendrate\" would request a cross-layer adjustment to the thread interfacing the local TCP layer entity. This allows express in an easy way complex operations in the application layer of any programming language, e.g. plain old C. We describe our translator and the work we are carrying out within PATS to build simple and powerful libraries of scripts based on reflective and refractive variables, including robotics applications and RFID tags processing.", "__typename": "ArticleAbstractType" } ], "normalizedAbstract": "We propose a simple and effective tool for the expression of tasks such as cross-layer optimization strategies or sensors-related applications. The approach is based on what we refer to as \"reflective and refractive variables\". Both types of variables are associated with external entities, e.g. sensors or actuators. A reflective variable is a volatile variable, that is, a variable that might be concurrently modified by multiple threads. A library of threads is made available, each of which interfaces a set of sensors and continuously update the value of a corresponding set of sensors. One such thread is \"cpu\", which exports the current level of usage of the local CPU as an integer between 0 and 100. This integer is reflected into the integer reflective variable cpu. A refractive variable is a reflective variable that can be modified. Each modification is caught and interpreted as a request to change the value of an actuator. For instance, setting variable \"tcp sendrate\" would request a cross-layer adjustment to the thread interfacing the local TCP layer entity. This allows express in an easy way complex operations in the application layer of any programming language, e.g. plain old C. We describe our translator and the work we are carrying out within PATS to build simple and powerful libraries of scripts based on reflective and refractive variables, including robotics applications and RFID tags processing.", "fno": "29770307", "keywords": [], "authors": [ { "affiliation": "University of Antwerp", "fullName": "Vincenzo De Florio", "givenName": "Vincenzo", "surname": "De Florio", "__typename": "ArticleAuthorType" }, { "affiliation": "University of Antwerp", "fullName": "Chris Blondia", "givenName": "Chris", "surname": "Blondia", "__typename": "ArticleAuthorType" } ], "idPrefix": "euromicro", "isOpenAccess": false, "showRecommendedArticles": true, "showBuyMe": true, "hasPdf": true, "pubDate": "2007-08-01T00:00:00", "pubType": "proceedings", "pages": "307-313", "year": "2007", "issn": "1089-6503", "isbn": "0-7695-2977-1", "notes": null, "notesType": null, "__typename": "ArticleType" }, "webExtras": [], "adjacentArticles": { "previous": { "fno": "29770297", "articleId": "12OmNrYCY2y", "__typename": "AdjacentArticleType" }, "next": { "fno": "29770314", "articleId": "12OmNCgJeaB", "__typename": "AdjacentArticleType" }, "__typename": "AdjacentArticlesType" }, "recommendedArticles": [ { "id": "proceedings/peits/2008/3342/0/3342a565", "title": "A Reflective NetGAP Logic Framework Design", "doi": null, "abstractUrl": "/proceedings-article/peits/2008/3342a565/12OmNAlvI7n", "parentPublication": { "id": "proceedings/peits/2008/3342/0", "title": "2008 Workshop on Power Electronics and Intelligent Transportation System", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/icise/2009/3887/0/pid983328", "title": "Research of Refractive Correction Forecasting Software Technology Based on WEB Database", "doi": null, "abstractUrl": "/proceedings-article/icise/2009/pid983328/12OmNrkT7Et", "parentPublication": { "id": "proceedings/icise/2009/3887/0", "title": "Information Science and Engineering, International Conference on", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/cbse/2015/3471/0/07447886", "title": "Refraction: low-cost management of reflective meta-data in pervasive component-based applications", "doi": null, "abstractUrl": "/proceedings-article/cbse/2015/07447886/12OmNx9WSWz", "parentPublication": { "id": "proceedings/cbse/2015/3471/0", "title": "2015 18th International ACM SIGSOFT Symposium on Component-Based Software Engineering (CBSE)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/cvpr/2012/1226/0/423O3C03", "title": "A theory of multi-layer flat refractive geometry", "doi": null, "abstractUrl": "/proceedings-article/cvpr/2012/423O3C03/12OmNxxdZzR", "parentPublication": { "id": "proceedings/cvpr/2012/1226/0", "title": "2012 IEEE Conference on Computer Vision and Pattern Recognition", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/pg/1999/0293/0/02930128", "title": "Visualization of Optical Phenomena Caused by Multilayer Films with Complex Refractive Indices", "doi": null, "abstractUrl": "/proceedings-article/pg/1999/02930128/12OmNxy4N2K", "parentPublication": { "id": "proceedings/pg/1999/0293/0", "title": "Computer Graphics and Applications, Pacific Conference on", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "trans/lt/2015/01/06975230", "title": "Context Becomes Content: Sensor Data for Computer-Supported Reflective Learning", "doi": null, "abstractUrl": "/journal/lt/2015/01/06975230/13rRUwgQpno", "parentPublication": { "id": "trans/lt", "title": "IEEE Transactions on Learning Technologies", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "trans/tg/2013/04/ttg2013040576", "title": "Reflective and Refractive Objects for Mixed Reality", "doi": null, "abstractUrl": "/journal/tg/2013/04/ttg2013040576/13rRUxYINf9", "parentPublication": { "id": "trans/tg", "title": "IEEE Transactions on Visualization & Computer Graphics", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2022/9617/0/961700a606", "title": "Distortion-free Mid-air Image Inside Refractive Surface and on Reflective Surface", "doi": null, "abstractUrl": "/proceedings-article/vr/2022/961700a606/1CJbLHfeIsE", "parentPublication": { "id": "proceedings/vr/2022/9617/0", "title": "2022 IEEE on Conference Virtual Reality and 3D User Interfaces (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/sti/2022/9045/0/10103255", "title": "Design and Characterization of Photonic Crystal Fiber Based Surface Plasmon Resonance Refractive Index Sensor", "doi": null, "abstractUrl": "/proceedings-article/sti/2022/10103255/1MBFfZxD7ig", "parentPublication": { "id": "proceedings/sti/2022/9045/0", "title": "2022 4th International Conference on Sustainable Technologies for Industry 4.0 (STI)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/icedme/2021/3596/0/359600a110", "title": "A novel optical refractive index sensor based on VCSELs and gold nanoparticle arrays", "doi": null, "abstractUrl": "/proceedings-article/icedme/2021/359600a110/1tMPOU0iVQk", "parentPublication": { "id": "proceedings/icedme/2021/3596/0", "title": "2021 4th International Conference on Electron Device and Mechanical Engineering (ICEDME)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" } ], "articleVideos": [] }
{ "proceeding": { "id": "12OmNrAdsuf", "title": "2015 IEEE International Symposium on Mixed and Augmented Reality (ISMAR)", "acronym": "ismar", "groupId": "1000465", "volume": "0", "displayVolume": "0", "year": "2015", "__typename": "ProceedingType" }, "article": { "id": "12OmNzJbQY0", "doi": "10.1109/ISMAR.2015.22", "title": "[POSTER] Remote Mixed Reality System Supporting Interactions with Virtualized Objects", "normalizedTitle": "[POSTER] Remote Mixed Reality System Supporting Interactions with Virtualized Objects", "abstract": "Mixed Reality (MR) can merge real and virtual worlds seamlessly. This paper proposes a method to realize smooth collaboration using a remote MR, which makes it possible for geographically distributed users to share the same objects and communicate in real time as if they are at the same place. In this paper, we consider a situation where the users at local and remote sites perform a collaborative work, and real objects to be operated exist only at the local site. It is necessary to share the real objects between the two sites. However, prior studies have shown sharing real objects by duplication is either too costly or unrealistic. Therefore, we propose a method to share the objects by virtualizing the real objects using Computer Vision (CV) and then rendering the virtualized objects using MR. We have proposed a remote collaborative work system to create a smoother user experience for collaborative work with virtualized objects for remote users. Through experiments, we confirmed the effectiveness of our approach.", "abstracts": [ { "abstractType": "Regular", "content": "Mixed Reality (MR) can merge real and virtual worlds seamlessly. This paper proposes a method to realize smooth collaboration using a remote MR, which makes it possible for geographically distributed users to share the same objects and communicate in real time as if they are at the same place. In this paper, we consider a situation where the users at local and remote sites perform a collaborative work, and real objects to be operated exist only at the local site. It is necessary to share the real objects between the two sites. However, prior studies have shown sharing real objects by duplication is either too costly or unrealistic. Therefore, we propose a method to share the objects by virtualizing the real objects using Computer Vision (CV) and then rendering the virtualized objects using MR. We have proposed a remote collaborative work system to create a smoother user experience for collaborative work with virtualized objects for remote users. Through experiments, we confirmed the effectiveness of our approach.", "__typename": "ArticleAbstractType" } ], "normalizedAbstract": "Mixed Reality (MR) can merge real and virtual worlds seamlessly. This paper proposes a method to realize smooth collaboration using a remote MR, which makes it possible for geographically distributed users to share the same objects and communicate in real time as if they are at the same place. In this paper, we consider a situation where the users at local and remote sites perform a collaborative work, and real objects to be operated exist only at the local site. It is necessary to share the real objects between the two sites. However, prior studies have shown sharing real objects by duplication is either too costly or unrealistic. Therefore, we propose a method to share the objects by virtualizing the real objects using Computer Vision (CV) and then rendering the virtualized objects using MR. We have proposed a remote collaborative work system to create a smoother user experience for collaborative work with virtualized objects for remote users. Through experiments, we confirmed the effectiveness of our approach.", "fno": "7660a064", "keywords": [ "Collaboration", "Virtual Reality", "Collaborative Work", "Rendering Computer Graphics", "Three Dimensional Displays", "Cameras", "Real Time Systems", "RGB D Camera", "Mixed Reality", "Virtualized Object", "3 D Interaction", "Remote Collaboration" ], "authors": [ { "affiliation": null, "fullName": "Peng Yang", "givenName": "Peng", "surname": "Yang", "__typename": "ArticleAuthorType" }, { "affiliation": null, "fullName": "Itaru Kitahara", "givenName": "Itaru", "surname": "Kitahara", "__typename": "ArticleAuthorType" }, { "affiliation": null, "fullName": "Yuichi Ohta", "givenName": "Yuichi", "surname": "Ohta", "__typename": "ArticleAuthorType" } ], "idPrefix": "ismar", "isOpenAccess": false, "showRecommendedArticles": true, "showBuyMe": true, "hasPdf": true, "pubDate": "2015-09-01T00:00:00", "pubType": "proceedings", "pages": "64-67", "year": "2015", "issn": null, "isbn": "978-1-4673-7660-0", "notes": null, "notesType": null, "__typename": "ArticleType" }, "webExtras": [], "adjacentArticles": { "previous": { "fno": "7660a060", "articleId": "12OmNwD1pNO", "__typename": "AdjacentArticleType" }, "next": { "fno": "7660a068", "articleId": "12OmNzt0IDp", "__typename": "AdjacentArticleType" }, "__typename": "AdjacentArticlesType" }, "recommendedArticles": [ { "id": "proceedings/ismar/2007/1749/0/04538823", "title": "Face-to-Face Tabletop Remote Collaboration in Mixed Reality", "doi": null, "abstractUrl": "/proceedings-article/ismar/2007/04538823/12OmNBr4eCn", "parentPublication": { "id": "proceedings/ismar/2007/1749/0", "title": "2007 6th IEEE and ACM International Symposium on Mixed and Augmented Reality", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/ismar/2010/9343/0/05643588", "title": "Augmentation of check in/out model for remote collaboration with Mixed Reality", "doi": null, "abstractUrl": "/proceedings-article/ismar/2010/05643588/12OmNC4eSy7", "parentPublication": { "id": "proceedings/ismar/2010/9343/0", "title": "2010 IEEE International Symposium on Mixed and Augmented Reality", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/ismar/2014/6184/0/06948451", "title": "[Poster] Turbidity-based aerial perspective rendering for mixed reality", "doi": null, "abstractUrl": "/proceedings-article/ismar/2014/06948451/12OmNqyDjpg", "parentPublication": { "id": "proceedings/ismar/2014/6184/0", "title": "2014 IEEE International Symposium on Mixed and Augmented Reality (ISMAR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/aina/2012/4651/0/4651a663", "title": "Instruction for Remote MR Cooperative Work with Captured Still Worker's View Video", "doi": null, "abstractUrl": "/proceedings-article/aina/2012/4651a663/12OmNxdDFSs", "parentPublication": { "id": "proceedings/aina/2012/4651/0", "title": "2012 IEEE 26th International Conference on Advanced Information Networking and Applications", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2008/1971/0/04480753", "title": "Symmetric Model of Remote Collaborative MR Using Tangible Replicas", "doi": null, "abstractUrl": "/proceedings-article/vr/2008/04480753/12OmNyL0TDr", "parentPublication": { "id": "proceedings/vr/2008/1971/0", "title": "IEEE Virtual Reality 2008", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "trans/tg/2023/05/10049700", "title": "Using Virtual Replicas to Improve Mixed Reality Remote Collaboration", "doi": null, "abstractUrl": "/journal/tg/2023/05/10049700/1KYoAxyw5c4", "parentPublication": { "id": "trans/tg", "title": "IEEE Transactions on Visualization & Computer Graphics", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "trans/tg/2023/05/10049704", "title": "A Survey on Remote Assistance and Training in Mixed Reality Environments", "doi": null, "abstractUrl": "/journal/tg/2023/05/10049704/1KYowCHxUtO", "parentPublication": { "id": "trans/tg", "title": "IEEE Transactions on Visualization & Computer Graphics", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2019/1377/0/08798128", "title": "Supporting Visual Annotation Cues in a Live 360 Panorama-based Mixed Reality Remote Collaboration", "doi": null, "abstractUrl": "/proceedings-article/vr/2019/08798128/1cJ1aXJnUyI", "parentPublication": { "id": "proceedings/vr/2019/1377/0", "title": "2019 IEEE Conference on Virtual Reality and 3D User Interfaces (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/mipr/2021/1865/0/186500a399", "title": "Detecting and Preventing Faked Mixed Reality", "doi": null, "abstractUrl": "/proceedings-article/mipr/2021/186500a399/1xPsmX6Ouvm", "parentPublication": { "id": "proceedings/mipr/2021/1865/0", "title": "2021 IEEE 4th International Conference on Multimedia Information Processing and Retrieval (MIPR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/ismar-adjunct/2021/1298/0/129800a274", "title": "A Mixed-Reality System to Promote Child Engagement in Remote Intergenerational Storytelling", "doi": null, "abstractUrl": "/proceedings-article/ismar-adjunct/2021/129800a274/1yeQMxSyLp6", "parentPublication": { "id": "proceedings/ismar-adjunct/2021/1298/0", "title": "2021 IEEE International Symposium on Mixed and Augmented Reality Adjunct (ISMAR-Adjunct)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" } ], "articleVideos": [] }
{ "proceeding": { "id": "13bd1eJgoia", "title": "2018 IEEE Conference on Virtual Reality and 3D User Interfaces (VR)", "acronym": "vr", "groupId": "1000791", "volume": "0", "displayVolume": "0", "year": "2018", "__typename": "ProceedingType" }, "article": { "id": "13bd1eSlyst", "doi": "10.1109/VR.2018.8446391", "title": "Visual Perception of Real World Depth Map Resolution for Mixed Reality Rendering", "normalizedTitle": "Visual Perception of Real World Depth Map Resolution for Mixed Reality Rendering", "abstract": "Compositing virtual objects into photographs with known real world geometry is a common task in mixed reality (MR) applications. This geometry enables rendering of global illumination effects, such as mutual lighting, shadowing, and occlusions between the background photograph and virtual objects. Obtaining high fidelity geometric representations of the real world can be a costly procedure, and is often approximated with depth data. However, it is not clear how much fidelity the depth data should have in order to maintain high visual quality in MR rendering. in this paper, we investigate the relationship between real world depth fidelity and visual quality in MR rendering. We do this by conducting a series of user experiments that measure how seamlessly virtual objects are blended with the background under varying depth resolutions. We independently evaluate the noticeability of multiple composition artifacts that occur with approximate depth. Perceptual thresholds in depth resolution are then obtained for each artifact. The findings can be used to inform trade-off decisions for optimising depth acquisition pipelines in MR applications.", "abstracts": [ { "abstractType": "Regular", "content": "Compositing virtual objects into photographs with known real world geometry is a common task in mixed reality (MR) applications. This geometry enables rendering of global illumination effects, such as mutual lighting, shadowing, and occlusions between the background photograph and virtual objects. Obtaining high fidelity geometric representations of the real world can be a costly procedure, and is often approximated with depth data. However, it is not clear how much fidelity the depth data should have in order to maintain high visual quality in MR rendering. in this paper, we investigate the relationship between real world depth fidelity and visual quality in MR rendering. We do this by conducting a series of user experiments that measure how seamlessly virtual objects are blended with the background under varying depth resolutions. We independently evaluate the noticeability of multiple composition artifacts that occur with approximate depth. Perceptual thresholds in depth resolution are then obtained for each artifact. The findings can be used to inform trade-off decisions for optimising depth acquisition pipelines in MR applications.", "__typename": "ArticleAbstractType" } ], "normalizedAbstract": "Compositing virtual objects into photographs with known real world geometry is a common task in mixed reality (MR) applications. This geometry enables rendering of global illumination effects, such as mutual lighting, shadowing, and occlusions between the background photograph and virtual objects. Obtaining high fidelity geometric representations of the real world can be a costly procedure, and is often approximated with depth data. However, it is not clear how much fidelity the depth data should have in order to maintain high visual quality in MR rendering. in this paper, we investigate the relationship between real world depth fidelity and visual quality in MR rendering. We do this by conducting a series of user experiments that measure how seamlessly virtual objects are blended with the background under varying depth resolutions. We independently evaluate the noticeability of multiple composition artifacts that occur with approximate depth. Perceptual thresholds in depth resolution are then obtained for each artifact. The findings can be used to inform trade-off decisions for optimising depth acquisition pipelines in MR applications.", "fno": "08446391", "keywords": [ "Geometry", "Rendering Computer Graphics", "Virtual Reality", "Visual Perception", "Mixed Reality Applications", "Global Illumination Effects", "Mutual Lighting", "Background Photograph", "High Fidelity Geometric Representations", "MR Rendering", "World Depth Fidelity", "Seamlessly Virtual Objects", "Depth Resolution", "Multiple Composition Artifacts", "Approximate Depth", "Visual Perception", "Real World Depth Map Resolution", "Mixed Reality Rendering", "High Visual Quality", "Depth Acquisition Pipelines", "Rendering Computer Graphics", "Geometry", "Lighting", "Virtual Reality", "Visualization", "Three Dimensional Displays", "Image Resolution", "Computing Methodologies Computer Graphics Graphics Systems And Interfaces Mixed Augmented Reality", "Computing Methodologies Computer Graphics Graphics Systems And Interfaces Perception" ], "authors": [ { "affiliation": "Victoria University of Wellington, Computational Media Innovation Centre, NZ", "fullName": "Lohit Petikam", "givenName": "Lohit", "surname": "Petikam", "__typename": "ArticleAuthorType" }, { "affiliation": "Victoria University of Wellington, Computational Media Innovation Centre, NZ", "fullName": "Andrew Chalmers", "givenName": "Andrew", "surname": "Chalmers", "__typename": "ArticleAuthorType" }, { "affiliation": "DreamFlux, NZ", "fullName": "Taeyhun Rhee", "givenName": "Taeyhun", "surname": "Rhee", "__typename": "ArticleAuthorType" } ], "idPrefix": "vr", "isOpenAccess": false, "showRecommendedArticles": true, "showBuyMe": true, "hasPdf": true, "pubDate": "2018-03-01T00:00:00", "pubType": "proceedings", "pages": "401-408", "year": "2018", "issn": null, "isbn": "978-1-5386-3365-6", "notes": null, "notesType": null, "__typename": "ArticleType" }, "webExtras": [], "adjacentArticles": { "previous": { "fno": "08446058", "articleId": "13bd1ftOBCG", "__typename": "AdjacentArticleType" }, "next": { "fno": "08446300", "articleId": "13bd1fdV4lD", "__typename": "AdjacentArticleType" }, "__typename": "AdjacentArticlesType" }, "recommendedArticles": [ { "id": "proceedings/icccnt/2014/2696/0/06963150", "title": "Reprojection of textured depth map for network rendering", "doi": null, "abstractUrl": "/proceedings-article/icccnt/2014/06963150/12OmNBuL1h7", "parentPublication": { "id": "proceedings/icccnt/2014/2696/0", "title": "2014 5th International Conference on Computing, Communication and Networking Technologies (ICCCNT)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/3dui/2010/6846/0/05444706", "title": "Evaluating depth perception of photorealistic mixed reality visualizations for occluded objects in outdoor environments", "doi": null, "abstractUrl": "/proceedings-article/3dui/2010/05444706/12OmNsd6vhN", "parentPublication": { "id": "proceedings/3dui/2010/6846/0", "title": "2010 IEEE Symposium on 3D User Interfaces (3DUI)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/apwcs/2010/4003/0/4003a147", "title": "A New Virtual View Rendering Method Based on Depth Image", "doi": null, "abstractUrl": "/proceedings-article/apwcs/2010/4003a147/12OmNy6HQX6", "parentPublication": { "id": "proceedings/apwcs/2010/4003/0", "title": "Wearable Computing Systems, Asia-Pacific Conference on", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "trans/tg/2017/04/07829404", "title": "MR360: Mixed Reality Rendering for 360&#x00B0; Panoramic Videos", "doi": null, "abstractUrl": "/journal/tg/2017/04/07829404/13rRUwhHcQW", "parentPublication": { "id": "trans/tg", "title": "IEEE Transactions on Visualization & Computer Graphics", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "trans/tg/2007/06/v1344", "title": "Enhancing Depth-Perception with Flexible Volumetric Halos", "doi": null, "abstractUrl": "/journal/tg/2007/06/v1344/13rRUygT7ss", "parentPublication": { "id": "trans/tg", "title": "IEEE Transactions on Visualization & Computer Graphics", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/cvpr/2005/2372/2/01467605", "title": "RGB-Z: mapping a sparse depth map to a high resolution RGB camera image", "doi": null, "abstractUrl": "/proceedings-article/cvpr/2005/01467605/1htC5iEJs1W", "parentPublication": { "id": "proceedings/cvpr/2005/2372/2", "title": "2005 IEEE Computer Society Conference on Computer Vision and Pattern Recognition (CVPR'05)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "trans/tg/2021/10/09115833", "title": "Reconstructing Reflection Maps Using a Stacked-CNN for Mixed Reality Rendering", "doi": null, "abstractUrl": "/journal/tg/2021/10/09115833/1kBgVhAEmeA", "parentPublication": { "id": "trans/tg", "title": "IEEE Transactions on Visualization & Computer Graphics", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "trans/tg/2021/12/09123589", "title": "An Improved Augmented-Reality Framework for Differential Rendering Beyond the Lambertian-World Assumption", "doi": null, "abstractUrl": "/journal/tg/2021/12/09123589/1kTxwwg0epW", "parentPublication": { "id": "trans/tg", "title": "IEEE Transactions on Visualization & Computer Graphics", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/ismar/2020/8508/0/850800a064", "title": "The Effects of Object Shape, Fidelity, Color, and Luminance on Depth Perception in Handheld Mobile Augmented Reality", "doi": null, "abstractUrl": "/proceedings-article/ismar/2020/850800a064/1pysxPMqyTm", "parentPublication": { "id": "proceedings/ismar/2020/8508/0", "title": "2020 IEEE International Symposium on Mixed and Augmented Reality (ISMAR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "trans/tg/2022/12/09463728", "title": "Color Contrast Enhanced Rendering for Optical See-Through Head-Mounted Displays", "doi": null, "abstractUrl": "/journal/tg/2022/12/09463728/1uFxo1ImlpK", "parentPublication": { "id": "trans/tg", "title": "IEEE Transactions on Visualization & Computer Graphics", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" } ], "articleVideos": [] }
{ "proceeding": { "id": "17D45VtKirI", "title": "2018 International Conference on Cyberworlds (CW)", "acronym": "cw", "groupId": "1000175", "volume": "0", "displayVolume": "0", "year": "2018", "__typename": "ProceedingType" }, "article": { "id": "17D45Wda7hc", "doi": "10.1109/CW.2018.00034", "title": "Glossy Reflections for Mixed Reality Environments on Mobile Devices", "normalizedTitle": "Glossy Reflections for Mixed Reality Environments on Mobile Devices", "abstract": "Glossy reflections of the surroundings play a major role when trying to achieve a seamless fusion of real and virtual objects in Mixed Reality (MR) environments. Traditionally, the necessary information about the ambiance is captured using mirrored balls, HDR cameras, fish-eye lenses, RGB-D cameras or 360-degree cameras. While these approaches allow for pretty good results, they require a rather complex setup. Our approach is based on a single RGB camera capturing the environmental lighting at a certain location within the scene. Therefore, we apply a precomputation step generating a 360-degree environment map and combine it with a camera-based image stitching for a continuous enhancement and update of the lighting information. We show that our approach allows for realistic and high-quality reflections within an AR/MR environment in real time even on mobile devices.", "abstracts": [ { "abstractType": "Regular", "content": "Glossy reflections of the surroundings play a major role when trying to achieve a seamless fusion of real and virtual objects in Mixed Reality (MR) environments. Traditionally, the necessary information about the ambiance is captured using mirrored balls, HDR cameras, fish-eye lenses, RGB-D cameras or 360-degree cameras. While these approaches allow for pretty good results, they require a rather complex setup. Our approach is based on a single RGB camera capturing the environmental lighting at a certain location within the scene. Therefore, we apply a precomputation step generating a 360-degree environment map and combine it with a camera-based image stitching for a continuous enhancement and update of the lighting information. We show that our approach allows for realistic and high-quality reflections within an AR/MR environment in real time even on mobile devices.", "__typename": "ArticleAbstractType" } ], "normalizedAbstract": "Glossy reflections of the surroundings play a major role when trying to achieve a seamless fusion of real and virtual objects in Mixed Reality (MR) environments. Traditionally, the necessary information about the ambiance is captured using mirrored balls, HDR cameras, fish-eye lenses, RGB-D cameras or 360-degree cameras. While these approaches allow for pretty good results, they require a rather complex setup. Our approach is based on a single RGB camera capturing the environmental lighting at a certain location within the scene. Therefore, we apply a precomputation step generating a 360-degree environment map and combine it with a camera-based image stitching for a continuous enhancement and update of the lighting information. We show that our approach allows for realistic and high-quality reflections within an AR/MR environment in real time even on mobile devices.", "fno": "731500a138", "keywords": [ "Cameras", "Lighting", "Streaming Media", "Image Reconstruction", "Table Lookup", "Visualization", "Virtual Reality", "Augmented Reality", "Enhanced Reality", "Visualization", "Image Fusion" ], "authors": [ { "affiliation": null, "fullName": "Tobias Schwandt", "givenName": "Tobias", "surname": "Schwandt", "__typename": "ArticleAuthorType" }, { "affiliation": null, "fullName": "Christian Kunert", "givenName": "Christian", "surname": "Kunert", "__typename": "ArticleAuthorType" }, { "affiliation": null, "fullName": "Wolfgang Broll", "givenName": "Wolfgang", "surname": "Broll", "__typename": "ArticleAuthorType" } ], "idPrefix": "cw", "isOpenAccess": false, "showRecommendedArticles": true, "showBuyMe": true, "hasPdf": true, "pubDate": "2018-10-01T00:00:00", "pubType": "proceedings", "pages": "138-143", "year": "2018", "issn": null, "isbn": "978-1-5386-7315-7", "notes": null, "notesType": null, "__typename": "ArticleType" }, "webExtras": [], "adjacentArticles": { "previous": { "fno": "731500a132", "articleId": "17D45WYQJ9q", "__typename": "AdjacentArticleType" }, "next": { "fno": "731500a144", "articleId": "17D45XeKgrv", "__typename": "AdjacentArticleType" }, "__typename": "AdjacentArticlesType" }, "recommendedArticles": [ { "id": "proceedings/ismar/2016/3641/0/3641a037", "title": "A Single Camera Image Based Approach for Glossy Reflections in Mixed Reality Applications", "doi": null, "abstractUrl": "/proceedings-article/ismar/2016/3641a037/12OmNrJAdMm", "parentPublication": { "id": "proceedings/ismar/2016/3641/0", "title": "2016 IEEE International Symposium on Mixed and Augmented Reality (ISMAR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "trans/tg/2017/04/07829404", "title": "MR360: Mixed Reality Rendering for 360&#x00B0; Panoramic Videos", "doi": null, "abstractUrl": "/journal/tg/2017/04/07829404/13rRUwhHcQW", "parentPublication": { "id": "trans/tg", "title": "IEEE Transactions on Visualization & Computer Graphics", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "trans/tg/2013/04/ttg2013040576", "title": "Reflective and Refractive Objects for Mixed Reality", "doi": null, "abstractUrl": "/journal/tg/2013/04/ttg2013040576/13rRUxYINf9", "parentPublication": { "id": "trans/tg", "title": "IEEE Transactions on Visualization & Computer Graphics", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/ismar-adjunct/2018/7592/0/08699293", "title": "Probeless and Realistic Mixed Reality Application in Presence of Dynamic Light Sources", "doi": null, "abstractUrl": "/proceedings-article/ismar-adjunct/2018/08699293/19F1LW7sJEc", "parentPublication": { "id": "proceedings/ismar-adjunct/2018/7592/0", "title": "2018 IEEE International Symposium on Mixed and Augmented Reality Adjunct (ISMAR-Adjunct)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2019/1377/0/08798002", "title": "Real-time Underwater Caustics for Mixed Reality 360&#x00B0; Videos", "doi": null, "abstractUrl": "/proceedings-article/vr/2019/08798002/1cJ0UOVs3Ly", "parentPublication": { "id": "proceedings/vr/2019/1377/0", "title": "2019 IEEE Conference on Virtual Reality and 3D User Interfaces (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2019/1377/0/08798067", "title": "OmniMR: Omnidirectional Mixed Reality with Spatially-Varying Environment Reflections from Moving 360&#x00B0; Video Cameras", "doi": null, "abstractUrl": "/proceedings-article/vr/2019/08798067/1cJ1cnBEFb2", "parentPublication": { "id": "proceedings/vr/2019/1377/0", "title": "2019 IEEE Conference on Virtual Reality and 3D User Interfaces (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/ismar/2019/0987/0/08943651", "title": "Real-Time Mixed Reality Rendering for Underwater 360&#x00B0; Videos", "doi": null, "abstractUrl": "/proceedings-article/ismar/2019/08943651/1grOLwL1jt6", "parentPublication": { "id": "proceedings/ismar/2019/0987/0", "title": "2019 IEEE International Symposium on Mixed and Augmented Reality (ISMAR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "trans/tg/2022/02/09018202", "title": "Detecting Specular Reflections and Cast Shadows to Estimate Reflectance and Illumination of Dynamic Indoor Scenes", "doi": null, "abstractUrl": "/journal/tg/2022/02/09018202/1hN4BrDSVHi", "parentPublication": { "id": "trans/tg", "title": "IEEE Transactions on Visualization & Computer Graphics", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vrw/2020/6532/0/09090568", "title": "Real-time Illumination Estimation for Mixed Reality on Mobile Devices", "doi": null, "abstractUrl": "/proceedings-article/vrw/2020/09090568/1jIxuGbpWa4", "parentPublication": { "id": "proceedings/vrw/2020/6532/0", "title": "2020 IEEE Conference on Virtual Reality and 3D User Interfaces Abstracts and Workshops (VRW)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "trans/tg/2021/10/09115833", "title": "Reconstructing Reflection Maps Using a Stacked-CNN for Mixed Reality Rendering", "doi": null, "abstractUrl": "/journal/tg/2021/10/09115833/1kBgVhAEmeA", "parentPublication": { "id": "trans/tg", "title": "IEEE Transactions on Visualization & Computer Graphics", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" } ], "articleVideos": [] }
{ "proceeding": { "id": "1CJbEwHHqEg", "title": "2022 IEEE on Conference Virtual Reality and 3D User Interfaces (VR)", "acronym": "vr", "groupId": "1000791", "volume": "0", "displayVolume": "0", "year": "2022", "__typename": "ProceedingType" }, "article": { "id": "1CJbLHfeIsE", "doi": "10.1109/VR51125.2022.00081", "title": "Distortion-free Mid-air Image Inside Refractive Surface and on Reflective Surface", "normalizedTitle": "Distortion-free Mid-air Image Inside Refractive Surface and on Reflective Surface", "abstract": "In this study, we propose an approach to display a distortion-free mid-air image inside a transparent refractive object and on a curved reflective surface. We compensate for the distortion by generating a light source image that cancels the distortions in the mid-air image caused by refraction and reflection. The light source image is generated via ray-tracing simulation by transmitting the desired view image to the mid-air imaging system, which includes distortive surfaces, and by receiving the transmitted image at the light source position. The proposed methods can be applied to dynamic images using a light source image as a UV map in texture mapping. Finally, we present the results of an evaluation of our method performed in an actual optical system using the generated light source image, which visually demonstrate the effectiveness of the proposed approach.", "abstracts": [ { "abstractType": "Regular", "content": "In this study, we propose an approach to display a distortion-free mid-air image inside a transparent refractive object and on a curved reflective surface. We compensate for the distortion by generating a light source image that cancels the distortions in the mid-air image caused by refraction and reflection. The light source image is generated via ray-tracing simulation by transmitting the desired view image to the mid-air imaging system, which includes distortive surfaces, and by receiving the transmitted image at the light source position. The proposed methods can be applied to dynamic images using a light source image as a UV map in texture mapping. Finally, we present the results of an evaluation of our method performed in an actual optical system using the generated light source image, which visually demonstrate the effectiveness of the proposed approach.", "__typename": "ArticleAbstractType" } ], "normalizedAbstract": "In this study, we propose an approach to display a distortion-free mid-air image inside a transparent refractive object and on a curved reflective surface. We compensate for the distortion by generating a light source image that cancels the distortions in the mid-air image caused by refraction and reflection. The light source image is generated via ray-tracing simulation by transmitting the desired view image to the mid-air imaging system, which includes distortive surfaces, and by receiving the transmitted image at the light source position. The proposed methods can be applied to dynamic images using a light source image as a UV map in texture mapping. Finally, we present the results of an evaluation of our method performed in an actual optical system using the generated light source image, which visually demonstrate the effectiveness of the proposed approach.", "fno": "961700a606", "keywords": [ "Image Texture", "Light Refraction", "Light Sources", "Ray Tracing", "Refractive Index", "Transparency", "Distortion Free Mid Air Image", "Refractive Surface", "Transparent Refractive Object", "Curved Reflective Surface", "Reflection", "Desired View Image", "Mid Air Imaging System", "Distortive Surfaces", "Transmitted Image", "Light Source Position", "Dynamic Images", "Generated Light Source Image", "Solid Modeling", "Optical Distortion", "Virtual Reality", "Ray Tracing", "User Interfaces", "Distortion", "Optical Imaging", "Computing Methodologies", "Computer Graphics", "Graphics Systems And Interfaces", "Mixed Augmented Reality", "Rendering", "Ray Tracing" ], "authors": [ { "affiliation": "The University of Electro-Communications", "fullName": "Shunji Kiuchi", "givenName": "Shunji", "surname": "Kiuchi", "__typename": "ArticleAuthorType" }, { "affiliation": "The University of Electro-Communications", "fullName": "Naoya Koizumi", "givenName": "Naoya", "surname": "Koizumi", "__typename": "ArticleAuthorType" } ], "idPrefix": "vr", "isOpenAccess": false, "showRecommendedArticles": true, "showBuyMe": true, "hasPdf": true, "pubDate": "2022-03-01T00:00:00", "pubType": "proceedings", "pages": "606-614", "year": "2022", "issn": null, "isbn": "978-1-6654-9617-9", "notes": null, "notesType": null, "__typename": "ArticleType" }, "webExtras": [ { "id": "1CJbLuPqzHG", "name": "pvr202296170-09756834s1-mm_961700a606.zip", "size": "98 MB", "location": "https://www.computer.org/csdl/api/v1/extra/pvr202296170-09756834s1-mm_961700a606.zip", "__typename": "WebExtraType" } ], "adjacentArticles": { "previous": { "fno": "961700a599", "articleId": "1CJchTZ5dEk", "__typename": "AdjacentArticleType" }, "next": { "fno": "961700a615", "articleId": "1CJbHx2dVu0", "__typename": "AdjacentArticleType" }, "__typename": "AdjacentArticlesType" }, "recommendedArticles": [ { "id": "proceedings/ism/2013/2171/0/06746465", "title": "A Fluorescent Mid-air Screen", "doi": null, "abstractUrl": "/proceedings-article/ism/2013/06746465/12OmNvqmUKr", "parentPublication": { "id": "proceedings/ism/2013/2171/0", "title": "2013 IEEE International Symposium on Multimedia (ISM)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/iccvw/2011/0063/0/06130279", "title": "Mid-air interactive display using modulated display light", "doi": null, "abstractUrl": "/proceedings-article/iccvw/2011/06130279/12OmNy68EF6", "parentPublication": { "id": "proceedings/iccvw/2011/0063/0", "title": "2011 IEEE International Conference on Computer Vision Workshops (ICCV Workshops)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "trans/tg/2013/04/ttg2013040576", "title": "Reflective and Refractive Objects for Mixed Reality", "doi": null, "abstractUrl": "/journal/tg/2013/04/ttg2013040576/13rRUxYINf9", "parentPublication": { "id": "trans/tg", "title": "IEEE Transactions on Visualization & Computer Graphics", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vrw/2022/8402/0/840200a722", "title": "AIR-range: Arranging optical systems to present mid-AIR images with continuous luminance on and above a tabletop", "doi": null, "abstractUrl": "/proceedings-article/vrw/2022/840200a722/1CJd3cfYsbm", "parentPublication": { "id": "proceedings/vrw/2022/8402/0", "title": "2022 IEEE Conference on Virtual Reality and 3D User Interfaces Abstracts and Workshops (VRW)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/ismar/2022/5325/0/532500a637", "title": "Blending On-Body and Mid-Air Interaction in Virtual Reality", "doi": null, "abstractUrl": "/proceedings-article/ismar/2022/532500a637/1JrRmvhGko0", "parentPublication": { "id": "proceedings/ismar/2022/5325/0", "title": "2022 IEEE International Symposium on Mixed and Augmented Reality (ISMAR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2019/1377/0/08798235", "title": "Optical System That Forms a Mid-Air Image Moving at High Speed in the Depth Direction", "doi": null, "abstractUrl": "/proceedings-article/vr/2019/08798235/1cJ0IDINHJm", "parentPublication": { "id": "proceedings/vr/2019/1377/0", "title": "2019 IEEE Conference on Virtual Reality and 3D User Interfaces (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2020/5608/0/09089599", "title": "An Optical Design for Avatar-User Co-axial Viewpoint Telepresence", "doi": null, "abstractUrl": "/proceedings-article/vr/2020/09089599/1jIx8SwZIuQ", "parentPublication": { "id": "proceedings/vr/2020/5608/0", "title": "2020 IEEE Conference on Virtual Reality and 3D User Interfaces (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vrw/2020/6532/0/09090633", "title": "Micro-mirror array-plates simulation using ray tracing for mid-air imaging", "doi": null, "abstractUrl": "/proceedings-article/vrw/2020/09090633/1jIxsrAlhsY", "parentPublication": { "id": "proceedings/vrw/2020/6532/0", "title": "2020 IEEE Conference on Virtual Reality and 3D User Interfaces Abstracts and Workshops (VRW)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/ismar/2020/8508/0/850800a289", "title": "AR Interfaces for Mid-Air 6-DoF Alignment: Ergonomics-Aware Design and Evaluation", "doi": null, "abstractUrl": "/proceedings-article/ismar/2020/850800a289/1pysuoUYBhK", "parentPublication": { "id": "proceedings/ismar/2020/8508/0", "title": "2020 IEEE International Symposium on Mixed and Augmented Reality (ISMAR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2021/1838/0/255600a826", "title": "Mid-Air Finger Sketching for Tree Modeling", "doi": null, "abstractUrl": "/proceedings-article/vr/2021/255600a826/1tuBbGEUWm4", "parentPublication": { "id": "proceedings/vr/2021/1838/0", "title": "2021 IEEE Virtual Reality and 3D User Interfaces (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" } ], "articleVideos": [] }
{ "proceeding": { "id": "1MBRelnGflK", "title": "2022 International Conference on Cloud Computing, Big Data Applications and Software Engineering (CBASE)", "acronym": "cbase", "groupId": "10092532", "volume": "0", "displayVolume": "0", "year": "2022", "__typename": "ProceedingType" }, "article": { "id": "1MBRh4FHK3m", "doi": "10.1109/CBASE57816.2022.00013", "title": "Interactive Image-Space Rendering of Dispersions", "normalizedTitle": "Interactive Image-Space Rendering of Dispersions", "abstract": "Refractions, caustics and dispersions are very important for rendering global illumination images. Caustic is a phenomenon in which multiple light rays, through refractive or reflective objects, converge at a common region, thus creating a bright area. Dispersion is a special caustic effect that takes into account the different refractive indices of monochromatic lights which have different wavelengths. Although there are various works for rendering caustics interactively, there are few algorithms for dispersions. In this paper, we extend the image- space rendering algorithm for caustic to render the dispersion in real time. Rendered results prove that our method is able to render plausible images for many interactive applications.", "abstracts": [ { "abstractType": "Regular", "content": "Refractions, caustics and dispersions are very important for rendering global illumination images. Caustic is a phenomenon in which multiple light rays, through refractive or reflective objects, converge at a common region, thus creating a bright area. Dispersion is a special caustic effect that takes into account the different refractive indices of monochromatic lights which have different wavelengths. Although there are various works for rendering caustics interactively, there are few algorithms for dispersions. In this paper, we extend the image- space rendering algorithm for caustic to render the dispersion in real time. Rendered results prove that our method is able to render plausible images for many interactive applications.", "__typename": "ArticleAbstractType" } ], "normalizedAbstract": "Refractions, caustics and dispersions are very important for rendering global illumination images. Caustic is a phenomenon in which multiple light rays, through refractive or reflective objects, converge at a common region, thus creating a bright area. Dispersion is a special caustic effect that takes into account the different refractive indices of monochromatic lights which have different wavelengths. Although there are various works for rendering caustics interactively, there are few algorithms for dispersions. In this paper, we extend the image- space rendering algorithm for caustic to render the dispersion in real time. Rendered results prove that our method is able to render plausible images for many interactive applications.", "fno": "963900a027", "keywords": [ "Graphics", "Software Algorithms", "Lighting", "Entertainment Industry", "Ray Tracing", "Rendering Computer Graphics", "Approximation Algorithms", "Refraction", "Caustics", "Dispersions" ], "authors": [ { "affiliation": "Jiangnan University,School of Artificial Intelligence and Computer Science,Wuxi,China", "fullName": "Yuanmeng Luo", "givenName": "Yuanmeng", "surname": "Luo", "__typename": "ArticleAuthorType" } ], "idPrefix": "cbase", "isOpenAccess": false, "showRecommendedArticles": true, "showBuyMe": true, "hasPdf": true, "pubDate": "2022-09-01T00:00:00", "pubType": "proceedings", "pages": "27-31", "year": "2022", "issn": null, "isbn": "979-8-3503-9639-3", "notes": null, "notesType": null, "__typename": "ArticleType" }, "webExtras": [], "adjacentArticles": { "previous": { "fno": "963900a023", "articleId": "1MBRfahgUmY", "__typename": "AdjacentArticleType" }, "next": { "fno": "963900a032", "articleId": "1MBRhxbP7he", "__typename": "AdjacentArticleType" }, "__typename": "AdjacentArticlesType" }, "recommendedArticles": [ { "id": "proceedings/pg/2007/3009/0/30090181", "title": "Image-Space Caustics and Curvatures", "doi": null, "abstractUrl": "/proceedings-article/pg/2007/30090181/12OmNBsue5e", "parentPublication": { "id": "proceedings/pg/2007/3009/0", "title": "Computer Graphics and Applications, Pacific Conference on", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/cw/2004/2140/0/21400039", "title": "A Rapid Rendering Method for Caustics Arising from Refraction by Transparent Objects", "doi": null, "abstractUrl": "/proceedings-article/cw/2004/21400039/12OmNvTTc81", "parentPublication": { "id": "proceedings/cw/2004/2140/0", "title": "2004 International Conference on Cyberworlds", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/pg/2004/2234/0/22340143", "title": "Interactive Caustics Using Local Precomputed Irradiance", "doi": null, "abstractUrl": "/proceedings-article/pg/2004/22340143/12OmNwKGAmu", "parentPublication": { "id": "proceedings/pg/2004/2234/0", "title": "Computer Graphics and Applications, Pacific Conference on", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/cgi/2003/1946/0/19460190", "title": "A Real-Time Refraction Renderer for Volume Objects Using a Polygon-Rendering Scheme", "doi": null, "abstractUrl": "/proceedings-article/cgi/2003/19460190/12OmNx76TKO", "parentPublication": { "id": "proceedings/cgi/2003/1946/0", "title": "Computer Graphics International Conference", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "trans/tg/2007/02/v0272", "title": "Caustics Mapping: An Image-Space Technique for Real-Time Caustics", "doi": null, "abstractUrl": "/journal/tg/2007/02/v0272/13rRUxASuhs", "parentPublication": { "id": "trans/tg", "title": "IEEE Transactions on Visualization & Computer Graphics", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "trans/tg/2007/01/v0046", "title": "Interactive Approximate Rendering of Reflections, Refractions, and Caustics", "doi": null, "abstractUrl": "/journal/tg/2007/01/v0046/13rRUxBJhmJ", "parentPublication": { "id": "trans/tg", "title": "IEEE Transactions on Visualization & Computer Graphics", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "trans/tg/2013/04/ttg2013040576", "title": "Reflective and Refractive Objects for Mixed Reality", "doi": null, "abstractUrl": "/journal/tg/2013/04/ttg2013040576/13rRUxYINf9", "parentPublication": { "id": "trans/tg", "title": "IEEE Transactions on Visualization & Computer Graphics", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "trans/tg/2018/01/08017622", "title": "Interactive Dynamic Volume Illumination with Refraction and Caustics", "doi": null, "abstractUrl": "/journal/tg/2018/01/08017622/13rRUyfKIHU", "parentPublication": { "id": "trans/tg", "title": "IEEE Transactions on Visualization & Computer Graphics", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "trans/tg/5555/01/10077440", "title": "NeRC: Rendering Planar Caustics by Learning Implicit Neural Representations", "doi": null, "abstractUrl": "/journal/tg/5555/01/10077440/1LFQ6PMpeik", "parentPublication": { "id": "trans/tg", "title": "IEEE Transactions on Visualization & Computer Graphics", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2019/1377/0/08798002", "title": "Real-time Underwater Caustics for Mixed Reality 360&#x00B0; Videos", "doi": null, "abstractUrl": "/proceedings-article/vr/2019/08798002/1cJ0UOVs3Ly", "parentPublication": { "id": "proceedings/vr/2019/1377/0", "title": "2019 IEEE Conference on Virtual Reality and 3D User Interfaces (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" } ], "articleVideos": [] }
{ "proceeding": { "id": "1cI6akLvAuQ", "title": "2019 IEEE Conference on Virtual Reality and 3D User Interfaces (VR)", "acronym": "vr", "groupId": "1000791", "volume": "0", "displayVolume": "0", "year": "2019", "__typename": "ProceedingType" }, "article": { "id": "1cJ0UOVs3Ly", "doi": "10.1109/VR.2019.8798002", "title": "Real-time Underwater Caustics for Mixed Reality 360&#x00B0; Videos", "normalizedTitle": "Real-time Underwater Caustics for Mixed Reality 360° Videos", "abstract": "We present a novel mixed reality (MR) rendering solution that illuminates and blends virtual objects into underwater 360&#x00B0; video with real-time underwater caustic effects. Image-based lighting is used in conjunction with underwater caustics to provide automatic ambient and high frequency underwater lighting. This ensures that the caustics and virtual objects are lit and blend into each frame of the video semi-automatically and in real-time. We provide an interactive interface with intuitive parameter controls to fine tune caustics to match with the background video.", "abstracts": [ { "abstractType": "Regular", "content": "We present a novel mixed reality (MR) rendering solution that illuminates and blends virtual objects into underwater 360&#x00B0; video with real-time underwater caustic effects. Image-based lighting is used in conjunction with underwater caustics to provide automatic ambient and high frequency underwater lighting. This ensures that the caustics and virtual objects are lit and blend into each frame of the video semi-automatically and in real-time. We provide an interactive interface with intuitive parameter controls to fine tune caustics to match with the background video.", "__typename": "ArticleAbstractType" } ], "normalizedAbstract": "We present a novel mixed reality (MR) rendering solution that illuminates and blends virtual objects into underwater 360° video with real-time underwater caustic effects. Image-based lighting is used in conjunction with underwater caustics to provide automatic ambient and high frequency underwater lighting. This ensures that the caustics and virtual objects are lit and blend into each frame of the video semi-automatically and in real-time. We provide an interactive interface with intuitive parameter controls to fine tune caustics to match with the background video.", "fno": "08798002", "keywords": [ "Augmented Reality", "Rendering Computer Graphics", "Video Signal Processing", "Real Time Underwater Caustic Effects", "Image Based Lighting", "Underwater Caustics", "Automatic Ambient", "Background Video", "Mixed Reality Rendering Solution", "Mixed Reality 360 Degree Videos", "Lighting", "Real Time Systems", "Streaming Media", "Rendering Computer Graphics", "Sea Surface", "Virtual Reality", "Surface Waves", "Computing Methodologies", "Graphics Systems And Interfaces", "Mixed Augmented Reality" ], "authors": [ { "affiliation": "Computational Media Innovation Centre, Victoria University of Wellington, NZ", "fullName": "Stephen Thompson", "givenName": "Stephen", "surname": "Thompson", "__typename": "ArticleAuthorType" }, { "affiliation": "Computational Media Innovation Centre, Victoria University of Wellington, NZ", "fullName": "Andrew Chalmers", "givenName": "Andrew", "surname": "Chalmers", "__typename": "ArticleAuthorType" }, { "affiliation": "Computational Media Innovation Centre, Victoria University of Wellington, NZ", "fullName": "Taehyun Rhee", "givenName": "Taehyun", "surname": "Rhee", "__typename": "ArticleAuthorType" } ], "idPrefix": "vr", "isOpenAccess": false, "showRecommendedArticles": true, "showBuyMe": true, "hasPdf": true, "pubDate": "2019-03-01T00:00:00", "pubType": "proceedings", "pages": "1191-1192", "year": "2019", "issn": null, "isbn": "978-1-7281-1377-7", "notes": null, "notesType": null, "__typename": "ArticleType" }, "webExtras": [], "adjacentArticles": { "previous": { "fno": "08798123", "articleId": "1cJ0VB3SBlS", "__typename": "AdjacentArticleType" }, "next": { "fno": "08798309", "articleId": "1cJ1adrlIoo", "__typename": "AdjacentArticleType" }, "__typename": "AdjacentArticlesType" }, "recommendedArticles": [ { "id": "proceedings/iccv/2009/4420/0/05459166", "title": "Stereo from flickering caustics", "doi": null, "abstractUrl": "/proceedings-article/iccv/2009/05459166/12OmNwswg2Z", "parentPublication": { "id": "proceedings/iccv/2009/4420/0", "title": "2009 IEEE 12th International Conference on Computer Vision (ICCV)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/cvaui/2016/5870/0/5870a037", "title": "Surface Stereo for Shallow Underwater Scenes", "doi": null, "abstractUrl": "/proceedings-article/cvaui/2016/5870a037/12OmNyz5K1v", "parentPublication": { "id": "proceedings/cvaui/2016/5870/0", "title": "2016 ICPR 2nd Workshop on Computer Vision for Analysis of Underwater Imagery (CVAUI)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/cvpr/2016/8851/0/8851d764", "title": "The Next Best Underwater View", "doi": null, "abstractUrl": "/proceedings-article/cvpr/2016/8851d764/12OmNzaQogC", "parentPublication": { "id": "proceedings/cvpr/2016/8851/0", "title": "2016 IEEE Conference on Computer Vision and Pattern Recognition (CVPR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2018/3365/0/08445842", "title": "360&#x00B0; Video - Light Design Experience", "doi": null, "abstractUrl": "/proceedings-article/vr/2018/08445842/13bd1eW2l8T", "parentPublication": { "id": "proceedings/vr/2018/3365/0", "title": "2018 IEEE Conference on Virtual Reality and 3D User Interfaces (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "trans/tg/2017/04/07829404", "title": "MR360: Mixed Reality Rendering for 360&#x00B0; Panoramic Videos", "doi": null, "abstractUrl": "/journal/tg/2017/04/07829404/13rRUwhHcQW", "parentPublication": { "id": "trans/tg", "title": "IEEE Transactions on Visualization & Computer Graphics", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "trans/tg/2007/02/v0272", "title": "Caustics Mapping: An Image-Space Technique for Real-Time Caustics", "doi": null, "abstractUrl": "/journal/tg/2007/02/v0272/13rRUxASuhs", "parentPublication": { "id": "trans/tg", "title": "IEEE Transactions on Visualization & Computer Graphics", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "trans/tg/5555/01/10077440", "title": "NeRC: Rendering Planar Caustics by Learning Implicit Neural Representations", "doi": null, "abstractUrl": "/journal/tg/5555/01/10077440/1LFQ6PMpeik", "parentPublication": { "id": "trans/tg", "title": "IEEE Transactions on Visualization & Computer Graphics", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/cbase/2022/9639/0/963900a027", "title": "Interactive Image-Space Rendering of Dispersions", "doi": null, "abstractUrl": "/proceedings-article/cbase/2022/963900a027/1MBRh4FHK3m", "parentPublication": { "id": "proceedings/cbase/2022/9639/0", "title": "2022 International Conference on Cloud Computing, Big Data Applications and Software Engineering (CBASE)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2019/1377/0/08798067", "title": "OmniMR: Omnidirectional Mixed Reality with Spatially-Varying Environment Reflections from Moving 360&#x00B0; Video Cameras", "doi": null, "abstractUrl": "/proceedings-article/vr/2019/08798067/1cJ1cnBEFb2", "parentPublication": { "id": "proceedings/vr/2019/1377/0", "title": "2019 IEEE Conference on Virtual Reality and 3D User Interfaces (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/ismar/2019/0987/0/08943651", "title": "Real-Time Mixed Reality Rendering for Underwater 360&#x00B0; Videos", "doi": null, "abstractUrl": "/proceedings-article/ismar/2019/08943651/1grOLwL1jt6", "parentPublication": { "id": "proceedings/ismar/2019/0987/0", "title": "2019 IEEE International Symposium on Mixed and Augmented Reality (ISMAR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" } ], "articleVideos": [] }
{ "proceeding": { "id": "12OmNvAiSpZ", "title": "2015 IEEE Virtual Reality (VR)", "acronym": "vr", "groupId": "1000791", "volume": "0", "displayVolume": "0", "year": "2015", "__typename": "ProceedingType" }, "article": { "id": "12OmNAWpyrk", "doi": "10.1109/VR.2015.7223379", "title": "Avatar anthropomorphism and illusion of body ownership in VR", "normalizedTitle": "Avatar anthropomorphism and illusion of body ownership in VR", "abstract": "In this paper we present a novel experiment to explore the impact of avatar realism on the illusion of virtual body ownership (IVBO) in immersive virtual environments, with full-body avatar embodiment and freedom of movement. We evaluated four distinct avatars (a humanoid robot, a block-man, and both male and female human adult) presenting an increasing level of anthropomorphism in their detailed compositions Our results revealed that each avatar elicited a relatively high level of illusion. However both machine-like and cartoon-like avatars elicited an equivalent IVBO, slightly superior to the human-ones. A realistic human appearance is therefore not a critical top-down factor of IVBO, and could lead to an Uncanney Valley effect.", "abstracts": [ { "abstractType": "Regular", "content": "In this paper we present a novel experiment to explore the impact of avatar realism on the illusion of virtual body ownership (IVBO) in immersive virtual environments, with full-body avatar embodiment and freedom of movement. We evaluated four distinct avatars (a humanoid robot, a block-man, and both male and female human adult) presenting an increasing level of anthropomorphism in their detailed compositions Our results revealed that each avatar elicited a relatively high level of illusion. However both machine-like and cartoon-like avatars elicited an equivalent IVBO, slightly superior to the human-ones. A realistic human appearance is therefore not a critical top-down factor of IVBO, and could lead to an Uncanney Valley effect.", "__typename": "ArticleAbstractType" } ], "normalizedAbstract": "In this paper we present a novel experiment to explore the impact of avatar realism on the illusion of virtual body ownership (IVBO) in immersive virtual environments, with full-body avatar embodiment and freedom of movement. We evaluated four distinct avatars (a humanoid robot, a block-man, and both male and female human adult) presenting an increasing level of anthropomorphism in their detailed compositions Our results revealed that each avatar elicited a relatively high level of illusion. However both machine-like and cartoon-like avatars elicited an equivalent IVBO, slightly superior to the human-ones. A realistic human appearance is therefore not a critical top-down factor of IVBO, and could lead to an Uncanney Valley effect.", "fno": "07223379", "keywords": [ "Avatars", "Anthropomorphism", "Visualization", "Games", "Skin", "Color", "H 5 1 Information Systems Artificial Augmented And Virtual Realities" ], "authors": [ { "affiliation": "HCI Group, University of Würzburg, Germany", "fullName": "Jean-Luc Lugrin", "givenName": "Jean-Luc", "surname": "Lugrin", "__typename": "ArticleAuthorType" }, { "affiliation": "HCI Group, University of Würzburg, Germany", "fullName": "Johanna Latt", "givenName": "Johanna", "surname": "Latt", "__typename": "ArticleAuthorType" }, { "affiliation": "HCI Group, University of Würzburg, Germany", "fullName": "Marc Erich Latoschik", "givenName": "Marc Erich", "surname": "Latoschik", "__typename": "ArticleAuthorType" } ], "idPrefix": "vr", "isOpenAccess": false, "showRecommendedArticles": true, "showBuyMe": true, "hasPdf": true, "pubDate": "2015-03-01T00:00:00", "pubType": "proceedings", "pages": "229-230", "year": "2015", "issn": null, "isbn": "978-1-4799-1727-3", "notes": null, "notesType": null, "__typename": "ArticleType" }, "webExtras": [], "adjacentArticles": { "previous": { "fno": "07223378", "articleId": "12OmNBdruc4", "__typename": "AdjacentArticleType" }, "next": { "fno": "07223380", "articleId": "12OmNzt0Iy7", "__typename": "AdjacentArticleType" }, "__typename": "AdjacentArticlesType" }, "recommendedArticles": [ { "id": "proceedings/vr/2016/0836/0/07504735", "title": "FaceBo: Real-time face and body tracking for faithful avatar synthesis", "doi": null, "abstractUrl": "/proceedings-article/vr/2016/07504735/12OmNBRbkpf", "parentPublication": { "id": "proceedings/vr/2016/0836/0", "title": "2016 IEEE Virtual Reality (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2015/1727/0/07223378", "title": "Influence of avatar realism on stressful situation in VR", "doi": null, "abstractUrl": "/proceedings-article/vr/2015/07223378/12OmNBdruc4", "parentPublication": { "id": "proceedings/vr/2015/1727/0", "title": "2015 IEEE Virtual Reality (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2015/1727/0/07223377", "title": "Avatar embodiment realism and virtual fitness training", "doi": null, "abstractUrl": "/proceedings-article/vr/2015/07223377/12OmNCcKQFn", "parentPublication": { "id": "proceedings/vr/2015/1727/0", "title": "2015 IEEE Virtual Reality (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2018/3365/0/08446448", "title": "Agency Enhances Body Ownership Illusion of Being a Virtual Bat", "doi": null, "abstractUrl": "/proceedings-article/vr/2018/08446448/13bd1gzWkRR", "parentPublication": { "id": "proceedings/vr/2018/3365/0", "title": "2018 IEEE Conference on Virtual Reality and 3D User Interfaces (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vrw/2022/8402/0/840200a770", "title": "Emotional Empathy and Facial Mimicry of Avatar Faces", "doi": null, "abstractUrl": "/proceedings-article/vrw/2022/840200a770/1CJdHd5yTSM", "parentPublication": { "id": "proceedings/vrw/2022/8402/0", "title": "2022 IEEE Conference on Virtual Reality and 3D User Interfaces Abstracts and Workshops (VRW)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/ismar-adjunct/2022/5365/0/536500a352", "title": "Effects of Optical See-Through Displays on Self-Avatar Appearance in Augmented Reality", "doi": null, "abstractUrl": "/proceedings-article/ismar-adjunct/2022/536500a352/1J7WodvTPzy", "parentPublication": { "id": "proceedings/ismar-adjunct/2022/5365/0", "title": "2022 IEEE International Symposium on Mixed and Augmented Reality Adjunct (ISMAR-Adjunct)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2019/1377/0/08798346", "title": "Scrambled Body: A Method to Compare Full Body Illusion and Illusory Body Ownership of Body Parts", "doi": null, "abstractUrl": "/proceedings-article/vr/2019/08798346/1cJ0NSIEQda", "parentPublication": { "id": "proceedings/vr/2019/1377/0", "title": "2019 IEEE Conference on Virtual Reality and 3D User Interfaces (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2019/1377/0/08798318", "title": "Evaluating Teacher Avatar Appearances in Educational VR", "doi": null, "abstractUrl": "/proceedings-article/vr/2019/08798318/1cJ0P8vBhhm", "parentPublication": { "id": "proceedings/vr/2019/1377/0", "title": "2019 IEEE Conference on Virtual Reality and 3D User Interfaces (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "trans/tg/2020/05/08998352", "title": "Using Facial Animation to Increase the Enfacement Illusion and Avatar Self-Identification", "doi": null, "abstractUrl": "/journal/tg/2020/05/08998352/1hpPCCB7Bte", "parentPublication": { "id": "trans/tg", "title": "IEEE Transactions on Visualization & Computer Graphics", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vrw/2020/6532/0/09090457", "title": "Affective Embodiment: The effect of avatar appearance and posture representation on emotions in VR", "doi": null, "abstractUrl": "/proceedings-article/vrw/2020/09090457/1jIxjXwO4HS", "parentPublication": { "id": "proceedings/vrw/2020/6532/0", "title": "2020 IEEE Conference on Virtual Reality and 3D User Interfaces Abstracts and Workshops (VRW)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" } ], "articleVideos": [] }
{ "proceeding": { "id": "12OmNvAiSpZ", "title": "2015 IEEE Virtual Reality (VR)", "acronym": "vr", "groupId": "1000791", "volume": "0", "displayVolume": "0", "year": "2015", "__typename": "ProceedingType" }, "article": { "id": "12OmNBOCWnu", "doi": "10.1109/VR.2015.7223405", "title": "Wings and flying in immersive VR — Controller type, sound effects and experienced ownership and agency", "normalizedTitle": "Wings and flying in immersive VR — Controller type, sound effects and experienced ownership and agency", "abstract": "An experiment investigated the subjective experiences of ownership and agency of a pair of virtual wings attached to a motion controlled avatar in an immersive virtual reality setup. A between groups comparison of two ways of controlling the movement of the wings and flight ability. One where the subjects achieved the wing motion and flight ability by using a hand-held video game controller and the other by moving the shoulder. Through four repetitions of a flight task with varying amounts of self-produced audio feedback (from the movement of the virtual limbs), the subjects evaluated their experienced embodiment of the wings on a body ownership and agency questionnaire. The results shows significant differences between the controllers in some of the questionnaire items and that adding self-produced sounds to the avatar, slightly changed the subjects evaluations.", "abstracts": [ { "abstractType": "Regular", "content": "An experiment investigated the subjective experiences of ownership and agency of a pair of virtual wings attached to a motion controlled avatar in an immersive virtual reality setup. A between groups comparison of two ways of controlling the movement of the wings and flight ability. One where the subjects achieved the wing motion and flight ability by using a hand-held video game controller and the other by moving the shoulder. Through four repetitions of a flight task with varying amounts of self-produced audio feedback (from the movement of the virtual limbs), the subjects evaluated their experienced embodiment of the wings on a body ownership and agency questionnaire. The results shows significant differences between the controllers in some of the questionnaire items and that adding self-produced sounds to the avatar, slightly changed the subjects evaluations.", "__typename": "ArticleAbstractType" } ], "normalizedAbstract": "An experiment investigated the subjective experiences of ownership and agency of a pair of virtual wings attached to a motion controlled avatar in an immersive virtual reality setup. A between groups comparison of two ways of controlling the movement of the wings and flight ability. One where the subjects achieved the wing motion and flight ability by using a hand-held video game controller and the other by moving the shoulder. Through four repetitions of a flight task with varying amounts of self-produced audio feedback (from the movement of the virtual limbs), the subjects evaluated their experienced embodiment of the wings on a body ownership and agency questionnaire. The results shows significant differences between the controllers in some of the questionnaire items and that adding self-produced sounds to the avatar, slightly changed the subjects evaluations.", "fno": "07223405", "keywords": [ "Games", "Avatars", "Visualization", "Solid Modeling", "Virtual Environments", "Tracking", "Virtual Realities Audio Output", "H 5 1 Multimedia Information Systems Artificial" ], "authors": [ { "affiliation": "Aalborg University Copenhagen", "fullName": "Erik Sikstrom", "givenName": "Erik", "surname": "Sikstrom", "__typename": "ArticleAuthorType" }, { "affiliation": "Aalborg University Copenhagen", "fullName": "Amalia de Gotzen", "givenName": "Amalia", "surname": "de Gotzen", "__typename": "ArticleAuthorType" }, { "affiliation": "Aalborg University Copenhagen", "fullName": "Stefania Serafin", "givenName": "Stefania", "surname": "Serafin", "__typename": "ArticleAuthorType" } ], "idPrefix": "vr", "isOpenAccess": false, "showRecommendedArticles": true, "showBuyMe": true, "hasPdf": true, "pubDate": "2015-03-01T00:00:00", "pubType": "proceedings", "pages": "281-282", "year": "2015", "issn": null, "isbn": "978-1-4799-1727-3", "notes": null, "notesType": null, "__typename": "ArticleType" }, "webExtras": [], "adjacentArticles": { "previous": { "fno": "07223404", "articleId": "12OmNzBwGnY", "__typename": "AdjacentArticleType" }, "next": { "fno": "07223406", "articleId": "12OmNAlvHUH", "__typename": "AdjacentArticleType" }, "__typename": "AdjacentArticlesType" }, "recommendedArticles": [ { "id": "proceedings/cts/2016/2300/0/07871048", "title": "Body Ownership in Virtual Reality", "doi": null, "abstractUrl": "/proceedings-article/cts/2016/07871048/12OmNCm7BF7", "parentPublication": { "id": "proceedings/cts/2016/2300/0", "title": "2016 International Conference on Collaboration Technologies and Systems (CTS)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2017/6647/0/07892241", "title": "Prism aftereffects for throwing with a self-avatar in an immersive virtual environment", "doi": null, "abstractUrl": "/proceedings-article/vr/2017/07892241/12OmNxy4N0w", "parentPublication": { "id": "proceedings/vr/2017/6647/0", "title": "2017 IEEE Virtual Reality (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2018/3365/0/08446448", "title": "Agency Enhances Body Ownership Illusion of Being a Virtual Bat", "doi": null, "abstractUrl": "/proceedings-article/vr/2018/08446448/13bd1gzWkRR", "parentPublication": { "id": "proceedings/vr/2018/3365/0", "title": "2018 IEEE Conference on Virtual Reality and 3D User Interfaces (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "trans/tg/2013/04/ttg2013040583", "title": "Human Tails: Ownership and Control of Extended Humanoid Avatars", "doi": null, "abstractUrl": "/journal/tg/2013/04/ttg2013040583/13rRUxYrbUF", "parentPublication": { "id": "trans/tg", "title": "IEEE Transactions on Visualization & Computer Graphics", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/ismar-adjunct/2022/5365/0/536500a772", "title": "Embodiment of an Avatar with Unnatural Arm Movements", "doi": null, "abstractUrl": "/proceedings-article/ismar-adjunct/2022/536500a772/1J7W9fEjd6g", "parentPublication": { "id": "proceedings/ismar-adjunct/2022/5365/0", "title": "2022 IEEE International Symposium on Mixed and Augmented Reality Adjunct (ISMAR-Adjunct)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2019/1377/0/08798222", "title": "Shared Body by Action Integration of Two Persons: Body Ownership, Sense of Agency and Task Performance", "doi": null, "abstractUrl": "/proceedings-article/vr/2019/08798222/1cJ0T8PM6qI", "parentPublication": { "id": "proceedings/vr/2019/1377/0", "title": "2019 IEEE Conference on Virtual Reality and 3D User Interfaces (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2019/1377/0/08797787", "title": "The Effect of Hand Size and Interaction Modality on the Virtual Hand Illusion", "doi": null, "abstractUrl": "/proceedings-article/vr/2019/08797787/1cJ179JUrPa", "parentPublication": { "id": "proceedings/vr/2019/1377/0", "title": "2019 IEEE Conference on Virtual Reality and 3D User Interfaces (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2019/1377/0/08797716", "title": "Reconciling Being in-Control vs. Being Helped for the Execution of Complex Movements in VR", "doi": null, "abstractUrl": "/proceedings-article/vr/2019/08797716/1cJ1dFOKU3m", "parentPublication": { "id": "proceedings/vr/2019/1377/0", "title": "2019 IEEE Conference on Virtual Reality and 3D User Interfaces (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vs-games/2019/4540/0/08864545", "title": "Valkyrie Project: Flying Immersion in Virtual Reality", "doi": null, "abstractUrl": "/proceedings-article/vs-games/2019/08864545/1e5Zs5f020E", "parentPublication": { "id": "proceedings/vs-games/2019/4540/0", "title": "2019 11th International Conference on Virtual Worlds and Games for Serious Applications (VS-Games)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "mags/cg/2020/04/09099981", "title": "Do You Feel Like Flying? A Study of Flying Perception in Virtual Reality for Future Game Development", "doi": null, "abstractUrl": "/magazine/cg/2020/04/09099981/1k93o1dO9WM", "parentPublication": { "id": "mags/cg", "title": "IEEE Computer Graphics and Applications", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" } ], "articleVideos": [] }
{ "proceeding": { "id": "12OmNwDACj6", "title": "2016 International Conference on Collaboration Technologies and Systems (CTS)", "acronym": "cts", "groupId": "1001747", "volume": "0", "displayVolume": "0", "year": "2016", "__typename": "ProceedingType" }, "article": { "id": "12OmNCm7BF7", "doi": "10.1109/CTS.2016.0110", "title": "Body Ownership in Virtual Reality", "normalizedTitle": "Body Ownership in Virtual Reality", "abstract": "The sense of proximity to a virtual context requires a diverse set of cues that gives a participant a sense of presence in a virtual space. The goal of our research is to provide a sense of body ownership of a virtual entity represented by a humanoid avatar or other virtual manifestation of a surrogate in virtual reality environment for sharing not only a space but also an experience. In essence, we seek a sense of Presence that transports the operator to the space inhabited by the virtual avatar in a virtual space. To arouse this sense of presence in a virtual space, we are testing a hypothesis that seeing one's own body from a first person point-of-view in the remote context is a critical element of \"being there.\" To achieve this we introduce a method to enhance the sensation of presence for the operator who inhabits a virtual avatar by increasing the sense of body ownership.", "abstracts": [ { "abstractType": "Regular", "content": "The sense of proximity to a virtual context requires a diverse set of cues that gives a participant a sense of presence in a virtual space. The goal of our research is to provide a sense of body ownership of a virtual entity represented by a humanoid avatar or other virtual manifestation of a surrogate in virtual reality environment for sharing not only a space but also an experience. In essence, we seek a sense of Presence that transports the operator to the space inhabited by the virtual avatar in a virtual space. To arouse this sense of presence in a virtual space, we are testing a hypothesis that seeing one's own body from a first person point-of-view in the remote context is a critical element of \"being there.\" To achieve this we introduce a method to enhance the sensation of presence for the operator who inhabits a virtual avatar by increasing the sense of body ownership.", "__typename": "ArticleAbstractType" } ], "normalizedAbstract": "The sense of proximity to a virtual context requires a diverse set of cues that gives a participant a sense of presence in a virtual space. The goal of our research is to provide a sense of body ownership of a virtual entity represented by a humanoid avatar or other virtual manifestation of a surrogate in virtual reality environment for sharing not only a space but also an experience. In essence, we seek a sense of Presence that transports the operator to the space inhabited by the virtual avatar in a virtual space. To arouse this sense of presence in a virtual space, we are testing a hypothesis that seeing one's own body from a first person point-of-view in the remote context is a critical element of \"being there.\" To achieve this we introduce a method to enhance the sensation of presence for the operator who inhabits a virtual avatar by increasing the sense of body ownership.", "fno": "07871048", "keywords": [ "Avatars", "Body Ownership", "Virtual Reality", "Humanoid Avatar", "Virtual Manifestation", "Virtual Avatar", "Sensation Of Presence Enhancement", "Thumb", "Collaboration", "Solid Modeling", "Mirrors", "Avatars", "Virtual Reality", "Body Ownership", "Presence", "Immersive Collaboration" ], "authors": [ { "affiliation": null, "fullName": "Sungchul Jung", "givenName": "Sungchul", "surname": "Jung", "__typename": "ArticleAuthorType" }, { "affiliation": null, "fullName": "Charles E. Hughes", "givenName": "Charles E.", "surname": "Hughes", "__typename": "ArticleAuthorType" } ], "idPrefix": "cts", "isOpenAccess": false, "showRecommendedArticles": true, "showBuyMe": true, "hasPdf": true, "pubDate": "2016-10-01T00:00:00", "pubType": "proceedings", "pages": "597-600", "year": "2016", "issn": null, "isbn": "978-1-5090-2300-4", "notes": null, "notesType": null, "__typename": "ArticleType" }, "webExtras": [], "adjacentArticles": { "previous": { "fno": "07871047", "articleId": "12OmNrIrPlz", "__typename": "AdjacentArticleType" }, "next": { "fno": "07871049", "articleId": "12OmNyQ7G4Q", "__typename": "AdjacentArticleType" }, "__typename": "AdjacentArticlesType" }, "recommendedArticles": [ { "id": "proceedings/vr/2016/0836/0/07504689", "title": "The impact of a self-avatar on cognitive load in immersive virtual reality", "doi": null, "abstractUrl": "/proceedings-article/vr/2016/07504689/12OmNviHKla", "parentPublication": { "id": "proceedings/vr/2016/0836/0", "title": "2016 IEEE Virtual Reality (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2010/6237/0/05444805", "title": "The contribution of real-time mirror reflections of motor actions on virtual body ownership in an immersive virtual environment", "doi": null, "abstractUrl": "/proceedings-article/vr/2010/05444805/12OmNwoPtlH", "parentPublication": { "id": "proceedings/vr/2010/6237/0", "title": "2010 IEEE Virtual Reality Conference (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2018/3365/0/08446229", "title": "Any &#x201C;Body&#x201D; There? Avatar Visibility Effects in a Virtual Reality Game", "doi": null, "abstractUrl": "/proceedings-article/vr/2018/08446229/13bd1fHrlRx", "parentPublication": { "id": "proceedings/vr/2018/3365/0", "title": "2018 IEEE Conference on Virtual Reality and 3D User Interfaces (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/ismar/2022/5325/0/532500a260", "title": "The Effects of Avatar and Environment Design on Embodiment, Presence, Activation, and Task Load in a Virtual Reality Exercise Application", "doi": null, "abstractUrl": "/proceedings-article/ismar/2022/532500a260/1JrRf0Dbcac", "parentPublication": { "id": "proceedings/ismar/2022/5325/0", "title": "2022 IEEE International Symposium on Mixed and Augmented Reality (ISMAR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "trans/tg/2023/05/10049676", "title": "The Impact of Avatar and Environment Congruence on Plausibility, Embodiment, Presence, and the Proteus Effect in Virtual Reality", "doi": null, "abstractUrl": "/journal/tg/2023/05/10049676/1KYosbnM8q4", "parentPublication": { "id": "trans/tg", "title": "IEEE Transactions on Visualization & Computer Graphics", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2019/1377/0/08798222", "title": "Shared Body by Action Integration of Two Persons: Body Ownership, Sense of Agency and Task Performance", "doi": null, "abstractUrl": "/proceedings-article/vr/2019/08798222/1cJ0T8PM6qI", "parentPublication": { "id": "proceedings/vr/2019/1377/0", "title": "2019 IEEE Conference on Virtual Reality and 3D User Interfaces (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2019/1377/0/08798055", "title": "Embodying an Extra Virtual Body in Augmented Reality", "doi": null, "abstractUrl": "/proceedings-article/vr/2019/08798055/1cJ0Y0o1pO8", "parentPublication": { "id": "proceedings/vr/2019/1377/0", "title": "2019 IEEE Conference on Virtual Reality and 3D User Interfaces (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "trans/tg/2022/04/09200517", "title": "On the Plausibility of Virtual Body Animation Features in Virtual Reality", "doi": null, "abstractUrl": "/journal/tg/2022/04/09200517/1ndVuuNfI64", "parentPublication": { "id": "trans/tg", "title": "IEEE Transactions on Visualization & Computer Graphics", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/ismar/2020/8508/0/850800a452", "title": "Studying the Inter-Relation Between Locomotion Techniques and Embodiment in Virtual Reality", "doi": null, "abstractUrl": "/proceedings-article/ismar/2020/850800a452/1pysvNRUnD2", "parentPublication": { "id": "proceedings/ismar/2020/8508/0", "title": "2020 IEEE International Symposium on Mixed and Augmented Reality (ISMAR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2021/1838/0/255600a065", "title": "The Embodiment of Photorealistic Avatars Influences Female Body Weight Perception in Virtual Reality", "doi": null, "abstractUrl": "/proceedings-article/vr/2021/255600a065/1tuAAOZpdoQ", "parentPublication": { "id": "proceedings/vr/2021/1838/0", "title": "2021 IEEE Virtual Reality and 3D User Interfaces (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" } ], "articleVideos": [] }
{ "proceeding": { "id": "12OmNzcxYUW", "title": "2014 7th International Conference on Intelligent Computation Technology and Automation (ICICTA)", "acronym": "icicta", "groupId": "1002487", "volume": "0", "displayVolume": "0", "year": "2014", "__typename": "ProceedingType" }, "article": { "id": "12OmNwpoFMM", "doi": "10.1109/ICICTA.2014.29", "title": "Analysis and Design of Humanoid Robot Dance", "normalizedTitle": "Analysis and Design of Humanoid Robot Dance", "abstract": "Dance robot ornamental and interesting to conquer the world, researchers of different countries have established laboratory to research and design the robot dance movements of harmonious and beautiful. In this paper, the generalized coordinate method is utilized to construct a kinematics model for the sideways, forward and Split handstand movement of a humanoid robot. The designed robot can rise and dance with music.", "abstracts": [ { "abstractType": "Regular", "content": "Dance robot ornamental and interesting to conquer the world, researchers of different countries have established laboratory to research and design the robot dance movements of harmonious and beautiful. In this paper, the generalized coordinate method is utilized to construct a kinematics model for the sideways, forward and Split handstand movement of a humanoid robot. The designed robot can rise and dance with music.", "__typename": "ArticleAbstractType" } ], "normalizedAbstract": "Dance robot ornamental and interesting to conquer the world, researchers of different countries have established laboratory to research and design the robot dance movements of harmonious and beautiful. In this paper, the generalized coordinate method is utilized to construct a kinematics model for the sideways, forward and Split handstand movement of a humanoid robot. The designed robot can rise and dance with music.", "fno": "6636a088", "keywords": [ "Joints", "Robot Kinematics", "Kinematics", "Humanoid Robots", "Hip", "Legged Locomotion", "Mimic Human", "Robot Dance", "Movement Design", "Kinematics Modeling", "Implementation", "Harmonious" ], "authors": [ { "affiliation": null, "fullName": "Huacben Wang", "givenName": "Huacben", "surname": "Wang", "__typename": "ArticleAuthorType" }, { "affiliation": null, "fullName": "Jiancyin Lu", "givenName": "Jiancyin", "surname": "Lu", "__typename": "ArticleAuthorType" }, { "affiliation": null, "fullName": "Zhao-Shan Yuan", "givenName": "Zhao-Shan", "surname": "Yuan", "__typename": "ArticleAuthorType" } ], "idPrefix": "icicta", "isOpenAccess": false, "showRecommendedArticles": true, "showBuyMe": true, "hasPdf": true, "pubDate": "2014-10-01T00:00:00", "pubType": "proceedings", "pages": "88-91", "year": "2014", "issn": null, "isbn": "978-1-4799-6636-3", "notes": null, "notesType": null, "__typename": "ArticleType" }, "webExtras": [], "adjacentArticles": { "previous": { "fno": "6636a084", "articleId": "12OmNz5JC9T", "__typename": "AdjacentArticleType" }, "next": { "fno": "6636a092", "articleId": "12OmNxA3YRn", "__typename": "AdjacentArticleType" }, "__typename": "AdjacentArticlesType" }, "recommendedArticles": [ { "id": "proceedings/icarsc/2016/2255/0/07781957", "title": "How to Select a Suitable Action against Strong Pushes in Adult-Size Humanoid Robot: Learning from Past Experiences", "doi": null, "abstractUrl": "/proceedings-article/icarsc/2016/07781957/12OmNAolH2B", "parentPublication": { "id": "proceedings/icarsc/2016/2255/0", "title": "2016 International Conference on Autonomous Robot Systems and Competitions (ICARSC)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/icicta/2008/3357/1/3357a732", "title": "Study on Simulation for Humanoid Robot", "doi": null, "abstractUrl": "/proceedings-article/icicta/2008/3357a732/12OmNBE7MoS", "parentPublication": { "id": "proceedings/icicta/2008/3357/1", "title": "Intelligent Computation Technology and Automation, International Conference on", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/eurosim/2013/5073/0/5073a179", "title": "A Path Motion Planning for Humanoid Climbing Robot", "doi": null, "abstractUrl": "/proceedings-article/eurosim/2013/5073a179/12OmNxEBz0P", "parentPublication": { "id": "proceedings/eurosim/2013/5073/0", "title": "2013 8th EUROSIM Congress on Modelling and Simulation (EUROSIM)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/icpr/2014/5209/0/06977439", "title": "Humanoid Robot Imitation with Pose Similarity Metric Learning", "doi": null, "abstractUrl": "/proceedings-article/icpr/2014/06977439/12OmNxXCGOy", "parentPublication": { "id": "proceedings/icpr/2014/5209/0", "title": "2014 22nd International Conference on Pattern Recognition (ICPR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/hase/2014/3466/0/3466a073", "title": "Modeling and Verification of Humanoid Robot Task Coordination", "doi": null, "abstractUrl": "/proceedings-article/hase/2014/3466a073/12OmNyRxFwS", "parentPublication": { "id": "proceedings/hase/2014/3466/0", "title": "2014 IEEE 15th International Symposium on High-Assurance Systems Engineering", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/icccnt/2014/2696/0/06963121", "title": "Design and fabrication of bipedal robot", "doi": null, "abstractUrl": "/proceedings-article/icccnt/2014/06963121/12OmNylbouN", "parentPublication": { "id": "proceedings/icccnt/2014/2696/0", "title": "2014 5th International Conference on Computing, Communication and Networking Technologies (ICCCNT)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/sbr-lars/2012/4906/0/4906a033", "title": "Explicit Analytic Solution for Inverse Kinematics of Bioloid Humanoid Robot", "doi": null, "abstractUrl": "/proceedings-article/sbr-lars/2012/4906a033/12OmNzd7bLt", "parentPublication": { "id": "proceedings/sbr-lars/2012/4906/0", "title": "Brazilian Robotics Symposium and Latin American Robotics Symposium (SBR-LARS)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/sbr-lars-r/2016/3656/0/07783510", "title": "Whole-Body Modeling and Hierarchical Control of a Humanoid Robot Based on Dual Quaternion Algebra", "doi": null, "abstractUrl": "/proceedings-article/sbr-lars-r/2016/07783510/12OmNzlD9a1", "parentPublication": { "id": "proceedings/sbr-lars-r/2016/3656/0", "title": "2016 XIII Latin-American Robotics Symposium and IV Brazilian Robotics Symposium (LARS/SBR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/icndc/2010/8382/0/05645366", "title": "Optimal Momentum Compensation with Waist Joint for Online Biped Gait Generating of Humanoid Robot", "doi": null, "abstractUrl": "/proceedings-article/icndc/2010/05645366/12OmNzlUKzP", "parentPublication": { "id": "proceedings/icndc/2010/8382/0", "title": "2010 First International Conference on Networking and Distributed Computing", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/icid/2021/2065/0/206500a389", "title": "Design and implementation of the imitation human shape dance robot", "doi": null, "abstractUrl": "/proceedings-article/icid/2021/206500a389/1AjTFhomauA", "parentPublication": { "id": "proceedings/icid/2021/2065/0", "title": "2021 2nd International Conference on Intelligent Design (ICID)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" } ], "articleVideos": [] }
{ "proceeding": { "id": "12OmNwseEQY", "title": "2010 First International Conference on Networking and Distributed Computing", "acronym": "icndc", "groupId": "1800193", "volume": "0", "displayVolume": "0", "year": "2010", "__typename": "ProceedingType" }, "article": { "id": "12OmNzlUKzP", "doi": "10.1109/ICNDC.2010.73", "title": "Optimal Momentum Compensation with Waist Joint for Online Biped Gait Generating of Humanoid Robot", "normalizedTitle": "Optimal Momentum Compensation with Waist Joint for Online Biped Gait Generating of Humanoid Robot", "abstract": "In this paper we introduce a method to generate whole body's linear and angular momentum, and use upper body to compensate for the momentum caused by lower-body prescribed trajectory for fast dynamic walk of humanoid robot. In this method, the system dynamic model is built with its performance indices, and the optimal momentum compensation (OPMC) is solved by preview control method. Experiments conducted both in simulation and on real robot \"Kong\" with a walking motion at the speed of 0.5km/h, proved the effectiveness of our method.", "abstracts": [ { "abstractType": "Regular", "content": "In this paper we introduce a method to generate whole body's linear and angular momentum, and use upper body to compensate for the momentum caused by lower-body prescribed trajectory for fast dynamic walk of humanoid robot. In this method, the system dynamic model is built with its performance indices, and the optimal momentum compensation (OPMC) is solved by preview control method. Experiments conducted both in simulation and on real robot \"Kong\" with a walking motion at the speed of 0.5km/h, proved the effectiveness of our method.", "__typename": "ArticleAbstractType" } ], "normalizedAbstract": "In this paper we introduce a method to generate whole body's linear and angular momentum, and use upper body to compensate for the momentum caused by lower-body prescribed trajectory for fast dynamic walk of humanoid robot. In this method, the system dynamic model is built with its performance indices, and the optimal momentum compensation (OPMC) is solved by preview control method. Experiments conducted both in simulation and on real robot \"Kong\" with a walking motion at the speed of 0.5km/h, proved the effectiveness of our method.", "fno": "05645366", "keywords": [ "Acceleration Control", "Angular Momentum", "Gait Analysis", "Humanoid Robots", "Legged Locomotion", "Motion Control", "Predictive Control", "Robot Kinematics", "Optimal Momentum Compensation", "Waist Joint", "Online Biped Gait Analysis", "Humanoid Robot", "Preview Control Method", "Kong", "Walking Motion", "Motion Control", "Legged Locomotion", "Humanoid Robots", "Joints", "Robot Kinematics", "Trajectory", "Hip", "Humanoid Robot", "Momentum Compensation", "ZMP" ], "authors": [ { "affiliation": null, "fullName": "Yuanfei Xiang", "givenName": "Yuanfei", "surname": "Xiang", "__typename": "ArticleAuthorType" }, { "affiliation": null, "fullName": "Rong Xiong", "givenName": "Rong", "surname": "Xiong", "__typename": "ArticleAuthorType" }, { "affiliation": null, "fullName": "Yichao Sun", "givenName": "Yichao", "surname": "Sun", "__typename": "ArticleAuthorType" }, { "affiliation": null, "fullName": "Jun Wu", "givenName": "Jun", "surname": "Wu", "__typename": "ArticleAuthorType" }, { "affiliation": null, "fullName": "Jian Chu", "givenName": "Jian", "surname": "Chu", "__typename": "ArticleAuthorType" } ], "idPrefix": "icndc", "isOpenAccess": false, "showRecommendedArticles": true, "showBuyMe": true, "hasPdf": true, "pubDate": "2010-10-01T00:00:00", "pubType": "proceedings", "pages": "341-345", "year": "2010", "issn": "2165-4999", "isbn": "978-1-4244-8382-2", "notes": null, "notesType": null, "__typename": "ArticleType" }, "webExtras": [], "adjacentArticles": { "previous": { "fno": "05645365", "articleId": "12OmNAoDibc", "__typename": "AdjacentArticleType" }, "next": { "fno": "05645363", "articleId": "12OmNywfKJ1", "__typename": "AdjacentArticleType" }, "__typename": "AdjacentArticlesType" }, "recommendedArticles": [ { "id": "proceedings/icarsc/2016/2255/0/07781955", "title": "Real-Time Gait Events Detection during Walking of Biped Model and Humanoid Robot through Adaptive Thresholds", "doi": null, "abstractUrl": "/proceedings-article/icarsc/2016/07781955/12OmNANkoks", "parentPublication": { "id": "proceedings/icarsc/2016/2255/0", "title": "2016 International Conference on Autonomous Robot Systems and Competitions (ICARSC)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/robot/1991/2163/0/00131936", "title": "Biped gait transitions", "doi": null, "abstractUrl": "/proceedings-article/robot/1991/00131936/12OmNAS9zt7", "parentPublication": { "id": "proceedings/robot/1991/2163/0", "title": "Proceedings. 1991 IEEE International Conference on Robotics and Automation", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/cw/2014/4677/0/4677a285", "title": "Human Identification Using Sensors Data Based on 3D Gait Area", "doi": null, "abstractUrl": "/proceedings-article/cw/2014/4677a285/12OmNAZOK1U", "parentPublication": { "id": "proceedings/cw/2014/4677/0", "title": "2014 International Conference on Cyberworlds (CW)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/sbr-lars/2012/4906/0/4906a190", "title": "Recognition of Human Motions for Imitation and Control of a Humanoid Robot", "doi": null, "abstractUrl": "/proceedings-article/sbr-lars/2012/4906a190/12OmNBPtJBQ", "parentPublication": { "id": "proceedings/sbr-lars/2012/4906/0", "title": "Brazilian Robotics Symposium and Latin American Robotics Symposium (SBR-LARS)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/iccairo/2017/6536/0/6536a015", "title": "Modeling of Biped Robot Archie", "doi": null, "abstractUrl": "/proceedings-article/iccairo/2017/6536a015/12OmNCdBDHd", "parentPublication": { "id": "proceedings/iccairo/2017/6536/0", "title": "2017 International Conference on Control, Artificial Intelligence, Robotics & Optimization (ICCAIRO)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/icisce/2016/2535/0/2535a763", "title": "A Neuro Fuzzy-Based Gait Trajectory Generator for a Biped Robot Using Kinect Data", "doi": null, "abstractUrl": "/proceedings-article/icisce/2016/2535a763/12OmNwcUk2S", "parentPublication": { "id": "proceedings/icisce/2016/2535/0", "title": "2016 3rd International Conference on Information Science and Control Engineering (ICISCE)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/icicta/2014/6636/0/6636a088", "title": "Analysis and Design of Humanoid Robot Dance", "doi": null, "abstractUrl": "/proceedings-article/icicta/2014/6636a088/12OmNwpoFMM", "parentPublication": { "id": "proceedings/icicta/2014/6636/0", "title": "2014 7th International Conference on Intelligent Computation Technology and Automation (ICICTA)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/icpr/2014/5209/0/06977439", "title": "Humanoid Robot Imitation with Pose Similarity Metric Learning", "doi": null, "abstractUrl": "/proceedings-article/icpr/2014/06977439/12OmNxXCGOy", "parentPublication": { "id": "proceedings/icpr/2014/5209/0", "title": "2014 22nd International Conference on Pattern Recognition (ICPR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/eurosim/2013/5073/0/5073a226", "title": "Multi-legged Walking Robot Modelling in MATLAB/SimmechanicsTM and Its Simulation", "doi": null, "abstractUrl": "/proceedings-article/eurosim/2013/5073a226/12OmNyFU7bV", "parentPublication": { "id": "proceedings/eurosim/2013/5073/0", "title": "2013 8th EUROSIM Congress on Modelling and Simulation (EUROSIM)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/case/2012/0430/0/06386470", "title": "Walking pattern generation based on energy function and inverse pendulum model for biped robot", "doi": null, "abstractUrl": "/proceedings-article/case/2012/06386470/12OmNzcxZ9h", "parentPublication": { "id": "proceedings/case/2012/0430/0", "title": "2012 IEEE International Conference on Automation Science and Engineering (CASE 2012)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" } ], "articleVideos": [] }
{ "proceeding": { "id": "13bd1eJgoia", "title": "2018 IEEE Conference on Virtual Reality and 3D User Interfaces (VR)", "acronym": "vr", "groupId": "1000791", "volume": "0", "displayVolume": "0", "year": "2018", "__typename": "ProceedingType" }, "article": { "id": "13bd1ftOBCI", "doi": "10.1109/VR.2018.8446569", "title": "Spatial Asynchronous Visuo-Tactile Stimuli Influence Ownership of Virtual Wings", "normalizedTitle": "Spatial Asynchronous Visuo-Tactile Stimuli Influence Ownership of Virtual Wings", "abstract": "This poster describes a within-subject study of the virtual body ownership (VBO) illusion using anatomically similar but morphologically different body of a virtual bat. Participants experienced visuo-tactile stimulation of their arms while seeing an object touching the wing of the bat. The mapping between the real and the virtual touch points varied across three conditions: no spatial deviation between visual and tactile input, 50% deviation, and 70% deviation. The results suggest that the degree of experienced VBO varies across the conditions. The illusion was broken in the absence of visuo-tactile stimuli.", "abstracts": [ { "abstractType": "Regular", "content": "This poster describes a within-subject study of the virtual body ownership (VBO) illusion using anatomically similar but morphologically different body of a virtual bat. Participants experienced visuo-tactile stimulation of their arms while seeing an object touching the wing of the bat. The mapping between the real and the virtual touch points varied across three conditions: no spatial deviation between visual and tactile input, 50% deviation, and 70% deviation. The results suggest that the degree of experienced VBO varies across the conditions. The illusion was broken in the absence of visuo-tactile stimuli.", "__typename": "ArticleAbstractType" } ], "normalizedAbstract": "This poster describes a within-subject study of the virtual body ownership (VBO) illusion using anatomically similar but morphologically different body of a virtual bat. Participants experienced visuo-tactile stimulation of their arms while seeing an object touching the wing of the bat. The mapping between the real and the virtual touch points varied across three conditions: no spatial deviation between visual and tactile input, 50% deviation, and 70% deviation. The results suggest that the degree of experienced VBO varies across the conditions. The illusion was broken in the absence of visuo-tactile stimuli.", "fno": "08446569", "keywords": [ "Avatars", "Haptic Interfaces", "Virtual Reality", "Virtual Touch Points", "Spatial Deviation", "Visual Input", "Tactile Input", "Spatial Asynchronous Visuo Tactile Stimuli Influence Ownership", "Virtual Wings", "Virtual Body Ownership Illusion", "Virtual Bat", "Visuo Tactile Stimulation", "VBO", "Shape", "Interviews", "Avatars", "Virtual Environments", "Electronic Mail", "Three Dimensional Displays", "Virtual Reality", "Virtual Body Ownership", "Visuo Tactile Stimuli", "Human Centered Computing X 007 E Virtual Reality", "Computing Methodologies X 007 E Virtual Reality" ], "authors": [ { "affiliation": "Aalborg University Copenhagen, Multisensory Experience Lab", "fullName": "Anastassia Andreasen", "givenName": "Anastassia", "surname": "Andreasen", "__typename": "ArticleAuthorType" }, { "affiliation": "Aalborg University Copenhagen, Multisensory Experience Lab", "fullName": "Niels Christian Nilsson", "givenName": "Niels Christian", "surname": "Nilsson", "__typename": "ArticleAuthorType" }, { "affiliation": "Aalborg University Copenhagen, Multisensory Experience Lab", "fullName": "Stefania Serafin", "givenName": "Stefania", "surname": "Serafin", "__typename": "ArticleAuthorType" } ], "idPrefix": "vr", "isOpenAccess": false, "showRecommendedArticles": true, "showBuyMe": true, "hasPdf": true, "pubDate": "2018-03-01T00:00:00", "pubType": "proceedings", "pages": "503-504", "year": "2018", "issn": null, "isbn": "978-1-5386-3365-6", "notes": null, "notesType": null, "__typename": "ArticleType" }, "webExtras": [], "adjacentArticles": { "previous": { "fno": "08446560", "articleId": "13bd1eOELLz", "__typename": "AdjacentArticleType" }, "next": { "fno": "08446448", "articleId": "13bd1gzWkRR", "__typename": "AdjacentArticleType" }, "__typename": "AdjacentArticlesType" }, "recommendedArticles": [ { "id": "proceedings/vr/2015/1727/0/07223405", "title": "Wings and flying in immersive VR — Controller type, sound effects and experienced ownership and agency", "doi": null, "abstractUrl": "/proceedings-article/vr/2015/07223405/12OmNBOCWnu", "parentPublication": { "id": "proceedings/vr/2015/1727/0", "title": "2015 IEEE Virtual Reality (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2012/1247/0/06180904", "title": "Influence analysis of visual stimuli on localization of tactile stimuli in augmented reality", "doi": null, "abstractUrl": "/proceedings-article/vr/2012/06180904/12OmNqFJhKf", "parentPublication": { "id": "proceedings/vr/2012/1247/0", "title": "Virtual Reality Conference, IEEE", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/whc/2007/2738/0/04145149", "title": "Vibration Enhances Geometry Perception with Tactile Shape Displays", "doi": null, "abstractUrl": "/proceedings-article/whc/2007/04145149/12OmNqHItul", "parentPublication": { "id": "proceedings/whc/2007/2738/0", "title": "2007 2nd Joint EuroHaptics Conference and Symposium on Haptic Interfaces for Virtual Environments and Teleoperator Systems", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/icme/2017/6067/0/08019519", "title": "ForceTab: Visuo-haptic interaction with a force-sensitive actuated tablet", "doi": null, "abstractUrl": "/proceedings-article/icme/2017/08019519/12OmNvA1hjt", "parentPublication": { "id": "proceedings/icme/2017/6067/0", "title": "2017 IEEE International Conference on Multimedia and Expo (ICME)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2018/3365/0/08446285", "title": "Using Vertex Displacements to Distort Virtual Bodies and Objects While Preserving Visuo-Tactile Congruency During Touch", "doi": null, "abstractUrl": "/proceedings-article/vr/2018/08446285/13bd1fZBGcN", "parentPublication": { "id": "proceedings/vr/2018/3365/0", "title": "2018 IEEE Conference on Virtual Reality and 3D User Interfaces (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2018/3365/0/08446448", "title": "Agency Enhances Body Ownership Illusion of Being a Virtual Bat", "doi": null, "abstractUrl": "/proceedings-article/vr/2018/08446448/13bd1gzWkRR", "parentPublication": { "id": "proceedings/vr/2018/3365/0", "title": "2018 IEEE Conference on Virtual Reality and 3D User Interfaces (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2018/3365/0/08446054", "title": "Keynote Speaker Tactile Reality", "doi": null, "abstractUrl": "/proceedings-article/vr/2018/08446054/13bd1h03qOn", "parentPublication": { "id": "proceedings/vr/2018/3365/0", "title": "2018 IEEE Conference on Virtual Reality and 3D User Interfaces (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/cw/2018/7315/0/731500a229", "title": "REVAM: A Virtual Reality Application for Inducing Body Size Perception Modifications", "doi": null, "abstractUrl": "/proceedings-article/cw/2018/731500a229/17D45XeKgnO", "parentPublication": { "id": "proceedings/cw/2018/7315/0", "title": "2018 International Conference on Cyberworlds (CW)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2019/1377/0/08798033", "title": "The Effects of Tactile Gestalt on Generating Velvet Hand Illusion", "doi": null, "abstractUrl": "/proceedings-article/vr/2019/08798033/1cJ0LPy4Yb6", "parentPublication": { "id": "proceedings/vr/2019/1377/0", "title": "2019 IEEE Conference on Virtual Reality and 3D User Interfaces (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "trans/tg/2021/05/09382899", "title": "Tactile Perceptual Thresholds of Electrovibration in VR", "doi": null, "abstractUrl": "/journal/tg/2021/05/09382899/1saZD2BiBMc", "parentPublication": { "id": "trans/tg", "title": "IEEE Transactions on Visualization & Computer Graphics", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" } ], "articleVideos": [] }
{ "proceeding": { "id": "13bd1eJgoia", "title": "2018 IEEE Conference on Virtual Reality and 3D User Interfaces (VR)", "acronym": "vr", "groupId": "1000791", "volume": "0", "displayVolume": "0", "year": "2018", "__typename": "ProceedingType" }, "article": { "id": "13bd1gzWkRR", "doi": "10.1109/VR.2018.8446448", "title": "Agency Enhances Body Ownership Illusion of Being a Virtual Bat", "normalizedTitle": "Agency Enhances Body Ownership Illusion of Being a Virtual Bat", "abstract": "This poster describes a within-subject study of agency's influence on virtual body ownership (VBO) using anatomically similar but morphologically different body of a virtual bat. Paricipants were exposed to flight under four conditions: voluntary movement through virtual environment (VE) with avatar present, voluntary movement through virtual environment (VE) with avatar absent, voluntary limbs movement without movements through VE, and finally involuntary movement of the avatar through VE. The results suggest that agency enhances VBO illusion the most under participants' full control during flight locomotion.", "abstracts": [ { "abstractType": "Regular", "content": "This poster describes a within-subject study of agency's influence on virtual body ownership (VBO) using anatomically similar but morphologically different body of a virtual bat. Paricipants were exposed to flight under four conditions: voluntary movement through virtual environment (VE) with avatar present, voluntary movement through virtual environment (VE) with avatar absent, voluntary limbs movement without movements through VE, and finally involuntary movement of the avatar through VE. The results suggest that agency enhances VBO illusion the most under participants' full control during flight locomotion.", "__typename": "ArticleAbstractType" } ], "normalizedAbstract": "This poster describes a within-subject study of agency's influence on virtual body ownership (VBO) using anatomically similar but morphologically different body of a virtual bat. Paricipants were exposed to flight under four conditions: voluntary movement through virtual environment (VE) with avatar present, voluntary movement through virtual environment (VE) with avatar absent, voluntary limbs movement without movements through VE, and finally involuntary movement of the avatar through VE. The results suggest that agency enhances VBO illusion the most under participants' full control during flight locomotion.", "fno": "08446448", "keywords": [ "Avatars", "Virtual Environment", "Voluntary Limbs Movement", "VBO Illusion", "Virtual Bat", "Virtual Body Ownership", "Body Ownership Illusion", "Involuntary Avatar Movement", "Flight Locomotion", "Correlation", "Avatars", "Virtual Environments", "Pain", "Phantoms", "Visualization", "Artificial Limbs", "Agency", "Virtual Body Ownership", "Embodiment X 2022 Human Centered Computing X 007 E Virtual Reality", "X 2022 Computing Methodologies X 007 E Virtual Reality" ], "authors": [ { "affiliation": "Aalborg University, Multisensory Experience Lab, Copenhagen", "fullName": "Anastassia Andreasen", "givenName": "Anastassia", "surname": "Andreasen", "__typename": "ArticleAuthorType" }, { "affiliation": "Aalborg University, Multisensory Experience Lab, Copenhagen", "fullName": "Niels Christian Nilsson", "givenName": "Niels Christian", "surname": "Nilsson", "__typename": "ArticleAuthorType" }, { "affiliation": "Aalborg University, Multisensory Experience Lab, Copenhagen", "fullName": "Stefania Serafin", "givenName": "Stefania", "surname": "Serafin", "__typename": "ArticleAuthorType" } ], "idPrefix": "vr", "isOpenAccess": false, "showRecommendedArticles": true, "showBuyMe": true, "hasPdf": true, "pubDate": "2018-03-01T00:00:00", "pubType": "proceedings", "pages": "505-506", "year": "2018", "issn": null, "isbn": "978-1-5386-3365-6", "notes": null, "notesType": null, "__typename": "ArticleType" }, "webExtras": [], "adjacentArticles": { "previous": { "fno": "08446569", "articleId": "13bd1ftOBCI", "__typename": "AdjacentArticleType" }, "next": { "fno": "08446165", "articleId": "13bd1AIBM28", "__typename": "AdjacentArticleType" }, "__typename": "AdjacentArticlesType" }, "recommendedArticles": [ { "id": "proceedings/vr/2015/1727/0/07223379", "title": "Avatar anthropomorphism and illusion of body ownership in VR", "doi": null, "abstractUrl": "/proceedings-article/vr/2015/07223379/12OmNAWpyrk", "parentPublication": { "id": "proceedings/vr/2015/1727/0", "title": "2015 IEEE Virtual Reality (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2015/1727/0/07223405", "title": "Wings and flying in immersive VR — Controller type, sound effects and experienced ownership and agency", "doi": null, "abstractUrl": "/proceedings-article/vr/2015/07223405/12OmNBOCWnu", "parentPublication": { "id": "proceedings/vr/2015/1727/0", "title": "2015 IEEE Virtual Reality (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vs-games/2011/4419/0/4419a168", "title": "Animation of 3D Avatars for Rehabilitation of the Upper Limbs", "doi": null, "abstractUrl": "/proceedings-article/vs-games/2011/4419a168/12OmNCfjeoQ", "parentPublication": { "id": "proceedings/vs-games/2011/4419/0", "title": "Games and Virtual Worlds for Serious Applications, Conference in", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2017/6647/0/07892241", "title": "Prism aftereffects for throwing with a self-avatar in an immersive virtual environment", "doi": null, "abstractUrl": "/proceedings-article/vr/2017/07892241/12OmNxy4N0w", "parentPublication": { "id": "proceedings/vr/2017/6647/0", "title": "2017 IEEE Virtual Reality (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2018/3365/0/08446431", "title": "A Multisensory Virtual Environment for OSH Training", "doi": null, "abstractUrl": "/proceedings-article/vr/2018/08446431/13bd1eW2l9z", "parentPublication": { "id": "proceedings/vr/2018/3365/0", "title": "2018 IEEE Conference on Virtual Reality and 3D User Interfaces (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "trans/tg/2017/04/07833192", "title": "A Psychophysical Experiment Regarding Components of the Plausibility Illusion", "doi": null, "abstractUrl": "/journal/tg/2017/04/07833192/13rRUxly8SZ", "parentPublication": { "id": "trans/tg", "title": "IEEE Transactions on Visualization & Computer Graphics", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2019/1377/0/08798222", "title": "Shared Body by Action Integration of Two Persons: Body Ownership, Sense of Agency and Task Performance", "doi": null, "abstractUrl": "/proceedings-article/vr/2019/08798222/1cJ0T8PM6qI", "parentPublication": { "id": "proceedings/vr/2019/1377/0", "title": "2019 IEEE Conference on Virtual Reality and 3D User Interfaces (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2019/1377/0/08798345", "title": "Investigation of Visual Self-Representation for a Walking-in-Place Navigation System in Virtual Reality", "doi": null, "abstractUrl": "/proceedings-article/vr/2019/08798345/1cJ1hpkUgHS", "parentPublication": { "id": "proceedings/vr/2019/1377/0", "title": "2019 IEEE Conference on Virtual Reality and 3D User Interfaces (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vrw/2020/6532/0/09090634", "title": "Rhythmic proprioceptive stimulation improves embodiment in a walking avatar when added to visual stimulation", "doi": null, "abstractUrl": "/proceedings-article/vrw/2020/09090634/1jIxkrgIlEY", "parentPublication": { "id": "proceedings/vrw/2020/6532/0", "title": "2020 IEEE Conference on Virtual Reality and 3D User Interfaces Abstracts and Workshops (VRW)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vrw/2020/6532/0/09090453", "title": "Perception of Walking Self-body Avatar Enhances Virtual-walking Sensation", "doi": null, "abstractUrl": "/proceedings-article/vrw/2020/09090453/1jIxoojmMy4", "parentPublication": { "id": "proceedings/vrw/2020/6532/0", "title": "2020 IEEE Conference on Virtual Reality and 3D User Interfaces Abstracts and Workshops (VRW)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" } ], "articleVideos": [] }
{ "proceeding": { "id": "1cI6akLvAuQ", "title": "2019 IEEE Conference on Virtual Reality and 3D User Interfaces (VR)", "acronym": "vr", "groupId": "1000791", "volume": "0", "displayVolume": "0", "year": "2019", "__typename": "ProceedingType" }, "article": { "id": "1cJ179JUrPa", "doi": "10.1109/VR.2019.8797787", "title": "The Effect of Hand Size and Interaction Modality on the Virtual Hand Illusion", "normalizedTitle": "The Effect of Hand Size and Interaction Modality on the Virtual Hand Illusion", "abstract": "Most commercial virtual reality applications with self avatars provide users with a &#x201C;one-size fits all&#x201D; avatar. While the height of this body may be scaled to the user's height, other body proportions, such as limb length and hand size, are rarely customized to fit an individual user. Prior research has shown that mismatches between users' avatars and their actual bodies can affect size perception and feelings of body ownership. In this paper, we consider how concepts related to the virtual hand illusion, user experience, and task efficiency are influenced by variations between the size of a user's actual hand and their avatar's hand. We also consider how using a tracked controller or tracked gestures affect these concepts. We conducted a 2&#x00D7;3 within-subjects study (n=20), with two levels of input modality: using tracked finger motion vs. a hand-held controller (Glove vs. Controller), and three levels of hand scaling (Small, Fit, and Large). Participants completed 2 block-assembly trials for each condition (for a total of 12 trials). Time, mistakes, and a user experience survey were recorded for each trial. Participants experienced stronger feelings of ownership and realism in the Glove condition. Efficiency was higher in the Controller condition and supported by play data of more time spent, blocks grabbed, and blocks dropped in the Glove condition. We did not find enough evidence for a change in agency and the intensity of the virtual hand illusion depending on hand size. Over half of the participants indicated preferring the Glove condition over the Controller condition, mentioning fun and efficiency as factors in their choices. Preferences on hand scaling were mixed but often attributed to efficiency. Participants liked the appearance of their virtual hand more while using the Fit instead of Large hands. Several interaction effects were observed between input modality and hand scaling, for example, for smaller hands, tracked hands evoked stronger feelings of ownership compared to using a controller. Our results show that the virtual hand illusion is stronger when participants are able to control a hand directly rather than with a hand-held device, and that the virtual reality task must first be considered to determine which modality and hand size are the most applicable.", "abstracts": [ { "abstractType": "Regular", "content": "Most commercial virtual reality applications with self avatars provide users with a &#x201C;one-size fits all&#x201D; avatar. While the height of this body may be scaled to the user's height, other body proportions, such as limb length and hand size, are rarely customized to fit an individual user. Prior research has shown that mismatches between users' avatars and their actual bodies can affect size perception and feelings of body ownership. In this paper, we consider how concepts related to the virtual hand illusion, user experience, and task efficiency are influenced by variations between the size of a user's actual hand and their avatar's hand. We also consider how using a tracked controller or tracked gestures affect these concepts. We conducted a 2&#x00D7;3 within-subjects study (n=20), with two levels of input modality: using tracked finger motion vs. a hand-held controller (Glove vs. Controller), and three levels of hand scaling (Small, Fit, and Large). Participants completed 2 block-assembly trials for each condition (for a total of 12 trials). Time, mistakes, and a user experience survey were recorded for each trial. Participants experienced stronger feelings of ownership and realism in the Glove condition. Efficiency was higher in the Controller condition and supported by play data of more time spent, blocks grabbed, and blocks dropped in the Glove condition. We did not find enough evidence for a change in agency and the intensity of the virtual hand illusion depending on hand size. Over half of the participants indicated preferring the Glove condition over the Controller condition, mentioning fun and efficiency as factors in their choices. Preferences on hand scaling were mixed but often attributed to efficiency. Participants liked the appearance of their virtual hand more while using the Fit instead of Large hands. Several interaction effects were observed between input modality and hand scaling, for example, for smaller hands, tracked hands evoked stronger feelings of ownership compared to using a controller. Our results show that the virtual hand illusion is stronger when participants are able to control a hand directly rather than with a hand-held device, and that the virtual reality task must first be considered to determine which modality and hand size are the most applicable.", "__typename": "ArticleAbstractType" } ], "normalizedAbstract": "Most commercial virtual reality applications with self avatars provide users with a “one-size fits all” avatar. While the height of this body may be scaled to the user's height, other body proportions, such as limb length and hand size, are rarely customized to fit an individual user. Prior research has shown that mismatches between users' avatars and their actual bodies can affect size perception and feelings of body ownership. In this paper, we consider how concepts related to the virtual hand illusion, user experience, and task efficiency are influenced by variations between the size of a user's actual hand and their avatar's hand. We also consider how using a tracked controller or tracked gestures affect these concepts. We conducted a 2×3 within-subjects study (n=20), with two levels of input modality: using tracked finger motion vs. a hand-held controller (Glove vs. Controller), and three levels of hand scaling (Small, Fit, and Large). Participants completed 2 block-assembly trials for each condition (for a total of 12 trials). Time, mistakes, and a user experience survey were recorded for each trial. Participants experienced stronger feelings of ownership and realism in the Glove condition. Efficiency was higher in the Controller condition and supported by play data of more time spent, blocks grabbed, and blocks dropped in the Glove condition. We did not find enough evidence for a change in agency and the intensity of the virtual hand illusion depending on hand size. Over half of the participants indicated preferring the Glove condition over the Controller condition, mentioning fun and efficiency as factors in their choices. Preferences on hand scaling were mixed but often attributed to efficiency. Participants liked the appearance of their virtual hand more while using the Fit instead of Large hands. Several interaction effects were observed between input modality and hand scaling, for example, for smaller hands, tracked hands evoked stronger feelings of ownership compared to using a controller. Our results show that the virtual hand illusion is stronger when participants are able to control a hand directly rather than with a hand-held device, and that the virtual reality task must first be considered to determine which modality and hand size are the most applicable.", "fno": "08797787", "keywords": [ "Avatars", "Data Gloves", "Gesture Recognition", "Human Computer Interaction", "User Experience Survey", "Virtual Hand Illusion", "Hand Size", "Hand Scaling", "Tracked Hands", "Hand Held Device", "One Size Fits All Avatar", "Hand Held Controller", "Glove Condition", "Virtual Reality Applications", "Controller Condition", "Avatars", "Tracking", "Rubber", "Task Analysis", "Games", "Virtual Environments", "Human Centered Computing X 2014 Virtual Reality Human Centered Computing X 2014 Gestural Input", "Human Centered Computing X 2014 Interaction Design", "Computing Methodologies X 2014 Perception" ], "authors": [ { "affiliation": "Clemson University", "fullName": "Lorraine Lin", "givenName": "Lorraine", "surname": "Lin", "__typename": "ArticleAuthorType" }, { "affiliation": "Venturi Labs", "fullName": "Aline Normoyle", "givenName": "Aline", "surname": "Normoyle", "__typename": "ArticleAuthorType" }, { "affiliation": "Clemson University", "fullName": "Alexandra Adkins", "givenName": "Alexandra", "surname": "Adkins", "__typename": "ArticleAuthorType" }, { "affiliation": "Clemson University", "fullName": "Yu Sun", "givenName": "Yu", "surname": "Sun", "__typename": "ArticleAuthorType" }, { "affiliation": "Clemson University", "fullName": "Andrew Robb", "givenName": "Andrew", "surname": "Robb", "__typename": "ArticleAuthorType" }, { "affiliation": "Facebook Reality Labs", "fullName": "Yuting Ye", "givenName": "Yuting", "surname": "Ye", "__typename": "ArticleAuthorType" }, { "affiliation": "Facebook Reality Labs, University of Birmingham, UK", "fullName": "Massimiliano Di Luca", "givenName": "Massimiliano", "surname": "Di Luca", "__typename": "ArticleAuthorType" }, { "affiliation": "Clemson University", "fullName": "Sophie Jörg", "givenName": "Sophie", "surname": "Jörg", "__typename": "ArticleAuthorType" } ], "idPrefix": "vr", "isOpenAccess": false, "showRecommendedArticles": true, "showBuyMe": true, "hasPdf": true, "pubDate": "2019-03-01T00:00:00", "pubType": "proceedings", "pages": "510-518", "year": "2019", "issn": null, "isbn": "978-1-7281-1377-7", "notes": null, "notesType": null, "__typename": "ArticleType" }, "webExtras": [], "adjacentArticles": { "previous": { "fno": "08797828", "articleId": "1cJ13n6aEsE", "__typename": "AdjacentArticleType" }, "next": { "fno": "08798040", "articleId": "1cJ14CI2Jsk", "__typename": "AdjacentArticleType" }, "__typename": "AdjacentArticlesType" }, "recommendedArticles": [ { "id": "proceedings/vr/2015/1727/0/07223405", "title": "Wings and flying in immersive VR — Controller type, sound effects and experienced ownership and agency", "doi": null, "abstractUrl": "/proceedings-article/vr/2015/07223405/12OmNBOCWnu", "parentPublication": { "id": "proceedings/vr/2015/1727/0", "title": "2015 IEEE Virtual Reality (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2016/0836/0/07504682", "title": "The role of interaction in virtual embodiment: Effects of the virtual hand representation", "doi": null, "abstractUrl": "/proceedings-article/vr/2016/07504682/12OmNwE9Our", "parentPublication": { "id": "proceedings/vr/2016/0836/0", "title": "2016 IEEE Virtual Reality (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2016/0836/0/07504731", "title": "The effect of realism on the virtual hand illusion", "doi": null, "abstractUrl": "/proceedings-article/vr/2016/07504731/12OmNxu6p9n", "parentPublication": { "id": "proceedings/vr/2016/0836/0", "title": "2016 IEEE Virtual Reality (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/isuvr/2017/3091/0/3091a001", "title": "User Study of VR Basic Controller and Data Glove as Hand Gesture Inputs in VR Games", "doi": null, "abstractUrl": "/proceedings-article/isuvr/2017/3091a001/12OmNyyeWrp", "parentPublication": { "id": "proceedings/isuvr/2017/3091/0", "title": "2017 International Symposium on Ubiquitous Virtual Reality (ISUVR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "trans/th/2013/01/tth2013010106", "title": "Using Postural Synergies to Animate a Low-Dimensional Hand Avatar in Haptic Simulation", "doi": null, "abstractUrl": "/journal/th/2013/01/tth2013010106/13rRUwInv4z", "parentPublication": { "id": "trans/th", "title": "IEEE Transactions on Haptics", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "trans/th/2014/04/06732936", "title": "Robotically Enhanced Rubber Hand Illusion", "doi": null, "abstractUrl": "/journal/th/2014/04/06732936/13rRUwbJD4U", "parentPublication": { "id": "trans/th", "title": "IEEE Transactions on Haptics", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "trans/tg/2017/04/07829415", "title": "Paint with Me: Stimulating Creativity and Empathy While Painting with a Painter in Virtual Reality", "doi": null, "abstractUrl": "/journal/tg/2017/04/07829415/13rRUxASubB", "parentPublication": { "id": "trans/tg", "title": "IEEE Transactions on Visualization & Computer Graphics", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "trans/tg/2023/05/10049656", "title": "How Virtual Hand Representations Affect the Perceptions of Dynamic Affordances in Virtual Reality", "doi": null, "abstractUrl": "/journal/tg/2023/05/10049656/1KYozmEJf2M", "parentPublication": { "id": "trans/tg", "title": "IEEE Transactions on Visualization & Computer Graphics", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2019/1377/0/08798222", "title": "Shared Body by Action Integration of Two Persons: Body Ownership, Sense of Agency and Task Performance", "doi": null, "abstractUrl": "/proceedings-article/vr/2019/08798222/1cJ0T8PM6qI", "parentPublication": { "id": "proceedings/vr/2019/1377/0", "title": "2019 IEEE Conference on Virtual Reality and 3D User Interfaces (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2019/1377/0/08798193", "title": "Virtual Hand Illusion: The Alien Finger Motion Experiment", "doi": null, "abstractUrl": "/proceedings-article/vr/2019/08798193/1cJ0USQZTmU", "parentPublication": { "id": "proceedings/vr/2019/1377/0", "title": "2019 IEEE Conference on Virtual Reality and 3D User Interfaces (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" } ], "articleVideos": [] }
{ "proceeding": { "id": "1tnWwqMuCzu", "title": "2021 IEEE Conference on Virtual Reality and 3D User Interfaces Abstracts and Workshops (VRW)", "acronym": "vrw", "groupId": "1836626", "volume": "0", "displayVolume": "0", "year": "2021", "__typename": "ProceedingType" }, "article": { "id": "1tnXoIo36uY", "doi": "10.1109/VRW52623.2021.00247", "title": "[DC] The Effect of Modulating The Step Length of an Embodied Self-Avatars on Gait Symmetry During Treadmill Walking", "normalizedTitle": "[DC] The Effect of Modulating The Step Length of an Embodied Self-Avatars on Gait Symmetry During Treadmill Walking", "abstract": "In virtual reality, it is possible to simulate visual self-representation by mapping one's body movements to those of an avatar. The acceptance of the virtual body as part of one's own body creates an ownership illusion. This paper discusses different protocols to study the effects of modulating a self-avatar's step length on the step length of embodied healthy participants and post-stroke patients.", "abstracts": [ { "abstractType": "Regular", "content": "In virtual reality, it is possible to simulate visual self-representation by mapping one's body movements to those of an avatar. The acceptance of the virtual body as part of one's own body creates an ownership illusion. This paper discusses different protocols to study the effects of modulating a self-avatar's step length on the step length of embodied healthy participants and post-stroke patients.", "__typename": "ArticleAbstractType" } ], "normalizedAbstract": "In virtual reality, it is possible to simulate visual self-representation by mapping one's body movements to those of an avatar. The acceptance of the virtual body as part of one's own body creates an ownership illusion. This paper discusses different protocols to study the effects of modulating a self-avatar's step length on the step length of embodied healthy participants and post-stroke patients.", "fno": "405700a729", "keywords": [ "Avatars", "Gait Analysis", "Patient Rehabilitation", "Visual Self Representation", "Virtual Body", "Ownership Illusion", "Self Avatar", "Step Length", "Embodied Healthy Participants", "Embodied Self Avatars", "Gait Symmetry", "Treadmill Walking", "Virtual Reality", "Legged Locomotion", "Visualization", "Protocols", "Three Dimensional Displays", "Avatars", "Conferences", "Multimedia Systems", "Self Avatar", "Virtual Reality", "Ownership Illusion", "Rehabilitation", "Visual Feedback", "Gait Symmetry" ], "authors": [ { "affiliation": "Ecole de technologie superieure,Montreal,Canada", "fullName": "Iris Willaert", "givenName": "Iris", "surname": "Willaert", "__typename": "ArticleAuthorType" } ], "idPrefix": "vrw", "isOpenAccess": false, "showRecommendedArticles": true, "showBuyMe": true, "hasPdf": true, "pubDate": "2021-03-01T00:00:00", "pubType": "proceedings", "pages": "729-730", "year": "2021", "issn": null, "isbn": "978-1-6654-4057-8", "notes": null, "notesType": null, "__typename": "ArticleType" }, "webExtras": [], "adjacentArticles": { "previous": { "fno": "405700a727", "articleId": "1tnWx1pSNpK", "__typename": "AdjacentArticleType" }, "next": { "fno": "405700a731", "articleId": "1tnXdLvN92o", "__typename": "AdjacentArticleType" }, "__typename": "AdjacentArticlesType" }, "recommendedArticles": [ { "id": "proceedings/trustcom/2013/5022/0/5022b458", "title": "Measuring Walking and Running Cadence Using Magnetometers", "doi": null, "abstractUrl": "/proceedings-article/trustcom/2013/5022b458/12OmNxw5BsZ", "parentPublication": { "id": "proceedings/trustcom/2013/5022/0", "title": "2013 12th IEEE International Conference on Trust, Security and Privacy in Computing and Communications (TrustCom)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2017/6647/0/07892386", "title": "Travel in large-scale head-worn VR: Pre-oriented teleportation with WIMs and previews", "doi": null, "abstractUrl": "/proceedings-article/vr/2017/07892386/12OmNzhELm6", "parentPublication": { "id": "proceedings/vr/2017/6647/0", "title": "2017 IEEE Virtual Reality (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "trans/tg/2016/11/07523447", "title": "Retargeting Human-Object Interaction to Virtual Avatars", "doi": null, "abstractUrl": "/journal/tg/2016/11/07523447/13rRUzp02ot", "parentPublication": { "id": "trans/tg", "title": "IEEE Transactions on Visualization & Computer Graphics", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vrw/2022/8402/0/840200a792", "title": "Perception of Symmetry of Actual and Modulated Self-Avatar Gait Movements During Treadmill Walking", "doi": null, "abstractUrl": "/proceedings-article/vrw/2022/840200a792/1CJe47o4BRm", "parentPublication": { "id": "proceedings/vrw/2022/8402/0", "title": "2022 IEEE Conference on Virtual Reality and 3D User Interfaces Abstracts and Workshops (VRW)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vrw/2022/8402/0/840200a818", "title": "Towards Controlling Whole Body Avatars with Partial Body-Tracking and Environmental Information", "doi": null, "abstractUrl": "/proceedings-article/vrw/2022/840200a818/1CJeftFqI5W", "parentPublication": { "id": "proceedings/vrw/2022/8402/0", "title": "2022 IEEE Conference on Virtual Reality and 3D User Interfaces Abstracts and Workshops (VRW)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2020/5608/0/09089654", "title": "Effects of Locomotion Style and Body Visibility of a Telepresence Avatar", "doi": null, "abstractUrl": "/proceedings-article/vr/2020/09089654/1jIxd00PzX2", "parentPublication": { "id": "proceedings/vr/2020/5608/0", "title": "2020 IEEE Conference on Virtual Reality and 3D User Interfaces (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vrw/2020/6532/0/09090412", "title": "Modulating The Gait Of A Real-Time Self-Avatar To Induce Changes In Stride Length During Treadmill Walking", "doi": null, "abstractUrl": "/proceedings-article/vrw/2020/09090412/1jIxkYhRpKg", "parentPublication": { "id": "proceedings/vrw/2020/6532/0", "title": "2020 IEEE Conference on Virtual Reality and 3D User Interfaces Abstracts and Workshops (VRW)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vrw/2021/4057/0/405700a391", "title": "The Effectiveness of Locomotion Interfaces Depends on Self-Motion Cues, Environmental Cues, and the Individual", "doi": null, "abstractUrl": "/proceedings-article/vrw/2021/405700a391/1tnXFgLAfSw", "parentPublication": { "id": "proceedings/vrw/2021/4057/0", "title": "2021 IEEE Conference on Virtual Reality and 3D User Interfaces Abstracts and Workshops (VRW)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vrw/2021/4057/0/405700a140", "title": "Exploring behaviour towards avatars and agents in immersive virtual environments with mixed-agency interactions", "doi": null, "abstractUrl": "/proceedings-article/vrw/2021/405700a140/1tnXfmuYmli", "parentPublication": { "id": "proceedings/vrw/2021/4057/0", "title": "2021 IEEE Conference on Virtual Reality and 3D User Interfaces Abstracts and Workshops (VRW)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2021/1838/0/255600a788", "title": "Revisiting Distance Perception with Scaled Embodied Cues in Social Virtual Reality", "doi": null, "abstractUrl": "/proceedings-article/vr/2021/255600a788/1tuAHZj29Q4", "parentPublication": { "id": "proceedings/vr/2021/1838/0", "title": "2021 IEEE Virtual Reality and 3D User Interfaces (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" } ], "articleVideos": [] }
{ "proceeding": { "id": "1JrQPhTSspy", "title": "2022 IEEE International Symposium on Mixed and Augmented Reality (ISMAR)", "acronym": "ismar", "groupId": "1000465", "volume": "0", "displayVolume": "0", "year": "2022", "__typename": "ProceedingType" }, "article": { "id": "1JrRf0Dbcac", "doi": "10.1109/ISMAR55827.2022.00041", "title": "The Effects of Avatar and Environment Design on Embodiment, Presence, Activation, and Task Load in a Virtual Reality Exercise Application", "normalizedTitle": "The Effects of Avatar and Environment Design on Embodiment, Presence, Activation, and Task Load in a Virtual Reality Exercise Application", "abstract": "The development of embodied Virtual Reality (VR) systems involves multiple central design choices. These design choices affect the user perception and therefore require thorough consideration. This article reports on two user studies investigating the influence of common design choices on relevant intermediate factors (sense of embodiment, presence, motivation, activation, and task load) in a VR application for physical exercises. The first study manipulated the avatar fidelity (abstract, partial body vs. anthropomorphic, full-body) and the environment (with vs. without mirror). The second study manipulated the avatar type (healthy vs. injured) and the environment type (beach vs. hospital) and, hence, the avatar-environment congruence. The full-body avatar significantly increased the sense of embodiment and decreased mental demand. Interestingly, the mirror did not influence the dependent variables. The injured avatar significantly increased the temporal demand. The beach environment significantly reduced the tense activation. On the beach, participants felt more present in the incongruent condition embodying the injured avatar.", "abstracts": [ { "abstractType": "Regular", "content": "The development of embodied Virtual Reality (VR) systems involves multiple central design choices. These design choices affect the user perception and therefore require thorough consideration. This article reports on two user studies investigating the influence of common design choices on relevant intermediate factors (sense of embodiment, presence, motivation, activation, and task load) in a VR application for physical exercises. The first study manipulated the avatar fidelity (abstract, partial body vs. anthropomorphic, full-body) and the environment (with vs. without mirror). The second study manipulated the avatar type (healthy vs. injured) and the environment type (beach vs. hospital) and, hence, the avatar-environment congruence. The full-body avatar significantly increased the sense of embodiment and decreased mental demand. Interestingly, the mirror did not influence the dependent variables. The injured avatar significantly increased the temporal demand. The beach environment significantly reduced the tense activation. On the beach, participants felt more present in the incongruent condition embodying the injured avatar.", "__typename": "ArticleAbstractType" } ], "normalizedAbstract": "The development of embodied Virtual Reality (VR) systems involves multiple central design choices. These design choices affect the user perception and therefore require thorough consideration. This article reports on two user studies investigating the influence of common design choices on relevant intermediate factors (sense of embodiment, presence, motivation, activation, and task load) in a VR application for physical exercises. The first study manipulated the avatar fidelity (abstract, partial body vs. anthropomorphic, full-body) and the environment (with vs. without mirror). The second study manipulated the avatar type (healthy vs. injured) and the environment type (beach vs. hospital) and, hence, the avatar-environment congruence. The full-body avatar significantly increased the sense of embodiment and decreased mental demand. Interestingly, the mirror did not influence the dependent variables. The injured avatar significantly increased the temporal demand. The beach environment significantly reduced the tense activation. On the beach, participants felt more present in the incongruent condition embodying the injured avatar.", "fno": "532500a260", "keywords": [ "Avatars", "Cognition", "Human Factors", "Interactive Systems", "Abstract Body", "Anthropomorphic Body", "Avatar Fidelity", "Avatar Environment Congruence", "Beach Environment", "Common Design Choices", "Embodied Virtual Reality Systems", "Environment Design", "Environment Type", "Full Body Avatar", "Injured Avatar", "Multiple Central Design Choices", "Partial Body", "Physical Exercises", "Relevant Intermediate Factors", "Task Load", "Tense Activation", "User Perception", "Virtual Reality Exercise Application", "VR Application", "Hospitals", "Avatars", "Design Methodology", "Mirrors", "Task Analysis", "Augmented Reality", "Human Centered Computing", "Empirical Studies In HCI", "User Studies", "Virtual Reality" ], "authors": [ { "affiliation": "University of Würzburg,HCI Group", "fullName": "Andrea Bartl", "givenName": "Andrea", "surname": "Bartl", "__typename": "ArticleAuthorType" }, { "affiliation": "University of Würzburg,HCI Group", "fullName": "Christian Merz", "givenName": "Christian", "surname": "Merz", "__typename": "ArticleAuthorType" }, { "affiliation": "HEX Lab FAU Erlangen-Nürnberg", "fullName": "Daniel Roth", "givenName": "Daniel", "surname": "Roth", "__typename": "ArticleAuthorType" }, { "affiliation": "University of Würzburg,HCI Group", "fullName": "Marc Erich Latoschik", "givenName": "Marc Erich", "surname": "Latoschik", "__typename": "ArticleAuthorType" } ], "idPrefix": "ismar", "isOpenAccess": false, "showRecommendedArticles": true, "showBuyMe": true, "hasPdf": true, "pubDate": "2022-10-01T00:00:00", "pubType": "proceedings", "pages": "260-269", "year": "2022", "issn": "1554-7868", "isbn": "978-1-6654-5325-7", "notes": null, "notesType": null, "__typename": "ArticleType" }, "webExtras": [ { "id": "1JrReVzO2yY", "name": "pismar202253250-09994882s1-mm_532500a260.zip", "size": "30 MB", "location": "https://www.computer.org/csdl/api/v1/extra/pismar202253250-09994882s1-mm_532500a260.zip", "__typename": "WebExtraType" } ], "adjacentArticles": { "previous": { "fno": "532500a252", "articleId": "1JrR1h7oXUQ", "__typename": "AdjacentArticleType" }, "next": { "fno": "532500a270", "articleId": "1JrQTgRsONG", "__typename": "AdjacentArticleType" }, "__typename": "AdjacentArticlesType" }, "recommendedArticles": [ { "id": "proceedings/vr/2015/1727/0/07223377", "title": "Avatar embodiment realism and virtual fitness training", "doi": null, "abstractUrl": "/proceedings-article/vr/2015/07223377/12OmNCcKQFn", "parentPublication": { "id": "proceedings/vr/2015/1727/0", "title": "2015 IEEE Virtual Reality (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "trans/tg/2013/04/ttg2013040591", "title": "An Evaluation of Self-Avatar Eye Movement for Virtual Embodiment", "doi": null, "abstractUrl": "/journal/tg/2013/04/ttg2013040591/13rRUyYBlgz", "parentPublication": { "id": "trans/tg", "title": "IEEE Transactions on Visualization & Computer Graphics", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vrw/2022/8402/0/840200a730", "title": "Third-Person Perspective Avatar Embodiment in Augmented Reality: Examining the Proteus Effect on Physical Performance", "doi": null, "abstractUrl": "/proceedings-article/vrw/2022/840200a730/1CJffY1QgeI", "parentPublication": { "id": "proceedings/vrw/2022/8402/0", "title": "2022 IEEE Conference on Virtual Reality and 3D User Interfaces Abstracts and Workshops (VRW)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/ismar-adjunct/2022/5365/0/536500a772", "title": "Embodiment of an Avatar with Unnatural Arm Movements", "doi": null, "abstractUrl": "/proceedings-article/ismar-adjunct/2022/536500a772/1J7W9fEjd6g", "parentPublication": { "id": "proceedings/ismar-adjunct/2022/5365/0", "title": "2022 IEEE International Symposium on Mixed and Augmented Reality Adjunct (ISMAR-Adjunct)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/ismar-adjunct/2022/5365/0/536500a503", "title": "Studying &#x201C;Avatar Transitions&#x201D; in Augmented Reality: Influence on Sense of Embodiment and Physiological Activity", "doi": null, "abstractUrl": "/proceedings-article/ismar-adjunct/2022/536500a503/1J7W9twFolO", "parentPublication": { "id": "proceedings/ismar-adjunct/2022/5365/0", "title": "2022 IEEE International Symposium on Mixed and Augmented Reality Adjunct (ISMAR-Adjunct)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "trans/tg/2023/05/10049676", "title": "The Impact of Avatar and Environment Congruence on Plausibility, Embodiment, Presence, and the Proteus Effect in Virtual Reality", "doi": null, "abstractUrl": "/journal/tg/2023/05/10049676/1KYosbnM8q4", "parentPublication": { "id": "trans/tg", "title": "IEEE Transactions on Visualization & Computer Graphics", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2019/1377/0/08797719", "title": "The Effect of Avatar Appearance on Social Presence in an Augmented Reality Remote Collaboration", "doi": null, "abstractUrl": "/proceedings-article/vr/2019/08797719/1cJ1dVsXQDS", "parentPublication": { "id": "proceedings/vr/2019/1377/0", "title": "2019 IEEE Conference on Virtual Reality and 3D User Interfaces (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "trans/tg/2020/05/08998305", "title": "Avatar and Sense of Embodiment: Studying the Relative Preference Between Appearance, Control and Point of View", "doi": null, "abstractUrl": "/journal/tg/2020/05/08998305/1hpPBuW1ahy", "parentPublication": { "id": "trans/tg", "title": "IEEE Transactions on Visualization & Computer Graphics", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "trans/tg/2020/05/08998352", "title": "Using Facial Animation to Increase the Enfacement Illusion and Avatar Self-Identification", "doi": null, "abstractUrl": "/journal/tg/2020/05/08998352/1hpPCCB7Bte", "parentPublication": { "id": "trans/tg", "title": "IEEE Transactions on Visualization & Computer Graphics", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/ismar/2020/8508/0/850800a054", "title": "The Effects of Body Tracking Fidelity on Embodiment of an Inverse-Kinematic Avatar for Male Participants", "doi": null, "abstractUrl": "/proceedings-article/ismar/2020/850800a054/1pyswgi4b7y", "parentPublication": { "id": "proceedings/ismar/2020/8508/0", "title": "2020 IEEE International Symposium on Mixed and Augmented Reality (ISMAR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" } ], "articleVideos": [] }
{ "proceeding": { "id": "1jIx7fmpQ9a", "title": "2020 IEEE Conference on Virtual Reality and 3D User Interfaces (VR)", "acronym": "vr", "groupId": "1000791", "volume": "0", "displayVolume": "0", "year": "2020", "__typename": "ProceedingType" }, "article": { "id": "1jIxamWhlT2", "doi": "10.1109/VR46266.2020.00019", "title": "The Self-Avatar Follower Effect in Virtual Reality", "normalizedTitle": "The Self-Avatar Follower Effect in Virtual Reality", "abstract": "When embodying a virtual avatar in immersive VR applications where body tracking is enabled, users typically are and feel in control the avatar movements. However, there are situations in which the technology could be tweaked to flip this relationship so that an embodied avatar could affect the user&#x2019;s motor behavior without users noticing it. This has been shown in action retargeting applications and motor contagion experiments. Here we discuss a different way in which an embodied avatar could implicitly drive users movements: the self-avatar follower effect. We review previous evidences and present new experimental results showing how, whenever the virtual body does not overlay with their physical body, users tend to unconsciously follow their avatar, filling the gap if the system allows for it. We discuss this effect in the context of the relevant neuroscientific literature, and propose a theoretical account of the follower effect at the intersection of motor control and inference theories.", "abstracts": [ { "abstractType": "Regular", "content": "When embodying a virtual avatar in immersive VR applications where body tracking is enabled, users typically are and feel in control the avatar movements. However, there are situations in which the technology could be tweaked to flip this relationship so that an embodied avatar could affect the user&#x2019;s motor behavior without users noticing it. This has been shown in action retargeting applications and motor contagion experiments. Here we discuss a different way in which an embodied avatar could implicitly drive users movements: the self-avatar follower effect. We review previous evidences and present new experimental results showing how, whenever the virtual body does not overlay with their physical body, users tend to unconsciously follow their avatar, filling the gap if the system allows for it. We discuss this effect in the context of the relevant neuroscientific literature, and propose a theoretical account of the follower effect at the intersection of motor control and inference theories.", "__typename": "ArticleAbstractType" } ], "normalizedAbstract": "When embodying a virtual avatar in immersive VR applications where body tracking is enabled, users typically are and feel in control the avatar movements. However, there are situations in which the technology could be tweaked to flip this relationship so that an embodied avatar could affect the user’s motor behavior without users noticing it. This has been shown in action retargeting applications and motor contagion experiments. Here we discuss a different way in which an embodied avatar could implicitly drive users movements: the self-avatar follower effect. We review previous evidences and present new experimental results showing how, whenever the virtual body does not overlay with their physical body, users tend to unconsciously follow their avatar, filling the gap if the system allows for it. We discuss this effect in the context of the relevant neuroscientific literature, and propose a theoretical account of the follower effect at the intersection of motor control and inference theories.", "fno": "09089510", "keywords": [ "Avatars", "Motor Drives", "Visualization", "Computational Modeling", "Predictive Models", "Tracking", "Human Centered Computing", "Virtual Reality", "Embodiment", "Perception", "Motor Control" ], "authors": [ { "affiliation": "Microsoft Research,Redmond,USA", "fullName": "Mar Gonzalez-Franco", "givenName": "Mar", "surname": "Gonzalez-Franco", "__typename": "ArticleAuthorType" }, { "affiliation": "Microsoft Research,Redmond,USA", "fullName": "Brian Cohn", "givenName": "Brian", "surname": "Cohn", "__typename": "ArticleAuthorType" }, { "affiliation": "Microsoft Research,Redmond,USA", "fullName": "Eyal Ofek", "givenName": "Eyal", "surname": "Ofek", "__typename": "ArticleAuthorType" }, { "affiliation": "Institute of Development, Aging and Cancer, Tohoku University,Smart-Aging Research Centre,Sendai,Japan", "fullName": "Dalila Burin", "givenName": "Dalila", "surname": "Burin", "__typename": "ArticleAuthorType" }, { "affiliation": "Microsoft Research,Redmond,USA", "fullName": "Antonella Maselli", "givenName": "Antonella", "surname": "Maselli", "__typename": "ArticleAuthorType" } ], "idPrefix": "vr", "isOpenAccess": false, "showRecommendedArticles": true, "showBuyMe": true, "hasPdf": true, "pubDate": "2020-03-01T00:00:00", "pubType": "proceedings", "pages": "18-25", "year": "2020", "issn": null, "isbn": "978-1-7281-5608-8", "notes": null, "notesType": null, "__typename": "ArticleType" }, "webExtras": [], "adjacentArticles": { "previous": { "fno": "09089588", "articleId": "1jIxbTl2uRi", "__typename": "AdjacentArticleType" }, "next": { "fno": "09089612", "articleId": "1jIxdAmCCJi", "__typename": "AdjacentArticleType" }, "__typename": "AdjacentArticlesType" }, "recommendedArticles": [ { "id": "proceedings/vr/2015/1727/0/07223406", "title": "Self-characterstics and sound in immersive virtual reality — Estimating avatar weight from footstep sounds", "doi": null, "abstractUrl": "/proceedings-article/vr/2015/07223406/12OmNAlvHUH", "parentPublication": { "id": "proceedings/vr/2015/1727/0", "title": "2015 IEEE Virtual Reality (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/ismar-adjunct/2018/7592/0/08699215", "title": "VIRTOOAIR: Virtual Reality TOOlbox for Avatar Intelligent Reconstruction", "doi": null, "abstractUrl": "/proceedings-article/ismar-adjunct/2018/08699215/19F1Ug56qB2", "parentPublication": { "id": "proceedings/ismar-adjunct/2018/7592/0", "title": "2018 IEEE International Symposium on Mixed and Augmented Reality Adjunct (ISMAR-Adjunct)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2022/9617/0/961700a057", "title": "Visual Fidelity Effects on Expressive Self-avatar in Virtual Reality: First Impressions Matter", "doi": null, "abstractUrl": "/proceedings-article/vr/2022/961700a057/1CJc41zMnFC", "parentPublication": { "id": "proceedings/vr/2022/9617/0", "title": "2022 IEEE on Conference Virtual Reality and 3D User Interfaces (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vrw/2022/8402/0/840200a001", "title": "A Cardboard-Based Virtual Reality Study on Self-Avatar Appearance and Breathing", "doi": null, "abstractUrl": "/proceedings-article/vrw/2022/840200a001/1CJdXjsLKBG", "parentPublication": { "id": "proceedings/vrw/2022/8402/0", "title": "2022 IEEE Conference on Virtual Reality and 3D User Interfaces Abstracts and Workshops (VRW)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "trans/tg/2023/05/10049676", "title": "The Impact of Avatar and Environment Congruence on Plausibility, Embodiment, Presence, and the Proteus Effect in Virtual Reality", "doi": null, "abstractUrl": "/journal/tg/2023/05/10049676/1KYosbnM8q4", "parentPublication": { "id": "trans/tg", "title": "IEEE Transactions on Visualization & Computer Graphics", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2019/1377/0/08798044", "title": "Effect of Full Body Avatar in Augmented Reality Remote Collaboration", "doi": null, "abstractUrl": "/proceedings-article/vr/2019/08798044/1cJ14GMFJdK", "parentPublication": { "id": "proceedings/vr/2019/1377/0", "title": "2019 IEEE Conference on Virtual Reality and 3D User Interfaces (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2019/1377/0/08798263", "title": "EEG Can Be Used to Measure Embodiment When Controlling a Walking Self-Avatar", "doi": null, "abstractUrl": "/proceedings-article/vr/2019/08798263/1cJ1gj5NtQc", "parentPublication": { "id": "proceedings/vr/2019/1377/0", "title": "2019 IEEE Conference on Virtual Reality and 3D User Interfaces (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vrw/2020/6532/0/09090412", "title": "Modulating The Gait Of A Real-Time Self-Avatar To Induce Changes In Stride Length During Treadmill Walking", "doi": null, "abstractUrl": "/proceedings-article/vrw/2020/09090412/1jIxkYhRpKg", "parentPublication": { "id": "proceedings/vrw/2020/6532/0", "title": "2020 IEEE Conference on Virtual Reality and 3D User Interfaces Abstracts and Workshops (VRW)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vrw/2020/6532/0/09090630", "title": "Embodied Realistic Avatar System with Body Motions and Facial Expressions for Communication in Virtual Reality Applications", "doi": null, "abstractUrl": "/proceedings-article/vrw/2020/09090630/1jIxtbZL30Y", "parentPublication": { "id": "proceedings/vrw/2020/6532/0", "title": "2020 IEEE Conference on Virtual Reality and 3D User Interfaces Abstracts and Workshops (VRW)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vrw/2021/4057/0/405700a127", "title": "Evidence for a Relationship Between Self-Avatar Fixations and Perceived Avatar Similarity within Low-Cost Virtual Reality Embodiment", "doi": null, "abstractUrl": "/proceedings-article/vrw/2021/405700a127/1tnXDDh8sqk", "parentPublication": { "id": "proceedings/vrw/2021/4057/0", "title": "2021 IEEE Conference on Virtual Reality and 3D User Interfaces Abstracts and Workshops (VRW)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" } ], "articleVideos": [] }
{ "proceeding": { "id": "1jIxhEnA8IE", "title": "2020 IEEE Conference on Virtual Reality and 3D User Interfaces Abstracts and Workshops (VRW)", "acronym": "vrw", "groupId": "1836626", "volume": "0", "displayVolume": "0", "year": "2020", "__typename": "ProceedingType" }, "article": { "id": "1jIxljoHOvK", "doi": "10.1109/VRW50115.2020.00276", "title": "Modified Playback of Avatar Clip Sequences Based on Student Attention in Educational VR", "normalizedTitle": "Modified Playback of Avatar Clip Sequences Based on Student Attention in Educational VR", "abstract": "We demonstrate a system that sequences teacher avatar clips considering student eye tracking. We are investigating subjective suitability of avatar responses to student misunderstandings or inattention. Three different avatar behaviors are demonstrated to allow a teacher pedagogical agent to behave more appropriately to student attention or distraction. An in-game mobile device provides an experiment control mechanism for 2 levels of distractions.", "abstracts": [ { "abstractType": "Regular", "content": "We demonstrate a system that sequences teacher avatar clips considering student eye tracking. We are investigating subjective suitability of avatar responses to student misunderstandings or inattention. Three different avatar behaviors are demonstrated to allow a teacher pedagogical agent to behave more appropriately to student attention or distraction. An in-game mobile device provides an experiment control mechanism for 2 levels of distractions.", "__typename": "ArticleAbstractType" } ], "normalizedAbstract": "We demonstrate a system that sequences teacher avatar clips considering student eye tracking. We are investigating subjective suitability of avatar responses to student misunderstandings or inattention. Three different avatar behaviors are demonstrated to allow a teacher pedagogical agent to behave more appropriately to student attention or distraction. An in-game mobile device provides an experiment control mechanism for 2 levels of distractions.", "fno": "09090598", "keywords": [ "Avatars", "Gaze Tracking", "Mobile Handsets", "Oils", "Cranes", "Conferences", "Human Centered Computing", "Visualization" ], "authors": [ { "affiliation": "University of Louisiana at Lafayette", "fullName": "Adil Khokhar", "givenName": "Adil", "surname": "Khokhar", "__typename": "ArticleAuthorType" }, { "affiliation": "University of Louisiana at Lafayette", "fullName": "Andrew Yoshimura", "givenName": "Andrew", "surname": "Yoshimura", "__typename": "ArticleAuthorType" }, { "affiliation": "University of Louisiana at Lafayette", "fullName": "Christoph W. Borst", "givenName": "Christoph W.", "surname": "Borst", "__typename": "ArticleAuthorType" } ], "idPrefix": "vrw", "isOpenAccess": false, "showRecommendedArticles": true, "showBuyMe": true, "hasPdf": true, "pubDate": "2020-03-01T00:00:00", "pubType": "proceedings", "pages": "850-851", "year": "2020", "issn": null, "isbn": "978-1-7281-6532-5", "notes": null, "notesType": null, "__typename": "ArticleType" }, "webExtras": [], "adjacentArticles": { "previous": { "fno": "09090550", "articleId": "1jIxkUrqmu4", "__typename": "AdjacentArticleType" }, "next": { "fno": "09090454", "articleId": "1jIxszQHffq", "__typename": "AdjacentArticleType" }, "__typename": "AdjacentArticlesType" }, "recommendedArticles": [ { "id": "proceedings/vr/2015/1727/0/07223379", "title": "Avatar anthropomorphism and illusion of body ownership in VR", "doi": null, "abstractUrl": "/proceedings-article/vr/2015/07223379/12OmNAWpyrk", "parentPublication": { "id": "proceedings/vr/2015/1727/0", "title": "2015 IEEE Virtual Reality (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2015/1727/0/07223378", "title": "Influence of avatar realism on stressful situation in VR", "doi": null, "abstractUrl": "/proceedings-article/vr/2015/07223378/12OmNBdruc4", "parentPublication": { "id": "proceedings/vr/2015/1727/0", "title": "2015 IEEE Virtual Reality (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/mobilecloud/2015/8977/0/8977a151", "title": "Avatar: Mobile Distributed Computing in the Cloud", "doi": null, "abstractUrl": "/proceedings-article/mobilecloud/2015/8977a151/12OmNrIaej2", "parentPublication": { "id": "proceedings/mobilecloud/2015/8977/0", "title": "2015 3rd IEEE International Conference on Mobile Cloud Computing, Services, and Engineering (MobileCloud)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2017/6647/0/07892278", "title": "Bodiless embodiment: A descriptive survey of avatar bodily coherence in first-wave consumer VR applications", "doi": null, "abstractUrl": "/proceedings-article/vr/2017/07892278/12OmNvnwVj4", "parentPublication": { "id": "proceedings/vr/2017/6647/0", "title": "2017 IEEE Virtual Reality (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/ismar-adjunct/2022/5365/0/536500a613", "title": "Evaluating Modifying Teacher Avatar Clip Sequencing Based on Eye-Tracked Visual Attention in Educational VR", "doi": null, "abstractUrl": "/proceedings-article/ismar-adjunct/2022/536500a613/1J7WepoS2w8", "parentPublication": { "id": "proceedings/ismar-adjunct/2022/5365/0", "title": "2022 IEEE International Symposium on Mixed and Augmented Reality Adjunct (ISMAR-Adjunct)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2019/1377/0/08798327", "title": "Eye-gaze-triggered Visual Cues to Restore Attention in Educational VR", "doi": null, "abstractUrl": "/proceedings-article/vr/2019/08798327/1cJ0HmmdfUY", "parentPublication": { "id": "proceedings/vr/2019/1377/0", "title": "2019 IEEE Conference on Virtual Reality and 3D User Interfaces (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2019/1377/0/08798318", "title": "Evaluating Teacher Avatar Appearances in Educational VR", "doi": null, "abstractUrl": "/proceedings-article/vr/2019/08798318/1cJ0P8vBhhm", "parentPublication": { "id": "proceedings/vr/2019/1377/0", "title": "2019 IEEE Conference on Virtual Reality and 3D User Interfaces (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2019/1377/0/08797896", "title": "Pedagogical Agent Responsive to Eye Tracking in Educational VR", "doi": null, "abstractUrl": "/proceedings-article/vr/2019/08797896/1cJ1ceQVCtG", "parentPublication": { "id": "proceedings/vr/2019/1377/0", "title": "2019 IEEE Conference on Virtual Reality and 3D User Interfaces (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/apwc-on-cse/2018/1390/0/139000a197", "title": "Semi-Autonomous Remote Control of an Avatar Robot&#x2019;s Head for Distance Education", "doi": null, "abstractUrl": "/proceedings-article/apwc-on-cse/2018/139000a197/1dPoOosvzG0", "parentPublication": { "id": "proceedings/apwc-on-cse/2018/1390/0", "title": "2018 5th Asia-Pacific World Congress on Computer Science and Engineering (APWC on CSE)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vrw/2020/6532/0/09090457", "title": "Affective Embodiment: The effect of avatar appearance and posture representation on emotions in VR", "doi": null, "abstractUrl": "/proceedings-article/vrw/2020/09090457/1jIxjXwO4HS", "parentPublication": { "id": "proceedings/vrw/2020/6532/0", "title": "2020 IEEE Conference on Virtual Reality and 3D User Interfaces Abstracts and Workshops (VRW)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" } ], "articleVideos": [] }
{ "proceeding": { "id": "1jIxhEnA8IE", "title": "2020 IEEE Conference on Virtual Reality and 3D User Interfaces Abstracts and Workshops (VRW)", "acronym": "vrw", "groupId": "1836626", "volume": "0", "displayVolume": "0", "year": "2020", "__typename": "ProceedingType" }, "article": { "id": "1jIxzZ4gw4E", "doi": "10.1109/VRW50115.2020.00241", "title": "The Effects of Avatar Visibility on Behavioral Response with or without Mirror-Visual Feedback in Virtual Environments", "normalizedTitle": "The Effects of Avatar Visibility on Behavioral Response with or without Mirror-Visual Feedback in Virtual Environments", "abstract": "Existing studies have shown that increasing avatar visibility could not improve perceptual responses. With the recent advances of VR technology, full body tracking avatars have been adopted to social interactions and games with light weight head-mounted displays. However, it is unknown about the effects of full-body avatars on behavioral responses. Hence, in this study, we designed a full-body avatar visibility with or without virtual-mirror feedback, and investigated their effects on presence, embodiment, and task performance in a bow-shooting game. This study provides initial results of using avatar visibility to enhance behavioral responses in virtual environments", "abstracts": [ { "abstractType": "Regular", "content": "Existing studies have shown that increasing avatar visibility could not improve perceptual responses. With the recent advances of VR technology, full body tracking avatars have been adopted to social interactions and games with light weight head-mounted displays. However, it is unknown about the effects of full-body avatars on behavioral responses. Hence, in this study, we designed a full-body avatar visibility with or without virtual-mirror feedback, and investigated their effects on presence, embodiment, and task performance in a bow-shooting game. This study provides initial results of using avatar visibility to enhance behavioral responses in virtual environments", "__typename": "ArticleAbstractType" } ], "normalizedAbstract": "Existing studies have shown that increasing avatar visibility could not improve perceptual responses. With the recent advances of VR technology, full body tracking avatars have been adopted to social interactions and games with light weight head-mounted displays. However, it is unknown about the effects of full-body avatars on behavioral responses. Hence, in this study, we designed a full-body avatar visibility with or without virtual-mirror feedback, and investigated their effects on presence, embodiment, and task performance in a bow-shooting game. This study provides initial results of using avatar visibility to enhance behavioral responses in virtual environments", "fno": "09090683", "keywords": [ "Avatars", "Mirrors", "Tracking", "Task Analysis", "Games", "Interviews", "Avatar Visibility", "Mirror Visual Feedback", "Behavioral Response", "Virtual Reality" ], "authors": [ { "affiliation": "Jinan University,Guangzhou,China", "fullName": "BoYu Gao", "givenName": "BoYu", "surname": "Gao", "__typename": "ArticleAuthorType" }, { "affiliation": "Konkuk University,Seoul,South Korea", "fullName": "Joonwoo Lee", "givenName": "Joonwoo", "surname": "Lee", "__typename": "ArticleAuthorType" }, { "affiliation": "La Trobe University,Melbourne,Australia", "fullName": "Huawei Tu", "givenName": "Huawei", "surname": "Tu", "__typename": "ArticleAuthorType" }, { "affiliation": "Konkuk University,Seoul,South Korea", "fullName": "Wonjun Seong", "givenName": "Wonjun", "surname": "Seong", "__typename": "ArticleAuthorType" }, { "affiliation": "Konkuk University,Seoul,South Korea", "fullName": "HyungSeok Kim", "givenName": "HyungSeok", "surname": "Kim", "__typename": "ArticleAuthorType" } ], "idPrefix": "vrw", "isOpenAccess": false, "showRecommendedArticles": true, "showBuyMe": true, "hasPdf": true, "pubDate": "2020-03-01T00:00:00", "pubType": "proceedings", "pages": "780-781", "year": "2020", "issn": null, "isbn": "978-1-7281-6532-5", "notes": null, "notesType": null, "__typename": "ArticleType" }, "webExtras": [], "adjacentArticles": { "previous": { "fno": "09090419", "articleId": "1jIxnZlNJok", "__typename": "AdjacentArticleType" }, "next": { "fno": "09090508", "articleId": "1jIxvVHkrqo", "__typename": "AdjacentArticleType" }, "__typename": "AdjacentArticlesType" }, "recommendedArticles": [ { "id": "proceedings/vr/2015/1727/0/07223379", "title": "Avatar anthropomorphism and illusion of body ownership in VR", "doi": null, "abstractUrl": "/proceedings-article/vr/2015/07223379/12OmNAWpyrk", "parentPublication": { "id": "proceedings/vr/2015/1727/0", "title": "2015 IEEE Virtual Reality (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2015/1727/0/07223377", "title": "Avatar embodiment realism and virtual fitness training", "doi": null, "abstractUrl": "/proceedings-article/vr/2015/07223377/12OmNCcKQFn", "parentPublication": { "id": "proceedings/vr/2015/1727/0", "title": "2015 IEEE Virtual Reality (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2018/3365/0/08446229", "title": "Any &#x201C;Body&#x201D; There? Avatar Visibility Effects in a Virtual Reality Game", "doi": null, "abstractUrl": "/proceedings-article/vr/2018/08446229/13bd1fHrlRx", "parentPublication": { "id": "proceedings/vr/2018/3365/0", "title": "2018 IEEE Conference on Virtual Reality and 3D User Interfaces (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/ismar-adjunct/2018/7592/0/08699215", "title": "VIRTOOAIR: Virtual Reality TOOlbox for Avatar Intelligent Reconstruction", "doi": null, "abstractUrl": "/proceedings-article/ismar-adjunct/2018/08699215/19F1Ug56qB2", "parentPublication": { "id": "proceedings/ismar-adjunct/2018/7592/0", "title": "2018 IEEE International Symposium on Mixed and Augmented Reality Adjunct (ISMAR-Adjunct)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2022/9617/0/961700a057", "title": "Visual Fidelity Effects on Expressive Self-avatar in Virtual Reality: First Impressions Matter", "doi": null, "abstractUrl": "/proceedings-article/vr/2022/961700a057/1CJc41zMnFC", "parentPublication": { "id": "proceedings/vr/2022/9617/0", "title": "2022 IEEE on Conference Virtual Reality and 3D User Interfaces (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2019/1377/0/08797884", "title": "Distributed, Collaborative Virtual Reality Application for Product Development with Simple Avatar Calibration Method", "doi": null, "abstractUrl": "/proceedings-article/vr/2019/08797884/1cJ0TJmlU9q", "parentPublication": { "id": "proceedings/vr/2019/1377/0", "title": "2019 IEEE Conference on Virtual Reality and 3D User Interfaces (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2019/1377/0/08797719", "title": "The Effect of Avatar Appearance on Social Presence in an Augmented Reality Remote Collaboration", "doi": null, "abstractUrl": "/proceedings-article/vr/2019/08797719/1cJ1dVsXQDS", "parentPublication": { "id": "proceedings/vr/2019/1377/0", "title": "2019 IEEE Conference on Virtual Reality and 3D User Interfaces (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "trans/tg/2020/05/08998352", "title": "Using Facial Animation to Increase the Enfacement Illusion and Avatar Self-Identification", "doi": null, "abstractUrl": "/journal/tg/2020/05/08998352/1hpPCCB7Bte", "parentPublication": { "id": "trans/tg", "title": "IEEE Transactions on Visualization & Computer Graphics", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2020/5608/0/09089654", "title": "Effects of Locomotion Style and Body Visibility of a Telepresence Avatar", "doi": null, "abstractUrl": "/proceedings-article/vr/2020/09089654/1jIxd00PzX2", "parentPublication": { "id": "proceedings/vr/2020/5608/0", "title": "2020 IEEE Conference on Virtual Reality and 3D User Interfaces (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/ismar/2021/0158/0/015800a050", "title": "The Effects of Virtual Avatar Visibility on Pointing Interpretation by Observers in 3D Environments", "doi": null, "abstractUrl": "/proceedings-article/ismar/2021/015800a050/1yeDa4aaGY0", "parentPublication": { "id": "proceedings/ismar/2021/0158/0", "title": "2021 IEEE International Symposium on Mixed and Augmented Reality (ISMAR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" } ], "articleVideos": [] }
{ "proceeding": { "id": "1tnWwqMuCzu", "title": "2021 IEEE Conference on Virtual Reality and 3D User Interfaces Abstracts and Workshops (VRW)", "acronym": "vrw", "groupId": "1836626", "volume": "0", "displayVolume": "0", "year": "2021", "__typename": "ProceedingType" }, "article": { "id": "1tnXDDh8sqk", "doi": "10.1109/VRW52623.2021.00031", "title": "Evidence for a Relationship Between Self-Avatar Fixations and Perceived Avatar Similarity within Low-Cost Virtual Reality Embodiment", "normalizedTitle": "Evidence for a Relationship Between Self-Avatar Fixations and Perceived Avatar Similarity within Low-Cost Virtual Reality Embodiment", "abstract": "In this study, we investigate potential relationships between avatar appearance and viewpoint within the context of low-cost motion tracking using Body and Mirror factors. Six experimental conditions were developed that combined Body and Mirror factors. In this between-groups design, participants were able to control their self-avatar by using HTC Vive controllers and trackers. Our results suggest a relationship between perceived similarity with the selfavatar and looking at the self-avatar.", "abstracts": [ { "abstractType": "Regular", "content": "In this study, we investigate potential relationships between avatar appearance and viewpoint within the context of low-cost motion tracking using Body and Mirror factors. Six experimental conditions were developed that combined Body and Mirror factors. In this between-groups design, participants were able to control their self-avatar by using HTC Vive controllers and trackers. Our results suggest a relationship between perceived similarity with the selfavatar and looking at the self-avatar.", "__typename": "ArticleAbstractType" } ], "normalizedAbstract": "In this study, we investigate potential relationships between avatar appearance and viewpoint within the context of low-cost motion tracking using Body and Mirror factors. Six experimental conditions were developed that combined Body and Mirror factors. In this between-groups design, participants were able to control their self-avatar by using HTC Vive controllers and trackers. Our results suggest a relationship between perceived similarity with the selfavatar and looking at the self-avatar.", "fno": "405700a127", "keywords": [ "Avatars", "Human Computer Interaction", "Social Aspects Of Automation", "Virtual Reality", "HTC Vive Controllers", "Mirror Factors", "Selfavatar", "Perceived Similarity", "Trackers", "Between Groups Design", "Combined Body", "Low Cost Motion", "Avatar Appearance", "Potential Relationships", "Low Cost Virtual Reality Embodiment", "Perceived Avatar Similarity", "Self Avatar Fixations", "Three Dimensional Displays", "Correlation", "Tracking", "Avatars", "Conferences", "Virtual Environments", "User Interfaces", "Human Centered Computing", "Human Computer Interaction HCI", "Interaction Paradigms", "Virtual Reality" ], "authors": [ { "affiliation": "Purdue University,Department of Computer Graphics Technology,West Lafayette,U.S.A.", "fullName": "Claudia Krogmeier", "givenName": "Claudia", "surname": "Krogmeier", "__typename": "ArticleAuthorType" }, { "affiliation": "Purdue University,Department of Computer Graphics Technology,West Lafayette,U.S.A.", "fullName": "Christos Mousas", "givenName": "Christos", "surname": "Mousas", "__typename": "ArticleAuthorType" } ], "idPrefix": "vrw", "isOpenAccess": false, "showRecommendedArticles": true, "showBuyMe": true, "hasPdf": true, "pubDate": "2021-03-01T00:00:00", "pubType": "proceedings", "pages": "127-134", "year": "2021", "issn": null, "isbn": "978-1-6654-4057-8", "notes": null, "notesType": null, "__typename": "ArticleType" }, "webExtras": [ { "id": "1tnXDAdeOpa", "name": "pvrw202140570-09419344s1-mm_405700a127.zip", "size": "115 kB", "location": "https://www.computer.org/csdl/api/v1/extra/pvrw202140570-09419344s1-mm_405700a127.zip", "__typename": "WebExtraType" } ], "adjacentArticles": { "previous": { "fno": "405700a123", "articleId": "1tnXz3r7biw", "__typename": "AdjacentArticleType" }, "next": { "fno": "405700a135", "articleId": "1tnY3zC32KI", "__typename": "AdjacentArticleType" }, "__typename": "AdjacentArticlesType" }, "recommendedArticles": [ { "id": "proceedings/vr/2015/1727/0/07223377", "title": "Avatar embodiment realism and virtual fitness training", "doi": null, "abstractUrl": "/proceedings-article/vr/2015/07223377/12OmNCcKQFn", "parentPublication": { "id": "proceedings/vr/2015/1727/0", "title": "2015 IEEE Virtual Reality (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2017/6647/0/07892278", "title": "Bodiless embodiment: A descriptive survey of avatar bodily coherence in first-wave consumer VR applications", "doi": null, "abstractUrl": "/proceedings-article/vr/2017/07892278/12OmNvnwVj4", "parentPublication": { "id": "proceedings/vr/2017/6647/0", "title": "2017 IEEE Virtual Reality (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2017/6647/0/07892241", "title": "Prism aftereffects for throwing with a self-avatar in an immersive virtual environment", "doi": null, "abstractUrl": "/proceedings-article/vr/2017/07892241/12OmNxy4N0w", "parentPublication": { "id": "proceedings/vr/2017/6647/0", "title": "2017 IEEE Virtual Reality (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "trans/tg/2013/04/ttg2013040591", "title": "An Evaluation of Self-Avatar Eye Movement for Virtual Embodiment", "doi": null, "abstractUrl": "/journal/tg/2013/04/ttg2013040591/13rRUyYBlgz", "parentPublication": { "id": "trans/tg", "title": "IEEE Transactions on Visualization & Computer Graphics", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2022/9617/0/961700a057", "title": "Visual Fidelity Effects on Expressive Self-avatar in Virtual Reality: First Impressions Matter", "doi": null, "abstractUrl": "/proceedings-article/vr/2022/961700a057/1CJc41zMnFC", "parentPublication": { "id": "proceedings/vr/2022/9617/0", "title": "2022 IEEE on Conference Virtual Reality and 3D User Interfaces (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2022/9617/0/961700a350", "title": "Exploring Presence, Avatar Embodiment, and Body Perception with a Holographic Augmented Reality Mirror", "doi": null, "abstractUrl": "/proceedings-article/vr/2022/961700a350/1CJcn3q3J5K", "parentPublication": { "id": "proceedings/vr/2022/9617/0", "title": "2022 IEEE on Conference Virtual Reality and 3D User Interfaces (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/ismar-adjunct/2022/5365/0/536500a772", "title": "Embodiment of an Avatar with Unnatural Arm Movements", "doi": null, "abstractUrl": "/proceedings-article/ismar-adjunct/2022/536500a772/1J7W9fEjd6g", "parentPublication": { "id": "proceedings/ismar-adjunct/2022/5365/0", "title": "2022 IEEE International Symposium on Mixed and Augmented Reality Adjunct (ISMAR-Adjunct)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/ismar/2022/5325/0/532500a260", "title": "The Effects of Avatar and Environment Design on Embodiment, Presence, Activation, and Task Load in a Virtual Reality Exercise Application", "doi": null, "abstractUrl": "/proceedings-article/ismar/2022/532500a260/1JrRf0Dbcac", "parentPublication": { "id": "proceedings/ismar/2022/5325/0", "title": "2022 IEEE International Symposium on Mixed and Augmented Reality (ISMAR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vrw/2020/6532/0/09090683", "title": "The Effects of Avatar Visibility on Behavioral Response with or without Mirror-Visual Feedback in Virtual Environments", "doi": null, "abstractUrl": "/proceedings-article/vrw/2020/09090683/1jIxzZ4gw4E", "parentPublication": { "id": "proceedings/vrw/2020/6532/0", "title": "2020 IEEE Conference on Virtual Reality and 3D User Interfaces Abstracts and Workshops (VRW)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2021/1838/0/255600a065", "title": "The Embodiment of Photorealistic Avatars Influences Female Body Weight Perception in Virtual Reality", "doi": null, "abstractUrl": "/proceedings-article/vr/2021/255600a065/1tuAAOZpdoQ", "parentPublication": { "id": "proceedings/vr/2021/1838/0", "title": "2021 IEEE Virtual Reality and 3D User Interfaces (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" } ], "articleVideos": [] }
{ "proceeding": { "id": "12OmNAXxWQw", "title": "2017 IEEE Symposium on Computers and Communications (ISCC)", "acronym": "iscc", "groupId": "1000156", "volume": "0", "displayVolume": "0", "year": "2017", "__typename": "ProceedingType" }, "article": { "id": "12OmNBt3qqj", "doi": "10.1109/ISCC.2017.8024496", "title": "Affective impact of social presence in immersive 3D virtual worlds", "normalizedTitle": "Affective impact of social presence in immersive 3D virtual worlds", "abstract": "The recent release of affordable head mounted displays (HMDs) foretells a near future where a variety of social interactions could be taking place in VR environments. There is little knowledge however as to how actual social conditions impact users during a virtual experience. We investigate this issue by placing 54 individuals into a virtual clothing store and having them witness the exposure of their avatar's naked virtual body, being either alone or in the presence of a virtual salesman. Our results indicate that a second character does not affect levels of presence and body ownership illusion, but does cause a strong emotional response, thus suggesting that social context and social presence affectively impact users.", "abstracts": [ { "abstractType": "Regular", "content": "The recent release of affordable head mounted displays (HMDs) foretells a near future where a variety of social interactions could be taking place in VR environments. There is little knowledge however as to how actual social conditions impact users during a virtual experience. We investigate this issue by placing 54 individuals into a virtual clothing store and having them witness the exposure of their avatar's naked virtual body, being either alone or in the presence of a virtual salesman. Our results indicate that a second character does not affect levels of presence and body ownership illusion, but does cause a strong emotional response, thus suggesting that social context and social presence affectively impact users.", "__typename": "ArticleAbstractType" } ], "normalizedAbstract": "The recent release of affordable head mounted displays (HMDs) foretells a near future where a variety of social interactions could be taking place in VR environments. There is little knowledge however as to how actual social conditions impact users during a virtual experience. We investigate this issue by placing 54 individuals into a virtual clothing store and having them witness the exposure of their avatar's naked virtual body, being either alone or in the presence of a virtual salesman. Our results indicate that a second character does not affect levels of presence and body ownership illusion, but does cause a strong emotional response, thus suggesting that social context and social presence affectively impact users.", "fno": "08024496", "keywords": [ "Three Dimensional Displays", "Clothing", "Avatars", "Mirrors", "Registers", "Solid Modeling", "Animation", "Virtual Reality", "Presence", "Body Ownership Illusion", "Social Context", "Social Presence" ], "authors": [ { "affiliation": "Department of Product and Systems Design Engineering, University of the Aegean, Hermoupolis, Syros, Greece", "fullName": "Elena Dzardanova", "givenName": "Elena", "surname": "Dzardanova", "__typename": "ArticleAuthorType" }, { "affiliation": "Department of Cultural Technology and Communication, University of the Aegean, Mytilene, Lesvos, Greece", "fullName": "Vlasios Kasapakis", "givenName": "Vlasios", "surname": "Kasapakis", "__typename": "ArticleAuthorType" }, { "affiliation": "Department of Product and Systems Design Engineering, University of the Aegean, Hermoupolis, Syros, Greece", "fullName": "Damianos Gavalas", "givenName": "Damianos", "surname": "Gavalas", "__typename": "ArticleAuthorType" } ], "idPrefix": "iscc", "isOpenAccess": false, "showRecommendedArticles": true, "showBuyMe": true, "hasPdf": true, "pubDate": "2017-07-01T00:00:00", "pubType": "proceedings", "pages": "6-11", "year": "2017", "issn": null, "isbn": "978-1-5386-1629-1", "notes": null, "notesType": null, "__typename": "ArticleType" }, "webExtras": [], "adjacentArticles": { "previous": { "fno": "08024495", "articleId": "12OmNAYXWC4", "__typename": "AdjacentArticleType" }, "next": { "fno": "08024497", "articleId": "12OmNAWpylQ", "__typename": "AdjacentArticleType" }, "__typename": "AdjacentArticlesType" }, "recommendedArticles": [ { "id": "proceedings/cts/2016/2300/0/07871048", "title": "Body Ownership in Virtual Reality", "doi": null, "abstractUrl": "/proceedings-article/cts/2016/07871048/12OmNCm7BF7", "parentPublication": { "id": "proceedings/cts/2016/2300/0", "title": "2016 International Conference on Collaboration Technologies and Systems (CTS)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/hicss/2016/5670/0/5670a491", "title": "A Processual View on Social Presence Emergence in Virtual Worlds", "doi": null, "abstractUrl": "/proceedings-article/hicss/2016/5670a491/12OmNwO5LS5", "parentPublication": { "id": "proceedings/hicss/2016/5670/0", "title": "2016 49th Hawaii International Conference on System Sciences (HICSS)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/3dui/2017/6716/0/07893357", "title": "Influence of avatar appearance on presence in social VR", "doi": null, "abstractUrl": "/proceedings-article/3dui/2017/07893357/12OmNwwuDSr", "parentPublication": { "id": "proceedings/3dui/2017/6716/0", "title": "2017 IEEE Symposium on 3D User Interfaces (3DUI)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "trans/tg/2018/04/08263407", "title": "The Impact of Avatar Personalization and Immersion on Virtual Body Ownership, Presence, and Emotional Response", "doi": null, "abstractUrl": "/journal/tg/2018/04/08263407/13rRUILtJqW", "parentPublication": { "id": "trans/tg", "title": "IEEE Transactions on Visualization & Computer Graphics", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vs-games/2018/7123/0/08493429", "title": "Improving Context Understanding Using Avatar's Affective Expressions Reflecting Operator's Mental States", "doi": null, "abstractUrl": "/proceedings-article/vs-games/2018/08493429/14tNJoD4Uxj", "parentPublication": { "id": "proceedings/vs-games/2018/7123/0", "title": "2018 10th International Conference on Virtual Worlds and Games for Serious Applications (VS-Games)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "trans/tg/2023/05/10049676", "title": "The Impact of Avatar and Environment Congruence on Plausibility, Embodiment, Presence, and the Proteus Effect in Virtual Reality", "doi": null, "abstractUrl": "/journal/tg/2023/05/10049676/1KYosbnM8q4", "parentPublication": { "id": "trans/tg", "title": "IEEE Transactions on Visualization & Computer Graphics", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "trans/tg/2020/05/08998133", "title": "The Security-Utility Trade-off for Iris Authentication and Eye Animation for Social Virtual Avatars", "doi": null, "abstractUrl": "/journal/tg/2020/05/08998133/1hrXcnyAOzu", "parentPublication": { "id": "trans/tg", "title": "IEEE Transactions on Visualization & Computer Graphics", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2020/5608/0/09089612", "title": "Effects of volumetric capture avatars on social presence in immersive virtual environments", "doi": null, "abstractUrl": "/proceedings-article/vr/2020/09089612/1jIxdAmCCJi", "parentPublication": { "id": "proceedings/vr/2020/5608/0", "title": "2020 IEEE Conference on Virtual Reality and 3D User Interfaces (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vrw/2020/6532/0/09090683", "title": "The Effects of Avatar Visibility on Behavioral Response with or without Mirror-Visual Feedback in Virtual Environments", "doi": null, "abstractUrl": "/proceedings-article/vrw/2020/09090683/1jIxzZ4gw4E", "parentPublication": { "id": "proceedings/vrw/2020/6532/0", "title": "2020 IEEE Conference on Virtual Reality and 3D User Interfaces Abstracts and Workshops (VRW)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vrw/2021/4057/0/405700a438", "title": "Impact of Avatar Anthropomorphism and Task Type on Social Presence in Immersive Collaborative Virtual Environments", "doi": null, "abstractUrl": "/proceedings-article/vrw/2021/405700a438/1tnXuRl9EJi", "parentPublication": { "id": "proceedings/vrw/2021/4057/0", "title": "2021 IEEE Conference on Virtual Reality and 3D User Interfaces Abstracts and Workshops (VRW)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" } ], "articleVideos": [] }
{ "proceeding": { "id": "12OmNBC8AAD", "title": "2010 IEEE Virtual Reality Conference (VR)", "acronym": "vr", "groupId": "1000791", "volume": "0", "displayVolume": "0", "year": "2010", "__typename": "ProceedingType" }, "article": { "id": "12OmNwoPtlH", "doi": "10.1109/VR.2010.5444805", "title": "The contribution of real-time mirror reflections of motor actions on virtual body ownership in an immersive virtual environment", "normalizedTitle": "The contribution of real-time mirror reflections of motor actions on virtual body ownership in an immersive virtual environment", "abstract": "This paper reports an experiment that investigated people's body ownership of an avatar that was observed in a virtual mirror. Twenty subjects were recruited in a within-groups study where 10 first experienced a virtual character that synchronously reflected their upper-body movements as seen in a virtual mirror, and then an asynchronous condition where the mirror avatar displayed prerecorded actions, unrelated to those of the participant. The other 10 subjects experienced the conditions in the opposite order. In both conditions the participant could carry out actions that led to elevation above ground level, as seen from their first person perspective and correspondingly in the mirror. A rotating virtual fan eventually descended to 2 m above the ground. The hypothesis was that synchronous mirror reflection would result in higher subjective sense of ownership. A questionnaire analysis showed that the body ownership illusion was significantly greater for the synchronous than asynchronous condition. Additionally participants in the synchronous condition avoided collision with the descending fan significantly more often than those in the asynchronous condition. The results of this experiment are put into context within similar experiments on multisensory correlation and body ownership within cognitive neuroscience.", "abstracts": [ { "abstractType": "Regular", "content": "This paper reports an experiment that investigated people's body ownership of an avatar that was observed in a virtual mirror. Twenty subjects were recruited in a within-groups study where 10 first experienced a virtual character that synchronously reflected their upper-body movements as seen in a virtual mirror, and then an asynchronous condition where the mirror avatar displayed prerecorded actions, unrelated to those of the participant. The other 10 subjects experienced the conditions in the opposite order. In both conditions the participant could carry out actions that led to elevation above ground level, as seen from their first person perspective and correspondingly in the mirror. A rotating virtual fan eventually descended to 2 m above the ground. The hypothesis was that synchronous mirror reflection would result in higher subjective sense of ownership. A questionnaire analysis showed that the body ownership illusion was significantly greater for the synchronous than asynchronous condition. Additionally participants in the synchronous condition avoided collision with the descending fan significantly more often than those in the asynchronous condition. The results of this experiment are put into context within similar experiments on multisensory correlation and body ownership within cognitive neuroscience.", "__typename": "ArticleAbstractType" } ], "normalizedAbstract": "This paper reports an experiment that investigated people's body ownership of an avatar that was observed in a virtual mirror. Twenty subjects were recruited in a within-groups study where 10 first experienced a virtual character that synchronously reflected their upper-body movements as seen in a virtual mirror, and then an asynchronous condition where the mirror avatar displayed prerecorded actions, unrelated to those of the participant. The other 10 subjects experienced the conditions in the opposite order. In both conditions the participant could carry out actions that led to elevation above ground level, as seen from their first person perspective and correspondingly in the mirror. A rotating virtual fan eventually descended to 2 m above the ground. The hypothesis was that synchronous mirror reflection would result in higher subjective sense of ownership. A questionnaire analysis showed that the body ownership illusion was significantly greater for the synchronous than asynchronous condition. Additionally participants in the synchronous condition avoided collision with the descending fan significantly more often than those in the asynchronous condition. The results of this experiment are put into context within similar experiments on multisensory correlation and body ownership within cognitive neuroscience.", "fno": "05444805", "keywords": [ "Mirrors", "Reflection", "Virtual Environment", "Rubber", "Virtual Reality", "Computer Graphics", "Avatars", "Synchronous Motors", "Large Scale Integration", "Computer Science", "Virtual Reality", "Rubber Hand Illusion", "Body Ownership", "Agency" ], "authors": [ { "affiliation": "EVENT Lab Facultat de Psicologia Universitat de Barcelona, Spain", "fullName": "Mar González-Franco", "givenName": "Mar", "surname": "González-Franco", "__typename": "ArticleAuthorType" }, { "affiliation": "EVENT Lab Facultat de Psicologia Universitat de Barcelona, Spain", "fullName": "Daniel Pérez-Marcos", "givenName": "Daniel", "surname": "Pérez-Marcos", "__typename": "ArticleAuthorType" }, { "affiliation": "EVENT Lab Facultat de Psicologia Universitat de Barcelona, Spain", "fullName": "Bernhard Spanlang", "givenName": "Bernhard", "surname": "Spanlang", "__typename": "ArticleAuthorType" }, { "affiliation": "EVENT Lab Facultat de Psicologia Universitat de Barcelona, Spain", "fullName": "Mel Slater", "givenName": "Mel", "surname": "Slater", "__typename": "ArticleAuthorType" } ], "idPrefix": "vr", "isOpenAccess": false, "showRecommendedArticles": true, "showBuyMe": true, "hasPdf": true, "pubDate": "2010-03-01T00:00:00", "pubType": "proceedings", "pages": "111-114", "year": "2010", "issn": "1087-8270", "isbn": "978-1-4244-6237-7", "notes": null, "notesType": null, "__typename": "ArticleType" }, "webExtras": [], "adjacentArticles": { "previous": { "fno": "05444802", "articleId": "12OmNwD1pSy", "__typename": "AdjacentArticleType" }, "next": { "fno": "05444803", "articleId": "12OmNyxXlwe", "__typename": "AdjacentArticleType" }, "__typename": "AdjacentArticlesType" }, "recommendedArticles": [ { "id": "proceedings/vr/2017/6647/0/07892318", "title": "Observation of mirror reflection and voluntary self-touch enhance self-recognition for a telexistence robot", "doi": null, "abstractUrl": "/proceedings-article/vr/2017/07892318/12OmNBQ2W0V", "parentPublication": { "id": "proceedings/vr/2017/6647/0", "title": "2017 IEEE Virtual Reality (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/cts/2016/2300/0/07871048", "title": "Body Ownership in Virtual Reality", "doi": null, "abstractUrl": "/proceedings-article/cts/2016/07871048/12OmNCm7BF7", "parentPublication": { "id": "proceedings/cts/2016/2300/0", "title": "2016 International Conference on Collaboration Technologies and Systems (CTS)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2016/0836/0/07504682", "title": "The role of interaction in virtual embodiment: Effects of the virtual hand representation", "doi": null, "abstractUrl": "/proceedings-article/vr/2016/07504682/12OmNwE9Our", "parentPublication": { "id": "proceedings/vr/2016/0836/0", "title": "2016 IEEE Virtual Reality (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2018/3365/0/08446282", "title": "Illusory Body Ownership Between Different Body Parts: Synchronization of Right Thumb and Right Arm", "doi": null, "abstractUrl": "/proceedings-article/vr/2018/08446282/13bd1gQYgE6", "parentPublication": { "id": "proceedings/vr/2018/3365/0", "title": "2018 IEEE Conference on Virtual Reality and 3D User Interfaces (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2018/3365/0/08447562", "title": "In Limbo: The Effect of Gradual Visual Transition Between Real and Virtual on Virtual Body Ownership Illusion and Presence", "doi": null, "abstractUrl": "/proceedings-article/vr/2018/08447562/13bd1sx4Zt3", "parentPublication": { "id": "proceedings/vr/2018/3365/0", "title": "2018 IEEE Conference on Virtual Reality and 3D User Interfaces (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "trans/tg/2018/04/08263407", "title": "The Impact of Avatar Personalization and Immersion on Virtual Body Ownership, Presence, and Emotional Response", "doi": null, "abstractUrl": "/journal/tg/2018/04/08263407/13rRUILtJqW", "parentPublication": { "id": "trans/tg", "title": "IEEE Transactions on Visualization & Computer Graphics", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "trans/tg/2013/04/ttg2013040597", "title": "Drumming in Immersive Virtual Reality: The Body Shapes the Way We Play", "doi": null, "abstractUrl": "/journal/tg/2013/04/ttg2013040597/13rRUwbs20V", "parentPublication": { "id": "trans/tg", "title": "IEEE Transactions on Visualization & Computer Graphics", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2019/1377/0/08798346", "title": "Scrambled Body: A Method to Compare Full Body Illusion and Illusory Body Ownership of Body Parts", "doi": null, "abstractUrl": "/proceedings-article/vr/2019/08798346/1cJ0NSIEQda", "parentPublication": { "id": "proceedings/vr/2019/1377/0", "title": "2019 IEEE Conference on Virtual Reality and 3D User Interfaces (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2019/1377/0/08798222", "title": "Shared Body by Action Integration of Two Persons: Body Ownership, Sense of Agency and Task Performance", "doi": null, "abstractUrl": "/proceedings-article/vr/2019/08798222/1cJ0T8PM6qI", "parentPublication": { "id": "proceedings/vr/2019/1377/0", "title": "2019 IEEE Conference on Virtual Reality and 3D User Interfaces (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2019/1377/0/08797787", "title": "The Effect of Hand Size and Interaction Modality on the Virtual Hand Illusion", "doi": null, "abstractUrl": "/proceedings-article/vr/2019/08797787/1cJ179JUrPa", "parentPublication": { "id": "proceedings/vr/2019/1377/0", "title": "2019 IEEE Conference on Virtual Reality and 3D User Interfaces (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" } ], "articleVideos": [] }
{ "proceeding": { "id": "12OmNxE2mWh", "title": "2013 IEEE Virtual Reality (VR)", "acronym": "vr", "groupId": "1000791", "volume": "0", "displayVolume": "0", "year": "2013", "__typename": "ProceedingType" }, "article": { "id": "12OmNz5apEB", "doi": "10.1109/VR.2013.6549442", "title": "Drumming in immersive virtual reality: The body shapes the way we play", "normalizedTitle": "Drumming in immersive virtual reality: The body shapes the way we play", "abstract": "Summary form only given. It has been shown that it is possible to generate perceptual illusions of ownership in immersive virtual reality (IVR) over a virtual body that visually substitutes a person's real body, independently of appearance differences between the two [1, 2]. However, the psychological, behavioral and attitudinal consequences of such body transformations remain unknown [3]. Thirty six Caucasian people participated in a between-groups experiment where they played a West-African Djembe hand drum accompanying another avatar inside IVR. Participants were represented by plainly shaded white hands in the baseline condition, and either a casually dressed dark-skinned virtual body (CD) or a formal suited light-skinned body (FL) in the experimental conditions. Although the experienced body ownership illusion was strong for both groups, only the CD representation produced significant increases in participants' movement patterns compared to the baseline and compared to the FL representation. Further analysis showed that the observed behavioral changes were a function of the illusion strength and the perceived appropriateness of the virtual body for drumming. These results demonstrate that full body ownership illusions can lead to substantial behavioral and possibly cognitive changes depending on the virtual body appearance, with important implications for learning, education, training, psychotherapy and rehabilitation applications using IVR.", "abstracts": [ { "abstractType": "Regular", "content": "Summary form only given. It has been shown that it is possible to generate perceptual illusions of ownership in immersive virtual reality (IVR) over a virtual body that visually substitutes a person's real body, independently of appearance differences between the two [1, 2]. However, the psychological, behavioral and attitudinal consequences of such body transformations remain unknown [3]. Thirty six Caucasian people participated in a between-groups experiment where they played a West-African Djembe hand drum accompanying another avatar inside IVR. Participants were represented by plainly shaded white hands in the baseline condition, and either a casually dressed dark-skinned virtual body (CD) or a formal suited light-skinned body (FL) in the experimental conditions. Although the experienced body ownership illusion was strong for both groups, only the CD representation produced significant increases in participants' movement patterns compared to the baseline and compared to the FL representation. Further analysis showed that the observed behavioral changes were a function of the illusion strength and the perceived appropriateness of the virtual body for drumming. These results demonstrate that full body ownership illusions can lead to substantial behavioral and possibly cognitive changes depending on the virtual body appearance, with important implications for learning, education, training, psychotherapy and rehabilitation applications using IVR.", "__typename": "ArticleAbstractType" } ], "normalizedAbstract": "Summary form only given. It has been shown that it is possible to generate perceptual illusions of ownership in immersive virtual reality (IVR) over a virtual body that visually substitutes a person's real body, independently of appearance differences between the two [1, 2]. However, the psychological, behavioral and attitudinal consequences of such body transformations remain unknown [3]. Thirty six Caucasian people participated in a between-groups experiment where they played a West-African Djembe hand drum accompanying another avatar inside IVR. Participants were represented by plainly shaded white hands in the baseline condition, and either a casually dressed dark-skinned virtual body (CD) or a formal suited light-skinned body (FL) in the experimental conditions. Although the experienced body ownership illusion was strong for both groups, only the CD representation produced significant increases in participants' movement patterns compared to the baseline and compared to the FL representation. Further analysis showed that the observed behavioral changes were a function of the illusion strength and the perceived appropriateness of the virtual body for drumming. These results demonstrate that full body ownership illusions can lead to substantial behavioral and possibly cognitive changes depending on the virtual body appearance, with important implications for learning, education, training, psychotherapy and rehabilitation applications using IVR.", "fno": "06549442", "keywords": [ "Europe", "Shape", "Abstracts", "Psychology", "Avatars", "Education" ], "authors": [ { "affiliation": "Event Lab., Univ. de Barcelona, Barcelona, Spain", "fullName": "Konstantina Kilteni", "givenName": "Konstantina", "surname": "Kilteni", "__typename": "ArticleAuthorType" }, { "affiliation": "Event Lab., Univ. de Barcelona, Barcelona, Spain", "fullName": "Ilias Bergstom", "givenName": "Ilias", "surname": "Bergstom", "__typename": "ArticleAuthorType" }, { "affiliation": "Event Lab., Univ. de Barcelona, Barcelona, Spain", "fullName": "Mel Slater", "givenName": "Mel", "surname": "Slater", "__typename": "ArticleAuthorType" } ], "idPrefix": "vr", "isOpenAccess": false, "showRecommendedArticles": true, "showBuyMe": true, "hasPdf": true, "pubDate": "2013-03-01T00:00:00", "pubType": "proceedings", "pages": "1-1", "year": "2013", "issn": "1087-8270", "isbn": "978-1-4673-4795-2", "notes": null, "notesType": null, "__typename": "ArticleType" }, "webExtras": [], "adjacentArticles": { "previous": { "fno": "06549441", "articleId": "12OmNyUFfJC", "__typename": "AdjacentArticleType" }, "next": { "fno": "06549443", "articleId": "12OmNyugz43", "__typename": "AdjacentArticleType" }, "__typename": "AdjacentArticlesType" }, "recommendedArticles": [ { "id": "proceedings/vr/2015/1727/0/07223405", "title": "Wings and flying in immersive VR — Controller type, sound effects and experienced ownership and agency", "doi": null, "abstractUrl": "/proceedings-article/vr/2015/07223405/12OmNBOCWnu", "parentPublication": { "id": "proceedings/vr/2015/1727/0", "title": "2015 IEEE Virtual Reality (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2015/1727/0/07223377", "title": "Avatar embodiment realism and virtual fitness training", "doi": null, "abstractUrl": "/proceedings-article/vr/2015/07223377/12OmNCcKQFn", "parentPublication": { "id": "proceedings/vr/2015/1727/0", "title": "2015 IEEE Virtual Reality (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2015/1727/0/07223315", "title": "Keynote speaker: Transforming the self through embodiment and agency", "doi": null, "abstractUrl": "/proceedings-article/vr/2015/07223315/12OmNroij0t", "parentPublication": { "id": "proceedings/vr/2015/1727/0", "title": "2015 IEEE Virtual Reality (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2010/6237/0/05444805", "title": "The contribution of real-time mirror reflections of motor actions on virtual body ownership in an immersive virtual environment", "doi": null, "abstractUrl": "/proceedings-article/vr/2010/05444805/12OmNwoPtlH", "parentPublication": { "id": "proceedings/vr/2010/6237/0", "title": "2010 IEEE Virtual Reality Conference (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2018/3365/0/08446318", "title": "Object Size Perception in Immersive Virtual Reality: Avatar Realism Affects the Way We Perceive", "doi": null, "abstractUrl": "/proceedings-article/vr/2018/08446318/13bd1AITna8", "parentPublication": { "id": "proceedings/vr/2018/3365/0", "title": "2018 IEEE Conference on Virtual Reality and 3D User Interfaces (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "trans/tg/2013/04/ttg2013040597", "title": "Drumming in Immersive Virtual Reality: The Body Shapes the Way We Play", "doi": null, "abstractUrl": "/journal/tg/2013/04/ttg2013040597/13rRUwbs20V", "parentPublication": { "id": "trans/tg", "title": "IEEE Transactions on Visualization & Computer Graphics", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "mags/co/2014/07/mco2014070024", "title": "Transcending the Self in Immersive Virtual Reality", "doi": null, "abstractUrl": "/magazine/co/2014/07/mco2014070024/13rRUwcAqvw", "parentPublication": { "id": "mags/co", "title": "Computer", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "mags/cg/2017/01/mcg2017010020", "title": "Playing with Senses in VR: Alternate Perceptions Combining Vision and Touch", "doi": null, "abstractUrl": "/magazine/cg/2017/01/mcg2017010020/13rRUytF43L", "parentPublication": { "id": "mags/cg", "title": "IEEE Computer Graphics and Applications", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vrw/2023/4839/0/483900a541", "title": "Is Immersive Virtual Reality in K-12 Education Ready for Primetime? Challenges, Possibilities, and Considerations", "doi": null, "abstractUrl": "/proceedings-article/vrw/2023/483900a541/1MTTgSsplgk", "parentPublication": { "id": "proceedings/vrw/2023/4839/null", "title": "2023 IEEE Conference on Virtual Reality and 3D User Interfaces Abstracts and Workshops (VRW)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "trans/tg/2021/07/08952604", "title": "Effect of Avatar Appearance on Detection Thresholds for Remapped Hand Movements", "doi": null, "abstractUrl": "/journal/tg/2021/07/08952604/1gqqhpSZ19S", "parentPublication": { "id": "trans/tg", "title": "IEEE Transactions on Visualization & Computer Graphics", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" } ], "articleVideos": [] }
{ "proceeding": { "id": "1cI6akLvAuQ", "title": "2019 IEEE Conference on Virtual Reality and 3D User Interfaces (VR)", "acronym": "vr", "groupId": "1000791", "volume": "0", "displayVolume": "0", "year": "2019", "__typename": "ProceedingType" }, "article": { "id": "1cJ0T8PM6qI", "doi": "10.1109/VR.2019.8798222", "title": "Shared Body by Action Integration of Two Persons: Body Ownership, Sense of Agency and Task Performance", "normalizedTitle": "Shared Body by Action Integration of Two Persons: Body Ownership, Sense of Agency and Task Performance", "abstract": "Humans have one own body. However, we can share a body with two different persons in a virtual environment. We have developed a shared body as an avatar that is controlled by two persons' actions. Movements of two subjects were continuously captured, and integrated into the avatar's motion with the ratios of 0:100,25:75, 50:50, 75:25, 100:0. They were not aware of integration ratios, and asked to reach cubes with the right hand. They felt body ownership and sense of agency to the shared avatar more when the responsible ratio was higher. The reaching path of the avatar's hand was shorter in the shared body condition (75:25) than the single body condition (100:0). These results suggest that we have some of body ownership and sense of agency to the shared body, and the task performance is improved by the shared body.", "abstracts": [ { "abstractType": "Regular", "content": "Humans have one own body. However, we can share a body with two different persons in a virtual environment. We have developed a shared body as an avatar that is controlled by two persons' actions. Movements of two subjects were continuously captured, and integrated into the avatar's motion with the ratios of 0:100,25:75, 50:50, 75:25, 100:0. They were not aware of integration ratios, and asked to reach cubes with the right hand. They felt body ownership and sense of agency to the shared avatar more when the responsible ratio was higher. The reaching path of the avatar's hand was shorter in the shared body condition (75:25) than the single body condition (100:0). These results suggest that we have some of body ownership and sense of agency to the shared body, and the task performance is improved by the shared body.", "__typename": "ArticleAbstractType" } ], "normalizedAbstract": "Humans have one own body. However, we can share a body with two different persons in a virtual environment. We have developed a shared body as an avatar that is controlled by two persons' actions. Movements of two subjects were continuously captured, and integrated into the avatar's motion with the ratios of 0:100,25:75, 50:50, 75:25, 100:0. They were not aware of integration ratios, and asked to reach cubes with the right hand. They felt body ownership and sense of agency to the shared avatar more when the responsible ratio was higher. The reaching path of the avatar's hand was shorter in the shared body condition (75:25) than the single body condition (100:0). These results suggest that we have some of body ownership and sense of agency to the shared body, and the task performance is improved by the shared body.", "fno": "08798222", "keywords": [ "Avatars", "Action Integration", "Body Ownership", "Shared Body Condition", "Single Body Condition", "Avatar Motion", "Avatars", "Task Analysis", "Virtual Environments", "Head", "Collaboration", "Robots", "Rubber", "Sense Of Agency", "Body Ownership", "Embodiment", "Collaboration", "Augmented Human", "I 3 7 Computer Graphics Three Dimensional Graphics And Realism X 2014 Virtual Reality", "H 1 2 Models And Principles User Machine Systems X 2014 Human Factors" ], "authors": [ { "affiliation": "Toyohashi University of Technology", "fullName": "Takayoshi Hagiwara", "givenName": "Takayoshi", "surname": "Hagiwara", "__typename": "ArticleAuthorType" }, { "affiliation": "Keio University", "fullName": "Maki Sugimoto", "givenName": "Maki", "surname": "Sugimoto", "__typename": "ArticleAuthorType" }, { "affiliation": "The University of Tokyo", "fullName": "Masahiko Inami", "givenName": "Masahiko", "surname": "Inami", "__typename": "ArticleAuthorType" }, { "affiliation": "Toyohashi University of Technology", "fullName": "Michiteru Kitazaki", "givenName": "Michiteru", "surname": "Kitazaki", "__typename": "ArticleAuthorType" } ], "idPrefix": "vr", "isOpenAccess": false, "showRecommendedArticles": true, "showBuyMe": true, "hasPdf": true, "pubDate": "2019-03-01T00:00:00", "pubType": "proceedings", "pages": "954-955", "year": "2019", "issn": null, "isbn": "978-1-7281-1377-7", "notes": null, "notesType": null, "__typename": "ArticleType" }, "webExtras": [], "adjacentArticles": { "previous": { "fno": "08797832", "articleId": "1cJ0Z4AOHN6", "__typename": "AdjacentArticleType" }, "next": { "fno": "08798360", "articleId": "1cJ10qsqQsU", "__typename": "AdjacentArticleType" }, "__typename": "AdjacentArticlesType" }, "recommendedArticles": [ { "id": "proceedings/cts/2016/2300/0/07871048", "title": "Body Ownership in Virtual Reality", "doi": null, "abstractUrl": "/proceedings-article/cts/2016/07871048/12OmNCm7BF7", "parentPublication": { "id": "proceedings/cts/2016/2300/0", "title": "2016 International Conference on Collaboration Technologies and Systems (CTS)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2013/4795/0/06549373", "title": "Integrating head and full-body tracking for embodiment in virtual characters", "doi": null, "abstractUrl": "/proceedings-article/vr/2013/06549373/12OmNx0RIVC", "parentPublication": { "id": "proceedings/vr/2013/4795/0", "title": "2013 IEEE Virtual Reality (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2018/3365/0/08446229", "title": "Any &#x201C;Body&#x201D; There? Avatar Visibility Effects in a Virtual Reality Game", "doi": null, "abstractUrl": "/proceedings-article/vr/2018/08446229/13bd1fHrlRx", "parentPublication": { "id": "proceedings/vr/2018/3365/0", "title": "2018 IEEE Conference on Virtual Reality and 3D User Interfaces (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2018/3365/0/08446448", "title": "Agency Enhances Body Ownership Illusion of Being a Virtual Bat", "doi": null, "abstractUrl": "/proceedings-article/vr/2018/08446448/13bd1gzWkRR", "parentPublication": { "id": "proceedings/vr/2018/3365/0", "title": "2018 IEEE Conference on Virtual Reality and 3D User Interfaces (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "trans/tg/2018/04/08263407", "title": "The Impact of Avatar Personalization and Immersion on Virtual Body Ownership, Presence, and Emotional Response", "doi": null, "abstractUrl": "/journal/tg/2018/04/08263407/13rRUILtJqW", "parentPublication": { "id": "trans/tg", "title": "IEEE Transactions on Visualization & Computer Graphics", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "trans/tg/2013/04/ttg2013040597", "title": "Drumming in Immersive Virtual Reality: The Body Shapes the Way We Play", "doi": null, "abstractUrl": "/journal/tg/2013/04/ttg2013040597/13rRUwbs20V", "parentPublication": { "id": "trans/tg", "title": "IEEE Transactions on Visualization & Computer Graphics", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "mags/co/2014/07/mco2014070024", "title": "Transcending the Self in Immersive Virtual Reality", "doi": null, "abstractUrl": "/magazine/co/2014/07/mco2014070024/13rRUwcAqvw", "parentPublication": { "id": "mags/co", "title": "Computer", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2019/1377/0/08798346", "title": "Scrambled Body: A Method to Compare Full Body Illusion and Illusory Body Ownership of Body Parts", "doi": null, "abstractUrl": "/proceedings-article/vr/2019/08798346/1cJ0NSIEQda", "parentPublication": { "id": "proceedings/vr/2019/1377/0", "title": "2019 IEEE Conference on Virtual Reality and 3D User Interfaces (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2019/1377/0/08797787", "title": "The Effect of Hand Size and Interaction Modality on the Virtual Hand Illusion", "doi": null, "abstractUrl": "/proceedings-article/vr/2019/08797787/1cJ179JUrPa", "parentPublication": { "id": "proceedings/vr/2019/1377/0", "title": "2019 IEEE Conference on Virtual Reality and 3D User Interfaces (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "trans/tg/2021/10/09105074", "title": "Virtual Co-Embodiment: Evaluation of the Sense of Agency While Sharing the Control of a Virtual Body Among Two Individuals", "doi": null, "abstractUrl": "/journal/tg/2021/10/09105074/1kj0SvEe6ly", "parentPublication": { "id": "trans/tg", "title": "IEEE Transactions on Visualization & Computer Graphics", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" } ], "articleVideos": [] }
{ "proceeding": { "id": "1cI6akLvAuQ", "title": "2019 IEEE Conference on Virtual Reality and 3D User Interfaces (VR)", "acronym": "vr", "groupId": "1000791", "volume": "0", "displayVolume": "0", "year": "2019", "__typename": "ProceedingType" }, "article": { "id": "1cJ0Y0o1pO8", "doi": "10.1109/VR.2019.8798055", "title": "Embodying an Extra Virtual Body in Augmented Reality", "normalizedTitle": "Embodying an Extra Virtual Body in Augmented Reality", "abstract": "Presence and the sense of embodiment are essential concepts for the experience of our self and virtual bodies, but there is little quantitative evidence for a relation between these, and this relation becomes more complicated when there are real and virtual bodies in augmented reality (AR). We investigate the experience of body ownership, agency, self-location and self-presence in AR where users can see their real body and a virtual body from behind. Active arm movement congruency and virtual anthropomorphism are varied. We found significant effects of movement congruency but not anthropomorphism, a strong correlation between self-presence and body ownership, and a moderate correlation between self-presence and agency and self-location.", "abstracts": [ { "abstractType": "Regular", "content": "Presence and the sense of embodiment are essential concepts for the experience of our self and virtual bodies, but there is little quantitative evidence for a relation between these, and this relation becomes more complicated when there are real and virtual bodies in augmented reality (AR). We investigate the experience of body ownership, agency, self-location and self-presence in AR where users can see their real body and a virtual body from behind. Active arm movement congruency and virtual anthropomorphism are varied. We found significant effects of movement congruency but not anthropomorphism, a strong correlation between self-presence and body ownership, and a moderate correlation between self-presence and agency and self-location.", "__typename": "ArticleAbstractType" } ], "normalizedAbstract": "Presence and the sense of embodiment are essential concepts for the experience of our self and virtual bodies, but there is little quantitative evidence for a relation between these, and this relation becomes more complicated when there are real and virtual bodies in augmented reality (AR). We investigate the experience of body ownership, agency, self-location and self-presence in AR where users can see their real body and a virtual body from behind. Active arm movement congruency and virtual anthropomorphism are varied. We found significant effects of movement congruency but not anthropomorphism, a strong correlation between self-presence and body ownership, and a moderate correlation between self-presence and agency and self-location.", "fno": "08798055", "keywords": [ "Augmented Reality", "Virtual Bodies", "Quantitative Evidence", "Body Ownership", "Active Arm Movement Congruency", "Virtual Anthropomorphism", "Virtual Body", "AR", "Anthropomorphism", "Avatars", "Augmented Reality", "Correlation", "Conferences", "Cameras", "Human Centered Computing", "Human Computer Interaction HCI", "Interaction Paradigms", "Mixed Augmented Reality", "Empirical Studies In HCI" ], "authors": [ { "affiliation": "Utrecht University, The Netherlands", "fullName": "Nina Rosa", "givenName": "Nina", "surname": "Rosa", "__typename": "ArticleAuthorType" }, { "affiliation": "Utrecht University, The Netherlands", "fullName": "Jean-Paul van Bommel", "givenName": "Jean-Paul", "surname": "van Bommel", "__typename": "ArticleAuthorType" }, { "affiliation": "Utrecht University, The Netherlands", "fullName": "Wolfgang Hürst", "givenName": "Wolfgang", "surname": "Hürst", "__typename": "ArticleAuthorType" }, { "affiliation": "UMC Utrecht, Rehabilitation Ctr. De Hoogstraat", "fullName": "Tanja Nijboer", "givenName": "Tanja", "surname": "Nijboer", "__typename": "ArticleAuthorType" }, { "affiliation": "Utrecht University, The Netherlands", "fullName": "Remco C. Veltkamp", "givenName": "Remco C.", "surname": "Veltkamp", "__typename": "ArticleAuthorType" }, { "affiliation": "TNO", "fullName": "Peter Werkhoven", "givenName": "Peter", "surname": "Werkhoven", "__typename": "ArticleAuthorType" } ], "idPrefix": "vr", "isOpenAccess": false, "showRecommendedArticles": true, "showBuyMe": true, "hasPdf": true, "pubDate": "2019-03-01T00:00:00", "pubType": "proceedings", "pages": "1138-1139", "year": "2019", "issn": null, "isbn": "978-1-7281-1377-7", "notes": null, "notesType": null, "__typename": "ArticleType" }, "webExtras": [], "adjacentArticles": { "previous": { "fno": "08798253", "articleId": "1cJ1aIfowEw", "__typename": "AdjacentArticleType" }, "next": { "fno": "08797909", "articleId": "1cJ182qfnpK", "__typename": "AdjacentArticleType" }, "__typename": "AdjacentArticlesType" }, "recommendedArticles": [ { "id": "proceedings/vr/2015/1727/0/07223379", "title": "Avatar anthropomorphism and illusion of body ownership in VR", "doi": null, "abstractUrl": "/proceedings-article/vr/2015/07223379/12OmNAWpyrk", "parentPublication": { "id": "proceedings/vr/2015/1727/0", "title": "2015 IEEE Virtual Reality (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/cts/2016/2300/0/07871048", "title": "Body Ownership in Virtual Reality", "doi": null, "abstractUrl": "/proceedings-article/cts/2016/07871048/12OmNCm7BF7", "parentPublication": { "id": "proceedings/cts/2016/2300/0", "title": "2016 International Conference on Collaboration Technologies and Systems (CTS)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/ismar-adjunct/2018/7592/0/08699238", "title": "Is That Me?&#x2014;Embodiment and Body Perception with an Augmented Reality Mirror", "doi": null, "abstractUrl": "/proceedings-article/ismar-adjunct/2018/08699238/19F1SZ9ch0I", "parentPublication": { "id": "proceedings/ismar-adjunct/2018/7592/0", "title": "2018 IEEE International Symposium on Mixed and Augmented Reality Adjunct (ISMAR-Adjunct)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vrw/2022/8402/0/840200a329", "title": "Anthropomorphism of Virtual Agents and Human Cognitive Performance in Augmented Reality", "doi": null, "abstractUrl": "/proceedings-article/vrw/2022/840200a329/1CJdRqHEpry", "parentPublication": { "id": "proceedings/vrw/2022/8402/0", "title": "2022 IEEE Conference on Virtual Reality and 3D User Interfaces Abstracts and Workshops (VRW)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vrw/2022/8402/0/840200a001", "title": "A Cardboard-Based Virtual Reality Study on Self-Avatar Appearance and Breathing", "doi": null, "abstractUrl": "/proceedings-article/vrw/2022/840200a001/1CJdXjsLKBG", "parentPublication": { "id": "proceedings/vrw/2022/8402/0", "title": "2022 IEEE Conference on Virtual Reality and 3D User Interfaces Abstracts and Workshops (VRW)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2019/1377/0/08798044", "title": "Effect of Full Body Avatar in Augmented Reality Remote Collaboration", "doi": null, "abstractUrl": "/proceedings-article/vr/2019/08798044/1cJ14GMFJdK", "parentPublication": { "id": "proceedings/vr/2019/1377/0", "title": "2019 IEEE Conference on Virtual Reality and 3D User Interfaces (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2019/1377/0/08797719", "title": "The Effect of Avatar Appearance on Social Presence in an Augmented Reality Remote Collaboration", "doi": null, "abstractUrl": "/proceedings-article/vr/2019/08797719/1cJ1dVsXQDS", "parentPublication": { "id": "proceedings/vr/2019/1377/0", "title": "2019 IEEE Conference on Virtual Reality and 3D User Interfaces (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/ismar/2020/8508/0/850800a462", "title": "Body Weight Perception of Females using Photorealistic Avatars in Virtual and Augmented Reality", "doi": null, "abstractUrl": "/proceedings-article/ismar/2020/850800a462/1pysu9tPcGc", "parentPublication": { "id": "proceedings/ismar/2020/8508/0", "title": "2020 IEEE International Symposium on Mixed and Augmented Reality (ISMAR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "trans/tg/2022/12/09495125", "title": "Being an Avatar &#x201C;for Real&#x201D;: A Survey on Virtual Embodiment in Augmented Reality", "doi": null, "abstractUrl": "/journal/tg/2022/12/09495125/1vyju4jl6AE", "parentPublication": { "id": "trans/tg", "title": "IEEE Transactions on Visualization & Computer Graphics", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" } ], "articleVideos": [] }
{ "proceeding": { "id": "12OmNxE2mWh", "title": "2013 IEEE Virtual Reality (VR)", "acronym": "vr", "groupId": "1000791", "volume": "0", "displayVolume": "0", "year": "2013", "__typename": "ProceedingType" }, "article": { "id": "12OmNB0X8pB", "doi": "10.1109/VR.2013.6549415", "title": "Smelling screen: Presenting a virtual odor source on a LCD screen", "normalizedTitle": "Smelling screen: Presenting a virtual odor source on a LCD screen", "abstract": "The smelling screen is a new olfactory display that can generate a localized odor distribution on a two-dimensional display screen. The generated odor distribution is as if an odor source had been placed on the screen, and leads the user to perceive the odor as emanating from a specific region of the screen. The position of this virtual odor source can be shifted to an arbitrary position on the screen. The user can freely move his/her head to sniff at various locations on the screen, and can experience realistic changes in the odor intensity with respect to the sniffing location.", "abstracts": [ { "abstractType": "Regular", "content": "The smelling screen is a new olfactory display that can generate a localized odor distribution on a two-dimensional display screen. The generated odor distribution is as if an odor source had been placed on the screen, and leads the user to perceive the odor as emanating from a specific region of the screen. The position of this virtual odor source can be shifted to an arbitrary position on the screen. The user can freely move his/her head to sniff at various locations on the screen, and can experience realistic changes in the odor intensity with respect to the sniffing location.", "__typename": "ArticleAbstractType" } ], "normalizedAbstract": "The smelling screen is a new olfactory display that can generate a localized odor distribution on a two-dimensional display screen. The generated odor distribution is as if an odor source had been placed on the screen, and leads the user to perceive the odor as emanating from a specific region of the screen. The position of this virtual odor source can be shifted to an arbitrary position on the screen. The user can freely move his/her head to sniff at various locations on the screen, and can experience realistic changes in the odor intensity with respect to the sniffing location.", "fno": "06549415", "keywords": [ "Robot Sensing Systems", "Olfactory", "Chemicals", "Mobile Robots", "Chemical Sensors", "Odor Presentation", "Olfactory Display" ], "authors": [ { "affiliation": null, "fullName": "Haruka Matsukura", "givenName": "Haruka", "surname": "Matsukura", "__typename": "ArticleAuthorType" }, { "affiliation": null, "fullName": "Tatsuhiro Yoneda", "givenName": "Tatsuhiro", "surname": "Yoneda", "__typename": "ArticleAuthorType" }, { "affiliation": null, "fullName": "Hiroshi Ishida", "givenName": "Hiroshi", "surname": "Ishida", "__typename": "ArticleAuthorType" } ], "idPrefix": "vr", "isOpenAccess": false, "showRecommendedArticles": true, "showBuyMe": true, "hasPdf": true, "pubDate": "2013-03-01T00:00:00", "pubType": "proceedings", "pages": "167-168", "year": "2013", "issn": "1087-8270", "isbn": "978-1-4673-4795-2", "notes": null, "notesType": null, "__typename": "ArticleType" }, "webExtras": [], "adjacentArticles": { "previous": { "fno": "06549414", "articleId": "12OmNy3iFiV", "__typename": "AdjacentArticleType" }, "next": { "fno": "06549416", "articleId": "12OmNx5GU9t", "__typename": "AdjacentArticleType" }, "__typename": "AdjacentArticlesType" }, "recommendedArticles": [ { "id": "proceedings/vr/2009/3943/0/04811065", "title": "Demonstration of Improved Olfactory Display using Rapidly-Switching Solenoid Valves", "doi": null, "abstractUrl": "/proceedings-article/vr/2009/04811065/12OmNAR1aSY", "parentPublication": { "id": "proceedings/vr/2009/3943/0", "title": "2009 IEEE Virtual Reality Conference", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2009/3943/0/04811062", "title": "Odor Presentation with a Vivid Sense of Reality: Incorporating Fluid Dynamics Simulation into Olfactory Display", "doi": null, "abstractUrl": "/proceedings-article/vr/2009/04811062/12OmNs0C9X2", "parentPublication": { "id": "proceedings/vr/2009/3943/0", "title": "2009 IEEE Virtual Reality Conference", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2011/0039/0/05759448", "title": "Multi-sensorial field display: Presenting spatial distribution of airflow and odor", "doi": null, "abstractUrl": "/proceedings-article/vr/2011/05759448/12OmNx3ZjcK", "parentPublication": { "id": "proceedings/vr/2011/0039/0", "title": "2011 IEEE Virtual Reality (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2009/3943/0/04811042", "title": "Interactive Odor Playback Based on Fluid Dynamics Simulation", "doi": null, "abstractUrl": "/proceedings-article/vr/2009/04811042/12OmNy7yEfO", "parentPublication": { "id": "proceedings/vr/2009/3943/0", "title": "2009 IEEE Virtual Reality Conference", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2012/1247/0/06180915", "title": "Smelling screen: Technique to present a virtual odor source at an arbitrary position on a screen", "doi": null, "abstractUrl": "/proceedings-article/vr/2012/06180915/12OmNzgeLC5", "parentPublication": { "id": "proceedings/vr/2012/1247/0", "title": "Virtual Reality Conference, IEEE", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2018/3365/0/08446484", "title": "Olfactory Display Based on Sniffing Action", "doi": null, "abstractUrl": "/proceedings-article/vr/2018/08446484/13bd1fdV4kM", "parentPublication": { "id": "proceedings/vr/2018/3365/0", "title": "2018 IEEE Conference on Virtual Reality and 3D User Interfaces (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "trans/tg/2013/04/ttg2013040606", "title": "Smelling Screen: Development and Evaluation of an Olfactory Display System for Presenting a Virtual Odor Source", "doi": null, "abstractUrl": "/journal/tg/2013/04/ttg2013040606/13rRUILLkvq", "parentPublication": { "id": "trans/tg", "title": "IEEE Transactions on Visualization & Computer Graphics", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "mags/cg/2008/01/mcg2008010075", "title": "Cooking Up an Interactive Olfactory Game Display", "doi": null, "abstractUrl": "/magazine/cg/2008/01/mcg2008010075/13rRUwvT9lE", "parentPublication": { "id": "mags/cg", "title": "IEEE Computer Graphics and Applications", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/icbdie/2021/3870/0/387000a296", "title": "Odor Prediction Based on Chemical Molecular Structure", "doi": null, "abstractUrl": "/proceedings-article/icbdie/2021/387000a296/1uCih1vBPSU", "parentPublication": { "id": "proceedings/icbdie/2021/3870/0", "title": "2021 2nd International Conference on Big Data and Informatization Education (ICBDIE)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/smartiot/2021/4511/0/451100a329", "title": "The Odor Characterizations and Interactive Olfactory Display: A Survey", "doi": null, "abstractUrl": "/proceedings-article/smartiot/2021/451100a329/1xDQb2bELm0", "parentPublication": { "id": "proceedings/smartiot/2021/4511/0", "title": "2021 IEEE International Conference on Smart Internet of Things (SmartIoT)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" } ], "articleVideos": [] }
{ "proceeding": { "id": "12OmNASrawz", "title": "2009 IEEE Virtual Reality Conference", "acronym": "vr", "groupId": "1000791", "volume": "0", "displayVolume": "0", "year": "2009", "__typename": "ProceedingType" }, "article": { "id": "12OmNs0C9X2", "doi": "10.1109/VR.2009.4811062", "title": "Odor Presentation with a Vivid Sense of Reality: Incorporating Fluid Dynamics Simulation into Olfactory Display", "normalizedTitle": "Odor Presentation with a Vivid Sense of Reality: Incorporating Fluid Dynamics Simulation into Olfactory Display", "abstract": "The results of our current multidisciplinary research efforts on the development of an olfactory display system are demonstrated. To present odors with a vivid sense of reality, we propose to use computational fluid dynamics (CFD) simulation in conjunction with the olfactory display system. In this demonstration, an odor is released with a movie clip using the olfactory display. A CFD solver is employed to calculate the turbulent airflow field in the given environment and diffusion/advection of odor molecules from their source. The olfactory display system generates an odor with the concentration determined by the calculated odor distribution. The user is assumed to be a small animal slowly walking through a virtual room, and experiences the spread of the odor in the room.", "abstracts": [ { "abstractType": "Regular", "content": "The results of our current multidisciplinary research efforts on the development of an olfactory display system are demonstrated. To present odors with a vivid sense of reality, we propose to use computational fluid dynamics (CFD) simulation in conjunction with the olfactory display system. In this demonstration, an odor is released with a movie clip using the olfactory display. A CFD solver is employed to calculate the turbulent airflow field in the given environment and diffusion/advection of odor molecules from their source. The olfactory display system generates an odor with the concentration determined by the calculated odor distribution. The user is assumed to be a small animal slowly walking through a virtual room, and experiences the spread of the odor in the room.", "__typename": "ArticleAbstractType" } ], "normalizedAbstract": "The results of our current multidisciplinary research efforts on the development of an olfactory display system are demonstrated. To present odors with a vivid sense of reality, we propose to use computational fluid dynamics (CFD) simulation in conjunction with the olfactory display system. In this demonstration, an odor is released with a movie clip using the olfactory display. A CFD solver is employed to calculate the turbulent airflow field in the given environment and diffusion/advection of odor molecules from their source. The olfactory display system generates an odor with the concentration determined by the calculated odor distribution. The user is assumed to be a small animal slowly walking through a virtual room, and experiences the spread of the odor in the room.", "fno": "04811062", "keywords": [ "Chemioception", "Computational Fluid Dynamics", "Turbulence", "Odor Presentation", "Olfactory Display System", "Computational Fluid Dynamics", "CFD Simulation", "Turbulent Airflow Field", "Fluid Dynamics", "Olfactory", "Displays", "Computational Fluid Dynamics", "Computational Modeling", "Motion Pictures", "Virtual Reality", "Layout", "Surface Acoustic Wave Devices", "Acoustic Waves", "Olfactory Display", "Computational Fluid Dynamics", "H 5 1 Information Interfaces And Presentation Multimedia Information Systems Artificial", "Augmented", "And Virtual Realities" ], "authors": [ { "affiliation": "Tokyo University of Agriculture and Technology", "fullName": "Haruka Matsukura", "givenName": "Haruka", "surname": "Matsukura", "__typename": "ArticleAuthorType" }, { "affiliation": "Tokyo University of Agriculture and Technology", "fullName": "Hitoshi Yoshida", "givenName": "Hitoshi", "surname": "Yoshida", "__typename": "ArticleAuthorType" }, { "affiliation": "Tokyo University of Agriculture and Technology; 2-24-16 Nakacho, Koganei, Tokyo 184-8588, Japan, h_ishida@cc.tuat.ac.jp", "fullName": "Hiroshi Ishida", "givenName": "Hiroshi", "surname": "Ishida", "__typename": "ArticleAuthorType" }, { "affiliation": "Shibaura Institute of Technology; 3-7-5 Toyosu, Koto-ku, Tokyo 135-8548, Japan, saitoa@sic.shibaura-it.ac.jp", "fullName": "Atsushi Saitoh", "givenName": "Atsushi", "surname": "Saitoh", "__typename": "ArticleAuthorType" }, { "affiliation": "Tokyo Institute of Technology; 2-12-1 Ookayama, Meguro-ku, Tokyo 152-8552, Japan, nakamoto@mn.ee.titech.ac.jp", "fullName": "Takamichi Nakamoto", "givenName": "Takamichi", "surname": "Nakamoto", "__typename": "ArticleAuthorType" } ], "idPrefix": "vr", "isOpenAccess": false, "showRecommendedArticles": true, "showBuyMe": true, "hasPdf": true, "pubDate": "2009-03-01T00:00:00", "pubType": "proceedings", "pages": "295-296", "year": "2009", "issn": "1087-8270", "isbn": "978-1-4244-3943-0", "notes": null, "notesType": null, "__typename": "ArticleType" }, "webExtras": [], "adjacentArticles": { "previous": { "fno": "04811061", "articleId": "12OmNx76TSt", "__typename": "AdjacentArticleType" }, "next": { "fno": "04811063", "articleId": "12OmNxwENIr", "__typename": "AdjacentArticleType" }, "__typename": "AdjacentArticlesType" }, "recommendedArticles": [ { "id": "proceedings/vr/2009/3943/0/04811065", "title": "Demonstration of Improved Olfactory Display using Rapidly-Switching Solenoid Valves", "doi": null, "abstractUrl": "/proceedings-article/vr/2009/04811065/12OmNAR1aSY", "parentPublication": { "id": "proceedings/vr/2009/3943/0", "title": "2009 IEEE Virtual Reality Conference", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2013/4795/0/06549409", "title": "Visual-olfactory presentation system using a miniaturized olfactory display based on SAW streaming and electroosmotic pumps", "doi": null, "abstractUrl": "/proceedings-article/vr/2013/06549409/12OmNwDSduJ", "parentPublication": { "id": "proceedings/vr/2013/4795/0", "title": "2013 IEEE Virtual Reality (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2006/0224/0/02240199", "title": "Wearable Olfactory Display: Using Odor in Outdoor Environment", "doi": null, "abstractUrl": "/proceedings-article/vr/2006/02240199/12OmNwK7o3V", "parentPublication": { "id": "proceedings/vr/2006/0224/0", "title": "IEEE Virtual Reality Conference (VR 2006)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2009/3943/0/04811042", "title": "Interactive Odor Playback Based on Fluid Dynamics Simulation", "doi": null, "abstractUrl": "/proceedings-article/vr/2009/04811042/12OmNy7yEfO", "parentPublication": { "id": "proceedings/vr/2009/3943/0", "title": "2009 IEEE Virtual Reality Conference", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2018/3365/0/08445841", "title": "Demonstration of Olfactory Display Based on Sniffing Action", "doi": null, "abstractUrl": "/proceedings-article/vr/2018/08445841/13bd1eTtWYE", "parentPublication": { "id": "proceedings/vr/2018/3365/0", "title": "2018 IEEE Conference on Virtual Reality and 3D User Interfaces (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2018/3365/0/08446484", "title": "Olfactory Display Based on Sniffing Action", "doi": null, "abstractUrl": "/proceedings-article/vr/2018/08446484/13bd1fdV4kM", "parentPublication": { "id": "proceedings/vr/2018/3365/0", "title": "2018 IEEE Conference on Virtual Reality and 3D User Interfaces (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2020/5608/0/09089617", "title": "Virtual environment with smell using wearable olfactory display and computational fluid dynamics simulation", "doi": null, "abstractUrl": "/proceedings-article/vr/2020/09089617/1jIxfcDz7Ak", "parentPublication": { "id": "proceedings/vr/2020/5608/0", "title": "2020 IEEE Conference on Virtual Reality and 3D User Interfaces (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vrw/2021/4057/0/405700a490", "title": "Investigating Individual Differences in Olfactory Adaptation to Pulse Ejection Odor Display by Scaling Olfaction Sensitivity of Intensity", "doi": null, "abstractUrl": "/proceedings-article/vrw/2021/405700a490/1tnXnAd9AK4", "parentPublication": { "id": "proceedings/vrw/2021/4057/0", "title": "2021 IEEE Conference on Virtual Reality and 3D User Interfaces Abstracts and Workshops (VRW)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2021/1838/0/255600a279", "title": "Does Virtual Odor Representation Influence the Perception of Olfactory Intensity and Directionality in VR?", "doi": null, "abstractUrl": "/proceedings-article/vr/2021/255600a279/1tuAlZRpf6E", "parentPublication": { "id": "proceedings/vr/2021/1838/0", "title": "2021 IEEE Virtual Reality and 3D User Interfaces (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/smartiot/2021/4511/0/451100a329", "title": "The Odor Characterizations and Interactive Olfactory Display: A Survey", "doi": null, "abstractUrl": "/proceedings-article/smartiot/2021/451100a329/1xDQb2bELm0", "parentPublication": { "id": "proceedings/smartiot/2021/4511/0", "title": "2021 IEEE International Conference on Smart Internet of Things (SmartIoT)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" } ], "articleVideos": [] }
{ "proceeding": { "id": "12OmNBC8AAD", "title": "2010 IEEE Virtual Reality Conference (VR)", "acronym": "vr", "groupId": "1000791", "volume": "0", "displayVolume": "0", "year": "2010", "__typename": "ProceedingType" }, "article": { "id": "12OmNviHKkx", "doi": "10.1109/VR.2010.5444763", "title": "On the effect of airflow on odor presentation", "normalizedTitle": "On the effect of airflow on odor presentation", "abstract": "This article describes the investigations on the effect of airflow on the perception of odors presented by an olfactory display device. A clear sensation of the direction to an odor source can be given to the user of the olfactory display when the air currents are provided to the user's face by using fans. When the air currents are not provided, the user feels as if the source is placed nearby. We hypothesize that this sensation is caused by the upward air currents generated by our body temperature. When there is no wind, only the odors from nearby sources are brought to our noses by the upward air currents. The result of a sensory test shows that the perceived location of an odor source changes with the airflow presented to the panelist. Providing airflow together with odors is thus promising to reproduce complicated situations that cannot be reproduced by olfactory stimulation alone.", "abstracts": [ { "abstractType": "Regular", "content": "This article describes the investigations on the effect of airflow on the perception of odors presented by an olfactory display device. A clear sensation of the direction to an odor source can be given to the user of the olfactory display when the air currents are provided to the user's face by using fans. When the air currents are not provided, the user feels as if the source is placed nearby. We hypothesize that this sensation is caused by the upward air currents generated by our body temperature. When there is no wind, only the odors from nearby sources are brought to our noses by the upward air currents. The result of a sensory test shows that the perceived location of an odor source changes with the airflow presented to the panelist. Providing airflow together with odors is thus promising to reproduce complicated situations that cannot be reproduced by olfactory stimulation alone.", "__typename": "ArticleAbstractType" } ], "normalizedAbstract": "This article describes the investigations on the effect of airflow on the perception of odors presented by an olfactory display device. A clear sensation of the direction to an odor source can be given to the user of the olfactory display when the air currents are provided to the user's face by using fans. When the air currents are not provided, the user feels as if the source is placed nearby. We hypothesize that this sensation is caused by the upward air currents generated by our body temperature. When there is no wind, only the odors from nearby sources are brought to our noses by the upward air currents. The result of a sensory test shows that the perceived location of an odor source changes with the airflow presented to the panelist. Providing airflow together with odors is thus promising to reproduce complicated situations that cannot be reproduced by olfactory stimulation alone.", "fno": "05444763", "keywords": [ "Odor Source Location", "Odor Presentation", "Airflow Effect", "Olfactory Display Device", "Clear Sensation", "Odor Source Direction", "Upward Air Currents", "Body Temperature" ], "authors": [ { "affiliation": "Tokyo Univ. of Agric. & Technol., Koganei, Japan", "fullName": "Haruka Matsukura", "givenName": "Haruka", "surname": "Matsukura", "__typename": "ArticleAuthorType" }, { "affiliation": "Tokyo Univ. of Agric. & Technol., Koganei, Japan", "fullName": "Akira Ohno", "givenName": "Akira", "surname": "Ohno", "__typename": "ArticleAuthorType" }, { "affiliation": "Tokyo Univ. of Agric. & Technol., Koganei, Japan", "fullName": "Hiroshi Ishida", "givenName": "Hiroshi", "surname": "Ishida", "__typename": "ArticleAuthorType" } ], "idPrefix": "vr", "isOpenAccess": false, "showRecommendedArticles": true, "showBuyMe": true, "hasPdf": true, "pubDate": "2010-03-01T00:00:00", "pubType": "proceedings", "pages": "287-288", "year": "2010", "issn": null, "isbn": "978-1-4244-6237-7", "notes": null, "notesType": null, "__typename": "ArticleType" }, "webExtras": [], "adjacentArticles": { "previous": { "fno": "05444762", "articleId": "12OmNAo45HO", "__typename": "AdjacentArticleType" }, "next": { "fno": "05444760", "articleId": "12OmNyoiYZj", "__typename": "AdjacentArticleType" }, "__typename": "AdjacentArticlesType" }, "recommendedArticles": [ { "id": "proceedings/cmcsn/2012/4738/0/4738a311", "title": "Research of Airflow Distribution for Center Data Equipment Room Air Conditioning System", "doi": null, "abstractUrl": "/proceedings-article/cmcsn/2012/4738a311/12OmNAOsMPI", "parentPublication": { "id": "proceedings/cmcsn/2012/4738/0", "title": "Computing, Measurement, Control and Sensor Network, International Conference on", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2009/3943/0/04811062", "title": "Odor Presentation with a Vivid Sense of Reality: Incorporating Fluid Dynamics Simulation into Olfactory Display", "doi": null, "abstractUrl": "/proceedings-article/vr/2009/04811062/12OmNs0C9X2", "parentPublication": { "id": "proceedings/vr/2009/3943/0", "title": "2009 IEEE Virtual Reality Conference", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2006/0224/0/02240207", "title": "SpotScents: A Novel Method of Natural Scent Delivery Using Multiple Scent Projectors", "doi": null, "abstractUrl": "/proceedings-article/vr/2006/02240207/12OmNvA1hcF", "parentPublication": { "id": "proceedings/vr/2006/0224/0", "title": "IEEE Virtual Reality Conference (VR 2006)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2011/0039/0/05759448", "title": "Multi-sensorial field display: Presenting spatial distribution of airflow and odor", "doi": null, "abstractUrl": "/proceedings-article/vr/2011/05759448/12OmNx3ZjcK", "parentPublication": { "id": "proceedings/vr/2011/0039/0", "title": "2011 IEEE Virtual Reality (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/icbake/2011/4512/0/4512a246", "title": "Intermittent Presentation of Hinoki Odor as the Inhibitory Factor on the Performance and Physiological Responses in Stroop Task", "doi": null, "abstractUrl": "/proceedings-article/icbake/2011/4512a246/12OmNxj23fg", "parentPublication": { "id": "proceedings/icbake/2011/4512/0", "title": "Biometrics and Kansei Engineering, International Conference on", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2011/0039/0/05759464", "title": "Olfactory display using a miniaturized pump and a SAW atomizer for presenting low-volatile scents", "doi": null, "abstractUrl": "/proceedings-article/vr/2011/05759464/12OmNy3AgAZ", "parentPublication": { "id": "proceedings/vr/2011/0039/0", "title": "2011 IEEE Virtual Reality (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2009/3943/0/04811042", "title": "Interactive Odor Playback Based on Fluid Dynamics Simulation", "doi": null, "abstractUrl": "/proceedings-article/vr/2009/04811042/12OmNy7yEfO", "parentPublication": { "id": "proceedings/vr/2009/3943/0", "title": "2009 IEEE Virtual Reality Conference", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "trans/ta/2016/04/07312920", "title": "Subject-Independent Odor Pleasantness Classification Using Brain and Peripheral Signals", "doi": null, "abstractUrl": "/journal/ta/2016/04/07312920/13rRUy2YLWE", "parentPublication": { "id": "trans/ta", "title": "IEEE Transactions on Affective Computing", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2019/1377/0/08797727", "title": "Odor Modulation by Warming/Cooling Nose Based on Cross-modal Effect", "doi": null, "abstractUrl": "/proceedings-article/vr/2019/08797727/1cJ0Ivh5MKk", "parentPublication": { "id": "proceedings/vr/2019/1377/0", "title": "2019 IEEE Conference on Virtual Reality and 3D User Interfaces (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/smartiot/2021/4511/0/451100a329", "title": "The Odor Characterizations and Interactive Olfactory Display: A Survey", "doi": null, "abstractUrl": "/proceedings-article/smartiot/2021/451100a329/1xDQb2bELm0", "parentPublication": { "id": "proceedings/smartiot/2021/4511/0", "title": "2021 IEEE International Conference on Smart Internet of Things (SmartIoT)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" } ], "articleVideos": [] }
{ "proceeding": { "id": "12OmNzw8jgZ", "title": "2011 IEEE Virtual Reality (VR)", "acronym": "vr", "groupId": "1000791", "volume": "0", "displayVolume": "0", "year": "2011", "__typename": "ProceedingType" }, "article": { "id": "12OmNx3ZjcK", "doi": "10.1109/VR.2011.5759448", "title": "Multi-sensorial field display: Presenting spatial distribution of airflow and odor", "normalizedTitle": "Multi-sensorial field display: Presenting spatial distribution of airflow and odor", "abstract": "A new device has been developed for generating airflow field and odor-concentration distribution in a real environment for presenting to the user. This device is called a multi-sensorial field (MSF) display. When two fans are placed facing each other, the airflows generated by them collide with each other and are radially deflected on a plane perpendicular to the original airflow direction. By utilizing the deflected airflow, the MSF display can present the airflow blowing from the front to the user without placing fans in front of the user. The directivity of the airflow deflection can be controlled by placing nozzles on the fans to adjust the cross-sectional shape of the airflow jets coming from the fans. The MSF display can also generate odor-concentration distribution in a real environment by introducing odor vapors into the airflow generated by the fans. The user can freely move his/her head and sniff at various locations in the generated odor distribution. The results of preliminary sensory tests are presented to show the potential of the MSF display.", "abstracts": [ { "abstractType": "Regular", "content": "A new device has been developed for generating airflow field and odor-concentration distribution in a real environment for presenting to the user. This device is called a multi-sensorial field (MSF) display. When two fans are placed facing each other, the airflows generated by them collide with each other and are radially deflected on a plane perpendicular to the original airflow direction. By utilizing the deflected airflow, the MSF display can present the airflow blowing from the front to the user without placing fans in front of the user. The directivity of the airflow deflection can be controlled by placing nozzles on the fans to adjust the cross-sectional shape of the airflow jets coming from the fans. The MSF display can also generate odor-concentration distribution in a real environment by introducing odor vapors into the airflow generated by the fans. The user can freely move his/her head and sniff at various locations in the generated odor distribution. The results of preliminary sensory tests are presented to show the potential of the MSF display.", "__typename": "ArticleAbstractType" } ], "normalizedAbstract": "A new device has been developed for generating airflow field and odor-concentration distribution in a real environment for presenting to the user. This device is called a multi-sensorial field (MSF) display. When two fans are placed facing each other, the airflows generated by them collide with each other and are radially deflected on a plane perpendicular to the original airflow direction. By utilizing the deflected airflow, the MSF display can present the airflow blowing from the front to the user without placing fans in front of the user. The directivity of the airflow deflection can be controlled by placing nozzles on the fans to adjust the cross-sectional shape of the airflow jets coming from the fans. The MSF display can also generate odor-concentration distribution in a real environment by introducing odor vapors into the airflow generated by the fans. The user can freely move his/her head and sniff at various locations in the generated odor distribution. The results of preliminary sensory tests are presented to show the potential of the MSF display.", "fno": "05759448", "keywords": [ "Odor Vapors", "Multisensorial Field Display", "Spatial Distribution", "Airflow Field", "Odor Concentration Distribution", "Fans", "Collision", "Nozzles", "Cross Sectional Shape", "Airflow Jets", "MSF Display" ], "authors": [ { "affiliation": "Tokyo Univ. of Agric. & Technol., Koganei, Japan", "fullName": "H Matsukura", "givenName": "H", "surname": "Matsukura", "__typename": "ArticleAuthorType" }, { "affiliation": "Dept. of Mech. Syst. Eng., Tokyo Univ. of Agric. & Technol., Koganei, Japan", "fullName": "T Nihei", "givenName": "T", "surname": "Nihei", "__typename": "ArticleAuthorType" }, { "affiliation": "Dept. of Mech. Syst. Eng., Tokyo Univ. of Agric. & Technol., Koganei, Japan", "fullName": "H Ishida", "givenName": "H", "surname": "Ishida", "__typename": "ArticleAuthorType" } ], "idPrefix": "vr", "isOpenAccess": false, "showRecommendedArticles": true, "showBuyMe": true, "hasPdf": true, "pubDate": "2011-03-01T00:00:00", "pubType": "proceedings", "pages": "119-122", "year": "2011", "issn": null, "isbn": "978-1-4577-0039-2", "notes": null, "notesType": null, "__typename": "ArticleType" }, "webExtras": [], "adjacentArticles": { "previous": { "fno": "05759447", "articleId": "12OmNvStcQS", "__typename": "AdjacentArticleType" }, "next": { "fno": "05759449", "articleId": "12OmNBWi6JL", "__typename": "AdjacentArticleType" }, "__typename": "AdjacentArticlesType" }, "recommendedArticles": [ { "id": "proceedings/cmcsn/2012/4738/0/4738a311", "title": "Research of Airflow Distribution for Center Data Equipment Room Air Conditioning System", "doi": null, "abstractUrl": "/proceedings-article/cmcsn/2012/4738a311/12OmNAOsMPI", "parentPublication": { "id": "proceedings/cmcsn/2012/4738/0", "title": "Computing, Measurement, Control and Sensor Network, International Conference on", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2013/4795/0/06549415", "title": "Smelling screen: Presenting a virtual odor source on a LCD screen", "doi": null, "abstractUrl": "/proceedings-article/vr/2013/06549415/12OmNB0X8pB", "parentPublication": { "id": "proceedings/vr/2013/4795/0", "title": "2013 IEEE Virtual Reality (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/cdciem/2012/4639/0/4639a272", "title": "Effect on Resistance and Airflow Distribution of Lateral Moving-electrode Type Electric Field", "doi": null, "abstractUrl": "/proceedings-article/cdciem/2012/4639a272/12OmNC2OSJh", "parentPublication": { "id": "proceedings/cdciem/2012/4639/0", "title": "Computer Distributed Control and Intelligent Environmental Monitoring, International Conference on", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2009/3943/0/04811062", "title": "Odor Presentation with a Vivid Sense of Reality: Incorporating Fluid Dynamics Simulation into Olfactory Display", "doi": null, "abstractUrl": "/proceedings-article/vr/2009/04811062/12OmNs0C9X2", "parentPublication": { "id": "proceedings/vr/2009/3943/0", "title": "2009 IEEE Virtual Reality Conference", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2010/6237/0/05444763", "title": "On the effect of airflow on odor presentation", "doi": null, "abstractUrl": "/proceedings-article/vr/2010/05444763/12OmNviHKkx", "parentPublication": { "id": "proceedings/vr/2010/6237/0", "title": "2010 IEEE Virtual Reality Conference (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2006/0224/0/02240199", "title": "Wearable Olfactory Display: Using Odor in Outdoor Environment", "doi": null, "abstractUrl": "/proceedings-article/vr/2006/02240199/12OmNwK7o3V", "parentPublication": { "id": "proceedings/vr/2006/0224/0", "title": "IEEE Virtual Reality Conference (VR 2006)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2009/3943/0/04811016", "title": "Selection Method of Odor Components for Olfactory Display Using Mass Spectrum Database", "doi": null, "abstractUrl": "/proceedings-article/vr/2009/04811016/12OmNxzuMBP", "parentPublication": { "id": "proceedings/vr/2009/3943/0", "title": "2009 IEEE Virtual Reality Conference", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2011/0039/0/05759464", "title": "Olfactory display using a miniaturized pump and a SAW atomizer for presenting low-volatile scents", "doi": null, "abstractUrl": "/proceedings-article/vr/2011/05759464/12OmNy3AgAZ", "parentPublication": { "id": "proceedings/vr/2011/0039/0", "title": "2011 IEEE Virtual Reality (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2012/1247/0/06180915", "title": "Smelling screen: Technique to present a virtual odor source at an arbitrary position on a screen", "doi": null, "abstractUrl": "/proceedings-article/vr/2012/06180915/12OmNzgeLC5", "parentPublication": { "id": "proceedings/vr/2012/1247/0", "title": "Virtual Reality Conference, IEEE", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "trans/tg/2013/04/ttg2013040606", "title": "Smelling Screen: Development and Evaluation of an Olfactory Display System for Presenting a Virtual Odor Source", "doi": null, "abstractUrl": "/journal/tg/2013/04/ttg2013040606/13rRUILLkvq", "parentPublication": { "id": "trans/tg", "title": "IEEE Transactions on Visualization & Computer Graphics", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" } ], "articleVideos": [] }
{ "proceeding": { "id": "12OmNASrawz", "title": "2009 IEEE Virtual Reality Conference", "acronym": "vr", "groupId": "1000791", "volume": "0", "displayVolume": "0", "year": "2009", "__typename": "ProceedingType" }, "article": { "id": "12OmNxzuMBP", "doi": "10.1109/VR.2009.4811016", "title": "Selection Method of Odor Components for Olfactory Display Using Mass Spectrum Database", "normalizedTitle": "Selection Method of Odor Components for Olfactory Display Using Mass Spectrum Database", "abstract": "A variety of smells can be realized by blending multiple odor components using an olfactory display. Since a set of odor components to cover the entire range of smells has not yet been known, we studied a method of selecting odor components using a large-scale mass spectrum database. Basis vectors corresponding to odor components were extracted by the NMF (nonnegative matrix factorization) method. Then, the recipe of the target odor was obtained using the nonnegative least-squares method. The basis vectors were successfully obtained from 10,000 compounds within a tolerable error. Moreover, the mass spectra of 104 odors composed of 322 compounds could be approximated using 32-50 basis vectors.", "abstracts": [ { "abstractType": "Regular", "content": "A variety of smells can be realized by blending multiple odor components using an olfactory display. Since a set of odor components to cover the entire range of smells has not yet been known, we studied a method of selecting odor components using a large-scale mass spectrum database. Basis vectors corresponding to odor components were extracted by the NMF (nonnegative matrix factorization) method. Then, the recipe of the target odor was obtained using the nonnegative least-squares method. The basis vectors were successfully obtained from 10,000 compounds within a tolerable error. Moreover, the mass spectra of 104 odors composed of 322 compounds could be approximated using 32-50 basis vectors.", "__typename": "ArticleAbstractType" } ], "normalizedAbstract": "A variety of smells can be realized by blending multiple odor components using an olfactory display. Since a set of odor components to cover the entire range of smells has not yet been known, we studied a method of selecting odor components using a large-scale mass spectrum database. Basis vectors corresponding to odor components were extracted by the NMF (nonnegative matrix factorization) method. Then, the recipe of the target odor was obtained using the nonnegative least-squares method. The basis vectors were successfully obtained from 10,000 compounds within a tolerable error. Moreover, the mass spectra of 104 odors composed of 322 compounds could be approximated using 32-50 basis vectors.", "fno": "04811016", "keywords": [ "Least Squares Approximations", "Matrix Decomposition", "Multimedia Databases", "Odor Components", "Olfactory Display", "Large Scale Mass Spectrum Database", "NMF", "Nonnegative Matrix Factorization Method", "Target Odor Recipe", "Nonnegative Least Squares Method", "Mass Spectra", "Olfactory", "Displays", "Databases", "Virtual Reality", "Mass Spectroscopy", "Solenoids", "Valves", "Educational Institutions", "Data Engineering", "Weight Control", "Olfactory Display", "Mass Spectrometry", "NMF Method", "Nonnegative Least Squares", "H 5 1 Information Interfaces And Presentation Multimedia Information Systems Artificial", "Augmented", "And Virtual Realities" ], "authors": [ { "affiliation": "Graduate school of science and engineering, Tokyo Institute of Technology 2-12-1, Ookayama, Meguro-ku, Tokyo, 152-8552, JAPAN E-mail: nakamoto@mn.ee.titech.ac.jp", "fullName": "Takamichi Nakamoto", "givenName": "Takamichi", "surname": "Nakamoto", "__typename": "ArticleAuthorType" }, { "affiliation": "Graduate school of science and engineering, Tokyo Institute of Technology", "fullName": "Keisuke Murakami", "givenName": "Keisuke", "surname": "Murakami", "__typename": "ArticleAuthorType" } ], "idPrefix": "vr", "isOpenAccess": false, "showRecommendedArticles": true, "showBuyMe": true, "hasPdf": true, "pubDate": "2009-03-01T00:00:00", "pubType": "proceedings", "pages": "159-162", "year": "2009", "issn": "1087-8270", "isbn": "978-1-4244-3943-0", "notes": null, "notesType": null, "__typename": "ArticleType" }, "webExtras": [], "adjacentArticles": { "previous": { "fno": "04811004", "articleId": "12OmNs0C9Xm", "__typename": "AdjacentArticleType" }, "next": { "fno": "04811017", "articleId": "1t0I2raPC12", "__typename": "AdjacentArticleType" }, "__typename": "AdjacentArticlesType" }, "recommendedArticles": [ { "id": "proceedings/vr/2009/3943/0/04811065", "title": "Demonstration of Improved Olfactory Display using Rapidly-Switching Solenoid Valves", "doi": null, "abstractUrl": "/proceedings-article/vr/2009/04811065/12OmNAR1aSY", "parentPublication": { "id": "proceedings/vr/2009/3943/0", "title": "2009 IEEE Virtual Reality Conference", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/icmtma/2017/4868/0/07832282", "title": "Rapid Determination of Geosmin and 2-MIB in Water by Headspace Solid Phase Microextraction Gas Chromatography-Mass Spectrometry", "doi": null, "abstractUrl": "/proceedings-article/icmtma/2017/07832282/12OmNBqMDEG", "parentPublication": { "id": "proceedings/icmtma/2017/4868/0", "title": "2017 9th International Conference on Measuring Technology and Mechatronics Automation (ICMTMA)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2009/3943/0/04811062", "title": "Odor Presentation with a Vivid Sense of Reality: Incorporating Fluid Dynamics Simulation into Olfactory Display", "doi": null, "abstractUrl": "/proceedings-article/vr/2009/04811062/12OmNs0C9X2", "parentPublication": { "id": "proceedings/vr/2009/3943/0", "title": "2009 IEEE Virtual Reality Conference", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2006/0224/0/02240199", "title": "Wearable Olfactory Display: Using Odor in Outdoor Environment", "doi": null, "abstractUrl": "/proceedings-article/vr/2006/02240199/12OmNwK7o3V", "parentPublication": { "id": "proceedings/vr/2006/0224/0", "title": "IEEE Virtual Reality Conference (VR 2006)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2018/3365/0/08445841", "title": "Demonstration of Olfactory Display Based on Sniffing Action", "doi": null, "abstractUrl": "/proceedings-article/vr/2018/08445841/13bd1eTtWYE", "parentPublication": { "id": "proceedings/vr/2018/3365/0", "title": "2018 IEEE Conference on Virtual Reality and 3D User Interfaces (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2018/3365/0/08446484", "title": "Olfactory Display Based on Sniffing Action", "doi": null, "abstractUrl": "/proceedings-article/vr/2018/08446484/13bd1fdV4kM", "parentPublication": { "id": "proceedings/vr/2018/3365/0", "title": "2018 IEEE Conference on Virtual Reality and 3D User Interfaces (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "trans/tg/2013/04/ttg2013040606", "title": "Smelling Screen: Development and Evaluation of an Olfactory Display System for Presenting a Virtual Odor Source", "doi": null, "abstractUrl": "/journal/tg/2013/04/ttg2013040606/13rRUILLkvq", "parentPublication": { "id": "trans/tg", "title": "IEEE Transactions on Visualization & Computer Graphics", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "mags/cg/2008/01/mcg2008010075", "title": "Cooking Up an Interactive Olfactory Game Display", "doi": null, "abstractUrl": "/magazine/cg/2008/01/mcg2008010075/13rRUwvT9lE", "parentPublication": { "id": "mags/cg", "title": "IEEE Computer Graphics and Applications", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2022/9617/0/961700a474", "title": "Simulating Olfactory Cocktail Party Effect in VR: A Multi-odor Display Approach Based on Attention", "doi": null, "abstractUrl": "/proceedings-article/vr/2022/961700a474/1CJbU8KWWTS", "parentPublication": { "id": "proceedings/vr/2022/9617/0", "title": "2022 IEEE on Conference Virtual Reality and 3D User Interfaces (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/smartiot/2021/4511/0/451100a329", "title": "The Odor Characterizations and Interactive Olfactory Display: A Survey", "doi": null, "abstractUrl": "/proceedings-article/smartiot/2021/451100a329/1xDQb2bELm0", "parentPublication": { "id": "proceedings/smartiot/2021/4511/0", "title": "2021 IEEE International Conference on Smart Internet of Things (SmartIoT)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" } ], "articleVideos": [] }
{ "proceeding": { "id": "12OmNASrawz", "title": "2009 IEEE Virtual Reality Conference", "acronym": "vr", "groupId": "1000791", "volume": "0", "displayVolume": "0", "year": "2009", "__typename": "ProceedingType" }, "article": { "id": "12OmNy7yEfO", "doi": "10.1109/VR.2009.4811042", "title": "Interactive Odor Playback Based on Fluid Dynamics Simulation", "normalizedTitle": "Interactive Odor Playback Based on Fluid Dynamics Simulation", "abstract": "This article describes the experiments on an interactive application of an olfactory display system into which computational fluid dynamics (CFD) simulation is incorporated. In the proposed system, the olfactory display is used to add special effects to movies and virtual reality systems by releasing odors relevant to the scenes shown on the computer screen. To provide high-presence olfactory stimuli to the users, a model of the environment shown in the scene is provided to a CFD solver. The airflow field in the environment and the dispersal of odor molecules from their source are then calculated. An odor blender is used to generate the odor with the concentration determined based on the calculated odor distribution. In the experiments, a virtual room was presented on a PC monitor, and the panel were asked to stroll in the room to find an odor source. The results showed the effectiveness of the CFD simulation in reproducing the spatial distribution of the odor in the virtual space.", "abstracts": [ { "abstractType": "Regular", "content": "This article describes the experiments on an interactive application of an olfactory display system into which computational fluid dynamics (CFD) simulation is incorporated. In the proposed system, the olfactory display is used to add special effects to movies and virtual reality systems by releasing odors relevant to the scenes shown on the computer screen. To provide high-presence olfactory stimuli to the users, a model of the environment shown in the scene is provided to a CFD solver. The airflow field in the environment and the dispersal of odor molecules from their source are then calculated. An odor blender is used to generate the odor with the concentration determined based on the calculated odor distribution. In the experiments, a virtual room was presented on a PC monitor, and the panel were asked to stroll in the room to find an odor source. The results showed the effectiveness of the CFD simulation in reproducing the spatial distribution of the odor in the virtual space.", "__typename": "ArticleAbstractType" } ], "normalizedAbstract": "This article describes the experiments on an interactive application of an olfactory display system into which computational fluid dynamics (CFD) simulation is incorporated. In the proposed system, the olfactory display is used to add special effects to movies and virtual reality systems by releasing odors relevant to the scenes shown on the computer screen. To provide high-presence olfactory stimuli to the users, a model of the environment shown in the scene is provided to a CFD solver. The airflow field in the environment and the dispersal of odor molecules from their source are then calculated. An odor blender is used to generate the odor with the concentration determined based on the calculated odor distribution. In the experiments, a virtual room was presented on a PC monitor, and the panel were asked to stroll in the room to find an odor source. The results showed the effectiveness of the CFD simulation in reproducing the spatial distribution of the odor in the virtual space.", "fno": "04811042", "keywords": [ "Brain Computer Interfaces", "Computational Fluid Dynamics", "Computer Displays", "Virtual Reality", "Interactive Odor Playback", "Interactive Application", "Olfactory Display System", "Computational Fluid Dynamics Simulation", "Virtual Reality System", "Computer Screen", "Olfactory Stimuli", "Airflow Field", "Odor Molecules Dispersal", "Odor Blender", "Odor Generation", "Virtual Room", "Odor Source", "Virtual Space", "Fluid Dynamics", "Olfactory", "Computational Fluid Dynamics", "Computational Modeling", "Computer Displays", "Layout", "Virtual Reality", "Motion Pictures", "Application Software", "Nose", "Olfactory Display", "Computational Fluid Dynamics", "H 5 1 Information Interfaces And Presentation Multimedia Information Systems Artificial", "Augmented", "And Virtual Realities" ], "authors": [ { "affiliation": "Tokyo University of Agriculture and Technology 2-24-16 Nakacho, Koganei, Tokyo 184-8588, Japan", "fullName": "Haruka Matsukura", "givenName": "Haruka", "surname": "Matsukura", "__typename": "ArticleAuthorType" }, { "affiliation": "Tokyo University of Agriculture and Technology 2-24-16 Nakacho, Koganei, Tokyo 184-8588, Japan", "fullName": "Hitoshi Yoshida", "givenName": "Hitoshi", "surname": "Yoshida", "__typename": "ArticleAuthorType" }, { "affiliation": "Tokyo University of Agriculture and Technology 2-24-16 Nakacho, Koganei, Tokyo 184-8588, Japan h_ishida@cc.tuat.ac.jp", "fullName": "Hiroshi Ishida", "givenName": "Hiroshi", "surname": "Ishida", "__typename": "ArticleAuthorType" }, { "affiliation": "Tokyo Institute of Technology 2-12-1 Ookayama, Meguro-ku, Tokyo 152-8552, Japan nakamoto@mn.ee.titech.ac.jp", "fullName": "Takamichi Nakamoto", "givenName": "Takamichi", "surname": "Nakamoto", "__typename": "ArticleAuthorType" } ], "idPrefix": "vr", "isOpenAccess": false, "showRecommendedArticles": true, "showBuyMe": true, "hasPdf": true, "pubDate": "2009-03-01T00:00:00", "pubType": "proceedings", "pages": "255-256", "year": "2009", "issn": "1087-8270", "isbn": "978-1-4244-3943-0", "notes": null, "notesType": null, "__typename": "ArticleType" }, "webExtras": [], "adjacentArticles": { "previous": { "fno": "04811041", "articleId": "12OmNyUWQSk", "__typename": "AdjacentArticleType" }, "next": { "fno": "04811043", "articleId": "12OmNBd9T2L", "__typename": "AdjacentArticleType" }, "__typename": "AdjacentArticlesType" }, "recommendedArticles": [ { "id": "proceedings/vr/2013/4795/0/06549415", "title": "Smelling screen: Presenting a virtual odor source on a LCD screen", "doi": null, "abstractUrl": "/proceedings-article/vr/2013/06549415/12OmNB0X8pB", "parentPublication": { "id": "proceedings/vr/2013/4795/0", "title": "2013 IEEE Virtual Reality (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2009/3943/0/04811015", "title": "Effective Presentation Technique of Scent Using Small Ejection Quantities of Odor", "doi": null, "abstractUrl": "/proceedings-article/vr/2009/04811015/12OmNqFa5ps", "parentPublication": { "id": "proceedings/vr/2009/3943/0", "title": "2009 IEEE Virtual Reality Conference", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2009/3943/0/04811062", "title": "Odor Presentation with a Vivid Sense of Reality: Incorporating Fluid Dynamics Simulation into Olfactory Display", "doi": null, "abstractUrl": "/proceedings-article/vr/2009/04811062/12OmNs0C9X2", "parentPublication": { "id": "proceedings/vr/2009/3943/0", "title": "2009 IEEE Virtual Reality Conference", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "trans/tg/2013/04/ttg2013040606", "title": "Smelling Screen: Development and Evaluation of an Olfactory Display System for Presenting a Virtual Odor Source", "doi": null, "abstractUrl": "/journal/tg/2013/04/ttg2013040606/13rRUILLkvq", "parentPublication": { "id": "trans/tg", "title": "IEEE Transactions on Visualization & Computer Graphics", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2020/5608/0/09089617", "title": "Virtual environment with smell using wearable olfactory display and computational fluid dynamics simulation", "doi": null, "abstractUrl": "/proceedings-article/vr/2020/09089617/1jIxfcDz7Ak", "parentPublication": { "id": "proceedings/vr/2020/5608/0", "title": "2020 IEEE Conference on Virtual Reality and 3D User Interfaces (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vrw/2021/4057/0/405700a490", "title": "Investigating Individual Differences in Olfactory Adaptation to Pulse Ejection Odor Display by Scaling Olfaction Sensitivity of Intensity", "doi": null, "abstractUrl": "/proceedings-article/vrw/2021/405700a490/1tnXnAd9AK4", "parentPublication": { "id": "proceedings/vrw/2021/4057/0", "title": "2021 IEEE Conference on Virtual Reality and 3D User Interfaces Abstracts and Workshops (VRW)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2021/1838/0/255600a279", "title": "Does Virtual Odor Representation Influence the Perception of Olfactory Intensity and Directionality in VR?", "doi": null, "abstractUrl": "/proceedings-article/vr/2021/255600a279/1tuAlZRpf6E", "parentPublication": { "id": "proceedings/vr/2021/1838/0", "title": "2021 IEEE Virtual Reality and 3D User Interfaces (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/icbdie/2021/3870/0/387000a296", "title": "Odor Prediction Based on Chemical Molecular Structure", "doi": null, "abstractUrl": "/proceedings-article/icbdie/2021/387000a296/1uCih1vBPSU", "parentPublication": { "id": "proceedings/icbdie/2021/3870/0", "title": "2021 2nd International Conference on Big Data and Informatization Education (ICBDIE)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/smartiot/2021/4511/0/451100a329", "title": "The Odor Characterizations and Interactive Olfactory Display: A Survey", "doi": null, "abstractUrl": "/proceedings-article/smartiot/2021/451100a329/1xDQb2bELm0", "parentPublication": { "id": "proceedings/smartiot/2021/4511/0", "title": "2021 IEEE International Conference on Smart Internet of Things (SmartIoT)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/smartiot/2021/4511/0/451100a309", "title": "A Cheminformatic Compression Method for Multiple Odor Label in Intelligent Perception", "doi": null, "abstractUrl": "/proceedings-article/smartiot/2021/451100a309/1xDQdqtVPxK", "parentPublication": { "id": "proceedings/smartiot/2021/4511/0", "title": "2021 IEEE International Conference on Smart Internet of Things (SmartIoT)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" } ], "articleVideos": [] }
{ "proceeding": { "id": "12OmNz2TCuO", "title": "Virtual Reality Conference, IEEE", "acronym": "vr", "groupId": "1000791", "volume": "0", "displayVolume": "0", "year": "2012", "__typename": "ProceedingType" }, "article": { "id": "12OmNzgeLC5", "doi": "10.1109/VR.2012.6180915", "title": "Smelling screen: Technique to present a virtual odor source at an arbitrary position on a screen", "normalizedTitle": "Smelling screen: Technique to present a virtual odor source at an arbitrary position on a screen", "abstract": "A new olfactory display that can present a virtual odor source at an arbitrary position on a two-dimensional screen is proposed in this paper. The proposed device can give a sensation that an odor is emanating from a certain position on the screen. Fans are placed at the four corners of the screen. The airflows generated by the fans are deflected multiple times by making them collide with each other, and are finally directed toward the user from the position of a virtual odor source on the screen. By introducing odor vapor into the airflows, the odor is spread from the virtual odor source toward the user. The position of the virtual odor source can be shifted to an arbitrary position on the screen by adjusting the balance of the airflows from the four fans. The user can freely move his/her head and sniff at various locations. Potential applications of the proposed device include digital signage, video games, and exhibitions in museums. The result of odor-distribution measurement is presented here to show the validity of the device design.", "abstracts": [ { "abstractType": "Regular", "content": "A new olfactory display that can present a virtual odor source at an arbitrary position on a two-dimensional screen is proposed in this paper. The proposed device can give a sensation that an odor is emanating from a certain position on the screen. Fans are placed at the four corners of the screen. The airflows generated by the fans are deflected multiple times by making them collide with each other, and are finally directed toward the user from the position of a virtual odor source on the screen. By introducing odor vapor into the airflows, the odor is spread from the virtual odor source toward the user. The position of the virtual odor source can be shifted to an arbitrary position on the screen by adjusting the balance of the airflows from the four fans. The user can freely move his/her head and sniff at various locations. Potential applications of the proposed device include digital signage, video games, and exhibitions in museums. The result of odor-distribution measurement is presented here to show the validity of the device design.", "__typename": "ArticleAbstractType" } ], "normalizedAbstract": "A new olfactory display that can present a virtual odor source at an arbitrary position on a two-dimensional screen is proposed in this paper. The proposed device can give a sensation that an odor is emanating from a certain position on the screen. Fans are placed at the four corners of the screen. The airflows generated by the fans are deflected multiple times by making them collide with each other, and are finally directed toward the user from the position of a virtual odor source on the screen. By introducing odor vapor into the airflows, the odor is spread from the virtual odor source toward the user. The position of the virtual odor source can be shifted to an arbitrary position on the screen by adjusting the balance of the airflows from the four fans. The user can freely move his/her head and sniff at various locations. Potential applications of the proposed device include digital signage, video games, and exhibitions in museums. The result of odor-distribution measurement is presented here to show the validity of the device design.", "fno": "06180915", "keywords": [], "authors": [ { "affiliation": "Tokyo University of Agriculture and Technology JSPS", "fullName": "Haruka Matsukura", "givenName": "Haruka", "surname": "Matsukura", "__typename": "ArticleAuthorType" }, { "affiliation": "Tokyo University of Agriculture and Technology Department of Mechanical Systems Engineering", "fullName": "Tatsuhiro Yoneda", "givenName": "Tatsuhiro", "surname": "Yoneda", "__typename": "ArticleAuthorType" }, { "affiliation": "Tokyo University of Agriculture and Technology Department of Mechanical Systems Engineering", "fullName": "Hiroshi Ishida", "givenName": "Hiroshi", "surname": "Ishida", "__typename": "ArticleAuthorType" } ], "idPrefix": "vr", "isOpenAccess": false, "showRecommendedArticles": true, "showBuyMe": true, "hasPdf": true, "pubDate": "2012-03-01T00:00:00", "pubType": "proceedings", "pages": "127-128", "year": "2012", "issn": null, "isbn": "978-1-4673-1247-9", "notes": null, "notesType": null, "__typename": "ArticleType" }, "webExtras": [], "adjacentArticles": { "previous": { "fno": "06180914", "articleId": "12OmNxE2n0j", "__typename": "AdjacentArticleType" }, "next": { "fno": "06180870", "articleId": "12OmNvHoQqm", "__typename": "AdjacentArticleType" }, "__typename": "AdjacentArticlesType" }, "recommendedArticles": [ { "id": "proceedings/vr/2013/4795/0/06549415", "title": "Smelling screen: Presenting a virtual odor source on a LCD screen", "doi": null, "abstractUrl": "/proceedings-article/vr/2013/06549415/12OmNB0X8pB", "parentPublication": { "id": "proceedings/vr/2013/4795/0", "title": "2013 IEEE Virtual Reality (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2010/6237/0/05444763", "title": "On the effect of airflow on odor presentation", "doi": null, "abstractUrl": "/proceedings-article/vr/2010/05444763/12OmNviHKkx", "parentPublication": { "id": "proceedings/vr/2010/6237/0", "title": "2010 IEEE Virtual Reality Conference (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2006/0224/0/02240199", "title": "Wearable Olfactory Display: Using Odor in Outdoor Environment", "doi": null, "abstractUrl": "/proceedings-article/vr/2006/02240199/12OmNwK7o3V", "parentPublication": { "id": "proceedings/vr/2006/0224/0", "title": "IEEE Virtual Reality Conference (VR 2006)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2011/0039/0/05759448", "title": "Multi-sensorial field display: Presenting spatial distribution of airflow and odor", "doi": null, "abstractUrl": "/proceedings-article/vr/2011/05759448/12OmNx3ZjcK", "parentPublication": { "id": "proceedings/vr/2011/0039/0", "title": "2011 IEEE Virtual Reality (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/cicsyn/2009/3743/0/3743a045", "title": "Simulation of Coordinating Sniffer Robots for Building Odor Maps", "doi": null, "abstractUrl": "/proceedings-article/cicsyn/2009/3743a045/12OmNy5zsr3", "parentPublication": { "id": "proceedings/cicsyn/2009/3743/0", "title": "Computational Intelligence, Communication Systems and Networks, International Conference on", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2009/3943/0/04811042", "title": "Interactive Odor Playback Based on Fluid Dynamics Simulation", "doi": null, "abstractUrl": "/proceedings-article/vr/2009/04811042/12OmNy7yEfO", "parentPublication": { "id": "proceedings/vr/2009/3943/0", "title": "2009 IEEE Virtual Reality Conference", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "trans/tg/2013/04/ttg2013040606", "title": "Smelling Screen: Development and Evaluation of an Olfactory Display System for Presenting a Virtual Odor Source", "doi": null, "abstractUrl": "/journal/tg/2013/04/ttg2013040606/13rRUILLkvq", "parentPublication": { "id": "trans/tg", "title": "IEEE Transactions on Visualization & Computer Graphics", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/icbdie/2021/3870/0/387000a296", "title": "Odor Prediction Based on Chemical Molecular Structure", "doi": null, "abstractUrl": "/proceedings-article/icbdie/2021/387000a296/1uCih1vBPSU", "parentPublication": { "id": "proceedings/icbdie/2021/3870/0", "title": "2021 2nd International Conference on Big Data and Informatization Education (ICBDIE)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/smartiot/2021/4511/0/451100a329", "title": "The Odor Characterizations and Interactive Olfactory Display: A Survey", "doi": null, "abstractUrl": "/proceedings-article/smartiot/2021/451100a329/1xDQb2bELm0", "parentPublication": { "id": "proceedings/smartiot/2021/4511/0", "title": "2021 IEEE International Conference on Smart Internet of Things (SmartIoT)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/smartiot/2021/4511/0/451100a309", "title": "A Cheminformatic Compression Method for Multiple Odor Label in Intelligent Perception", "doi": null, "abstractUrl": "/proceedings-article/smartiot/2021/451100a309/1xDQdqtVPxK", "parentPublication": { "id": "proceedings/smartiot/2021/4511/0", "title": "2021 IEEE International Conference on Smart Internet of Things (SmartIoT)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" } ], "articleVideos": [] }
{ "proceeding": { "id": "13bd1eJgoia", "title": "2018 IEEE Conference on Virtual Reality and 3D User Interfaces (VR)", "acronym": "vr", "groupId": "1000791", "volume": "0", "displayVolume": "0", "year": "2018", "__typename": "ProceedingType" }, "article": { "id": "13bd1fdV4kM", "doi": "10.1109/VR.2018.8446484", "title": "Olfactory Display Based on Sniffing Action", "normalizedTitle": "Olfactory Display Based on Sniffing Action", "abstract": "An olfactory display is a device which provides various scents to a user. Such devices are expected to be applied to VR since olfactory stimulus influences human emotion and enhances user experience. One of the main problems in the conventional olfactory display is that the odorants emitted from the device not only reach the nose but spread into the ambient air, so that the user experience may be changed by the remaining odor. To solve this problem, we have developed a newly structured olfactory display which utilizes human respiratory action. In the method, DC fan is driven to create an odor stream in front of the nostril. Thus, odor goes through the nostril only when the user sniffs it. The odor plume generation is based on the combination of SAW atomizer with micro dispensing valve. We have fabricated a prototype, and then evaluated the waste odor emitted into the air using a commercially available gas detector. It was demonstrated that the new structure makes it possible to reduce the waste odor emission into the air compared to the conventional method.", "abstracts": [ { "abstractType": "Regular", "content": "An olfactory display is a device which provides various scents to a user. Such devices are expected to be applied to VR since olfactory stimulus influences human emotion and enhances user experience. One of the main problems in the conventional olfactory display is that the odorants emitted from the device not only reach the nose but spread into the ambient air, so that the user experience may be changed by the remaining odor. To solve this problem, we have developed a newly structured olfactory display which utilizes human respiratory action. In the method, DC fan is driven to create an odor stream in front of the nostril. Thus, odor goes through the nostril only when the user sniffs it. The odor plume generation is based on the combination of SAW atomizer with micro dispensing valve. We have fabricated a prototype, and then evaluated the waste odor emitted into the air using a commercially available gas detector. It was demonstrated that the new structure makes it possible to reduce the waste odor emission into the air compared to the conventional method.", "__typename": "ArticleAbstractType" } ], "normalizedAbstract": "An olfactory display is a device which provides various scents to a user. Such devices are expected to be applied to VR since olfactory stimulus influences human emotion and enhances user experience. One of the main problems in the conventional olfactory display is that the odorants emitted from the device not only reach the nose but spread into the ambient air, so that the user experience may be changed by the remaining odor. To solve this problem, we have developed a newly structured olfactory display which utilizes human respiratory action. In the method, DC fan is driven to create an odor stream in front of the nostril. Thus, odor goes through the nostril only when the user sniffs it. The odor plume generation is based on the combination of SAW atomizer with micro dispensing valve. We have fabricated a prototype, and then evaluated the waste odor emitted into the air using a commercially available gas detector. It was demonstrated that the new structure makes it possible to reduce the waste odor emission into the air compared to the conventional method.", "fno": "08446484", "keywords": [ "Olfactory", "Fans", "Valves", "Surface Acoustic Wave Devices", "Surface Acoustic Waves", "Gas Detectors", "Olfactory Display", "Aspiration", "SAW", "Solenoid Valve", "H 5 1 Information Interfaces And Presentation Multimedia Information Systems Artificial", "Augmented And Virtual Realities" ], "authors": [ { "affiliation": "Tokyo Institute of Technology, Japan", "fullName": "Shingo Kato", "givenName": "Shingo", "surname": "Kato", "__typename": "ArticleAuthorType" }, { "affiliation": "Tokyo Institute of Technology, Japan", "fullName": "Takamichi Nakamoto", "givenName": "Takamichi", "surname": "Nakamoto", "__typename": "ArticleAuthorType" } ], "idPrefix": "vr", "isOpenAccess": false, "showRecommendedArticles": true, "showBuyMe": true, "hasPdf": true, "pubDate": "2018-03-01T00:00:00", "pubType": "proceedings", "pages": "597-598", "year": "2018", "issn": null, "isbn": "978-1-5386-3365-6", "notes": null, "notesType": null, "__typename": "ArticleType" }, "webExtras": [], "adjacentArticles": { "previous": { "fno": "08446212", "articleId": "13bd1fHrlRw", "__typename": "AdjacentArticleType" }, "next": { "fno": "08446090", "articleId": "13bd1rsER1X", "__typename": "AdjacentArticleType" }, "__typename": "AdjacentArticlesType" }, "recommendedArticles": [ { "id": "proceedings/vr/2009/3943/0/04811065", "title": "Demonstration of Improved Olfactory Display using Rapidly-Switching Solenoid Valves", "doi": null, "abstractUrl": "/proceedings-article/vr/2009/04811065/12OmNAR1aSY", "parentPublication": { "id": "proceedings/vr/2009/3943/0", "title": "2009 IEEE Virtual Reality Conference", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2009/3943/0/04811062", "title": "Odor Presentation with a Vivid Sense of Reality: Incorporating Fluid Dynamics Simulation into Olfactory Display", "doi": null, "abstractUrl": "/proceedings-article/vr/2009/04811062/12OmNs0C9X2", "parentPublication": { "id": "proceedings/vr/2009/3943/0", "title": "2009 IEEE Virtual Reality Conference", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2013/4795/0/06549409", "title": "Visual-olfactory presentation system using a miniaturized olfactory display based on SAW streaming and electroosmotic pumps", "doi": null, "abstractUrl": "/proceedings-article/vr/2013/06549409/12OmNwDSduJ", "parentPublication": { "id": "proceedings/vr/2013/4795/0", "title": "2013 IEEE Virtual Reality (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/saint/2010/4107/0/4107a001", "title": "Development of a High-Performance Olfactory Display and Measurement of Olfactory Characteristics for Pulse Ejections", "doi": null, "abstractUrl": "/proceedings-article/saint/2010/4107a001/12OmNyrIawk", "parentPublication": { "id": "proceedings/saint/2010/4107/0", "title": "2010 10th IEEE/IPSJ International Symposium on Applications and the Internet", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2016/0836/0/07504712", "title": "Olfactory display using surface acoustic wave device and micropumps for wearable applications", "doi": null, "abstractUrl": "/proceedings-article/vr/2016/07504712/12OmNzgwmQK", "parentPublication": { "id": "proceedings/vr/2016/0836/0", "title": "2016 IEEE Virtual Reality (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2018/3365/0/08445841", "title": "Demonstration of Olfactory Display Based on Sniffing Action", "doi": null, "abstractUrl": "/proceedings-article/vr/2018/08445841/13bd1eTtWYE", "parentPublication": { "id": "proceedings/vr/2018/3365/0", "title": "2018 IEEE Conference on Virtual Reality and 3D User Interfaces (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "trans/tg/2013/04/ttg2013040606", "title": "Smelling Screen: Development and Evaluation of an Olfactory Display System for Presenting a Virtual Odor Source", "doi": null, "abstractUrl": "/journal/tg/2013/04/ttg2013040606/13rRUILLkvq", "parentPublication": { "id": "trans/tg", "title": "IEEE Transactions on Visualization & Computer Graphics", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "mags/cg/2008/01/mcg2008010075", "title": "Cooking Up an Interactive Olfactory Game Display", "doi": null, "abstractUrl": "/magazine/cg/2008/01/mcg2008010075/13rRUwvT9lE", "parentPublication": { "id": "mags/cg", "title": "IEEE Computer Graphics and Applications", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2020/5608/0/09089617", "title": "Virtual environment with smell using wearable olfactory display and computational fluid dynamics simulation", "doi": null, "abstractUrl": "/proceedings-article/vr/2020/09089617/1jIxfcDz7Ak", "parentPublication": { "id": "proceedings/vr/2020/5608/0", "title": "2020 IEEE Conference on Virtual Reality and 3D User Interfaces (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/smartiot/2021/4511/0/451100a329", "title": "The Odor Characterizations and Interactive Olfactory Display: A Survey", "doi": null, "abstractUrl": "/proceedings-article/smartiot/2021/451100a329/1xDQb2bELm0", "parentPublication": { "id": "proceedings/smartiot/2021/4511/0", "title": "2021 IEEE International Conference on Smart Internet of Things (SmartIoT)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" } ], "articleVideos": [] }
{ "proceeding": { "id": "1xDQ8zRShPy", "title": "2021 IEEE International Conference on Smart Internet of Things (SmartIoT)", "acronym": "smartiot", "groupId": "1827484", "volume": "0", "displayVolume": "0", "year": "2021", "__typename": "ProceedingType" }, "article": { "id": "1xDQb2bELm0", "doi": "10.1109/SmartIoT52359.2021.00061", "title": "The Odor Characterizations and Interactive Olfactory Display: A Survey", "normalizedTitle": "The Odor Characterizations and Interactive Olfactory Display: A Survey", "abstract": "In recent years, the characterization of odor information has gained an emerging concentration. Furthermore, with the development of interactive olfactory display technology, it has become a trend that odor blenders were required to generate more types of odors. However, the bottleneck was the lack of an automated way to create recipes for reproducing odors. One of the significant reasons why it is difficult to find a way for the automotive generation of odor recipes is the lack of a universal characterization for describing odors. The primary research contributions of this paper include the current state of odor characterization and odor generation devices, a new taxonomy of odor reproduction, interactive olfactory display technology in augmented reality and virtual reality (AR/VR), the challenges faced by odor characterization and odor reproduction.", "abstracts": [ { "abstractType": "Regular", "content": "In recent years, the characterization of odor information has gained an emerging concentration. Furthermore, with the development of interactive olfactory display technology, it has become a trend that odor blenders were required to generate more types of odors. However, the bottleneck was the lack of an automated way to create recipes for reproducing odors. One of the significant reasons why it is difficult to find a way for the automotive generation of odor recipes is the lack of a universal characterization for describing odors. The primary research contributions of this paper include the current state of odor characterization and odor generation devices, a new taxonomy of odor reproduction, interactive olfactory display technology in augmented reality and virtual reality (AR/VR), the challenges faced by odor characterization and odor reproduction.", "__typename": "ArticleAbstractType" } ], "normalizedAbstract": "In recent years, the characterization of odor information has gained an emerging concentration. Furthermore, with the development of interactive olfactory display technology, it has become a trend that odor blenders were required to generate more types of odors. However, the bottleneck was the lack of an automated way to create recipes for reproducing odors. One of the significant reasons why it is difficult to find a way for the automotive generation of odor recipes is the lack of a universal characterization for describing odors. The primary research contributions of this paper include the current state of odor characterization and odor generation devices, a new taxonomy of odor reproduction, interactive olfactory display technology in augmented reality and virtual reality (AR/VR), the challenges faced by odor characterization and odor reproduction.", "fno": "451100a329", "keywords": [ "Augmented Reality", "Chemioception", "Display Instrumentation", "Electronic Noses", "Interactive Olfactory Display Technology", "Odor Generation Devices", "Odor Blenders", "Odor Information Characterization", "Odor Recipe Generation", "Augmented Reality", "Virtual Reality", "AR", "VR", "Odor Reproduction Characterization", "Visualization", "Conferences", "Olfactory", "Taxonomy", "Market Research", "Physiology", "Acoustics", "Odor Characterizations", "Olfactory Display", "Odor Reproduction", "AR VR" ], "authors": [ { "affiliation": "Guangdong University of Technology,School of Information Engineering,Guangzhou,China", "fullName": "Qi Liu", "givenName": "Qi", "surname": "Liu", "__typename": "ArticleAuthorType" }, { "affiliation": "Guangdong University of Technology,School of Information Engineering,Guangzhou,China", "fullName": "Dehan Luo", "givenName": "Dehan", "surname": "Luo", "__typename": "ArticleAuthorType" }, { "affiliation": "Guangdong University of Technology,School of Electromechanical Engineering,Guangzhou,China", "fullName": "Tengteng Wen", "givenName": "Tengteng", "surname": "Wen", "__typename": "ArticleAuthorType" }, { "affiliation": "Guangdong University of Technology,School of Information Engineering,Guangzhou,China", "fullName": "Zhuofeng Mo", "givenName": "Zhuofeng", "surname": "Mo", "__typename": "ArticleAuthorType" }, { "affiliation": "Guangdong University of Technology,School of Information Engineering,Guangzhou,China", "fullName": "Jingshan Li", "givenName": "Jingshan", "surname": "Li", "__typename": "ArticleAuthorType" }, { "affiliation": "Guangdong University of Technology,School of Information Engineering,Guangzhou,China", "fullName": "Qingrong Li", "givenName": "Qingrong", "surname": "Li", "__typename": "ArticleAuthorType" } ], "idPrefix": "smartiot", "isOpenAccess": false, "showRecommendedArticles": true, "showBuyMe": true, "hasPdf": true, "pubDate": "2021-08-01T00:00:00", "pubType": "proceedings", "pages": "337-341", "year": "2021", "issn": null, "isbn": "978-1-6654-4511-5", "notes": null, "notesType": null, "__typename": "ArticleType" }, "webExtras": [], "adjacentArticles": { "previous": { "fno": "451100a324", "articleId": "1xDQfQ5oaVW", "__typename": "AdjacentArticleType" }, "next": { "fno": "451100a334", "articleId": "1xDQ9Td5rPO", "__typename": "AdjacentArticleType" }, "__typename": "AdjacentArticlesType" }, "recommendedArticles": [ { "id": "proceedings/vr/2009/3943/0/04811065", "title": "Demonstration of Improved Olfactory Display using Rapidly-Switching Solenoid Valves", "doi": null, "abstractUrl": "/proceedings-article/vr/2009/04811065/12OmNAR1aSY", "parentPublication": { "id": "proceedings/vr/2009/3943/0", "title": "2009 IEEE Virtual Reality Conference", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2009/3943/0/04811062", "title": "Odor Presentation with a Vivid Sense of Reality: Incorporating Fluid Dynamics Simulation into Olfactory Display", "doi": null, "abstractUrl": "/proceedings-article/vr/2009/04811062/12OmNs0C9X2", "parentPublication": { "id": "proceedings/vr/2009/3943/0", "title": "2009 IEEE Virtual Reality Conference", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2010/6237/0/05444763", "title": "On the effect of airflow on odor presentation", "doi": null, "abstractUrl": "/proceedings-article/vr/2010/05444763/12OmNviHKkx", "parentPublication": { "id": "proceedings/vr/2010/6237/0", "title": "2010 IEEE Virtual Reality Conference (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2006/0224/0/02240199", "title": "Wearable Olfactory Display: Using Odor in Outdoor Environment", "doi": null, "abstractUrl": "/proceedings-article/vr/2006/02240199/12OmNwK7o3V", "parentPublication": { "id": "proceedings/vr/2006/0224/0", "title": "IEEE Virtual Reality Conference (VR 2006)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2009/3943/0/04811016", "title": "Selection Method of Odor Components for Olfactory Display Using Mass Spectrum Database", "doi": null, "abstractUrl": "/proceedings-article/vr/2009/04811016/12OmNxzuMBP", "parentPublication": { "id": "proceedings/vr/2009/3943/0", "title": "2009 IEEE Virtual Reality Conference", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2011/0039/0/05759464", "title": "Olfactory display using a miniaturized pump and a SAW atomizer for presenting low-volatile scents", "doi": null, "abstractUrl": "/proceedings-article/vr/2011/05759464/12OmNy3AgAZ", "parentPublication": { "id": "proceedings/vr/2011/0039/0", "title": "2011 IEEE Virtual Reality (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2018/3365/0/08446484", "title": "Olfactory Display Based on Sniffing Action", "doi": null, "abstractUrl": "/proceedings-article/vr/2018/08446484/13bd1fdV4kM", "parentPublication": { "id": "proceedings/vr/2018/3365/0", "title": "2018 IEEE Conference on Virtual Reality and 3D User Interfaces (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "trans/tg/2013/04/ttg2013040606", "title": "Smelling Screen: Development and Evaluation of an Olfactory Display System for Presenting a Virtual Odor Source", "doi": null, "abstractUrl": "/journal/tg/2013/04/ttg2013040606/13rRUILLkvq", "parentPublication": { "id": "trans/tg", "title": "IEEE Transactions on Visualization & Computer Graphics", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "mags/cg/2008/01/mcg2008010075", "title": "Cooking Up an Interactive Olfactory Game Display", "doi": null, "abstractUrl": "/magazine/cg/2008/01/mcg2008010075/13rRUwvT9lE", "parentPublication": { "id": "mags/cg", "title": "IEEE Computer Graphics and Applications", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2021/1838/0/255600a279", "title": "Does Virtual Odor Representation Influence the Perception of Olfactory Intensity and Directionality in VR?", "doi": null, "abstractUrl": "/proceedings-article/vr/2021/255600a279/1tuAlZRpf6E", "parentPublication": { "id": "proceedings/vr/2021/1838/0", "title": "2021 IEEE Virtual Reality and 3D User Interfaces (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" } ], "articleVideos": [] }
{ "proceeding": { "id": "12OmNvRU0cM", "title": "2017 IEEE International Symposium on Mixed and Augmented Reality (ISMAR-Adjunct)", "acronym": "ismar-adjunct", "groupId": "1810084", "volume": "0", "displayVolume": "0", "year": "2017", "__typename": "ProceedingType" }, "article": { "id": "12OmNzV70Kh", "doi": "10.1109/ISMAR-Adjunct.2017.72", "title": "[POSTER] CoVAR: Mixed-Platform Remote Collaborative Augmented and Virtual Realities System with Shared Collaboration Cues", "normalizedTitle": "[POSTER] CoVAR: Mixed-Platform Remote Collaborative Augmented and Virtual Realities System with Shared Collaboration Cues", "abstract": "We present CoVAR, a novel Virtual Reality (VR) and Augmented Reality (AR) system for remote collaboration. It supports collaboration between AR and VR users by sharing a 3D reconstruction of the AR user's environment. To enhance this mixed platform collaboration, it provides natural inputs such as eye-gaze and hand gestures, remote embodiment through avatar's head and hands, and awareness cues of field-of-view and gaze cue. In this paper, we describe the system architecture, setup and calibration procedures, input methods and interaction, and collaboration enhancement features.", "abstracts": [ { "abstractType": "Regular", "content": "We present CoVAR, a novel Virtual Reality (VR) and Augmented Reality (AR) system for remote collaboration. It supports collaboration between AR and VR users by sharing a 3D reconstruction of the AR user's environment. To enhance this mixed platform collaboration, it provides natural inputs such as eye-gaze and hand gestures, remote embodiment through avatar's head and hands, and awareness cues of field-of-view and gaze cue. In this paper, we describe the system architecture, setup and calibration procedures, input methods and interaction, and collaboration enhancement features.", "__typename": "ArticleAbstractType" } ], "normalizedAbstract": "We present CoVAR, a novel Virtual Reality (VR) and Augmented Reality (AR) system for remote collaboration. It supports collaboration between AR and VR users by sharing a 3D reconstruction of the AR user's environment. To enhance this mixed platform collaboration, it provides natural inputs such as eye-gaze and hand gestures, remote embodiment through avatar's head and hands, and awareness cues of field-of-view and gaze cue. In this paper, we describe the system architecture, setup and calibration procedures, input methods and interaction, and collaboration enhancement features.", "fno": "6327a218", "keywords": [ "Collaboration", "Augmented Reality", "Calibration", "Image Reconstruction", "Three Dimensional Displays", "Tracking", "Mixed Reality", "Remote Collaboration", "Eye Tracking" ], "authors": [ { "affiliation": null, "fullName": "Thammathip Piumsomboon", "givenName": "Thammathip", "surname": "Piumsomboon", "__typename": "ArticleAuthorType" }, { "affiliation": null, "fullName": "Arindam Dey", "givenName": "Arindam", "surname": "Dey", "__typename": "ArticleAuthorType" }, { "affiliation": null, "fullName": "Barrett Ens", "givenName": "Barrett", "surname": "Ens", "__typename": "ArticleAuthorType" }, { "affiliation": null, "fullName": "Gun Lee", "givenName": "Gun", "surname": "Lee", "__typename": "ArticleAuthorType" }, { "affiliation": null, "fullName": "Mark Billinghurst", "givenName": "Mark", "surname": "Billinghurst", "__typename": "ArticleAuthorType" } ], "idPrefix": "ismar-adjunct", "isOpenAccess": false, "showRecommendedArticles": true, "showBuyMe": true, "hasPdf": true, "pubDate": "2017-10-01T00:00:00", "pubType": "proceedings", "pages": "218-219", "year": "2017", "issn": null, "isbn": "978-0-7695-6327-5", "notes": null, "notesType": null, "__typename": "ArticleType" }, "webExtras": [], "adjacentArticles": { "previous": { "fno": "6327a216", "articleId": "12OmNwO5M1i", "__typename": "AdjacentArticleType" }, "next": { "fno": "6327a220", "articleId": "12OmNBghtty", "__typename": "AdjacentArticleType" }, "__typename": "AdjacentArticlesType" }, "recommendedArticles": [ { "id": "proceedings/icisa/2011/9222/0/05772406", "title": "Design and Implementation of an Augmented Reality System Using Gaze Interaction", "doi": null, "abstractUrl": "/proceedings-article/icisa/2011/05772406/12OmNAWpyrv", "parentPublication": { "id": "proceedings/icisa/2011/9222/0", "title": "2011 International Conference on Information Science and Applications", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/ismar/2015/7660/0/7660a140", "title": "[POSTER] AR4AR: Using Augmented Reality for guidance in Augmented Reality Systems Setup", "doi": null, "abstractUrl": "/proceedings-article/ismar/2015/7660a140/12OmNCd2rIf", "parentPublication": { "id": "proceedings/ismar/2015/7660/0", "title": "2015 IEEE International Symposium on Mixed and Augmented Reality (ISMAR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/wetice/2014/4249/0/4249a243", "title": "Fostering Collaboration among Restoration Professionals Using Augmented Reality", "doi": null, "abstractUrl": "/proceedings-article/wetice/2014/4249a243/12OmNy50g7G", "parentPublication": { "id": "proceedings/wetice/2014/4249/0", "title": "2014 IEEE 23rd International Workshops on Enabling Technologies: Infrastructures for Collaborative Enterprise (WETICE)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "mags/pc/2009/03/mpc2009030042", "title": "Through-Walls Collaboration", "doi": null, "abstractUrl": "/magazine/pc/2009/03/mpc2009030042/13rRUEgs2z4", "parentPublication": { "id": "mags/pc", "title": "IEEE Pervasive Computing", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2022/9617/0/961700a250", "title": "Using Speech to Visualise Shared Gaze Cues in MR Remote Collaboration", "doi": null, "abstractUrl": "/proceedings-article/vr/2022/961700a250/1CJcnpSVomk", "parentPublication": { "id": "proceedings/vr/2022/9617/0", "title": "2022 IEEE on Conference Virtual Reality and 3D User Interfaces (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/ismar/2022/5325/0/532500a431", "title": "Adaptive Visual Cues for Guiding a Bimanual Unordered Task in Virtual Reality", "doi": null, "abstractUrl": "/proceedings-article/ismar/2022/532500a431/1JrRcvF2Yko", "parentPublication": { "id": "proceedings/ismar/2022/5325/0", "title": "2022 IEEE International Symposium on Mixed and Augmented Reality (ISMAR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/ismar/2022/5325/0/532500a787", "title": "VRDoc: Gaze-based Interactions for VR Reading Experience", "doi": null, "abstractUrl": "/proceedings-article/ismar/2022/532500a787/1JrRgFp6G2s", "parentPublication": { "id": "proceedings/ismar/2022/5325/0", "title": "2022 IEEE International Symposium on Mixed and Augmented Reality (ISMAR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2019/1377/0/08798080", "title": "Characterizing Asymmetric Collaborative Interactions in Virtual and Augmented Realities", "doi": null, "abstractUrl": "/proceedings-article/vr/2019/08798080/1cJ0Ph3yn7O", "parentPublication": { "id": "proceedings/vr/2019/1377/0", "title": "2019 IEEE Conference on Virtual Reality and 3D User Interfaces (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/pdp/2020/6582/0/09092352", "title": "Augmented reality for visualizing security data for cybernetic and cyberphysical systems", "doi": null, "abstractUrl": "/proceedings-article/pdp/2020/09092352/1jPaZfYhVPG", "parentPublication": { "id": "proceedings/pdp/2020/6582/0", "title": "2020 28th Euromicro International Conference on Parallel, Distributed and Network-Based Processing (PDP)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/ismar-adjunct/2021/1298/0/129800a435", "title": "Multi-scale Mixed Reality Collaboration for Digital Twin", "doi": null, "abstractUrl": "/proceedings-article/ismar-adjunct/2021/129800a435/1yeQLyb4LpC", "parentPublication": { "id": "proceedings/ismar-adjunct/2021/1298/0", "title": "2021 IEEE International Symposium on Mixed and Augmented Reality Adjunct (ISMAR-Adjunct)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" } ], "articleVideos": [] }
{ "proceeding": { "id": "12OmNqHItAn", "title": "2017 IEEE Third International Conference on Multimedia Big Data (BigMM)", "acronym": "bigmm", "groupId": "1808144", "volume": "0", "displayVolume": "0", "year": "2017", "__typename": "ProceedingType" }, "article": { "id": "12OmNzcPADw", "doi": "10.1109/BigMM.2017.83", "title": "RGB-D Camera Network Calibration and Streaming for 3D Telepresence in Large Environment", "normalizedTitle": "RGB-D Camera Network Calibration and Streaming for 3D Telepresence in Large Environment", "abstract": "With the advance of multimedia technology, traditional remote communication applications (e.g. video chat) may not satisfy the increasing demand for effective interactivity due to the lack of realistic experience of being physically present. Telepresence is a new form of communication that aims to strengthen the connection between people by creating more immersive and interactive environments. However existing telepresence systems often suffer from limited viewing space or failing to render realistic 3D contents on the display. In this paper, we present a novel system that can improve the interactive experience among remote users with dynamically generated virtual 3D environments. To support wide view rendering, an automated calibration system is developed for RGB-D camera network that is scalable to capture and reconstruct large environments. Our proposed client-server architecture is capable of transmitting and processing large volume of RGB-D data across the the network in real-time. Virtual 3D views are automatically synthesized by seamlessly fusing multiple video streams acquired by different cameras. Our experimental results demonstrate better visual effects that can be applied to existing telepresence systems for more motivating and engaging remote applications.", "abstracts": [ { "abstractType": "Regular", "content": "With the advance of multimedia technology, traditional remote communication applications (e.g. video chat) may not satisfy the increasing demand for effective interactivity due to the lack of realistic experience of being physically present. Telepresence is a new form of communication that aims to strengthen the connection between people by creating more immersive and interactive environments. However existing telepresence systems often suffer from limited viewing space or failing to render realistic 3D contents on the display. In this paper, we present a novel system that can improve the interactive experience among remote users with dynamically generated virtual 3D environments. To support wide view rendering, an automated calibration system is developed for RGB-D camera network that is scalable to capture and reconstruct large environments. Our proposed client-server architecture is capable of transmitting and processing large volume of RGB-D data across the the network in real-time. Virtual 3D views are automatically synthesized by seamlessly fusing multiple video streams acquired by different cameras. Our experimental results demonstrate better visual effects that can be applied to existing telepresence systems for more motivating and engaging remote applications.", "__typename": "ArticleAbstractType" } ], "normalizedAbstract": "With the advance of multimedia technology, traditional remote communication applications (e.g. video chat) may not satisfy the increasing demand for effective interactivity due to the lack of realistic experience of being physically present. Telepresence is a new form of communication that aims to strengthen the connection between people by creating more immersive and interactive environments. However existing telepresence systems often suffer from limited viewing space or failing to render realistic 3D contents on the display. In this paper, we present a novel system that can improve the interactive experience among remote users with dynamically generated virtual 3D environments. To support wide view rendering, an automated calibration system is developed for RGB-D camera network that is scalable to capture and reconstruct large environments. Our proposed client-server architecture is capable of transmitting and processing large volume of RGB-D data across the the network in real-time. Virtual 3D views are automatically synthesized by seamlessly fusing multiple video streams acquired by different cameras. Our experimental results demonstrate better visual effects that can be applied to existing telepresence systems for more motivating and engaging remote applications.", "fno": "07966773", "keywords": [ "Calibration", "Cameras", "Client Server Systems", "Image Capture", "Image Colour Analysis", "Image Fusion", "Image Reconstruction", "Interactive Systems", "Realistic Images", "Real Time Systems", "Rendering Computer Graphics", "Video Streaming", "Virtual Reality", "RGB D Camera Network Calibration", "RGB D Camera Network Streaming", "3 D Telepresence", "Multimedia Technology", "Immersive Environments", "Interactive Environments", "Realistic 3 D Content Rendering", "Interactive Experience", "Remote Users", "Dynamically Generated Virtual 3 D Environments", "Automated Calibration System", "Large Environment Reconstruction", "Large Environment Capture", "Client Server Architecture", "RGB D Data Processing", "Real Time Data Transmission", "Virtual 3 D View Synthesis", "Multiple Video Streams Fusion", "Visual Effects", "Cameras", "Three Dimensional Displays", "Calibration", "Streaming Media", "Rendering Computer Graphics", "Real Time Systems", "Virtual Environments", "RGB D Camera Networks", "3 D Telepresence", "Immersive Environment", "Real Time Rendering", "Remote Communication" ], "authors": [ { "affiliation": null, "fullName": "Po-Chang Su", "givenName": "Po-Chang", "surname": "Su", "__typename": "ArticleAuthorType" }, { "affiliation": null, "fullName": "Ju Shen", "givenName": "Ju", "surname": "Shen", "__typename": "ArticleAuthorType" }, { "affiliation": null, "fullName": "Muhammad Usman Rafique", "givenName": "Muhammad Usman", "surname": "Rafique", "__typename": "ArticleAuthorType" } ], "idPrefix": "bigmm", "isOpenAccess": false, "showRecommendedArticles": true, "showBuyMe": true, "hasPdf": true, "pubDate": "2017-04-01T00:00:00", "pubType": "proceedings", "pages": "362-369", "year": "2017", "issn": null, "isbn": "978-1-5090-6549-3", "notes": null, "notesType": null, "__typename": "ArticleType" }, "webExtras": [], "adjacentArticles": { "previous": { "fno": "07966772", "articleId": "12OmNx7G5Sr", "__typename": "AdjacentArticleType" }, "next": { "fno": "07966774", "articleId": "12OmNroijnl", "__typename": "AdjacentArticleType" }, "__typename": "AdjacentArticlesType" }, "recommendedArticles": [ { "id": "proceedings/ismar/2013/2869/0/06671778", "title": "Single-shot extrinsic calibration of a generically configured RGB-D camera rig from scene constraints", "doi": null, "abstractUrl": "/proceedings-article/ismar/2013/06671778/12OmNAle6AS", "parentPublication": { "id": "proceedings/ismar/2013/2869/0", "title": "2013 IEEE International Symposium on Mixed and Augmented Reality (ISMAR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/percom/2016/8779/0/07456508", "title": "PanoVC: Pervasive telepresence using mobile phones", "doi": null, "abstractUrl": "/proceedings-article/percom/2016/07456508/12OmNwJybSQ", "parentPublication": { "id": "proceedings/percom/2016/8779/0", "title": "2016 IEEE International Conference on Pervasive Computing and Communications (PerCom)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "trans/tg/2017/03/07792698", "title": "JackIn Head: Immersive Visual Telepresence System with Omnidirectional Wearable Camera", "doi": null, "abstractUrl": "/journal/tg/2017/03/07792698/13rRUx0geq0", "parentPublication": { "id": "trans/tg", "title": "IEEE Transactions on Visualization & Computer Graphics", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "trans/th/2015/03/07165667", "title": "Telerobotic Haptic Exploration in Art Galleries and Museums for Individuals with Visual Impairments", "doi": null, "abstractUrl": "/journal/th/2015/03/07165667/13rRUxAAT7N", "parentPublication": { "id": "trans/th", "title": "IEEE Transactions on Haptics", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "trans/tg/2005/06/v0637", "title": "Data Streaming in Telepresence Environments", "doi": null, "abstractUrl": "/journal/tg/2005/06/v0637/13rRUyfKIHA", "parentPublication": { "id": "trans/tg", "title": "IEEE Transactions on Visualization & Computer Graphics", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "trans/tg/2013/04/ttg2013040616", "title": "Immersive Group-to-Group Telepresence", "doi": null, "abstractUrl": "/journal/tg/2013/04/ttg2013040616/13rRUzp02ok", "parentPublication": { "id": "trans/tg", "title": "IEEE Transactions on Visualization & Computer Graphics", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/cvpr/2005/2372/2/01467605", "title": "RGB-Z: mapping a sparse depth map to a high resolution RGB camera image", "doi": null, "abstractUrl": "/proceedings-article/cvpr/2005/01467605/1htC5iEJs1W", "parentPublication": { "id": "proceedings/cvpr/2005/2372/2", "title": "2005 IEEE Computer Society Conference on Computer Vision and Pattern Recognition (CVPR'05)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vrw/2020/6532/0/09090468", "title": "3D Human Reconstruction from an Image for Mobile Telepresence Systems", "doi": null, "abstractUrl": "/proceedings-article/vrw/2020/09090468/1jIxlaoW1UY", "parentPublication": { "id": "proceedings/vrw/2020/6532/0", "title": "2020 IEEE Conference on Virtual Reality and 3D User Interfaces Abstracts and Workshops (VRW)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vrw/2020/6532/0/09090433", "title": "Virtual Tour: An Immersive Low Cost Telepresence System", "doi": null, "abstractUrl": "/proceedings-article/vrw/2020/09090433/1jIxrSY8cZa", "parentPublication": { "id": "proceedings/vrw/2020/6532/0", "title": "2020 IEEE Conference on Virtual Reality and 3D User Interfaces Abstracts and Workshops (VRW)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "trans/tg/2022/07/09257094", "title": "Output-Sensitive Avatar Representations for Immersive Telepresence", "doi": null, "abstractUrl": "/journal/tg/2022/07/09257094/1oFCABrJUmA", "parentPublication": { "id": "trans/tg", "title": "IEEE Transactions on Visualization & Computer Graphics", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" } ], "articleVideos": [] }
{ "proceeding": { "id": "12OmNxQOjzV", "title": "2014 IEEE International Symposium on Multimedia (ISM)", "acronym": "ism", "groupId": "1001094", "volume": "0", "displayVolume": "0", "year": "2014", "__typename": "ProceedingType" }, "article": { "id": "12OmNzn38Pl", "doi": "10.1109/ISM.2014.62", "title": "An Immersive Telepresence System Using a Real-Time Omnidirectional Camera and a Virtual Reality Head-Mounted Display", "normalizedTitle": "An Immersive Telepresence System Using a Real-Time Omnidirectional Camera and a Virtual Reality Head-Mounted Display", "abstract": "Current telepresence systems are limited by the use of standard, narrow angle of view cameras. By using an Omni directional camera, an improved visual telepresence system is achieved. This work presents a more immersive visual experience using the Omni directional Panoptic camera in combination with a modern, low cost virtual reality head-mounted display. The camera system and its video data format are briefly described. The presented system is capable of broadcasting the Omni directional live stream via network connection. The system consists of the Panoptic camera as the acquisition device, a personal computer providing server support, and a head-mounted display connected to a client computer. The server and the client application that create a virtual environment from the video data are presented and the test setup is shown. Finally, a video example of the system's real-time operation is provided.", "abstracts": [ { "abstractType": "Regular", "content": "Current telepresence systems are limited by the use of standard, narrow angle of view cameras. By using an Omni directional camera, an improved visual telepresence system is achieved. This work presents a more immersive visual experience using the Omni directional Panoptic camera in combination with a modern, low cost virtual reality head-mounted display. The camera system and its video data format are briefly described. The presented system is capable of broadcasting the Omni directional live stream via network connection. The system consists of the Panoptic camera as the acquisition device, a personal computer providing server support, and a head-mounted display connected to a client computer. The server and the client application that create a virtual environment from the video data are presented and the test setup is shown. Finally, a video example of the system's real-time operation is provided.", "__typename": "ArticleAbstractType" } ], "normalizedAbstract": "Current telepresence systems are limited by the use of standard, narrow angle of view cameras. By using an Omni directional camera, an improved visual telepresence system is achieved. This work presents a more immersive visual experience using the Omni directional Panoptic camera in combination with a modern, low cost virtual reality head-mounted display. The camera system and its video data format are briefly described. The presented system is capable of broadcasting the Omni directional live stream via network connection. The system consists of the Panoptic camera as the acquisition device, a personal computer providing server support, and a head-mounted display connected to a client computer. The server and the client application that create a virtual environment from the video data are presented and the test setup is shown. Finally, a video example of the system's real-time operation is provided.", "fno": "4311a175", "keywords": [ "Cameras", "Streaming Media", "Real Time Systems", "Servers", "Image Resolution", "Visualization", "Virtual Reality", "Telepresence", "Omnidirectional Video", "Real Time", "Head Mounted Display", "Streaming" ], "authors": [ { "affiliation": null, "fullName": "Luis Gaemperle", "givenName": "Luis", "surname": "Gaemperle", "__typename": "ArticleAuthorType" }, { "affiliation": null, "fullName": "Kerem Seyid", "givenName": "Kerem", "surname": "Seyid", "__typename": "ArticleAuthorType" }, { "affiliation": null, "fullName": "Vladan Popovic", "givenName": "Vladan", "surname": "Popovic", "__typename": "ArticleAuthorType" }, { "affiliation": null, "fullName": "Yusuf Leblebici", "givenName": "Yusuf", "surname": "Leblebici", "__typename": "ArticleAuthorType" } ], "idPrefix": "ism", "isOpenAccess": false, "showRecommendedArticles": true, "showBuyMe": true, "hasPdf": true, "pubDate": "2014-12-01T00:00:00", "pubType": "proceedings", "pages": "175-178", "year": "2014", "issn": null, "isbn": "978-1-4799-4311-1", "notes": null, "notesType": null, "__typename": "ArticleType" }, "webExtras": [], "adjacentArticles": { "previous": { "fno": "4311a167", "articleId": "12OmNzsrwnJ", "__typename": "AdjacentArticleType" }, "next": { "fno": "4311a179", "articleId": "12OmNvDqsUz", "__typename": "AdjacentArticleType" }, "__typename": "AdjacentArticlesType" }, "recommendedArticles": [ { "id": "proceedings/ismar/2010/9343/0/05643596", "title": "Augmented telepresence using autopilot airship and omni-directional camera", "doi": null, "abstractUrl": "/proceedings-article/ismar/2010/05643596/12OmNqFJhG9", "parentPublication": { "id": "proceedings/ismar/2010/9343/0", "title": "2010 IEEE International Symposium on Mixed and Augmented Reality", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/icpr/2014/5209/0/5209a918", "title": "Local Image Feature Matching Improvements for Omnidirectional Camera Systems", "doi": null, "abstractUrl": "/proceedings-article/icpr/2014/5209a918/12OmNyoAAbJ", "parentPublication": { "id": "proceedings/icpr/2014/5209/0", "title": "2014 22nd International Conference on Pattern Recognition (ICPR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/icpr/2010/4109/0/4109d171", "title": "An Augmented Reality Setup with an Omnidirectional Camera Based on Multiple Object Detection", "doi": null, "abstractUrl": "/proceedings-article/icpr/2010/4109d171/12OmNyyeWuA", "parentPublication": { "id": "proceedings/icpr/2010/4109/0", "title": "Pattern Recognition, International Conference on", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2018/3365/0/08446471", "title": "Towards Mobile 3D Telepresence Using Head-Worn Devices and Dual-Purpose Screens", "doi": null, "abstractUrl": "/proceedings-article/vr/2018/08446471/13bd1AITn9Y", "parentPublication": { "id": "proceedings/vr/2018/3365/0", "title": "2018 IEEE Conference on Virtual Reality and 3D User Interfaces (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "trans/tg/2017/03/07792698", "title": "JackIn Head: Immersive Visual Telepresence System with Omnidirectional Wearable Camera", "doi": null, "abstractUrl": "/journal/tg/2017/03/07792698/13rRUx0geq0", "parentPublication": { "id": "trans/tg", "title": "IEEE Transactions on Visualization & Computer Graphics", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "trans/tg/2013/04/ttg2013040616", "title": "Immersive Group-to-Group Telepresence", "doi": null, "abstractUrl": "/journal/tg/2013/04/ttg2013040616/13rRUzp02ok", "parentPublication": { "id": "trans/tg", "title": "IEEE Transactions on Visualization & Computer Graphics", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/bwcca/2010/4236/0/05633252", "title": "Multihopped Wireless Disaster Information Network by Automatic Directional Antenna Control Method", "doi": null, "abstractUrl": "/proceedings-article/bwcca/2010/05633252/183rAd6PL6H", "parentPublication": { "id": "proceedings/bwcca/2010/4236/0", "title": "2010 International Conference on Broadband, Wireless Computing, Communication and Applications", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vrw/2020/6532/0/09090433", "title": "Virtual Tour: An Immersive Low Cost Telepresence System", "doi": null, "abstractUrl": "/proceedings-article/vrw/2020/09090433/1jIxrSY8cZa", "parentPublication": { "id": "proceedings/vrw/2020/6532/0", "title": "2020 IEEE Conference on Virtual Reality and 3D User Interfaces Abstracts and Workshops (VRW)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "trans/tg/2022/07/09257094", "title": "Output-Sensitive Avatar Representations for Immersive Telepresence", "doi": null, "abstractUrl": "/journal/tg/2022/07/09257094/1oFCABrJUmA", "parentPublication": { "id": "trans/tg", "title": "IEEE Transactions on Visualization & Computer Graphics", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/iscipt/2021/4137/0/413700a798", "title": "Research on Omnidirectional SLAM based on Vehicle-mounted Multi-Camera System", "doi": null, "abstractUrl": "/proceedings-article/iscipt/2021/413700a798/1zzpM9SsNqg", "parentPublication": { "id": "proceedings/iscipt/2021/4137/0", "title": "2021 6th International Symposium on Computer and Information Processing Technology (ISCIPT)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" } ], "articleVideos": [] }
{ "proceeding": { "id": "1J7W6LmbCw0", "title": "2022 IEEE International Symposium on Mixed and Augmented Reality Adjunct (ISMAR-Adjunct)", "acronym": "ismar-adjunct", "groupId": "9973799", "volume": "0", "displayVolume": "0", "year": "2022", "__typename": "ProceedingType" }, "article": { "id": "1J7WaFB7xNC", "doi": "10.1109/ISMAR-Adjunct57072.2022.00111", "title": "Synthesizing Novel Spaces for Remote Telepresence Experiences", "normalizedTitle": "Synthesizing Novel Spaces for Remote Telepresence Experiences", "abstract": "The emerging field of remote telepresence via spatial computing has opened many exciting opportunities for next-generation computer-mediated-communication platforms. Such techniques enable users to mutually engage in a wide spectrum of applications, previously not possible in 2D screen-based communication methods. Yet, cali-brating and finding a mutual environment compatible with all remote participant&#x0027;s physical environment is considered a challenging task. In this paper, we elaborate on the mutual space finding problem and provide a high-level introduction of our proposed novel Mutual Scene Synthesis (MSS) system. The MSS system takes the partici-pants&#x0027; surrounding environment as input, and synthesizes a virtual scene that corresponds to the functional features of all participants&#x0027; physical spaces. By combining a function optimization module with a deep-learning conditional scene augmentation process, the MSS can generate a scene compatible to all participants of a remote telepresence scenario. By performing early comparative user studies via the MatterPort3D dataset, we evaluate the effectiveness of our system and show our proposed MSS approach can be a promising re-search direction for facilitating contextualized telepresence systems for next-generation spatial computing platforms.", "abstracts": [ { "abstractType": "Regular", "content": "The emerging field of remote telepresence via spatial computing has opened many exciting opportunities for next-generation computer-mediated-communication platforms. Such techniques enable users to mutually engage in a wide spectrum of applications, previously not possible in 2D screen-based communication methods. Yet, cali-brating and finding a mutual environment compatible with all remote participant&#x0027;s physical environment is considered a challenging task. In this paper, we elaborate on the mutual space finding problem and provide a high-level introduction of our proposed novel Mutual Scene Synthesis (MSS) system. The MSS system takes the partici-pants&#x0027; surrounding environment as input, and synthesizes a virtual scene that corresponds to the functional features of all participants&#x0027; physical spaces. By combining a function optimization module with a deep-learning conditional scene augmentation process, the MSS can generate a scene compatible to all participants of a remote telepresence scenario. By performing early comparative user studies via the MatterPort3D dataset, we evaluate the effectiveness of our system and show our proposed MSS approach can be a promising re-search direction for facilitating contextualized telepresence systems for next-generation spatial computing platforms.", "__typename": "ArticleAbstractType" } ], "normalizedAbstract": "The emerging field of remote telepresence via spatial computing has opened many exciting opportunities for next-generation computer-mediated-communication platforms. Such techniques enable users to mutually engage in a wide spectrum of applications, previously not possible in 2D screen-based communication methods. Yet, cali-brating and finding a mutual environment compatible with all remote participant's physical environment is considered a challenging task. In this paper, we elaborate on the mutual space finding problem and provide a high-level introduction of our proposed novel Mutual Scene Synthesis (MSS) system. The MSS system takes the partici-pants' surrounding environment as input, and synthesizes a virtual scene that corresponds to the functional features of all participants' physical spaces. By combining a function optimization module with a deep-learning conditional scene augmentation process, the MSS can generate a scene compatible to all participants of a remote telepresence scenario. By performing early comparative user studies via the MatterPort3D dataset, we evaluate the effectiveness of our system and show our proposed MSS approach can be a promising re-search direction for facilitating contextualized telepresence systems for next-generation spatial computing platforms.", "fno": "536500a524", "keywords": [ "Augmented Reality", "Groupware", "Learning Artificial Intelligence", "Mobile Computing", "Software Tools", "Telecontrol", "Telerobotics", "User Interfaces", "Virtual Reality", "Web Services", "Cali Brating", "Conditional Scene Augmentation Process", "Contextualized Telepresence Systems", "Early Comparative User Studies", "Function Optimization Module", "Functional Features", "High Level Introduction", "MSS Approach", "MSS System", "Mutual Environment", "Mutual Space Finding Problem", "Next Generation Computer Mediated Communication Platforms", "Next Generation Spatial Computing Platforms", "Novel Mutual Scene Synthesis System", "Novel Spaces", "Partici Pants", "Participants", "Remote Participant", "Remote Telepresence Experiences", "Remote Telepresence Scenario", "Screen Based Communication Methods", "Synthesizes", "Virtual Scene", "Privacy", "Telepresence", "Layout", "Mixed Reality", "Manuals", "Stairs", "Space Exploration", "Mixed Reality", "Scene Graphs", "Scene Synthesis", "Telepresence", "Generative Modeling", "Spatial Computing" ], "authors": [ { "affiliation": "Reality Labs Research Meta", "fullName": "Mohammad Keshavarzi", "givenName": "Mohammad", "surname": "Keshavarzi", "__typename": "ArticleAuthorType" }, { "affiliation": "Reality Labs Research Meta", "fullName": "Michael Zollhoefer", "givenName": "Michael", "surname": "Zollhoefer", "__typename": "ArticleAuthorType" }, { "affiliation": "University of California Berkeley", "fullName": "Allen Y. Yang", "givenName": "Allen Y.", "surname": "Yang", "__typename": "ArticleAuthorType" }, { "affiliation": "Reality Labs Research Meta", "fullName": "Patrick Peluse", "givenName": "Patrick", "surname": "Peluse", "__typename": "ArticleAuthorType" }, { "affiliation": "University of California Berkeley", "fullName": "Luisa Caldas", "givenName": "Luisa", "surname": "Caldas", "__typename": "ArticleAuthorType" } ], "idPrefix": "ismar-adjunct", "isOpenAccess": false, "showRecommendedArticles": true, "showBuyMe": true, "hasPdf": true, "pubDate": "2022-10-01T00:00:00", "pubType": "proceedings", "pages": "524-529", "year": "2022", "issn": null, "isbn": "978-1-6654-5365-3", "notes": null, "notesType": null, "__typename": "ArticleType" }, "webExtras": [], "adjacentArticles": { "previous": { "fno": "536500a518", "articleId": "1J7WnZV8Mo0", "__typename": "AdjacentArticleType" }, "next": { "fno": "536500a530", "articleId": "1J7WtHqguHu", "__typename": "AdjacentArticleType" }, "__typename": "AdjacentArticlesType" }, "recommendedArticles": [ { "id": "trans/tg/2018/04/08314105", "title": "Detection Thresholds for Rotation and Translation Gains in 360&#x00B0; Video-Based Telepresence Systems", "doi": null, "abstractUrl": "/journal/tg/2018/04/08314105/13rRUxASubD", "parentPublication": { "id": "trans/tg", "title": "IEEE Transactions on Visualization & Computer Graphics", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "trans/tg/2019/05/08642375", "title": "Immersive Telepresence and Remote Collaboration using Mobile and Wearable Devices", "doi": null, "abstractUrl": "/journal/tg/2019/05/08642375/17PYEk3WIil", "parentPublication": { "id": "trans/tg", "title": "IEEE Transactions on Visualization & Computer Graphics", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "trans/tg/2022/05/09714051", "title": "Augmenting Immersive Telepresence Experience with a Virtual Body", "doi": null, "abstractUrl": "/journal/tg/2022/05/09714051/1B0Y0I5xWyk", "parentPublication": { "id": "trans/tg", "title": "IEEE Transactions on Visualization & Computer Graphics", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vrw/2022/8402/0/840200a666", "title": "If I Share with you my Perspective, Would you Share your Data with me?", "doi": null, "abstractUrl": "/proceedings-article/vrw/2022/840200a666/1CJcFhW6P6M", "parentPublication": { "id": "proceedings/vrw/2022/8402/0", "title": "2022 IEEE Conference on Virtual Reality and 3D User Interfaces Abstracts and Workshops (VRW)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vrw/2022/8402/0/840200a856", "title": "Creating 3D Personal Avatars with High Quality Facial Expressions for Telecommunication and Telepresence", "doi": null, "abstractUrl": "/proceedings-article/vrw/2022/840200a856/1CJezdpzDMI", "parentPublication": { "id": "proceedings/vrw/2022/8402/0", "title": "2022 IEEE Conference on Virtual Reality and 3D User Interfaces Abstracts and Workshops (VRW)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "trans/tg/2022/11/09873991", "title": "Predict-and-Drive: Avatar Motion Adaption in Room-Scale Augmented Reality Telepresence with Heterogeneous Spaces", "doi": null, "abstractUrl": "/journal/tg/2022/11/09873991/1GjwGcGrRmg", "parentPublication": { "id": "trans/tg", "title": "IEEE Transactions on Visualization & Computer Graphics", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/icdcsw/2022/8879/0/887900a260", "title": "FlyLISL: Traffic Balance Awared Routing for Large-scale Mixed-Reality Telepresence over Reconfigurable Mega-Constellation", "doi": null, "abstractUrl": "/proceedings-article/icdcsw/2022/887900a260/1IFJGjJmjgk", "parentPublication": { "id": "proceedings/icdcsw/2022/8879/0", "title": "2022 IEEE 42nd International Conference on Distributed Computing Systems Workshops (ICDCSW)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/aqtr/2020/7164/0/09129994", "title": "Solutions for the design and control of telepresence robots that climb obstacles", "doi": null, "abstractUrl": "/proceedings-article/aqtr/2020/09129994/1l6SGBqYCbu", "parentPublication": { "id": "proceedings/aqtr/2020/7164/0", "title": "2020 IEEE International Conference on Automation, Quality and Testing, Robotics (AQTR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/ismar-adjunct/2021/1298/0/129800a451", "title": "The Owl: Immersive Telepresence Communication for Hybrid Conferences", "doi": null, "abstractUrl": "/proceedings-article/ismar-adjunct/2021/129800a451/1yeQG4fi6Dm", "parentPublication": { "id": "proceedings/ismar-adjunct/2021/1298/0", "title": "2021 IEEE International Symposium on Mixed and Augmented Reality Adjunct (ISMAR-Adjunct)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" } ], "articleVideos": [] }
{ "proceeding": { "id": "1cI61KPdPAQ", "title": "2019 IEEE 5th Workshop on Everyday Virtual Reality (WEVR)", "acronym": "wevr", "groupId": "1807824", "volume": "0", "displayVolume": "0", "year": "2019", "__typename": "ProceedingType" }, "article": { "id": "1cI62dVXsB2", "doi": "10.1109/WEVR.2019.8809591", "title": "Immersive Gastronomic Experience with Distributed Reality", "normalizedTitle": "Immersive Gastronomic Experience with Distributed Reality", "abstract": "We have developed an immersive gastronomic experience as a proof of concept of Distributed Reality, a type of Augmented Virtuality which combines a reality transmitted from a remote place, using 360&#x00B0; video, with a local reality, using video see-through. In order to reach fully immersive experience, local objects of interest such as hands and local food are segmented using red chrominance keying. Only those segmented objects are merged with the remote reality, enabling this way to increase self-presence and to allow user interaction. More concretely, the gastronomic experience consists of tasting small pieces of food, while being immersed in a remote place designed to pair with the food, thus creating an innovative concept with potential impact in hospitality and tourism industries. An evaluation performed with 66 users shows that it provides good levels of immersion, local interactivity, and general user satisfaction. The application achieves real time performance and has been developed for a smartphone mounted on a consumer headset, thus being easy to deploy and to reuse in other use cases.", "abstracts": [ { "abstractType": "Regular", "content": "We have developed an immersive gastronomic experience as a proof of concept of Distributed Reality, a type of Augmented Virtuality which combines a reality transmitted from a remote place, using 360&#x00B0; video, with a local reality, using video see-through. In order to reach fully immersive experience, local objects of interest such as hands and local food are segmented using red chrominance keying. Only those segmented objects are merged with the remote reality, enabling this way to increase self-presence and to allow user interaction. More concretely, the gastronomic experience consists of tasting small pieces of food, while being immersed in a remote place designed to pair with the food, thus creating an innovative concept with potential impact in hospitality and tourism industries. An evaluation performed with 66 users shows that it provides good levels of immersion, local interactivity, and general user satisfaction. The application achieves real time performance and has been developed for a smartphone mounted on a consumer headset, thus being easy to deploy and to reuse in other use cases.", "__typename": "ArticleAbstractType" } ], "normalizedAbstract": "We have developed an immersive gastronomic experience as a proof of concept of Distributed Reality, a type of Augmented Virtuality which combines a reality transmitted from a remote place, using 360° video, with a local reality, using video see-through. In order to reach fully immersive experience, local objects of interest such as hands and local food are segmented using red chrominance keying. Only those segmented objects are merged with the remote reality, enabling this way to increase self-presence and to allow user interaction. More concretely, the gastronomic experience consists of tasting small pieces of food, while being immersed in a remote place designed to pair with the food, thus creating an innovative concept with potential impact in hospitality and tourism industries. An evaluation performed with 66 users shows that it provides good levels of immersion, local interactivity, and general user satisfaction. The application achieves real time performance and has been developed for a smartphone mounted on a consumer headset, thus being easy to deploy and to reuse in other use cases.", "fno": "08809591", "keywords": [ "Augmented Reality", "Computer Graphics", "Image Colour Analysis", "Image Segmentation", "Interactive Devices", "Smart Phones", "Immersive Gastronomic Experience", "Distributed Reality", "Fully Immersive Experience", "Local Food", "Red Chrominance Keying", "Segmented Objects", "Remote Reality", "Local Interactivity", "Augmented Virtuality", "User Interaction", "Smart Phone", "Consumer Headset", "Cameras", "Streaming Media", "Virtual Reality", "Distortion", "Real Time Systems", "Resists", "Image Segmentation", "Human Centered Computing", "Human Computer Interaction HCI", "Interaction Paradigms" ], "authors": [ { "affiliation": "Nokia Bell Labs", "fullName": "Pablo Perez", "givenName": "Pablo", "surname": "Perez", "__typename": "ArticleAuthorType" }, { "affiliation": "Nokia Bell Labs", "fullName": "Ester Gonzalez-Sosa", "givenName": "Ester", "surname": "Gonzalez-Sosa", "__typename": "ArticleAuthorType" }, { "affiliation": "Nokia Bell Labs", "fullName": "Redouane Kachach", "givenName": "Redouane", "surname": "Kachach", "__typename": "ArticleAuthorType" }, { "affiliation": "Nokia Bell Labs", "fullName": "Jaime Ruiz", "givenName": "Jaime", "surname": "Ruiz", "__typename": "ArticleAuthorType" }, { "affiliation": "Nokia Bell Labs", "fullName": "Ignacio Benito", "givenName": "Ignacio", "surname": "Benito", "__typename": "ArticleAuthorType" }, { "affiliation": "Nokia Bell Labs", "fullName": "Francisco Pereira", "givenName": "Francisco", "surname": "Pereira", "__typename": "ArticleAuthorType" }, { "affiliation": "Nokia Bell Labs", "fullName": "Alvaro Villegas", "givenName": "Alvaro", "surname": "Villegas", "__typename": "ArticleAuthorType" } ], "idPrefix": "wevr", "isOpenAccess": false, "showRecommendedArticles": true, "showBuyMe": true, "hasPdf": true, "pubDate": "2019-03-01T00:00:00", "pubType": "proceedings", "pages": "1-6", "year": "2019", "issn": null, "isbn": "978-1-7281-4050-6", "notes": null, "notesType": null, "__typename": "ArticleType" }, "webExtras": [], "adjacentArticles": { "previous": { "fno": "08809593", "articleId": "1cI6221nc9a", "__typename": "AdjacentArticleType" }, "next": { "fno": "08809590", "articleId": "1cI62tizh7y", "__typename": "AdjacentArticleType" }, "__typename": "AdjacentArticlesType" }, "recommendedArticles": [ { "id": "proceedings/icmu/2017/31/0/08330112", "title": "Clash tanks: An investigation of virtual and augmented reality gaming experience", "doi": null, "abstractUrl": "/proceedings-article/icmu/2017/08330112/12OmNB8TU7d", "parentPublication": { "id": "proceedings/icmu/2017/31/0", "title": "2017 Tenth International Conference on Mobile Computing and Ubiquitous Network (ICMU)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/ismar/2010/9343/0/05643591", "title": "An immersive e-learning system providing virtual experience", "doi": null, "abstractUrl": "/proceedings-article/ismar/2010/05643591/12OmNwcUjSs", "parentPublication": { "id": "proceedings/ismar/2010/9343/0", "title": "2010 IEEE International Symposium on Mixed and Augmented Reality", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/3dcve/2016/2138/0/07563559", "title": "Vishnu: virtual immersive support for HelpiNg users an interaction paradigm for collaborative remote guiding in mixed reality", "doi": null, "abstractUrl": "/proceedings-article/3dcve/2016/07563559/12OmNxG1yHz", "parentPublication": { "id": "proceedings/3dcve/2016/2138/0", "title": "2016 IEEE Third VR International Workshop on Collaborative Virtual Environments (3DCVE)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2017/6647/0/07892320", "title": "A mixed reality tele-presence platform to exchange emotion and sensory information based on MPEG-V standard", "doi": null, "abstractUrl": "/proceedings-article/vr/2017/07892320/12OmNxUdv7D", "parentPublication": { "id": "proceedings/vr/2017/6647/0", "title": "2017 IEEE Virtual Reality (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "trans/tg/2017/03/07792698", "title": "JackIn Head: Immersive Visual Telepresence System with Omnidirectional Wearable Camera", "doi": null, "abstractUrl": "/journal/tg/2017/03/07792698/13rRUx0geq0", "parentPublication": { "id": "trans/tg", "title": "IEEE Transactions on Visualization & Computer Graphics", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/aivr/2018/9269/0/926900a204", "title": "Comparative Reality: Measuring User Experience and Emotion in Immersive Virtual Environments", "doi": null, "abstractUrl": "/proceedings-article/aivr/2018/926900a204/17D45Xi9rXe", "parentPublication": { "id": "proceedings/aivr/2018/9269/0", "title": "2018 IEEE International Conference on Artificial Intelligence and Virtual Reality (AIVR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/ismar-adjunct/2018/7592/0/08699260", "title": "Comparing Different Augmented Reality Support Applications for Cooperative Repair of an Industrial Robot", "doi": null, "abstractUrl": "/proceedings-article/ismar-adjunct/2018/08699260/19F1M8A6RHO", "parentPublication": { "id": "proceedings/ismar-adjunct/2018/7592/0", "title": "2018 IEEE International Symposium on Mixed and Augmented Reality Adjunct (ISMAR-Adjunct)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "trans/tg/2023/05/10049700", "title": "Using Virtual Replicas to Improve Mixed Reality Remote Collaboration", "doi": null, "abstractUrl": "/journal/tg/2023/05/10049700/1KYoAxyw5c4", "parentPublication": { "id": "trans/tg", "title": "IEEE Transactions on Visualization & Computer Graphics", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/ism/2022/7172/0/717200a206", "title": "Hand Tracking vs Motion Controllers: The effects on Immersive Virtual Reality Game Experience", "doi": null, "abstractUrl": "/proceedings-article/ism/2022/717200a206/1KaHMe1XT0I", "parentPublication": { "id": "proceedings/ism/2022/7172/0", "title": "2022 IEEE International Symposium on Multimedia (ISM)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2019/1377/0/08797966", "title": "A Mixed Presence Collaborative Mixed Reality System", "doi": null, "abstractUrl": "/proceedings-article/vr/2019/08797966/1cJ19fldjVu", "parentPublication": { "id": "proceedings/vr/2019/1377/0", "title": "2019 IEEE Conference on Virtual Reality and 3D User Interfaces (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" } ], "articleVideos": [] }
{ "proceeding": { "id": "1jIxhEnA8IE", "title": "2020 IEEE Conference on Virtual Reality and 3D User Interfaces Abstracts and Workshops (VRW)", "acronym": "vrw", "groupId": "1836626", "volume": "0", "displayVolume": "0", "year": "2020", "__typename": "ProceedingType" }, "article": { "id": "1jIxrSY8cZa", "doi": "10.1109/VRW50115.2020.00104", "title": "Virtual Tour: An Immersive Low Cost Telepresence System", "normalizedTitle": "Virtual Tour: An Immersive Low Cost Telepresence System", "abstract": "In this work we present Virtual Tour, a low cost telepresence system that provides the possibility of visiting a remote place almost reaching the feeling of being physically there. This is accomplished by a fully mobile, consumer setup composed of a VR Headset, 360&#x00B0; video camera and a full-duplex audio system. On one hand, the remote environment is captured, stitched and encoded by the 360&#x00B0; camera. Resulting video and audio are streamed using a custom low latency streaming solution. On the other hand, user wears an untethered VR HMD to experiment a full immersion and close interaction with the remote space. In addition, conference capabilities of the proposed system boosts the telepresence feeling since the user can have a real-time conversation with people on the remote scenario.", "abstracts": [ { "abstractType": "Regular", "content": "In this work we present Virtual Tour, a low cost telepresence system that provides the possibility of visiting a remote place almost reaching the feeling of being physically there. This is accomplished by a fully mobile, consumer setup composed of a VR Headset, 360&#x00B0; video camera and a full-duplex audio system. On one hand, the remote environment is captured, stitched and encoded by the 360&#x00B0; camera. Resulting video and audio are streamed using a custom low latency streaming solution. On the other hand, user wears an untethered VR HMD to experiment a full immersion and close interaction with the remote space. In addition, conference capabilities of the proposed system boosts the telepresence feeling since the user can have a real-time conversation with people on the remote scenario.", "__typename": "ArticleAbstractType" } ], "normalizedAbstract": "In this work we present Virtual Tour, a low cost telepresence system that provides the possibility of visiting a remote place almost reaching the feeling of being physically there. This is accomplished by a fully mobile, consumer setup composed of a VR Headset, 360° video camera and a full-duplex audio system. On one hand, the remote environment is captured, stitched and encoded by the 360° camera. Resulting video and audio are streamed using a custom low latency streaming solution. On the other hand, user wears an untethered VR HMD to experiment a full immersion and close interaction with the remote space. In addition, conference capabilities of the proposed system boosts the telepresence feeling since the user can have a real-time conversation with people on the remote scenario.", "fno": "09090433", "keywords": [ "Head Mounted Displays", "Streaming Media", "Telepresence", "Real Time Systems", "Vrtual Reality", "Homan Computer Interaction", "Human Centered Computing", "Collaborative Interaction", "Human Centered Computing", "Virtual Reality" ], "authors": [ { "affiliation": "Nokia Bell Labs", "fullName": "Redouane Kachach", "givenName": "Redouane", "surname": "Kachach", "__typename": "ArticleAuthorType" }, { "affiliation": "Nokia Bell Labs", "fullName": "Pablo Perez", "givenName": "Pablo", "surname": "Perez", "__typename": "ArticleAuthorType" }, { "affiliation": "Nokia Bell Labs", "fullName": "Alvaro Villegas", "givenName": "Alvaro", "surname": "Villegas", "__typename": "ArticleAuthorType" }, { "affiliation": "Nokia Bell Labs", "fullName": "Ester Gonzalez-Sosa", "givenName": "Ester", "surname": "Gonzalez-Sosa", "__typename": "ArticleAuthorType" } ], "idPrefix": "vrw", "isOpenAccess": false, "showRecommendedArticles": true, "showBuyMe": true, "hasPdf": true, "pubDate": "2020-03-01T00:00:00", "pubType": "proceedings", "pages": "504-506", "year": "2020", "issn": null, "isbn": "978-1-7281-6532-5", "notes": null, "notesType": null, "__typename": "ArticleType" }, "webExtras": [], "adjacentArticles": { "previous": { "fno": "09090603", "articleId": "1jIxuXfkVWw", "__typename": "AdjacentArticleType" }, "next": { "fno": "09090658", "articleId": "1jIxiXtKyPu", "__typename": "AdjacentArticleType" }, "__typename": "AdjacentArticlesType" }, "recommendedArticles": [ { "id": "proceedings/wacv/2018/4886/0/488601c010", "title": "Delay Compensation for Actuated Stereoscopic 360 Degree Telepresence Systems with Probabilistic Head Motion Prediction", "doi": null, "abstractUrl": "/proceedings-article/wacv/2018/488601c010/12OmNy68EOY", "parentPublication": { "id": "proceedings/wacv/2018/4886/0", "title": "2018 IEEE Winter Conference on Applications of Computer Vision (WACV)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2018/3365/0/08446521", "title": "Extended Abstract: Natural Human-Robot Interaction in Virtual Reality Telepresence Systems", "doi": null, "abstractUrl": "/proceedings-article/vr/2018/08446521/13bd1ftOBCZ", "parentPublication": { "id": "proceedings/vr/2018/3365/0", "title": "2018 IEEE Conference on Virtual Reality and 3D User Interfaces (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "trans/tg/2018/04/08314105", "title": "Detection Thresholds for Rotation and Translation Gains in 360&#x00B0; Video-Based Telepresence Systems", "doi": null, "abstractUrl": "/journal/tg/2018/04/08314105/13rRUxASubD", "parentPublication": { "id": "trans/tg", "title": "IEEE Transactions on Visualization & Computer Graphics", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "trans/tg/2019/05/08642375", "title": "Immersive Telepresence and Remote Collaboration using Mobile and Wearable Devices", "doi": null, "abstractUrl": "/journal/tg/2019/05/08642375/17PYEk3WIil", "parentPublication": { "id": "trans/tg", "title": "IEEE Transactions on Visualization & Computer Graphics", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "trans/tg/2022/05/09714051", "title": "Augmenting Immersive Telepresence Experience with a Virtual Body", "doi": null, "abstractUrl": "/journal/tg/2022/05/09714051/1B0Y0I5xWyk", "parentPublication": { "id": "trans/tg", "title": "IEEE Transactions on Visualization & Computer Graphics", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2022/9617/0/961700a001", "title": "Bullet Comments for 360&#x00B0;Video", "doi": null, "abstractUrl": "/proceedings-article/vr/2022/961700a001/1CJcgerbwNa", "parentPublication": { "id": "proceedings/vr/2022/9617/0", "title": "2022 IEEE on Conference Virtual Reality and 3D User Interfaces (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/ismar-adjunct/2022/5365/0/536500a524", "title": "Synthesizing Novel Spaces for Remote Telepresence Experiences", "doi": null, "abstractUrl": "/proceedings-article/ismar-adjunct/2022/536500a524/1J7WaFB7xNC", "parentPublication": { "id": "proceedings/ismar-adjunct/2022/5365/0", "title": "2022 IEEE International Symposium on Mixed and Augmented Reality Adjunct (ISMAR-Adjunct)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/ismar/2022/5325/0/532500a576", "title": "Leaning-Based Control of an Immersive-Telepresence Robot", "doi": null, "abstractUrl": "/proceedings-article/ismar/2022/532500a576/1JrR64XrANW", "parentPublication": { "id": "proceedings/ismar/2022/5325/0", "title": "2022 IEEE International Symposium on Mixed and Augmented Reality (ISMAR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2019/1377/0/08797819", "title": "Localizing Teleoperator Gaze in 360&#x00B0; Hosted Telepresence", "doi": null, "abstractUrl": "/proceedings-article/vr/2019/08797819/1cJ1d3MdShi", "parentPublication": { "id": "proceedings/vr/2019/1377/0", "title": "2019 IEEE Conference on Virtual Reality and 3D User Interfaces (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/ismar-adjunct/2021/1298/0/129800a346", "title": "Tactile Telepresence for Isolated Patients", "doi": null, "abstractUrl": "/proceedings-article/ismar-adjunct/2021/129800a346/1yeQGRM0HLi", "parentPublication": { "id": "proceedings/ismar-adjunct/2021/1298/0", "title": "2021 IEEE International Symposium on Mixed and Augmented Reality Adjunct (ISMAR-Adjunct)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" } ], "articleVideos": [] }
{ "proceeding": { "id": "1yfxDjRGMmc", "title": "2021 IEEE International Symposium on Mixed and Augmented Reality Adjunct (ISMAR-Adjunct)", "acronym": "ismar-adjunct", "groupId": "1810084", "volume": "0", "displayVolume": "0", "year": "2021", "__typename": "ProceedingType" }, "article": { "id": "1yeQG4fi6Dm", "doi": "10.1109/ISMAR-Adjunct54149.2021.00104", "title": "The Owl: Immersive Telepresence Communication for Hybrid Conferences", "normalizedTitle": "The Owl: Immersive Telepresence Communication for Hybrid Conferences", "abstract": "The Owl is an immersive telepresence communication system in which remote attendees feel teleported to a physical location where a capture/communication device is placed. During the session they can interact naturally with the people present in that location and also with the other remote participants. Remote attendees get the full immersive experience using commercial VR goggles or Android phones with cardboard-like HMDs. It is also possible to engage in the experience using Android phones or tablets without HMD; in this case the gaze in the remote scenario is controlled using the user&#x2019;s fingers on the screen. Remote attendees using HMD are represented through avatars in the scene, while participants using plain phones or tablets will get their real time video captured by the device and rendered in overlayed windows. The latter can also be used to provide the local attendees (with) a real-time view of both the remote participants and their local surroundings. Client Software can be downloaded and installed by any user on standard devices.", "abstracts": [ { "abstractType": "Regular", "content": "The Owl is an immersive telepresence communication system in which remote attendees feel teleported to a physical location where a capture/communication device is placed. During the session they can interact naturally with the people present in that location and also with the other remote participants. Remote attendees get the full immersive experience using commercial VR goggles or Android phones with cardboard-like HMDs. It is also possible to engage in the experience using Android phones or tablets without HMD; in this case the gaze in the remote scenario is controlled using the user&#x2019;s fingers on the screen. Remote attendees using HMD are represented through avatars in the scene, while participants using plain phones or tablets will get their real time video captured by the device and rendered in overlayed windows. The latter can also be used to provide the local attendees (with) a real-time view of both the remote participants and their local surroundings. Client Software can be downloaded and installed by any user on standard devices.", "__typename": "ArticleAbstractType" } ], "normalizedAbstract": "The Owl is an immersive telepresence communication system in which remote attendees feel teleported to a physical location where a capture/communication device is placed. During the session they can interact naturally with the people present in that location and also with the other remote participants. Remote attendees get the full immersive experience using commercial VR goggles or Android phones with cardboard-like HMDs. It is also possible to engage in the experience using Android phones or tablets without HMD; in this case the gaze in the remote scenario is controlled using the user’s fingers on the screen. Remote attendees using HMD are represented through avatars in the scene, while participants using plain phones or tablets will get their real time video captured by the device and rendered in overlayed windows. The latter can also be used to provide the local attendees (with) a real-time view of both the remote participants and their local surroundings. Client Software can be downloaded and installed by any user on standard devices.", "fno": "129800a451", "keywords": [ "Avatars", "Helmet Mounted Displays", "Smart Phones", "Telerobotics", "Virtual Reality", "Immersive Telepresence Communication System", "Remote Attendees", "Physical Location", "Remote Participants", "Immersive Experience", "Commercial VR Goggles", "Android Phones", "HMD", "Remote Scenario", "Plain Phones", "Local Attendees", "Owl", "Hybrid Conferences", "Telepresence", "Fingers", "Resists", "Immersive Experience", "Streaming Media", "Real Time Systems", "Software", "Human Centered Computing", "Collaborative Interaction", "Virtual Reality" ], "authors": [ { "affiliation": "Nokia Bell Labs", "fullName": "Redouane Kachach", "givenName": "Redouane", "surname": "Kachach", "__typename": "ArticleAuthorType" }, { "affiliation": "Nokia Bell Labs", "fullName": "Sandra Morcuende", "givenName": "Sandra", "surname": "Morcuende", "__typename": "ArticleAuthorType" }, { "affiliation": "Nokia Bell Labs", "fullName": "Diego Gonzalez-Morin", "givenName": "Diego", "surname": "Gonzalez-Morin", "__typename": "ArticleAuthorType" }, { "affiliation": "Nokia Bell Labs", "fullName": "Pablo Perez-Garcia", "givenName": "Pablo", "surname": "Perez-Garcia", "__typename": "ArticleAuthorType" }, { "affiliation": "Nokia Bell Labs", "fullName": "Ester Gonzalez-Sosa", "givenName": "Ester", "surname": "Gonzalez-Sosa", "__typename": "ArticleAuthorType" }, { "affiliation": "Nokia Bell Labs", "fullName": "Francisco Pereira", "givenName": "Francisco", "surname": "Pereira", "__typename": "ArticleAuthorType" }, { "affiliation": "Nokia Bell Labs", "fullName": "Alvaro Villegas", "givenName": "Alvaro", "surname": "Villegas", "__typename": "ArticleAuthorType" } ], "idPrefix": "ismar-adjunct", "isOpenAccess": false, "showRecommendedArticles": true, "showBuyMe": true, "hasPdf": true, "pubDate": "2021-10-01T00:00:00", "pubType": "proceedings", "pages": "451-452", "year": "2021", "issn": null, "isbn": "978-1-6654-1298-8", "notes": null, "notesType": null, "__typename": "ArticleType" }, "webExtras": [ { "id": "1yeQERH24wM", "name": "pismar-adjunct202112980-09585789s1-mm_129800a451.zip", "size": "236 MB", "location": "https://www.computer.org/csdl/api/v1/extra/pismar-adjunct202112980-09585789s1-mm_129800a451.zip", "__typename": "WebExtraType" } ], "adjacentArticles": { "previous": { "fno": "129800a449", "articleId": "1yeQKVJz2bS", "__typename": "AdjacentArticleType" }, "next": { "fno": "129800a453", "articleId": "1yfxJ6xhCww", "__typename": "AdjacentArticleType" }, "__typename": "AdjacentArticlesType" }, "recommendedArticles": [ { "id": "trans/tg/2019/05/08642375", "title": "Immersive Telepresence and Remote Collaboration using Mobile and Wearable Devices", "doi": null, "abstractUrl": "/journal/tg/2019/05/08642375/17PYEk3WIil", "parentPublication": { "id": "trans/tg", "title": "IEEE Transactions on Visualization & Computer Graphics", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "trans/tg/2022/05/09714051", "title": "Augmenting Immersive Telepresence Experience with a Virtual Body", "doi": null, "abstractUrl": "/journal/tg/2022/05/09714051/1B0Y0I5xWyk", "parentPublication": { "id": "trans/tg", "title": "IEEE Transactions on Visualization & Computer Graphics", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/crv/2022/9774/0/977400a167", "title": "Attention based Occlusion Removal for Hybrid Telepresence Systems", "doi": null, "abstractUrl": "/proceedings-article/crv/2022/977400a167/1GeCvG8dPna", "parentPublication": { "id": "proceedings/crv/2022/9774/0", "title": "2022 19th Conference on Robots and Vision (CRV)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2019/1377/0/08798011", "title": "Hybrid Camera System for Telepresence with Foveated Imaging", "doi": null, "abstractUrl": "/proceedings-article/vr/2019/08798011/1cJ0KGEU288", "parentPublication": { "id": "proceedings/vr/2019/1377/0", "title": "2019 IEEE Conference on Virtual Reality and 3D User Interfaces (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2019/1377/0/08797819", "title": "Localizing Teleoperator Gaze in 360&#x00B0; Hosted Telepresence", "doi": null, "abstractUrl": "/proceedings-article/vr/2019/08797819/1cJ1d3MdShi", "parentPublication": { "id": "proceedings/vr/2019/1377/0", "title": "2019 IEEE Conference on Virtual Reality and 3D User Interfaces (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vrw/2020/6532/0/09090433", "title": "Virtual Tour: An Immersive Low Cost Telepresence System", "doi": null, "abstractUrl": "/proceedings-article/vrw/2020/09090433/1jIxrSY8cZa", "parentPublication": { "id": "proceedings/vrw/2020/6532/0", "title": "2020 IEEE Conference on Virtual Reality and 3D User Interfaces Abstracts and Workshops (VRW)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "trans/tg/2020/12/09199575", "title": "Eyes-free Target Acquisition During Walking in Immersive Mixed Reality", "doi": null, "abstractUrl": "/journal/tg/2020/12/09199575/1ncgpmtzdn2", "parentPublication": { "id": "trans/tg", "title": "IEEE Transactions on Visualization & Computer Graphics", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/cw/2020/6497/0/649700a055", "title": "Intermediation Family: Workspace for Sharing Spatial Design among Multiple Users", "doi": null, "abstractUrl": "/proceedings-article/cw/2020/649700a055/1olHzY0C61q", "parentPublication": { "id": "proceedings/cw/2020/6497/0", "title": "2020 International Conference on Cyberworlds (CW)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/ismar-adjunct/2021/1298/0/129800a447", "title": "Conect - An application for hybrid conferences", "doi": null, "abstractUrl": "/proceedings-article/ismar-adjunct/2021/129800a447/1yeQCWPE9zO", "parentPublication": { "id": "proceedings/ismar-adjunct/2021/1298/0", "title": "2021 IEEE International Symposium on Mixed and Augmented Reality Adjunct (ISMAR-Adjunct)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" } ], "articleVideos": [] }
{ "proceeding": { "id": "12OmNAsTgX7", "title": "2010 IEEE Symposium on 3D User Interfaces (3DUI)", "acronym": "3dui", "groupId": "1001623", "volume": "0", "displayVolume": "0", "year": "2010", "__typename": "ProceedingType" }, "article": { "id": "12OmNvH7fjb", "doi": "10.1109/3DUI.2010.5444703", "title": "Redirected touching: Warping space to remap passive haptics", "normalizedTitle": "Redirected touching: Warping space to remap passive haptics", "abstract": "There is an increasing interest in deployable virtual military training systems. Haptic feedback for these training systems can enable users to interact more naturally with the training environment, but is difficult to deploy. Passive haptic feedback is very compelling, but it is also inflexible. Changes made to virtual objects can require time-consuming changes to their physical passive-haptic counterparts. This poster explores the possibility of mapping many differently shaped virtual objects onto one physical object by warping virtual space and exploiting the dominance of the visual system. A first implementation that maps different virtual objects onto dynamically captured physical geometry is presented, and potential applications to deployable military trainers are discussed.", "abstracts": [ { "abstractType": "Regular", "content": "There is an increasing interest in deployable virtual military training systems. Haptic feedback for these training systems can enable users to interact more naturally with the training environment, but is difficult to deploy. Passive haptic feedback is very compelling, but it is also inflexible. Changes made to virtual objects can require time-consuming changes to their physical passive-haptic counterparts. This poster explores the possibility of mapping many differently shaped virtual objects onto one physical object by warping virtual space and exploiting the dominance of the visual system. A first implementation that maps different virtual objects onto dynamically captured physical geometry is presented, and potential applications to deployable military trainers are discussed.", "__typename": "ArticleAbstractType" } ], "normalizedAbstract": "There is an increasing interest in deployable virtual military training systems. Haptic feedback for these training systems can enable users to interact more naturally with the training environment, but is difficult to deploy. Passive haptic feedback is very compelling, but it is also inflexible. Changes made to virtual objects can require time-consuming changes to their physical passive-haptic counterparts. This poster explores the possibility of mapping many differently shaped virtual objects onto one physical object by warping virtual space and exploiting the dominance of the visual system. A first implementation that maps different virtual objects onto dynamically captured physical geometry is presented, and potential applications to deployable military trainers are discussed.", "fno": "05444703", "keywords": [ "Dynamically Captured Physical Geometry", "Redirected Touching", "Warping Virtual Space", "Passive Haptic Feedback", "Virtual Military Training Systems", "Visual System" ], "authors": [ { "affiliation": "Univ. of North Carolina at Chapel Hill, Chapel Hill, NC, USA", "fullName": "Luv Kohli", "givenName": "Luv", "surname": "Kohli", "__typename": "ArticleAuthorType" } ], "idPrefix": "3dui", "isOpenAccess": false, "showRecommendedArticles": true, "showBuyMe": true, "hasPdf": true, "pubDate": "2010-03-01T00:00:00", "pubType": "proceedings", "pages": "129-130", "year": "2010", "issn": null, "isbn": "978-1-4244-6846-1", "notes": null, "notesType": null, "__typename": "ArticleType" }, "webExtras": [], "adjacentArticles": { "previous": { "fno": "05444706", "articleId": "12OmNsd6vhN", "__typename": "AdjacentArticleType" }, "next": { "fno": "05444704", "articleId": "12OmNyRPgCl", "__typename": "AdjacentArticleType" }, "__typename": "AdjacentArticlesType" }, "recommendedArticles": [ { "id": "proceedings/3dui/2017/6716/0/07893325", "title": "FlexiFingers: Multi-finger interaction in VR combining passive haptics and pseudo-haptics", "doi": null, "abstractUrl": "/proceedings-article/3dui/2017/07893325/12OmNBBzoeV", "parentPublication": { "id": "proceedings/3dui/2017/6716/0", "title": "2017 IEEE Symposium on 3D User Interfaces (3DUI)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/3dui/2012/1204/0/06184193", "title": "Redirected touching: The effect of warping space on task performance", "doi": null, "abstractUrl": "/proceedings-article/3dui/2012/06184193/12OmNCbU37j", "parentPublication": { "id": "proceedings/3dui/2012/1204/0", "title": "2012 IEEE Symposium on 3D User Interfaces (3DUI)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/3dui/2017/6716/0/07893363", "title": "Redirected reach in virtual reality: Enabling natural hand interaction at multiple virtual locations with passive haptics", "doi": null, "abstractUrl": "/proceedings-article/3dui/2017/07893363/12OmNvIxeZj", "parentPublication": { "id": "proceedings/3dui/2017/6716/0", "title": "2017 IEEE Symposium on 3D User Interfaces (3DUI)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/3dui/2013/6097/0/06550201", "title": "Redirected Touching: Training and adaptation in warped virtual spaces", "doi": null, "abstractUrl": "/proceedings-article/3dui/2013/06550201/12OmNzDeh9p", "parentPublication": { "id": "proceedings/3dui/2013/6097/0", "title": "2013 IEEE Symposium on 3D User Interfaces (3DUI)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2011/0039/0/05759462", "title": "Grasping virtual objects with multi-point haptics", "doi": null, "abstractUrl": "/proceedings-article/vr/2011/05759462/12OmNzcPAzj", "parentPublication": { "id": "proceedings/vr/2011/0039/0", "title": "2011 IEEE Virtual Reality (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/3dui/2008/2047/0/04476610", "title": "Poster: Generic Redirected Walking &#x00026; Dynamic Passive Haptics: Evaluation and Implications for Virtual Locomotion Interfaces", "doi": null, "abstractUrl": "/proceedings-article/3dui/2008/04476610/12OmNzlD9i9", "parentPublication": { "id": "proceedings/3dui/2008/2047/0", "title": "2008 IEEE Symposium on 3D User Interfaces", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "trans/tg/5555/01/09961901", "title": "Transferable Virtual-Physical Environmental Alignment with Redirected Walking", "doi": null, "abstractUrl": "/journal/tg/5555/01/09961901/1IxvZ4KZbri", "parentPublication": { "id": "trans/tg", "title": "IEEE Transactions on Visualization & Computer Graphics", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vrw/2020/6532/0/09090415", "title": "Enhancing Proxy-Based Haptics in Virtual Reality", "doi": null, "abstractUrl": "/proceedings-article/vrw/2020/09090415/1jIxtWMak6c", "parentPublication": { "id": "proceedings/vrw/2020/6532/0", "title": "2020 IEEE Conference on Virtual Reality and 3D User Interfaces Abstracts and Workshops (VRW)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "trans/tg/2021/05/09382898", "title": "Combining Dynamic Passive Haptics and Haptic Retargeting for Enhanced Haptic Feedback in Virtual Reality", "doi": null, "abstractUrl": "/journal/tg/2021/05/09382898/1saZv7Dd9Ty", "parentPublication": { "id": "trans/tg", "title": "IEEE Transactions on Visualization & Computer Graphics", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/ismar/2021/0158/0/015800a184", "title": "A Reinforcement Learning Approach to Redirected Walking with Passive Haptic Feedback", "doi": null, "abstractUrl": "/proceedings-article/ismar/2021/015800a184/1yeCXhKVTXy", "parentPublication": { "id": "proceedings/ismar/2021/0158/0", "title": "2021 IEEE International Symposium on Mixed and Augmented Reality (ISMAR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" } ], "articleVideos": [] }
{ "proceeding": { "id": "12OmNyYm2vK", "title": "Computer Graphics International Conference", "acronym": "cgi", "groupId": "1000132", "volume": "0", "displayVolume": "0", "year": "2000", "__typename": "ProceedingType" }, "article": { "id": "12OmNyQYt7r", "doi": "10.1109/CGI.2000.852345", "title": "Haptics Issues in Virtual Environments", "normalizedTitle": "Haptics Issues in Virtual Environments", "abstract": "Haptics is a recent enhancement to virtual environments allowing users to “touch” and feel the simulated objects they interact with. Current commercial products allow tactile feedback through desktop interfaces (such as the FeelIt mouse(tm) or the PHANToM arm(tm)) and dextrous tactile and force feedback at the fingertips through haptic gloves (such as the CyberTouch(tm) and the CyberGrasp(tm)).Haptics VR programming requires good physical modeling of user interactions, primarily through collision detection, and of object response, such as surface deformation, hard-contact simulation, slippage, etc. It is at present difficult to simulate complex virtual environments that have a realistic behavior. This task is added by the recent introduction of haptics toolkits (such as Ghost(tm) or VPS(tm)).Current technology suffers from a number of limitations, which go beyond the higher product cost of haptic interfaces. These technical drawbacks include the limited workspace of desktop interfaces, the large weight of force feedback gloves, the lack of force feedback to the body, safety concerns, etc. Not to be neglected is the high bandwidth requirement of haptics, which is not met by current Internet technology. As a result, it is not possible at present to have a large number of remote participants interacting hapticly in a shared virtual space.", "abstracts": [ { "abstractType": "Regular", "content": "Haptics is a recent enhancement to virtual environments allowing users to “touch” and feel the simulated objects they interact with. Current commercial products allow tactile feedback through desktop interfaces (such as the FeelIt mouse(tm) or the PHANToM arm(tm)) and dextrous tactile and force feedback at the fingertips through haptic gloves (such as the CyberTouch(tm) and the CyberGrasp(tm)).Haptics VR programming requires good physical modeling of user interactions, primarily through collision detection, and of object response, such as surface deformation, hard-contact simulation, slippage, etc. It is at present difficult to simulate complex virtual environments that have a realistic behavior. This task is added by the recent introduction of haptics toolkits (such as Ghost(tm) or VPS(tm)).Current technology suffers from a number of limitations, which go beyond the higher product cost of haptic interfaces. These technical drawbacks include the limited workspace of desktop interfaces, the large weight of force feedback gloves, the lack of force feedback to the body, safety concerns, etc. Not to be neglected is the high bandwidth requirement of haptics, which is not met by current Internet technology. As a result, it is not possible at present to have a large number of remote participants interacting hapticly in a shared virtual space.", "__typename": "ArticleAbstractType" } ], "normalizedAbstract": "Haptics is a recent enhancement to virtual environments allowing users to “touch” and feel the simulated objects they interact with. Current commercial products allow tactile feedback through desktop interfaces (such as the FeelIt mouse(tm) or the PHANToM arm(tm)) and dextrous tactile and force feedback at the fingertips through haptic gloves (such as the CyberTouch(tm) and the CyberGrasp(tm)).Haptics VR programming requires good physical modeling of user interactions, primarily through collision detection, and of object response, such as surface deformation, hard-contact simulation, slippage, etc. It is at present difficult to simulate complex virtual environments that have a realistic behavior. This task is added by the recent introduction of haptics toolkits (such as Ghost(tm) or VPS(tm)).Current technology suffers from a number of limitations, which go beyond the higher product cost of haptic interfaces. These technical drawbacks include the limited workspace of desktop interfaces, the large weight of force feedback gloves, the lack of force feedback to the body, safety concerns, etc. Not to be neglected is the high bandwidth requirement of haptics, which is not met by current Internet technology. As a result, it is not possible at present to have a large number of remote participants interacting hapticly in a shared virtual space.", "fno": "06430295", "keywords": [ "Haptic Feedback", "Dextrous Glove", "Virtual Environments", "VR Programming", "Physical Modeling", "Collision Detection" ], "authors": [ { "affiliation": "Rutgers University", "fullName": "Grigore C. Burdea", "givenName": "Grigore C.", "surname": "Burdea", "__typename": "ArticleAuthorType" } ], "idPrefix": "cgi", "isOpenAccess": false, "showRecommendedArticles": false, "showBuyMe": true, "hasPdf": true, "pubDate": "2000-06-01T00:00:00", "pubType": "proceedings", "pages": "295", "year": "2000", "issn": "1530-1052", "isbn": "0-7695-0643-7", "notes": null, "notesType": null, "__typename": "ArticleType" }, "webExtras": [], "adjacentArticles": { "previous": { "fno": "06430287", "articleId": "12OmNwtWfP9", "__typename": "AdjacentArticleType" }, "next": { "fno": "06430303", "articleId": "12OmNApcuxR", "__typename": "AdjacentArticleType" }, "__typename": "AdjacentArticlesType" }, "recommendedArticles": [], "articleVideos": [] }
{ "proceeding": { "id": "1cI6akLvAuQ", "title": "2019 IEEE Conference on Virtual Reality and 3D User Interfaces (VR)", "acronym": "vr", "groupId": "1000791", "volume": "0", "displayVolume": "0", "year": "2019", "__typename": "ProceedingType" }, "article": { "id": "1cJ0HqCLp96", "doi": "10.1109/VR.2019.8797785", "title": "Occurrence of Pseudo-Haptics by Swimming in a Virtual Reality Environment", "normalizedTitle": "Occurrence of Pseudo-Haptics by Swimming in a Virtual Reality Environment", "abstract": "This study focuses on the investigation of pseudo-haptics while swimming in a highly immersive virtual reality environment created using a head-mounted display to investigate the conditions under which pseudo-haptics occur while performing considerable physical exercises. When the users perform the outward sweep motion of the breaststroke, the spheres floating in the virtual reality space move toward the users. The experimental results confirm that pseudo-haptics can be controlled using this setup by adjusting the ratio of the amount of movement of users' hands and the amount of movement of the virtual reality spheres.", "abstracts": [ { "abstractType": "Regular", "content": "This study focuses on the investigation of pseudo-haptics while swimming in a highly immersive virtual reality environment created using a head-mounted display to investigate the conditions under which pseudo-haptics occur while performing considerable physical exercises. When the users perform the outward sweep motion of the breaststroke, the spheres floating in the virtual reality space move toward the users. The experimental results confirm that pseudo-haptics can be controlled using this setup by adjusting the ratio of the amount of movement of users' hands and the amount of movement of the virtual reality spheres.", "__typename": "ArticleAbstractType" } ], "normalizedAbstract": "This study focuses on the investigation of pseudo-haptics while swimming in a highly immersive virtual reality environment created using a head-mounted display to investigate the conditions under which pseudo-haptics occur while performing considerable physical exercises. When the users perform the outward sweep motion of the breaststroke, the spheres floating in the virtual reality space move toward the users. The experimental results confirm that pseudo-haptics can be controlled using this setup by adjusting the ratio of the amount of movement of users' hands and the amount of movement of the virtual reality spheres.", "fno": "08797785", "keywords": [ "Gait Analysis", "Haptic Interfaces", "Helmet Mounted Displays", "Virtual Reality", "Pseudohaptics", "Highly Immersive Virtual Reality Environment", "Virtual Reality Space Move", "Virtual Reality Spheres", "Head Mounted Display", "Physical Exercises", "Breaststroke", "Outward Sweep Motion", "Swimming", "Virtual Reality", "Sports", "Aerospace Electronics", "Visualization", "Head Mounted Displays", "Haptic Interfaces", "Mice", "Pseudo Haptics", "Virtual Reality Space", "Swimming Motion" ], "authors": [ { "affiliation": "Chitose Institute of Science and Technolgoy", "fullName": "Hirooki Aoki", "givenName": "Hirooki", "surname": "Aoki", "__typename": "ArticleAuthorType" } ], "idPrefix": "vr", "isOpenAccess": false, "showRecommendedArticles": true, "showBuyMe": true, "hasPdf": true, "pubDate": "2019-03-01T00:00:00", "pubType": "proceedings", "pages": "842-843", "year": "2019", "issn": null, "isbn": "978-1-7281-1377-7", "notes": null, "notesType": null, "__typename": "ArticleType" }, "webExtras": [], "adjacentArticles": { "previous": { "fno": "08797755", "articleId": "1cJ0FJEiRHO", "__typename": "AdjacentArticleType" }, "next": { "fno": "08797899", "articleId": "1cJ12bET8XK", "__typename": "AdjacentArticleType" }, "__typename": "AdjacentArticlesType" }, "recommendedArticles": [ { "id": "proceedings/3dui/2017/6716/0/07893325", "title": "FlexiFingers: Multi-finger interaction in VR combining passive haptics and pseudo-haptics", "doi": null, "abstractUrl": "/proceedings-article/3dui/2017/07893325/12OmNBBzoeV", "parentPublication": { "id": "proceedings/3dui/2017/6716/0", "title": "2017 IEEE Symposium on 3D User Interfaces (3DUI)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/3dui/2008/2047/0/04476593", "title": "Hemp-hand-displacement-based pseudo-haptics: a study of a force field application", "doi": null, "abstractUrl": "/proceedings-article/3dui/2008/04476593/12OmNvDI3OQ", "parentPublication": { "id": "proceedings/3dui/2008/2047/0", "title": "2008 IEEE Symposium on 3D User Interfaces", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/icmew/2015/7079/0/07169762", "title": "Quantitative analysys of pseudo-haptics based on three types of hand form and two phases of perception", "doi": null, "abstractUrl": "/proceedings-article/icmew/2015/07169762/12OmNxR5ULY", "parentPublication": { "id": "proceedings/icmew/2015/7079/0", "title": "2015 IEEE International Conference on Multimedia & Expo Workshops (ICMEW)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/icmcs/1999/0253/1/02539195", "title": "Haptics in Augmented Reality", "doi": null, "abstractUrl": "/proceedings-article/icmcs/1999/02539195/12OmNyQ7G3s", "parentPublication": { "id": "proceedings/icmcs/1999/0253/1", "title": "Multimedia Computing and Systems, International Conference on", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2022/9617/0/961700a703", "title": "Exploring Pseudo-Weight in Augmented Reality Extended Displays", "doi": null, "abstractUrl": "/proceedings-article/vr/2022/961700a703/1CJbGtoliuY", "parentPublication": { "id": "proceedings/vr/2022/9617/0", "title": "2022 IEEE on Conference Virtual Reality and 3D User Interfaces (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vrw/2020/6532/0/09090415", "title": "Enhancing Proxy-Based Haptics in Virtual Reality", "doi": null, "abstractUrl": "/proceedings-article/vrw/2020/09090415/1jIxtWMak6c", "parentPublication": { "id": "proceedings/vrw/2020/6532/0", "title": "2020 IEEE Conference on Virtual Reality and 3D User Interfaces Abstracts and Workshops (VRW)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vrw/2020/6532/0/09090644", "title": "Evoking Pseudo-Haptics of Resistance Force by Viewpoint Displacement", "doi": null, "abstractUrl": "/proceedings-article/vrw/2020/09090644/1jIxuxGS1So", "parentPublication": { "id": "proceedings/vrw/2020/6532/0", "title": "2020 IEEE Conference on Virtual Reality and 3D User Interfaces Abstracts and Workshops (VRW)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/aivr/2020/7463/0/746300a333", "title": "Study on Pseudo-haptics during Swimming Motion in a Virtual Reality Space", "doi": null, "abstractUrl": "/proceedings-article/aivr/2020/746300a333/1qpzCt4VUOI", "parentPublication": { "id": "proceedings/aivr/2020/7463/0", "title": "2020 IEEE International Conference on Artificial Intelligence and Virtual Reality (AIVR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/ismar/2021/0158/0/015800z018", "title": "Keynote Speaker: Wearable Haptics for Virtual and Augmented Reality", "doi": null, "abstractUrl": "/proceedings-article/ismar/2021/015800z018/1yeD29pZAsw", "parentPublication": { "id": "proceedings/ismar/2021/0158/0", "title": "2021 IEEE International Symposium on Mixed and Augmented Reality (ISMAR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" } ], "articleVideos": [] }
{ "proceeding": { "id": "1jIxhEnA8IE", "title": "2020 IEEE Conference on Virtual Reality and 3D User Interfaces Abstracts and Workshops (VRW)", "acronym": "vrw", "groupId": "1836626", "volume": "0", "displayVolume": "0", "year": "2020", "__typename": "ProceedingType" }, "article": { "id": "1jIxtWMak6c", "doi": "10.1109/VRW50115.2020.00126", "title": "Enhancing Proxy-Based Haptics in Virtual Reality", "normalizedTitle": "Enhancing Proxy-Based Haptics in Virtual Reality", "abstract": "Rich haptic sensations in interactive virtual reality (VR) applications support immersive experiences. This position paper outlines my research efforts aiming to bring enhanced haptic interactions to VR users. Leveraging the highly realistic haptic feedback provided by real, physical proxy objects, I present two orthogonal research directions attempting to overcome the central drawbacks of conventional passive haptics. The first research direction leverages physical manipulations to enhance scalability through reusable yet low-complexity dynamic passive haptic proxy objects. Orthogonal to this, I explore hand redirection techniques in a second, more software-focused research direction based on virtual manipulations. In a concluding section, this position paper outlines how both approaches could be combined to further enhance haptics in VR.", "abstracts": [ { "abstractType": "Regular", "content": "Rich haptic sensations in interactive virtual reality (VR) applications support immersive experiences. This position paper outlines my research efforts aiming to bring enhanced haptic interactions to VR users. Leveraging the highly realistic haptic feedback provided by real, physical proxy objects, I present two orthogonal research directions attempting to overcome the central drawbacks of conventional passive haptics. The first research direction leverages physical manipulations to enhance scalability through reusable yet low-complexity dynamic passive haptic proxy objects. Orthogonal to this, I explore hand redirection techniques in a second, more software-focused research direction based on virtual manipulations. In a concluding section, this position paper outlines how both approaches could be combined to further enhance haptics in VR.", "__typename": "ArticleAbstractType" } ], "normalizedAbstract": "Rich haptic sensations in interactive virtual reality (VR) applications support immersive experiences. This position paper outlines my research efforts aiming to bring enhanced haptic interactions to VR users. Leveraging the highly realistic haptic feedback provided by real, physical proxy objects, I present two orthogonal research directions attempting to overcome the central drawbacks of conventional passive haptics. The first research direction leverages physical manipulations to enhance scalability through reusable yet low-complexity dynamic passive haptic proxy objects. Orthogonal to this, I explore hand redirection techniques in a second, more software-focused research direction based on virtual manipulations. In a concluding section, this position paper outlines how both approaches could be combined to further enhance haptics in VR.", "fno": "09090415", "keywords": [ "Haptic Interfaces", "Virtual Reality", "Manipulator Dynamics", "Scalability", "Visualization", "Shape", "Conferences", "Human Centered Computing", "Virtual Reality", "Human Centered Computing", "Haptic Devices", "Human Centered Computing", "Interaction Techniques" ], "authors": [ { "affiliation": "German Research Center for Artificial Intelligence (DFKI) Saarland Informatics Campus,Saarbrücken,Germany", "fullName": "André Zenner", "givenName": "André", "surname": "Zenner", "__typename": "ArticleAuthorType" } ], "idPrefix": "vrw", "isOpenAccess": false, "showRecommendedArticles": true, "showBuyMe": true, "hasPdf": true, "pubDate": "2020-03-01T00:00:00", "pubType": "proceedings", "pages": "549-550", "year": "2020", "issn": null, "isbn": "978-1-7281-6532-5", "notes": null, "notesType": null, "__typename": "ArticleType" }, "webExtras": [], "adjacentArticles": { "previous": { "fno": "09090547", "articleId": "1jIxw8zwtbO", "__typename": "AdjacentArticleType" }, "next": { "fno": "09090546", "articleId": "1jIxrquhCNO", "__typename": "AdjacentArticleType" }, "__typename": "AdjacentArticlesType" }, "recommendedArticles": [ { "id": "proceedings/cgi/2000/0643/0/06430295", "title": "Haptics Issues in Virtual Environments", "doi": null, "abstractUrl": "/proceedings-article/cgi/2000/06430295/12OmNyQYt7r", "parentPublication": { "id": "proceedings/cgi/2000/0643/0", "title": "Computer Graphics International Conference", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2018/3365/0/08446280", "title": "Enhancing the Stiffness Perception of Tangible Objects in Mixed Reality Using Wearable Haptics", "doi": null, "abstractUrl": "/proceedings-article/vr/2018/08446280/13bd1AIBM2a", "parentPublication": { "id": "proceedings/vr/2018/3365/0", "title": "2018 IEEE Conference on Virtual Reality and 3D User Interfaces (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "trans/tg/2017/04/07833030", "title": "Shifty: A Weight-Shifting Dynamic Passive Haptic Proxy to Enhance Object Perception in Virtual Reality", "doi": null, "abstractUrl": "/journal/tg/2017/04/07833030/13rRUwgQpqL", "parentPublication": { "id": "trans/tg", "title": "IEEE Transactions on Visualization & Computer Graphics", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "trans/tg/2018/04/08260962", "title": "Ascending and Descending in Virtual Reality: Simple and Safe System Using Passive Haptics", "doi": null, "abstractUrl": "/journal/tg/2018/04/08260962/13rRUwjGoLM", "parentPublication": { "id": "trans/tg", "title": "IEEE Transactions on Visualization & Computer Graphics", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/ismar-adjunct/2022/5365/0/536500a905", "title": "Haptics in VR Using Origami-Augmented Drones", "doi": null, "abstractUrl": "/proceedings-article/ismar-adjunct/2022/536500a905/1J7WrPcWIVO", "parentPublication": { "id": "proceedings/ismar-adjunct/2022/5365/0", "title": "2022 IEEE International Symposium on Mixed and Augmented Reality Adjunct (ISMAR-Adjunct)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/ismar/2022/5325/0/532500a748", "title": "Wormholes in VR: Teleporting Hands for Flexible Passive Haptics", "doi": null, "abstractUrl": "/proceedings-article/ismar/2022/532500a748/1JrR93EDicE", "parentPublication": { "id": "proceedings/ismar/2022/5325/0", "title": "2022 IEEE International Symposium on Mixed and Augmented Reality (ISMAR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/wevr/2019/4050/0/08809589", "title": "Passive Haptic Menus for Desk-Based and HMD-Projected Virtual Reality", "doi": null, "abstractUrl": "/proceedings-article/wevr/2019/08809589/1cI61Rx4b9m", "parentPublication": { "id": "proceedings/wevr/2019/4050/0", "title": "2019 IEEE 5th Workshop on Everyday Virtual Reality (WEVR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/svr/2020/9231/0/923100a134", "title": "Proxy Haptics for Surgical Training", "doi": null, "abstractUrl": "/proceedings-article/svr/2020/923100a134/1oZBAEAmMBW", "parentPublication": { "id": "proceedings/svr/2020/9231/0", "title": "2020 22nd Symposium on Virtual and Augmented Reality (SVR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vrw/2021/4057/0/405700a377", "title": "Multisensory Teleportation in Virtual Reality Applications", "doi": null, "abstractUrl": "/proceedings-article/vrw/2021/405700a377/1tnXGQKSUPm", "parentPublication": { "id": "proceedings/vrw/2021/4057/0", "title": "2021 IEEE Conference on Virtual Reality and 3D User Interfaces Abstracts and Workshops (VRW)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/ismar/2021/0158/0/015800z018", "title": "Keynote Speaker: Wearable Haptics for Virtual and Augmented Reality", "doi": null, "abstractUrl": "/proceedings-article/ismar/2021/015800z018/1yeD29pZAsw", "parentPublication": { "id": "proceedings/ismar/2021/0158/0", "title": "2021 IEEE International Symposium on Mixed and Augmented Reality (ISMAR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" } ], "articleVideos": [] }
{ "proceeding": { "id": "1oZBzHKi4UM", "title": "2020 22nd Symposium on Virtual and Augmented Reality (SVR)", "acronym": "svr", "groupId": "1800426", "volume": "0", "displayVolume": "0", "year": "2020", "__typename": "ProceedingType" }, "article": { "id": "1oZBAEAmMBW", "doi": "10.1109/SVR51698.2020.00033", "title": "Proxy Haptics for Surgical Training", "normalizedTitle": "Proxy Haptics for Surgical Training", "abstract": "This paper propose a solution to the realism of haptic perception in VR by using the concept called &#x201C;proxy haptics.&#x201D; In proxy haptics, real physical props are placed around the real environment to match their virtual counterparts. If the physical props are co-registered with the virtual world, a compelling sense of tactile sensation can be achieved. A prototype proxy haptic system was developed and a pilot study was performed to determine the effects of our system for simple surgical training tasks. Our results show that from a user perspective, our system is more believable and closer to the real world than the standard VR interface.", "abstracts": [ { "abstractType": "Regular", "content": "This paper propose a solution to the realism of haptic perception in VR by using the concept called &#x201C;proxy haptics.&#x201D; In proxy haptics, real physical props are placed around the real environment to match their virtual counterparts. If the physical props are co-registered with the virtual world, a compelling sense of tactile sensation can be achieved. A prototype proxy haptic system was developed and a pilot study was performed to determine the effects of our system for simple surgical training tasks. Our results show that from a user perspective, our system is more believable and closer to the real world than the standard VR interface.", "__typename": "ArticleAbstractType" } ], "normalizedAbstract": "This paper propose a solution to the realism of haptic perception in VR by using the concept called “proxy haptics.” In proxy haptics, real physical props are placed around the real environment to match their virtual counterparts. If the physical props are co-registered with the virtual world, a compelling sense of tactile sensation can be achieved. A prototype proxy haptic system was developed and a pilot study was performed to determine the effects of our system for simple surgical training tasks. Our results show that from a user perspective, our system is more believable and closer to the real world than the standard VR interface.", "fno": "923100a134", "keywords": [ "Computer Based Training", "Haptic Interfaces", "Surgery", "Virtual Reality", "Physical Props", "Prototype Proxy Haptic System", "Simple Surgical Training Tasks", "Proxy Haptics", "Haptic Perception", "Haptic Interfaces", "Training", "Task Analysis", "Robots", "Surgery", "Kinematics", "Tracking", "Mixed Reality", "Haptic", "Proxy Haptic", "Medical Training", "Multi Modal" ], "authors": [ { "affiliation": "University of Alberta,Dept. Computing Science,Edmonton,Canada", "fullName": "Mahdi Rahmani Hanzaki", "givenName": "Mahdi Rahmani", "surname": "Hanzaki", "__typename": "ArticleAuthorType" }, { "affiliation": "University of Alberta,Dept. Computing Science,Edmonton,Canada", "fullName": "Pierre Boulanger", "givenName": "Pierre", "surname": "Boulanger", "__typename": "ArticleAuthorType" } ], "idPrefix": "svr", "isOpenAccess": false, "showRecommendedArticles": true, "showBuyMe": true, "hasPdf": true, "pubDate": "2020-11-01T00:00:00", "pubType": "proceedings", "pages": "134-143", "year": "2020", "issn": null, "isbn": "978-1-7281-9231-4", "notes": null, "notesType": null, "__typename": "ArticleType" }, "webExtras": [], "adjacentArticles": { "previous": { "fno": "923100a129", "articleId": "1oZBDvexFle", "__typename": "AdjacentArticleType" }, "next": { "fno": "923100a144", "articleId": "1oZBCqdvnkQ", "__typename": "AdjacentArticleType" }, "__typename": "AdjacentArticlesType" }, "recommendedArticles": [ { "id": "proceedings/vr/2018/3365/0/08446128", "title": "Rendering of Pressure and Textures Using Wearable Haptics in Immersive VR Environments", "doi": null, "abstractUrl": "/proceedings-article/vr/2018/08446128/13bd1eSlyt0", "parentPublication": { "id": "proceedings/vr/2018/3365/0", "title": "2018 IEEE Conference on Virtual Reality and 3D User Interfaces (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "trans/th/2016/03/07452616", "title": "Haptics for Product Design and Manufacturing Simulation", "doi": null, "abstractUrl": "/journal/th/2016/03/07452616/13rRUNvyats", "parentPublication": { "id": "trans/th", "title": "IEEE Transactions on Haptics", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "trans/th/2011/01/tth2011010051", "title": "The Role of Haptics in Medical Training Simulators: A Survey of the State of the Art", "doi": null, "abstractUrl": "/journal/th/2011/01/tth2011010051/13rRUwI5TR8", "parentPublication": { "id": "trans/th", "title": "IEEE Transactions on Haptics", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "mags/cg/2004/02/mcg2004020056", "title": "Haptics in Minimally Invasive Surgical Simulation and Training", "doi": null, "abstractUrl": "/magazine/cg/2004/02/mcg2004020056/13rRUygBw20", "parentPublication": { "id": "mags/cg", "title": "IEEE Computer Graphics and Applications", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/ismar-adjunct/2022/5365/0/536500a905", "title": "Haptics in VR Using Origami-Augmented Drones", "doi": null, "abstractUrl": "/proceedings-article/ismar-adjunct/2022/536500a905/1J7WrPcWIVO", "parentPublication": { "id": "proceedings/ismar-adjunct/2022/5365/0", "title": "2022 IEEE International Symposium on Mixed and Augmented Reality Adjunct (ISMAR-Adjunct)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/ismar/2022/5325/0/532500a748", "title": "Wormholes in VR: Teleporting Hands for Flexible Passive Haptics", "doi": null, "abstractUrl": "/proceedings-article/ismar/2022/532500a748/1JrR93EDicE", "parentPublication": { "id": "proceedings/ismar/2022/5325/0", "title": "2022 IEEE International Symposium on Mixed and Augmented Reality (ISMAR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vrw/2020/6532/0/09090415", "title": "Enhancing Proxy-Based Haptics in Virtual Reality", "doi": null, "abstractUrl": "/proceedings-article/vrw/2020/09090415/1jIxtWMak6c", "parentPublication": { "id": "proceedings/vrw/2020/6532/0", "title": "2020 IEEE Conference on Virtual Reality and 3D User Interfaces Abstracts and Workshops (VRW)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "trans/tg/2021/05/09382898", "title": "Combining Dynamic Passive Haptics and Haptic Retargeting for Enhanced Haptic Feedback in Virtual Reality", "doi": null, "abstractUrl": "/journal/tg/2021/05/09382898/1saZv7Dd9Ty", "parentPublication": { "id": "trans/tg", "title": "IEEE Transactions on Visualization & Computer Graphics", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/ismar/2021/0158/0/015800z018", "title": "Keynote Speaker: Wearable Haptics for Virtual and Augmented Reality", "doi": null, "abstractUrl": "/proceedings-article/ismar/2021/015800z018/1yeD29pZAsw", "parentPublication": { "id": "proceedings/ismar/2021/0158/0", "title": "2021 IEEE International Symposium on Mixed and Augmented Reality (ISMAR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" } ], "articleVideos": [] }
{ "proceeding": { "id": "1cI6akLvAuQ", "title": "2019 IEEE Conference on Virtual Reality and 3D User Interfaces (VR)", "acronym": "vr", "groupId": "1000791", "volume": "0", "displayVolume": "0", "year": "2019", "__typename": "ProceedingType" }, "article": { "id": "1cJ1htJ7ArK", "doi": "10.1109/VR.2019.8797818", "title": "Effects of Tracking Area Shape and Size on Artificial Potential Field Redirected Walking", "normalizedTitle": "Effects of Tracking Area Shape and Size on Artificial Potential Field Redirected Walking", "abstract": "Immersive Virtual Environment systems that utilize Head Mounted Displays and a large tracking area have the advantage of being able to use natural walking as a locomotion interface. In such systems, difficulties arise when the virtual world is larger than the tracking area and users approach area boundaries. Redirected walking (RDW) is a technique that distorts the correspondence between physical and virtual world motion to steer users away from boundaries and obstacles, including other co-immersed users. Recently, a RDW algorithm was proposed based on the use of artificial potential fields (APF), in which walls and obstacles repel the user. APF-RDW effectively supports multiple simultaneous users and, unlike other RDW algorithms, can easily account for tracking area dimensions and room shape when generating steering instructions. This work investigates the performance of a refined APF-RDW algorithm in different sized tracking areas and in irregularly shaped rooms, as compared to a Steer-to-Center (STC) algorithm and an un-steered control condition. Data was generated in simulation using logged paths of prior live users, and is presented for both single-user and multi-user scenarios. Results show the ability of APF-RDW to steer effectively in irregular concave shaped tracking areas such as L-shaped rooms or crosses, along with scalable multi-user support, and better performance than STC algorithms in almost all conditions.", "abstracts": [ { "abstractType": "Regular", "content": "Immersive Virtual Environment systems that utilize Head Mounted Displays and a large tracking area have the advantage of being able to use natural walking as a locomotion interface. In such systems, difficulties arise when the virtual world is larger than the tracking area and users approach area boundaries. Redirected walking (RDW) is a technique that distorts the correspondence between physical and virtual world motion to steer users away from boundaries and obstacles, including other co-immersed users. Recently, a RDW algorithm was proposed based on the use of artificial potential fields (APF), in which walls and obstacles repel the user. APF-RDW effectively supports multiple simultaneous users and, unlike other RDW algorithms, can easily account for tracking area dimensions and room shape when generating steering instructions. This work investigates the performance of a refined APF-RDW algorithm in different sized tracking areas and in irregularly shaped rooms, as compared to a Steer-to-Center (STC) algorithm and an un-steered control condition. Data was generated in simulation using logged paths of prior live users, and is presented for both single-user and multi-user scenarios. Results show the ability of APF-RDW to steer effectively in irregular concave shaped tracking areas such as L-shaped rooms or crosses, along with scalable multi-user support, and better performance than STC algorithms in almost all conditions.", "__typename": "ArticleAbstractType" } ], "normalizedAbstract": "Immersive Virtual Environment systems that utilize Head Mounted Displays and a large tracking area have the advantage of being able to use natural walking as a locomotion interface. In such systems, difficulties arise when the virtual world is larger than the tracking area and users approach area boundaries. Redirected walking (RDW) is a technique that distorts the correspondence between physical and virtual world motion to steer users away from boundaries and obstacles, including other co-immersed users. Recently, a RDW algorithm was proposed based on the use of artificial potential fields (APF), in which walls and obstacles repel the user. APF-RDW effectively supports multiple simultaneous users and, unlike other RDW algorithms, can easily account for tracking area dimensions and room shape when generating steering instructions. This work investigates the performance of a refined APF-RDW algorithm in different sized tracking areas and in irregularly shaped rooms, as compared to a Steer-to-Center (STC) algorithm and an un-steered control condition. Data was generated in simulation using logged paths of prior live users, and is presented for both single-user and multi-user scenarios. Results show the ability of APF-RDW to steer effectively in irregular concave shaped tracking areas such as L-shaped rooms or crosses, along with scalable multi-user support, and better performance than STC algorithms in almost all conditions.", "fno": "08797818", "keywords": [ "Helmet Mounted Displays", "User Interfaces", "Virtual Reality", "Natural Walking", "Locomotion Interface", "Virtual World", "Artificial Potential Fields", "Refined APF RDW Algorithm", "Irregularly Shaped Rooms", "STC Algorithms", "Artificial Potential Field Redirected Walking", "Head Mounted Displays", "Immersive Virtual Environment Systems", "Steer To Center Algorithm", "Legged Locomotion", "Force", "Shape", "Task Analysis", "Target Tracking", "Navigation", "Redirected Walking", "Virtual Environments", "Navigation", "Simulation" ], "authors": [ { "affiliation": "Miami University, Oxford, PA, USA", "fullName": "Justin Messinger", "givenName": "Justin", "surname": "Messinger", "__typename": "ArticleAuthorType" }, { "affiliation": "Miami University, Oxford, PA, USA", "fullName": "Eric Hodgson", "givenName": "Eric", "surname": "Hodgson", "__typename": "ArticleAuthorType" }, { "affiliation": "Miami University, Oxford, PA, USA", "fullName": "Eric R. Bachmann", "givenName": "Eric R.", "surname": "Bachmann", "__typename": "ArticleAuthorType" } ], "idPrefix": "vr", "isOpenAccess": false, "showRecommendedArticles": true, "showBuyMe": true, "hasPdf": true, "pubDate": "2019-03-01T00:00:00", "pubType": "proceedings", "pages": "72-80", "year": "2019", "issn": null, "isbn": "978-1-7281-1377-7", "notes": null, "notesType": null, "__typename": "ArticleType" }, "webExtras": [], "adjacentArticles": { "previous": { "fno": "08798121", "articleId": "1cJ17Y60ruM", "__typename": "AdjacentArticleType" }, "next": { "fno": "08797990", "articleId": "1cJ1b7Rydpu", "__typename": "AdjacentArticleType" }, "__typename": "AdjacentArticlesType" }, "recommendedArticles": [ { "id": "proceedings/vr/2016/0836/0/07504742", "title": "Simultaneous mapping and redirected walking for ad hoc free walking in virtual environments", "doi": null, "abstractUrl": "/proceedings-article/vr/2016/07504742/12OmNyUFg0I", "parentPublication": { "id": "proceedings/vr/2016/0836/0", "title": "2016 IEEE Virtual Reality (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "trans/tg/2013/11/ttg2013111872", "title": "Optimizing Constrained-Environment Redirected Walking Instructions Using Search Techniques", "doi": null, "abstractUrl": "/journal/tg/2013/11/ttg2013111872/13rRUIM2VBH", "parentPublication": { "id": "trans/tg", "title": "IEEE Transactions on Visualization & Computer Graphics", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "trans/tg/2014/04/ttg201404579", "title": "Performance of Redirected Walking Algorithms in a Constrained Virtual World", "doi": null, "abstractUrl": "/journal/tg/2014/04/ttg201404579/13rRUwjoNx4", "parentPublication": { "id": "trans/tg", "title": "IEEE Transactions on Visualization & Computer Graphics", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "trans/tg/2019/05/08645818", "title": "Multi-User Redirected Walking and Resetting Using Artificial Potential Fields", "doi": null, "abstractUrl": "/journal/tg/2019/05/08645818/17PYEiVyc2v", "parentPublication": { "id": "trans/tg", "title": "IEEE Transactions on Visualization & Computer Graphics", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "trans/tg/5555/01/10049511", "title": "Redirected Walking On Omnidirectional Treadmill", "doi": null, "abstractUrl": "/journal/tg/5555/01/10049511/1KYoAYFd0m4", "parentPublication": { "id": "trans/tg", "title": "IEEE Transactions on Visualization & Computer Graphics", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "trans/tg/2023/05/10049692", "title": "FREE-RDW: A Multi-user Redirected Walking Method for Supporting Non-forward Steps", "doi": null, "abstractUrl": "/journal/tg/2023/05/10049692/1KYopXwY5Vu", "parentPublication": { "id": "trans/tg", "title": "IEEE Transactions on Visualization & Computer Graphics", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2019/1377/0/08797983", "title": "A General Reactive Algorithm for Redirected Walking Using Artificial Potential Functions", "doi": null, "abstractUrl": "/proceedings-article/vr/2019/08797983/1cJ12ULGPzq", "parentPublication": { "id": "proceedings/vr/2019/1377/0", "title": "2019 IEEE Conference on Virtual Reality and 3D User Interfaces (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "trans/tg/2020/05/08998570", "title": "A Steering Algorithm for Redirected Walking Using Reinforcement Learning", "doi": null, "abstractUrl": "/journal/tg/2020/05/08998570/1hx2DxYanDy", "parentPublication": { "id": "trans/tg", "title": "IEEE Transactions on Visualization & Computer Graphics", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2020/5608/0/09089532", "title": "Optimal Planning for Redirected Walking Based on Reinforcement Learning in Multi-user Environment with Irregularly Shaped Physical Space", "doi": null, "abstractUrl": "/proceedings-article/vr/2020/09089532/1jIx7m6wYKc", "parentPublication": { "id": "proceedings/vr/2020/5608/0", "title": "2020 IEEE Conference on Virtual Reality and 3D User Interfaces (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2020/5608/0/09089569", "title": "Dynamic Artificial Potential Fields for Multi-User Redirected Walking", "doi": null, "abstractUrl": "/proceedings-article/vr/2020/09089569/1jIxfFs8qgo", "parentPublication": { "id": "proceedings/vr/2020/5608/0", "title": "2020 IEEE Conference on Virtual Reality and 3D User Interfaces (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" } ], "articleVideos": [] }
{ "proceeding": { "id": "12OmNs4S8wv", "title": "2014 IEEE Symposium on 3D User Interfaces (3DUI)", "acronym": "3dui", "groupId": "1001623", "volume": "0", "displayVolume": "0", "year": "2014", "__typename": "ProceedingType" }, "article": { "id": "12OmNBp52Dj", "doi": "10.1109/3DUI.2014.6798837", "title": "An evaluation of a smart-phone-based menu system for immersive virtual environments", "normalizedTitle": "An evaluation of a smart-phone-based menu system for immersive virtual environments", "abstract": "System control is a crucial task for many virtual reality applications and can be realized in a broad variety of ways, whereat the most common way is the use of graphical menus. These are often implemented as part of the virtual environment, but can also be displayed on mobile devices. Until now, many systems and studies have been published on using mobile devices such as personal digital assistants (PDAs) to realize such menu systems. However, most of these systems have been proposed way before smartphones existed and evolved to everyday companions for many people. Thus, it is worthwhile to evaluate the applicability of modern smartphones as carrier of menu systems for immersive virtual environments. To do so, we implemented a platform-independent menu system for smartphones and evaluated it in two different ways. First, we performed an expert review in order to identify potential design flaws and to test the applicability of the approach for demonstrations of VR applications from a demonstrator's point of view. Second, we conducted a user study with 21 participants to test user acceptance of the menu system. The results of the two studies were contradictory: while experts appreciated the system very much, user acceptance was lower than expected. From these results we could draw conclusions on how smartphones should be used to realize system control in virtual environments and we could identify connecting factors for future research on the topic.", "abstracts": [ { "abstractType": "Regular", "content": "System control is a crucial task for many virtual reality applications and can be realized in a broad variety of ways, whereat the most common way is the use of graphical menus. These are often implemented as part of the virtual environment, but can also be displayed on mobile devices. Until now, many systems and studies have been published on using mobile devices such as personal digital assistants (PDAs) to realize such menu systems. However, most of these systems have been proposed way before smartphones existed and evolved to everyday companions for many people. Thus, it is worthwhile to evaluate the applicability of modern smartphones as carrier of menu systems for immersive virtual environments. To do so, we implemented a platform-independent menu system for smartphones and evaluated it in two different ways. First, we performed an expert review in order to identify potential design flaws and to test the applicability of the approach for demonstrations of VR applications from a demonstrator's point of view. Second, we conducted a user study with 21 participants to test user acceptance of the menu system. The results of the two studies were contradictory: while experts appreciated the system very much, user acceptance was lower than expected. From these results we could draw conclusions on how smartphones should be used to realize system control in virtual environments and we could identify connecting factors for future research on the topic.", "__typename": "ArticleAbstractType" } ], "normalizedAbstract": "System control is a crucial task for many virtual reality applications and can be realized in a broad variety of ways, whereat the most common way is the use of graphical menus. These are often implemented as part of the virtual environment, but can also be displayed on mobile devices. Until now, many systems and studies have been published on using mobile devices such as personal digital assistants (PDAs) to realize such menu systems. However, most of these systems have been proposed way before smartphones existed and evolved to everyday companions for many people. Thus, it is worthwhile to evaluate the applicability of modern smartphones as carrier of menu systems for immersive virtual environments. To do so, we implemented a platform-independent menu system for smartphones and evaluated it in two different ways. First, we performed an expert review in order to identify potential design flaws and to test the applicability of the approach for demonstrations of VR applications from a demonstrator's point of view. Second, we conducted a user study with 21 participants to test user acceptance of the menu system. The results of the two studies were contradictory: while experts appreciated the system very much, user acceptance was lower than expected. From these results we could draw conclusions on how smartphones should be used to realize system control in virtual environments and we could identify connecting factors for future research on the topic.", "fno": "06798837", "keywords": [ "Mobile Communication", "Smart Phones", "Usability", "Control Systems", "Virtual Environments", "Navigation" ], "authors": [ { "affiliation": "Virtual Reality Group, RWTH Aachen University, Germany", "fullName": "Sascha Gebhardt", "givenName": "Sascha", "surname": "Gebhardt", "__typename": "ArticleAuthorType" }, { "affiliation": "Virtual Reality Group, RWTH Aachen University, Germany", "fullName": "Sebastian Pick", "givenName": "Sebastian", "surname": "Pick", "__typename": "ArticleAuthorType" }, { "affiliation": "Virtual Reality Group, RWTH Aachen University, Germany", "fullName": "Thomas Oster", "givenName": "Thomas", "surname": "Oster", "__typename": "ArticleAuthorType" }, { "affiliation": "Virtual Reality Group, RWTH Aachen University, Germany", "fullName": "Bernd Hentschel", "givenName": "Bernd", "surname": "Hentschel", "__typename": "ArticleAuthorType" }, { "affiliation": "Virtual Reality Group, RWTH Aachen University, Germany", "fullName": "Torsten Kuhlen", "givenName": "Torsten", "surname": "Kuhlen", "__typename": "ArticleAuthorType" } ], "idPrefix": "3dui", "isOpenAccess": false, "showRecommendedArticles": true, "showBuyMe": true, "hasPdf": true, "pubDate": "2014-03-01T00:00:00", "pubType": "proceedings", "pages": "31-34", "year": "2014", "issn": null, "isbn": "978-1-4799-3624-3", "notes": null, "notesType": null, "__typename": "ArticleType" }, "webExtras": [], "adjacentArticles": { "previous": { "fno": "06798836", "articleId": "12OmNxWcH3Q", "__typename": "AdjacentArticleType" }, "next": { "fno": "06798838", "articleId": "12OmNBOUxsb", "__typename": "AdjacentArticleType" }, "__typename": "AdjacentArticlesType" }, "recommendedArticles": [ { "id": "proceedings/svr/2013/5001/0/06655766", "title": "An Interaction Tool for Immersive Environments Using Mobile Devices", "doi": null, "abstractUrl": "/proceedings-article/svr/2013/06655766/12OmNBVrjiW", "parentPublication": { "id": "proceedings/svr/2013/5001/0", "title": "2013 XV Symposium on Virtual and Augmented Reality (SVR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2014/2871/0/06802066", "title": "Tablet-based interaction panels for immersive environments", "doi": null, "abstractUrl": "/proceedings-article/vr/2014/06802066/12OmNqN6R3O", "parentPublication": { "id": "proceedings/vr/2014/2871/0", "title": "2014 IEEE Virtual Reality (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/mdm/2015/9972/1/9972a287", "title": "Scalable Mockup Experiments on Smartphones Using Smart Lab", "doi": null, "abstractUrl": "/proceedings-article/mdm/2015/9972a287/12OmNvA1hh4", "parentPublication": { "id": "proceedings/mdm/2015/9972/2", "title": "2015 16th IEEE International Conference on Mobile Data Management (MDM)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/icci-cc/2012/2795/0/06311146", "title": "A smart phone information appliance for improved usability", "doi": null, "abstractUrl": "/proceedings-article/icci-cc/2012/06311146/12OmNwt5so9", "parentPublication": { "id": "proceedings/icci-cc/2012/2795/0", "title": "2012 11th IEEE International Conference on Cognitive Informatics & Cognitive Computing (ICCI*CC)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2016/0836/0/07504707", "title": "Depth-based 3D gesture multi-level radial menu for virtual object manipulation", "doi": null, "abstractUrl": "/proceedings-article/vr/2016/07504707/12OmNx3HI96", "parentPublication": { "id": "proceedings/vr/2016/0836/0", "title": "2016 IEEE Virtual Reality (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2001/0948/0/09480149", "title": "Design and Evaluation of Menu Systems for Immersive Virtual Environments", "doi": null, "abstractUrl": "/proceedings-article/vr/2001/09480149/12OmNy3Agvx", "parentPublication": { "id": "proceedings/vr/2001/0948/0", "title": "Virtual Reality Conference, IEEE", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2000/0478/0/04780281", "title": "Multimodal Menu Presentation and Selection in Immersive Virtual Environments", "doi": null, "abstractUrl": "/proceedings-article/vr/2000/04780281/12OmNzDehfH", "parentPublication": { "id": "proceedings/vr/2000/0478/0", "title": "Virtual Reality Conference, IEEE", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2015/1727/0/07223376", "title": "I'm There! The influence of virtual reality and mixed reality environments combined with two different navigation methods on presence", "doi": null, "abstractUrl": "/proceedings-article/vr/2015/07223376/12OmNzYwcc3", "parentPublication": { "id": "proceedings/vr/2015/1727/0", "title": "2015 IEEE Virtual Reality (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2010/6237/0/05444773", "title": "Comparative study of the performances of several haptic modalities for a 3D menu", "doi": null, "abstractUrl": "/proceedings-article/vr/2010/05444773/12OmNzxPTPG", "parentPublication": { "id": "proceedings/vr/2010/6237/0", "title": "2010 IEEE Virtual Reality Conference (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "trans/tg/2013/04/ttg2013040644", "title": "Extended Pie Menus for Immersive Virtual Environments", "doi": null, "abstractUrl": "/journal/tg/2013/04/ttg2013040644/13rRUxC0SEg", "parentPublication": { "id": "trans/tg", "title": "IEEE Transactions on Visualization & Computer Graphics", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" } ], "articleVideos": [] }
{ "proceeding": { "id": "12OmNyugyPC", "title": "Haptic Interfaces for Virtual Environment and Teleoperator Systems, International Symposium on", "acronym": "haptics", "groupId": "1000312", "volume": "0", "displayVolume": "0", "year": "2004", "__typename": "ProceedingType" }, "article": { "id": "12OmNqyUUH0", "doi": "10.1109/HAPTIC.2004.1287200", "title": "A Study of Haptic Linear and Pie Menus in a 3D Fish Tank VR Environment", "normalizedTitle": "A Study of Haptic Linear and Pie Menus in a 3D Fish Tank VR Environment", "abstract": "New pop-up menu styles such as pie menus and marking menus have proven to be measurably faster and more accurate in mouse and pen-based interfaces. These characteristics suggest that they may also be useful for 3D haptically enhanced environments. This paper reports on our development and evaluation of a pie menu style and two types of linear menu styles. All utilize force to aid the user in option selection and activation. Our evaluation results show that selection using the pie menu is considerably faster and more accurate than both types of linear menu. Selection using push-through or exceed border methods was found to be superior to conventional button selection. We conclude that pop-up pie menus, with the right combination of selection method and assistive forces, can provide an excellent solution to providing menu choices in 3D haptic environments and that considering speed accuracy tradeoffs is important in making design decisions.", "abstracts": [ { "abstractType": "Regular", "content": "New pop-up menu styles such as pie menus and marking menus have proven to be measurably faster and more accurate in mouse and pen-based interfaces. These characteristics suggest that they may also be useful for 3D haptically enhanced environments. This paper reports on our development and evaluation of a pie menu style and two types of linear menu styles. All utilize force to aid the user in option selection and activation. Our evaluation results show that selection using the pie menu is considerably faster and more accurate than both types of linear menu. Selection using push-through or exceed border methods was found to be superior to conventional button selection. We conclude that pop-up pie menus, with the right combination of selection method and assistive forces, can provide an excellent solution to providing menu choices in 3D haptic environments and that considering speed accuracy tradeoffs is important in making design decisions.", "__typename": "ArticleAbstractType" } ], "normalizedAbstract": "New pop-up menu styles such as pie menus and marking menus have proven to be measurably faster and more accurate in mouse and pen-based interfaces. These characteristics suggest that they may also be useful for 3D haptically enhanced environments. This paper reports on our development and evaluation of a pie menu style and two types of linear menu styles. All utilize force to aid the user in option selection and activation. Our evaluation results show that selection using the pie menu is considerably faster and more accurate than both types of linear menu. Selection using push-through or exceed border methods was found to be superior to conventional button selection. We conclude that pop-up pie menus, with the right combination of selection method and assistive forces, can provide an excellent solution to providing menu choices in 3D haptic environments and that considering speed accuracy tradeoffs is important in making design decisions.", "fno": "21120224", "keywords": [], "authors": [ { "affiliation": "University of New Hampshire", "fullName": "Rick Komerska", "givenName": "Rick", "surname": "Komerska", "__typename": "ArticleAuthorType" }, { "affiliation": "University of New Hampshire", "fullName": "Colin Ware", "givenName": "Colin", "surname": "Ware", "__typename": "ArticleAuthorType" } ], "idPrefix": "haptics", "isOpenAccess": false, "showRecommendedArticles": true, "showBuyMe": true, "hasPdf": true, "pubDate": "2004-03-01T00:00:00", "pubType": "proceedings", "pages": "224-231", "year": "2004", "issn": "Pending", "isbn": "0-7695-2112-6", "notes": null, "notesType": null, "__typename": "ArticleType" }, "webExtras": [], "adjacentArticles": { "previous": { "fno": "21120216", "articleId": "12OmNwE9OUQ", "__typename": "AdjacentArticleType" }, "next": { "fno": "21120234", "articleId": "12OmNvxbhJ5", "__typename": "AdjacentArticleType" }, "__typename": "AdjacentArticlesType" }, "recommendedArticles": [ { "id": "proceedings/iswc/2009/3779/0/3779a063", "title": "A Comparison of Menu Configurations and Pointing Devices for Use with Wearable Computers while Mobile and Stationary", "doi": null, "abstractUrl": "/proceedings-article/iswc/2009/3779a063/12OmNB7LvG6", "parentPublication": { "id": "proceedings/iswc/2009/3779/0", "title": "2009 International Symposium on Wearable Computers", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/cw/2009/3791/0/3791a292", "title": "Comparison of Adaptive, Adaptable and Mixed-Initiative Menus", "doi": null, "abstractUrl": "/proceedings-article/cw/2009/3791a292/12OmNBBzoem", "parentPublication": { "id": "proceedings/cw/2009/3791/0", "title": "2009 International Conference on CyberWorlds", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/3dui/2010/6846/0/05444721", "title": "An evaluation of menu properties and pointing techniques in a projection-based VR environment", "doi": null, "abstractUrl": "/proceedings-article/3dui/2010/05444721/12OmNCdBDQK", "parentPublication": { "id": "proceedings/3dui/2010/6846/0", "title": "2010 IEEE Symposium on 3D User Interfaces (3DUI)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/3dui/2015/6886/0/07131737", "title": "Handymenu: Integrating menu selection into a multifunction smartphone-based VR controller", "doi": null, "abstractUrl": "/proceedings-article/3dui/2015/07131737/12OmNx19jZG", "parentPublication": { "id": "proceedings/3dui/2015/6886/0", "title": "2015 IEEE Symposium on 3D User Interfaces (3DUI)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/3dui/2009/3965/0/04811220", "title": "Poster: Shake menus: Towards activation and placement techniques for prop-based 3D graphical menus", "doi": null, "abstractUrl": "/proceedings-article/3dui/2009/04811220/12OmNy1SFIp", "parentPublication": { "id": "proceedings/3dui/2009/3965/0", "title": "2009 IEEE Symposium on 3D User Interfaces", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/tabletop/2007/3013/0/30130121", "title": "Improving Menu Interaction for Cluttered Tabletop Setups with User-Drawn Path Menus", "doi": null, "abstractUrl": "/proceedings-article/tabletop/2007/30130121/12OmNyVes0N", "parentPublication": { "id": "proceedings/tabletop/2007/3013/0", "title": "Horizontal Interactive Human-Computer Systems, International Workshop on", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/isuvr/2011/4420/0/4420b055", "title": "Graphical Menus Using a Mobile Phone for Wearable AR Systems", "doi": null, "abstractUrl": "/proceedings-article/isuvr/2011/4420b055/12OmNz2C1y4", "parentPublication": { "id": "proceedings/isuvr/2011/4420/0", "title": "International Symposium on Ubiquitous Virtual Reality", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/icicic/2008/3161/0/31610364", "title": "Enhancing Pie-Menu Selection with Pen Pressure", "doi": null, "abstractUrl": "/proceedings-article/icicic/2008/31610364/12OmNzn38SO", "parentPublication": { "id": "proceedings/icicic/2008/3161/0", "title": "Innovative Computing ,Information and Control, International Conference on", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "trans/tg/2013/04/ttg2013040644", "title": "Extended Pie Menus for Immersive Virtual Environments", "doi": null, "abstractUrl": "/journal/tg/2013/04/ttg2013040644/13rRUxC0SEg", "parentPublication": { "id": "trans/tg", "title": "IEEE Transactions on Visualization & Computer Graphics", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/wevr/2019/4050/0/08809589", "title": "Passive Haptic Menus for Desk-Based and HMD-Projected Virtual Reality", "doi": null, "abstractUrl": "/proceedings-article/wevr/2019/08809589/1cI61Rx4b9m", "parentPublication": { "id": "proceedings/wevr/2019/4050/0", "title": "2019 IEEE 5th Workshop on Everyday Virtual Reality (WEVR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" } ], "articleVideos": [] }
{ "proceeding": { "id": "12OmNAXxWQw", "title": "2017 IEEE Symposium on Computers and Communications (ISCC)", "acronym": "iscc", "groupId": "1000156", "volume": "0", "displayVolume": "0", "year": "2017", "__typename": "ProceedingType" }, "article": { "id": "12OmNwkR5vU", "doi": "10.1109/ISCC.2017.8024653", "title": "PIE: A personalized incentive for location-aware mobile crowd sensing", "normalizedTitle": "PIE: A personalized incentive for location-aware mobile crowd sensing", "abstract": "Mobile crowd sensing has the potential to acquire massive data from places and address large-scale societal problems. However, most currently existing crowd sensing systems suffer from insufficient participants. Therefore, incentive design for crowd sensing is essential and urgent. In this paper, different from the auction-based and server-dominant incentives, we design a personalized incentive, PIE, with partiality for neither the server nor the participants with budget constraint. The total payment for all the participants accords to their collective participation level, and the individual reward for each participant depends on individual contribution. We measure the individual contribution and participation level based on Voronoi diagram and Shannon entropy. Both offline and online incentives are proposed with budget constraint. Experimental study shows that our incentives are participation-aware and contribution-dependent, which encourages participants' active join, balanced distribution and flexible reward.", "abstracts": [ { "abstractType": "Regular", "content": "Mobile crowd sensing has the potential to acquire massive data from places and address large-scale societal problems. However, most currently existing crowd sensing systems suffer from insufficient participants. Therefore, incentive design for crowd sensing is essential and urgent. In this paper, different from the auction-based and server-dominant incentives, we design a personalized incentive, PIE, with partiality for neither the server nor the participants with budget constraint. The total payment for all the participants accords to their collective participation level, and the individual reward for each participant depends on individual contribution. We measure the individual contribution and participation level based on Voronoi diagram and Shannon entropy. Both offline and online incentives are proposed with budget constraint. Experimental study shows that our incentives are participation-aware and contribution-dependent, which encourages participants' active join, balanced distribution and flexible reward.", "__typename": "ArticleAbstractType" } ], "normalizedAbstract": "Mobile crowd sensing has the potential to acquire massive data from places and address large-scale societal problems. However, most currently existing crowd sensing systems suffer from insufficient participants. Therefore, incentive design for crowd sensing is essential and urgent. In this paper, different from the auction-based and server-dominant incentives, we design a personalized incentive, PIE, with partiality for neither the server nor the participants with budget constraint. The total payment for all the participants accords to their collective participation level, and the individual reward for each participant depends on individual contribution. We measure the individual contribution and participation level based on Voronoi diagram and Shannon entropy. Both offline and online incentives are proposed with budget constraint. Experimental study shows that our incentives are participation-aware and contribution-dependent, which encourages participants' active join, balanced distribution and flexible reward.", "fno": "08024653", "keywords": [ "Sensors", "Servers", "Atmospheric Measurements", "Particle Measurements", "Entropy", "Mobile Communication", "Computational Modeling" ], "authors": [ { "affiliation": "Key Laboratory of Data Engineering and Knowledge Engineering of Ministry of Education, Beijing, China", "fullName": "Yao Wu", "givenName": null, "surname": "Yao Wu", "__typename": "ArticleAuthorType" }, { "affiliation": "Key Laboratory of Data Engineering and Knowledge Engineering of Ministry of Education, Beijing, China", "fullName": "Yuncheng Wu", "givenName": null, "surname": "Yuncheng Wu", "__typename": "ArticleAuthorType" }, { "affiliation": "Key Laboratory of Data Engineering and Knowledge Engineering of Ministry of Education, Beijing, China", "fullName": "Juru Zeng", "givenName": null, "surname": "Juru Zeng", "__typename": "ArticleAuthorType" }, { "affiliation": "Key Laboratory of Data Engineering and Knowledge Engineering of Ministry of Education, Beijing, China", "fullName": "Hong Chen", "givenName": null, "surname": "Hong Chen", "__typename": "ArticleAuthorType" }, { "affiliation": "Key Laboratory of Data Engineering and Knowledge Engineering of Ministry of Education, Beijing, China", "fullName": "Cuiping Li", "givenName": null, "surname": "Cuiping Li", "__typename": "ArticleAuthorType" } ], "idPrefix": "iscc", "isOpenAccess": false, "showRecommendedArticles": true, "showBuyMe": true, "hasPdf": true, "pubDate": "2017-07-01T00:00:00", "pubType": "proceedings", "pages": "981-986", "year": "2017", "issn": null, "isbn": "978-1-5386-1629-1", "notes": null, "notesType": null, "__typename": "ArticleType" }, "webExtras": [], "adjacentArticles": { "previous": { "fno": "08024652", "articleId": "12OmNwNeYAD", "__typename": "AdjacentArticleType" }, "next": { "fno": "08024654", "articleId": "12OmNC9lEGb", "__typename": "AdjacentArticleType" }, "__typename": "AdjacentArticlesType" }, "recommendedArticles": [ { "id": "proceedings/percomw/2013/5075/0/06529518", "title": "Crowd-sensing: Why context matters", "doi": null, "abstractUrl": "/proceedings-article/percomw/2013/06529518/12OmNBzAcj4", "parentPublication": { "id": "proceedings/percomw/2013/5075/0", "title": "2013 IEEE International Conference on Pervasive Computing and Communications Workshops (PerCom Workshops 2013)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/uic-atc-scalcom/2014/7646/0/7646a179", "title": "A Cross-Space, Multi-interaction-Based Dynamic Incentive Mechanism for Mobile Crowd Sensing", "doi": null, "abstractUrl": "/proceedings-article/uic-atc-scalcom/2014/7646a179/12OmNrHjqHW", "parentPublication": { "id": "proceedings/uic-atc-scalcom/2014/7646/0", "title": "2014 IEEE 11th Intl Conf on Ubiquitous Intelligence & Computing and 2014 IEEE 11th Intl Conf on Autonomic & Trusted Computing and 2014 IEEE 14th Intl Conf on Scalable Computing and Communications and Its Associated Workshops (UIC-ATC-ScalCom)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/mdm/2014/5705/1/5705a073", "title": "Spatial Task Assignment for Crowd Sensing with Cloaked Locations", "doi": null, "abstractUrl": "/proceedings-article/mdm/2014/5705a073/12OmNxwWoJ0", "parentPublication": { "id": "proceedings/mdm/2014/5705/2", "title": "2014 15th IEEE International Conference on Mobile Data Management (MDM)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/iiki/2015/8637/0/8637a250", "title": "A Data-centric Cooperative Sensing Scheme in Crowdsourcing Systems", "doi": null, "abstractUrl": "/proceedings-article/iiki/2015/8637a250/12OmNyOq55z", "parentPublication": { "id": "proceedings/iiki/2015/8637/0", "title": "2015 International Conference on Identification, Information, and Knowledge in the Internet of Things (IIKI)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/hpcc-euc/2013/5088/0/06832188", "title": "Participation-Aware Incentive for Active Crowd Sensing", "doi": null, "abstractUrl": "/proceedings-article/hpcc-euc/2013/06832188/12OmNzBOhXI", "parentPublication": { "id": "proceedings/hpcc-euc/2013/5088/0", "title": "2013 IEEE International Conference on High Performance Computing and Communications (HPCC) & 2013 IEEE International Conference on Embedded and Ubiquitous Computing (EUC)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/scc/2015/7281/0/7281a293", "title": "Effective Opportunistic Crowd Sensing IoT System for Restoring Missing Objects", "doi": null, "abstractUrl": "/proceedings-article/scc/2015/7281a293/12OmNzt0ILj", "parentPublication": { "id": "proceedings/scc/2015/7281/0", "title": "2015 IEEE International Conference on Services Computing (SCC)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/mass/2014/6036/0/6036a001", "title": "Finding Nemo: Finding Your Lost Child in Crowds via Mobile Crowd Sensing", "doi": null, "abstractUrl": "/proceedings-article/mass/2014/6036a001/12OmNzxgHBj", "parentPublication": { "id": "proceedings/mass/2014/6036/0", "title": "2014 IEEE 11th International Conference on Mobile Ad Hoc and Sensor Systems (MASS)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/smartiot/2018/8543/0/854300a222", "title": "Incentive Mechanism Design Based on Stochastic Game for Multi-modality Crowd Sensing", "doi": null, "abstractUrl": "/proceedings-article/smartiot/2018/854300a222/13HFzbBCFm9", "parentPublication": { "id": "proceedings/smartiot/2018/8543/0", "title": "2018 IEEE International Conference on Smart Internet of Things (SmartIoT)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "trans/tc/2014/01/ttc2014010115", "title": "Incentive Mechanisms for Community Sensing", "doi": null, "abstractUrl": "/journal/tc/2014/01/ttc2014010115/13rRUx0xPhr", "parentPublication": { "id": "trans/tc", "title": "IEEE Transactions on Computers", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/percomw/2018/3227/0/08480185", "title": "Enabling Crowd Sensing for Non-Experts", "doi": null, "abstractUrl": "/proceedings-article/percomw/2018/08480185/17D45We0UDM", "parentPublication": { "id": "proceedings/percomw/2018/3227/0", "title": "2018 IEEE International Conference on Pervasive Computing and Communications Workshops (PerCom Workshops)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" } ], "articleVideos": [] }
{ "proceeding": { "id": "12OmNzVXNJh", "title": "2015 IEEE Symposium on 3D User Interfaces (3DUI)", "acronym": "3dui", "groupId": "1001623", "volume": "0", "displayVolume": "0", "year": "2015", "__typename": "ProceedingType" }, "article": { "id": "12OmNx19jZG", "doi": "10.1109/3DUI.2015.7131737", "title": "Handymenu: Integrating menu selection into a multifunction smartphone-based VR controller", "normalizedTitle": "Handymenu: Integrating menu selection into a multifunction smartphone-based VR controller", "abstract": "We integrated touch menus into a cohesive smartphone-based VR controller. Smartphone touch surfaces offer new interaction styles and also aid VR interaction when tracking is absent or imprecise or when users have limited arm mobility or fatigue. In Handymenu, a touch surface is split into two areas: one for menu interaction and the other for spatial interactions such as VR object selection, manipulation, navigation, or parameter adjustment. Users in our studies transitioned between the two areas and performed nested, repeated selections. A formal experiment included VR object selection (ray and touch), menu selection (ray and touch), menu layout (pie and grid), as well as touch and visual feedback sizes in some cases (two levels each).", "abstracts": [ { "abstractType": "Regular", "content": "We integrated touch menus into a cohesive smartphone-based VR controller. Smartphone touch surfaces offer new interaction styles and also aid VR interaction when tracking is absent or imprecise or when users have limited arm mobility or fatigue. In Handymenu, a touch surface is split into two areas: one for menu interaction and the other for spatial interactions such as VR object selection, manipulation, navigation, or parameter adjustment. Users in our studies transitioned between the two areas and performed nested, repeated selections. A formal experiment included VR object selection (ray and touch), menu selection (ray and touch), menu layout (pie and grid), as well as touch and visual feedback sizes in some cases (two levels each).", "__typename": "ArticleAbstractType" } ], "normalizedAbstract": "We integrated touch menus into a cohesive smartphone-based VR controller. Smartphone touch surfaces offer new interaction styles and also aid VR interaction when tracking is absent or imprecise or when users have limited arm mobility or fatigue. In Handymenu, a touch surface is split into two areas: one for menu interaction and the other for spatial interactions such as VR object selection, manipulation, navigation, or parameter adjustment. Users in our studies transitioned between the two areas and performed nested, repeated selections. A formal experiment included VR object selection (ray and touch), menu selection (ray and touch), menu layout (pie and grid), as well as touch and visual feedback sizes in some cases (two levels each).", "fno": "07131737", "keywords": [ "Visualization", "Layout", "Three Dimensional Displays", "Navigation", "TV", "Thumb", "Portable Media Players", "Smartphone", "Touch", "Menus", "Virtual Reality", "3 DTV" ], "authors": [ { "affiliation": "University of Louisiana at Lafayette, USA", "fullName": "Nicholas G. Lipari", "givenName": "Nicholas G.", "surname": "Lipari", "__typename": "ArticleAuthorType" }, { "affiliation": "University of Louisiana at Lafayette, USA", "fullName": "Christoph W. Borst", "givenName": "Christoph W.", "surname": "Borst", "__typename": "ArticleAuthorType" } ], "idPrefix": "3dui", "isOpenAccess": false, "showRecommendedArticles": true, "showBuyMe": true, "hasPdf": true, "pubDate": "2015-03-01T00:00:00", "pubType": "proceedings", "pages": "129-132", "year": "2015", "issn": null, "isbn": "978-1-4673-6886-5", "notes": null, "notesType": null, "__typename": "ArticleType" }, "webExtras": [], "adjacentArticles": { "previous": { "fno": "07131736", "articleId": "12OmNxRnvND", "__typename": "AdjacentArticleType" }, "next": { "fno": "07131738", "articleId": "12OmNxFaLuV", "__typename": "AdjacentArticleType" }, "__typename": "AdjacentArticlesType" }, "recommendedArticles": [ { "id": "proceedings/3dui/2008/2047/0/04476592", "title": "Tech-note: rapMenu: Remote Menu Selection Using Freehand Gestural Input", "doi": null, "abstractUrl": "/proceedings-article/3dui/2008/04476592/12OmNAS9zL1", "parentPublication": { "id": "proceedings/3dui/2008/2047/0", "title": "2008 IEEE Symposium on 3D User Interfaces", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/uic-atc-scalcom/2014/7646/0/7646a362", "title": "Defining and Analyzing a Gesture Set for Interactive TV Remote on Touchscreen Phones", "doi": null, "abstractUrl": "/proceedings-article/uic-atc-scalcom/2014/7646a362/12OmNBOllrx", "parentPublication": { "id": "proceedings/uic-atc-scalcom/2014/7646/0", "title": "2014 IEEE 11th Intl Conf on Ubiquitous Intelligence & Computing and 2014 IEEE 11th Intl Conf on Autonomic & Trusted Computing and 2014 IEEE 14th Intl Conf on Scalable Computing and Communications and Its Associated Workshops (UIC-ATC-ScalCom)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/3dui/2010/6846/0/05444721", "title": "An evaluation of menu properties and pointing techniques in a projection-based VR environment", "doi": null, "abstractUrl": "/proceedings-article/3dui/2010/05444721/12OmNCdBDQK", "parentPublication": { "id": "proceedings/3dui/2010/6846/0", "title": "2010 IEEE Symposium on 3D User Interfaces (3DUI)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2009/3943/0/04811045", "title": "iPhone/iPod Touch as Input Devices for Navigation in Immersive Virtual Environments", "doi": null, "abstractUrl": "/proceedings-article/vr/2009/04811045/12OmNqGitWe", "parentPublication": { "id": "proceedings/vr/2009/3943/0", "title": "2009 IEEE Virtual Reality Conference", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2016/0836/0/07504707", "title": "Depth-based 3D gesture multi-level radial menu for virtual object manipulation", "doi": null, "abstractUrl": "/proceedings-article/vr/2016/07504707/12OmNx3HI96", "parentPublication": { "id": "proceedings/vr/2016/0836/0", "title": "2016 IEEE Virtual Reality (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2000/0478/0/04780281", "title": "Multimodal Menu Presentation and Selection in Immersive Virtual Environments", "doi": null, "abstractUrl": "/proceedings-article/vr/2000/04780281/12OmNzDehfH", "parentPublication": { "id": "proceedings/vr/2000/0478/0", "title": "Virtual Reality Conference, IEEE", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2022/9617/0/961700a832", "title": "GazeDock: Gaze-Only Menu Selection in Virtual Reality using Auto-Triggering Peripheral Menu", "doi": null, "abstractUrl": "/proceedings-article/vr/2022/961700a832/1CJbR6qnKdW", "parentPublication": { "id": "proceedings/vr/2022/9617/0", "title": "2022 IEEE on Conference Virtual Reality and 3D User Interfaces (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vrw/2022/8402/0/840200a582", "title": "Multi-Touch Smartphone-Based Progressive Refinement VR Selection", "doi": null, "abstractUrl": "/proceedings-article/vrw/2022/840200a582/1CJcBfmyX5K", "parentPublication": { "id": "proceedings/vrw/2022/8402/0", "title": "2022 IEEE Conference on Virtual Reality and 3D User Interfaces Abstracts and Workshops (VRW)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vrw/2022/8402/0/840200a304", "title": "IMPReSS: Improved Multi-Touch Progressive Refinement Selection Strategy", "doi": null, "abstractUrl": "/proceedings-article/vrw/2022/840200a304/1CJetSxfyi4", "parentPublication": { "id": "proceedings/vrw/2022/8402/0", "title": "2022 IEEE Conference on Virtual Reality and 3D User Interfaces Abstracts and Workshops (VRW)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/ismar-adjunct/2021/1298/0/129800a362", "title": "VRSmartphoneSketch: Augmenting VR Controller With A Smartphone For Mid-air Sketching", "doi": null, "abstractUrl": "/proceedings-article/ismar-adjunct/2021/129800a362/1yeQLhUPzos", "parentPublication": { "id": "proceedings/ismar-adjunct/2021/1298/0", "title": "2021 IEEE International Symposium on Mixed and Augmented Reality Adjunct (ISMAR-Adjunct)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" } ], "articleVideos": [] }
{ "proceeding": { "id": "12OmNyfdOId", "title": "2009 33rd Annual IEEE International Computer Software and Applications Conference", "acronym": "compsac", "groupId": "1000143", "volume": "1", "displayVolume": "2", "year": "2009", "__typename": "ProceedingType" }, "article": { "id": "12OmNxIzWOh", "doi": "10.1109/COMPSAC.2009.22", "title": "A Study of Applying Extended PIE Technique to Software Testability Analysis", "normalizedTitle": "A Study of Applying Extended PIE Technique to Software Testability Analysis", "abstract": "During the software development process, data that has been gained from the testing phase can help developers to predict software reliability more precisely. But the testing stage usually takes more and more effort due to the growing complexity of software. How to build software that can be tested efficiently has become an important topic in addition to enhancing and developing new testing methods. Thus, research on software testability has been developed variously. In the past, a dynamic technique for estimating program testability was proposed and called propagation, infection, and execution (PIE) analysis. Previous research studies show that PIE analysis can complement software testing. However, this technique requires a lot of computational overhead in estimating the testability of software components. In this paper, we propose an Extended PIE (EPIE) technique to accelerate the traditional PIE analysis, based on generating group testability as a substitute for location testability. This technique can be separated into three steps: breaking a program into blocks, dividing blocks into groups, and marking target statements. We developed a tool called ePAT (extended PIE Analysis Tool) to help us identify the locations which will be analyzed. The experimental results show that the number of analyzed locations can be effectively decreased and that the estimated value of testability remains acceptable and useful.", "abstracts": [ { "abstractType": "Regular", "content": "During the software development process, data that has been gained from the testing phase can help developers to predict software reliability more precisely. But the testing stage usually takes more and more effort due to the growing complexity of software. How to build software that can be tested efficiently has become an important topic in addition to enhancing and developing new testing methods. Thus, research on software testability has been developed variously. In the past, a dynamic technique for estimating program testability was proposed and called propagation, infection, and execution (PIE) analysis. Previous research studies show that PIE analysis can complement software testing. However, this technique requires a lot of computational overhead in estimating the testability of software components. In this paper, we propose an Extended PIE (EPIE) technique to accelerate the traditional PIE analysis, based on generating group testability as a substitute for location testability. This technique can be separated into three steps: breaking a program into blocks, dividing blocks into groups, and marking target statements. We developed a tool called ePAT (extended PIE Analysis Tool) to help us identify the locations which will be analyzed. The experimental results show that the number of analyzed locations can be effectively decreased and that the estimated value of testability remains acceptable and useful.", "__typename": "ArticleAbstractType" } ], "normalizedAbstract": "During the software development process, data that has been gained from the testing phase can help developers to predict software reliability more precisely. But the testing stage usually takes more and more effort due to the growing complexity of software. How to build software that can be tested efficiently has become an important topic in addition to enhancing and developing new testing methods. Thus, research on software testability has been developed variously. In the past, a dynamic technique for estimating program testability was proposed and called propagation, infection, and execution (PIE) analysis. Previous research studies show that PIE analysis can complement software testing. However, this technique requires a lot of computational overhead in estimating the testability of software components. In this paper, we propose an Extended PIE (EPIE) technique to accelerate the traditional PIE analysis, based on generating group testability as a substitute for location testability. This technique can be separated into three steps: breaking a program into blocks, dividing blocks into groups, and marking target statements. We developed a tool called ePAT (extended PIE Analysis Tool) to help us identify the locations which will be analyzed. The experimental results show that the number of analyzed locations can be effectively decreased and that the estimated value of testability remains acceptable and useful.", "fno": "3726a089", "keywords": [ "Testability Software Testing" ], "authors": [ { "affiliation": null, "fullName": "Tsung-Han Tsai", "givenName": "Tsung-Han", "surname": "Tsai", "__typename": "ArticleAuthorType" }, { "affiliation": null, "fullName": "Chin-Yu Huang", "givenName": "Chin-Yu", "surname": "Huang", "__typename": "ArticleAuthorType" }, { "affiliation": null, "fullName": "Jun-Ru Chang", "givenName": "Jun-Ru", "surname": "Chang", "__typename": "ArticleAuthorType" } ], "idPrefix": "compsac", "isOpenAccess": false, "showRecommendedArticles": true, "showBuyMe": true, "hasPdf": true, "pubDate": "2009-07-01T00:00:00", "pubType": "proceedings", "pages": "89-98", "year": "2009", "issn": null, "isbn": null, "notes": null, "notesType": null, "__typename": "ArticleType" }, "webExtras": [], "adjacentArticles": { "previous": { "fno": "3726a081", "articleId": "12OmNvjyy2a", "__typename": "AdjacentArticleType" }, "next": { "fno": "3726a099", "articleId": "12OmNx8wTuH", "__typename": "AdjacentArticleType" }, "__typename": "AdjacentArticlesType" }, "recommendedArticles": [ { "id": "proceedings/icsm/2002/1819/0/18190422", "title": "Testability Analysis for Software Components", "doi": null, "abstractUrl": "/proceedings-article/icsm/2002/18190422/12OmNBOlln3", "parentPublication": { "id": "proceedings/icsm/2002/1819/0", "title": "International Conference on Software Maintenance, 2002. Proceedings.", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/step/1997/7840/0/78400209", "title": "Promoting the Software Design for Testability Towards a Partial Test Oracle", "doi": null, "abstractUrl": "/proceedings-article/step/1997/78400209/12OmNBTJIIg", "parentPublication": { "id": "proceedings/step/1997/7840/0", "title": "Software Technology and Engineering Practice, International Workshop on", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/ic3/2014/5172/0/06897198", "title": "An enhanced marking target statement strategy of E-PIE for testability estimation", "doi": null, "abstractUrl": "/proceedings-article/ic3/2014/06897198/12OmNrYCXI8", "parentPublication": { "id": "proceedings/ic3/2014/5172/0", "title": "2014 Seventh International Conference on Contemporary Computing (IC3)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/qsic/2003/2015/0/20150351", "title": "Testability Analysis Applied to Embedded Data-flow Software", "doi": null, "abstractUrl": "/proceedings-article/qsic/2003/20150351/12OmNrkT7wW", "parentPublication": { "id": "proceedings/qsic/2003/2015/0", "title": "Quality Software, International Conference on", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/issre/1998/8991/0/89910090", "title": "Applying Testability to Reliability Estimation", "doi": null, "abstractUrl": "/proceedings-article/issre/1998/89910090/12OmNwoPtmY", "parentPublication": { "id": "proceedings/issre/1998/8991/0", "title": "Proceedings Ninth International Symposium on Software Reliability Engineering (Cat. No.98TB100257)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/icstw/2011/4345/0/4345a414", "title": "Refactoring as Testability Transformation", "doi": null, "abstractUrl": "/proceedings-article/icstw/2011/4345a414/12OmNxGAL6h", "parentPublication": { "id": "proceedings/icstw/2011/4345/0", "title": "Software Testing Verification and Validation Workshop, IEEE International Conference on", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/compsac/2005/2413/2/241320211", "title": "A Component Testability Model for Verification and Measurement", "doi": null, "abstractUrl": "/proceedings-article/compsac/2005/241320211/12OmNxaw59E", "parentPublication": { "id": "proceedings/compsac/2005/2413/2", "title": "29th Annual International Computer Software and Applications Conference (COMPSAC'05)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/csmr/1998/8421/0/84210096", "title": "Software Testability Measurements Derived from Data Flow Analysis", "doi": null, "abstractUrl": "/proceedings-article/csmr/1998/84210096/12OmNzBOimJ", "parentPublication": { "id": "proceedings/csmr/1998/8421/0", "title": "Proceedings of the Second Euromicro Conference on Software Maintenance and Reengineering", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/icse/2006/2832/0/28320985", "title": "A new approach for software testability analysis", "doi": null, "abstractUrl": "/proceedings-article/icse/2006/28320985/12OmNzIUg1a", "parentPublication": { "id": "proceedings/icse/2006/2832/0", "title": "Software Engineering, International Conference on", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "trans/ts/1992/08/e0717", "title": "PIE: A Dynamic Failure-Based Technique", "doi": null, "abstractUrl": "/journal/ts/1992/08/e0717/13rRUxDqSa2", "parentPublication": { "id": "trans/ts", "title": "IEEE Transactions on Software Engineering", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" } ], "articleVideos": [] }
{ "proceeding": { "id": "12OmNrIJqwn", "title": "Virtual Reality Conference, IEEE", "acronym": "vr", "groupId": "1000791", "volume": "0", "displayVolume": "0", "year": "2001", "__typename": "ProceedingType" }, "article": { "id": "12OmNy3Agvx", "doi": "10.1109/VR.2001.913781", "title": "Design and Evaluation of Menu Systems for Immersive Virtual Environments", "normalizedTitle": "Design and Evaluation of Menu Systems for Immersive Virtual Environments", "abstract": "Interfaces for system control tasks in virtual environments (VEs) have not been extensively studied. This paper focuses on various types of menu systems to be used in such environments. We describe the design of the TULIP menu, a menu system using Pinch Gloves(tm), and compare it to two common alternatives: floating menus and pen and tablet menus. These three menus were compared in an empirical evaluation. The pen and tablet menu was found to be significantly faster, while users had a preference for TULIP. Subjective discomfort levels were also higher with the floating menus and pen and tablet.", "abstracts": [ { "abstractType": "Regular", "content": "Interfaces for system control tasks in virtual environments (VEs) have not been extensively studied. This paper focuses on various types of menu systems to be used in such environments. We describe the design of the TULIP menu, a menu system using Pinch Gloves(tm), and compare it to two common alternatives: floating menus and pen and tablet menus. These three menus were compared in an empirical evaluation. The pen and tablet menu was found to be significantly faster, while users had a preference for TULIP. Subjective discomfort levels were also higher with the floating menus and pen and tablet.", "__typename": "ArticleAbstractType" } ], "normalizedAbstract": "Interfaces for system control tasks in virtual environments (VEs) have not been extensively studied. This paper focuses on various types of menu systems to be used in such environments. We describe the design of the TULIP menu, a menu system using Pinch Gloves(tm), and compare it to two common alternatives: floating menus and pen and tablet menus. These three menus were compared in an empirical evaluation. The pen and tablet menu was found to be significantly faster, while users had a preference for TULIP. Subjective discomfort levels were also higher with the floating menus and pen and tablet.", "fno": "09480149", "keywords": [], "authors": [ { "affiliation": "Virginia Tech", "fullName": "Doug A. Bowman", "givenName": "Doug A.", "surname": "Bowman", "__typename": "ArticleAuthorType" }, { "affiliation": "Virginia Tech", "fullName": "Chadwick A. Wingrave", "givenName": "Chadwick A.", "surname": "Wingrave", "__typename": "ArticleAuthorType" } ], "idPrefix": "vr", "isOpenAccess": false, "showRecommendedArticles": true, "showBuyMe": true, "hasPdf": true, "pubDate": "2001-03-01T00:00:00", "pubType": "proceedings", "pages": "149", "year": "2001", "issn": "1087-8270", "isbn": "0-7695-0948-7", "notes": null, "notesType": null, "__typename": "ArticleType" }, "webExtras": [], "adjacentArticles": { "previous": { "fno": "09480141", "articleId": "12OmNroijna", "__typename": "AdjacentArticleType" }, "next": { "fno": "09480157", "articleId": "12OmNvAAtJd", "__typename": "AdjacentArticleType" }, "__typename": "AdjacentArticlesType" }, "recommendedArticles": [ { "id": "proceedings/3dui/2014/3624/0/06798837", "title": "An evaluation of a smart-phone-based menu system for immersive virtual environments", "doi": null, "abstractUrl": "/proceedings-article/3dui/2014/06798837/12OmNBp52Dj", "parentPublication": { "id": "proceedings/3dui/2014/3624/0", "title": "2014 IEEE Symposium on 3D User Interfaces (3DUI)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/3dui/2010/6846/0/05444721", "title": "An evaluation of menu properties and pointing techniques in a projection-based VR environment", "doi": null, "abstractUrl": "/proceedings-article/3dui/2010/05444721/12OmNCdBDQK", "parentPublication": { "id": "proceedings/3dui/2010/6846/0", "title": "2010 IEEE Symposium on 3D User Interfaces (3DUI)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/haptics/2004/2112/0/21120224", "title": "A Study of Haptic Linear and Pie Menus in a 3D Fish Tank VR Environment", "doi": null, "abstractUrl": "/proceedings-article/haptics/2004/21120224/12OmNqyUUH0", "parentPublication": { "id": "proceedings/haptics/2004/2112/0", "title": "Haptic Interfaces for Virtual Environment and Teleoperator Systems, International Symposium on", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/3dui/2014/3624/0/06798858", "title": "Poster: Amplitude test for input devices for system control in immersive virtual environment", "doi": null, "abstractUrl": "/proceedings-article/3dui/2014/06798858/12OmNrkT7O0", "parentPublication": { "id": "proceedings/3dui/2014/3624/0", "title": "2014 IEEE Symposium on 3D User Interfaces (3DUI)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/3dui/2009/3965/0/04811225", "title": "Poster: Evaluation of menu techniques using a 3D game input device", "doi": null, "abstractUrl": "/proceedings-article/3dui/2009/04811225/12OmNwudQOk", "parentPublication": { "id": "proceedings/3dui/2009/3965/0", "title": "2009 IEEE Symposium on 3D User Interfaces", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2016/0836/0/07504707", "title": "Depth-based 3D gesture multi-level radial menu for virtual object manipulation", "doi": null, "abstractUrl": "/proceedings-article/vr/2016/07504707/12OmNx3HI96", "parentPublication": { "id": "proceedings/vr/2016/0836/0", "title": "2016 IEEE Virtual Reality (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/tabletop/2007/3013/0/30130121", "title": "Improving Menu Interaction for Cluttered Tabletop Setups with User-Drawn Path Menus", "doi": null, "abstractUrl": "/proceedings-article/tabletop/2007/30130121/12OmNyVes0N", "parentPublication": { "id": "proceedings/tabletop/2007/3013/0", "title": "Horizontal Interactive Human-Computer Systems, International Workshop on", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2000/0478/0/04780281", "title": "Multimodal Menu Presentation and Selection in Immersive Virtual Environments", "doi": null, "abstractUrl": "/proceedings-article/vr/2000/04780281/12OmNzDehfH", "parentPublication": { "id": "proceedings/vr/2000/0478/0", "title": "Virtual Reality Conference, IEEE", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/icicic/2008/3161/0/31610364", "title": "Enhancing Pie-Menu Selection with Pen Pressure", "doi": null, "abstractUrl": "/proceedings-article/icicic/2008/31610364/12OmNzn38SO", "parentPublication": { "id": "proceedings/icicic/2008/3161/0", "title": "Innovative Computing ,Information and Control, International Conference on", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "trans/tg/2013/04/ttg2013040644", "title": "Extended Pie Menus for Immersive Virtual Environments", "doi": null, "abstractUrl": "/journal/tg/2013/04/ttg2013040644/13rRUxC0SEg", "parentPublication": { "id": "trans/tg", "title": "IEEE Transactions on Visualization & Computer Graphics", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" } ], "articleVideos": [] }
{ "proceeding": { "id": "12OmNBqMDBB", "title": "Horizontal Interactive Human-Computer Systems, International Workshop on", "acronym": "tabletop", "groupId": "1001441", "volume": "0", "displayVolume": "0", "year": "2007", "__typename": "ProceedingType" }, "article": { "id": "12OmNyVes0N", "doi": "10.1109/TABLETOP.2007.24", "title": "Improving Menu Interaction for Cluttered Tabletop Setups with User-Drawn Path Menus", "normalizedTitle": "Improving Menu Interaction for Cluttered Tabletop Setups with User-Drawn Path Menus", "abstract": "Many digital tabletop systems have a graphical user interface (GUI) that features context (or pop-up) menus. While linear and pie menus are commonly used for direct pen and touch interaction, their appearance can be problematic on a digital tabletop display, where physical objects might occlude menu items. We propose a user-drawn path menu, that appears along a custom path to avoid such occlusions. This paper introduces four different metaphors for user-drawn context menus: the Fan Out Menu, the Card Deck Menu, the Pearl String Menu, and the Trail Menu. It also presents the results we acquired from a user study, where participants were able to work faster when using our user-drawn menus, on cluttered tabletop setups.", "abstracts": [ { "abstractType": "Regular", "content": "Many digital tabletop systems have a graphical user interface (GUI) that features context (or pop-up) menus. While linear and pie menus are commonly used for direct pen and touch interaction, their appearance can be problematic on a digital tabletop display, where physical objects might occlude menu items. We propose a user-drawn path menu, that appears along a custom path to avoid such occlusions. This paper introduces four different metaphors for user-drawn context menus: the Fan Out Menu, the Card Deck Menu, the Pearl String Menu, and the Trail Menu. It also presents the results we acquired from a user study, where participants were able to work faster when using our user-drawn menus, on cluttered tabletop setups.", "__typename": "ArticleAbstractType" } ], "normalizedAbstract": "Many digital tabletop systems have a graphical user interface (GUI) that features context (or pop-up) menus. While linear and pie menus are commonly used for direct pen and touch interaction, their appearance can be problematic on a digital tabletop display, where physical objects might occlude menu items. We propose a user-drawn path menu, that appears along a custom path to avoid such occlusions. This paper introduces four different metaphors for user-drawn context menus: the Fan Out Menu, the Card Deck Menu, the Pearl String Menu, and the Trail Menu. It also presents the results we acquired from a user study, where participants were able to work faster when using our user-drawn menus, on cluttered tabletop setups.", "fno": "30130121", "keywords": [], "authors": [ { "affiliation": null, "fullName": "Daniel Leithinger", "givenName": "Daniel", "surname": "Leithinger", "__typename": "ArticleAuthorType" }, { "affiliation": null, "fullName": "Michael Haller", "givenName": "Michael", "surname": "Haller", "__typename": "ArticleAuthorType" } ], "idPrefix": "tabletop", "isOpenAccess": false, "showRecommendedArticles": true, "showBuyMe": true, "hasPdf": true, "pubDate": "2007-11-01T00:00:00", "pubType": "proceedings", "pages": "121-128", "year": "2007", "issn": null, "isbn": "0-7695-3013-3", "notes": null, "notesType": null, "__typename": "ArticleType" }, "webExtras": [], "adjacentArticles": { "previous": { "fno": "30130113", "articleId": "12OmNAY799b", "__typename": "AdjacentArticleType" }, "next": { "fno": "30130137", "articleId": "12OmNxWuiv6", "__typename": "AdjacentArticleType" }, "__typename": "AdjacentArticlesType" }, "recommendedArticles": [ { "id": "proceedings/cw/2009/3791/0/3791a292", "title": "Comparison of Adaptive, Adaptable and Mixed-Initiative Menus", "doi": null, "abstractUrl": "/proceedings-article/cw/2009/3791a292/12OmNBBzoem", "parentPublication": { "id": "proceedings/cw/2009/3791/0", "title": "2009 International Conference on CyberWorlds", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/haptics/2004/2112/0/21120224", "title": "A Study of Haptic Linear and Pie Menus in a 3D Fish Tank VR Environment", "doi": null, "abstractUrl": "/proceedings-article/haptics/2004/21120224/12OmNqyUUH0", "parentPublication": { "id": "proceedings/haptics/2004/2112/0", "title": "Haptic Interfaces for Virtual Environment and Teleoperator Systems, International Symposium on", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/ismar/2009/5390/0/05336500", "title": "Interaction and presentation techniques for shake menus in tangible augmented reality", "doi": null, "abstractUrl": "/proceedings-article/ismar/2009/05336500/12OmNwc3wt2", "parentPublication": { "id": "proceedings/ismar/2009/5390/0", "title": "2009 8th IEEE International Symposium on Mixed and Augmented Reality", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/3dui/2009/3965/0/04811225", "title": "Poster: Evaluation of menu techniques using a 3D game input device", "doi": null, "abstractUrl": "/proceedings-article/3dui/2009/04811225/12OmNwudQOk", "parentPublication": { "id": "proceedings/3dui/2009/3965/0", "title": "2009 IEEE Symposium on 3D User Interfaces", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/3dui/2009/3965/0/04811220", "title": "Poster: Shake menus: Towards activation and placement techniques for prop-based 3D graphical menus", "doi": null, "abstractUrl": "/proceedings-article/3dui/2009/04811220/12OmNy1SFIp", "parentPublication": { "id": "proceedings/3dui/2009/3965/0", "title": "2009 IEEE Symposium on 3D User Interfaces", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2001/0948/0/09480149", "title": "Design and Evaluation of Menu Systems for Immersive Virtual Environments", "doi": null, "abstractUrl": "/proceedings-article/vr/2001/09480149/12OmNy3Agvx", "parentPublication": { "id": "proceedings/vr/2001/0948/0", "title": "Virtual Reality Conference, IEEE", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/isuvr/2011/4420/0/4420b055", "title": "Graphical Menus Using a Mobile Phone for Wearable AR Systems", "doi": null, "abstractUrl": "/proceedings-article/isuvr/2011/4420b055/12OmNz2C1y4", "parentPublication": { "id": "proceedings/isuvr/2011/4420/0", "title": "International Symposium on Ubiquitous Virtual Reality", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "trans/tg/2013/04/ttg2013040644", "title": "Extended Pie Menus for Immersive Virtual Environments", "doi": null, "abstractUrl": "/journal/tg/2013/04/ttg2013040644/13rRUxC0SEg", "parentPublication": { "id": "trans/tg", "title": "IEEE Transactions on Visualization & Computer Graphics", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/taai/2021/0825/0/082500a162", "title": "Data Collection Framework on Menus satisfying both Preferences and Nutritional Balance", "doi": null, "abstractUrl": "/proceedings-article/taai/2021/082500a162/1DBZAhoqcnK", "parentPublication": { "id": "proceedings/taai/2021/0825/0", "title": "2021 International Conference on Technologies and Applications of Artificial Intelligence (TAAI)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2019/1377/0/08797900", "title": "Menus on the Desk? System Control in DeskVR", "doi": null, "abstractUrl": "/proceedings-article/vr/2019/08797900/1cJ18TJZQf6", "parentPublication": { "id": "proceedings/vr/2019/1377/0", "title": "2019 IEEE Conference on Virtual Reality and 3D User Interfaces (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" } ], "articleVideos": [] }
{ "proceeding": { "id": "12OmNBUAvUm", "title": "Innovative Computing ,Information and Control, International Conference on", "acronym": "icicic", "groupId": "1001501", "volume": "0", "displayVolume": "0", "year": "2008", "__typename": "ProceedingType" }, "article": { "id": "12OmNzn38SO", "doi": "10.1109/ICICIC.2008.252", "title": "Enhancing Pie-Menu Selection with Pen Pressure", "normalizedTitle": "Enhancing Pie-Menu Selection with Pen Pressure", "abstract": "Pie-menu, a very effective item-selection widget, has been proposed to enhance item-selection tasks in pen-based devices such as PDAs (personal digital assistants) and Tablet PCs. The significant advantage of Pie-menu is that it enables users to select items within a menu with only a short distance for the pen-tip to move. However, as the number of items to be selected increases, the architecture of Pie-menu becomes more complex, and this makes such tasks more difficult. Therefore, with a view to overcoming this problem, we developed a novel menu widget called Layer-Pie-Menu which is designed to reduce the architectural complexity by layering numbers of Pie-menus and using pressure as the switch mode to differentiate between the layers. An experiment was conducted to examine the functional efficiency of Layer-Pie-Menu. Experimental results showed that the most suitable number of layers that can be controlled by pressure is two.", "abstracts": [ { "abstractType": "Regular", "content": "Pie-menu, a very effective item-selection widget, has been proposed to enhance item-selection tasks in pen-based devices such as PDAs (personal digital assistants) and Tablet PCs. The significant advantage of Pie-menu is that it enables users to select items within a menu with only a short distance for the pen-tip to move. However, as the number of items to be selected increases, the architecture of Pie-menu becomes more complex, and this makes such tasks more difficult. Therefore, with a view to overcoming this problem, we developed a novel menu widget called Layer-Pie-Menu which is designed to reduce the architectural complexity by layering numbers of Pie-menus and using pressure as the switch mode to differentiate between the layers. An experiment was conducted to examine the functional efficiency of Layer-Pie-Menu. Experimental results showed that the most suitable number of layers that can be controlled by pressure is two.", "__typename": "ArticleAbstractType" } ], "normalizedAbstract": "Pie-menu, a very effective item-selection widget, has been proposed to enhance item-selection tasks in pen-based devices such as PDAs (personal digital assistants) and Tablet PCs. The significant advantage of Pie-menu is that it enables users to select items within a menu with only a short distance for the pen-tip to move. However, as the number of items to be selected increases, the architecture of Pie-menu becomes more complex, and this makes such tasks more difficult. Therefore, with a view to overcoming this problem, we developed a novel menu widget called Layer-Pie-Menu which is designed to reduce the architectural complexity by layering numbers of Pie-menus and using pressure as the switch mode to differentiate between the layers. An experiment was conducted to examine the functional efficiency of Layer-Pie-Menu. Experimental results showed that the most suitable number of layers that can be controlled by pressure is two.", "fno": "31610364", "keywords": [], "authors": [ { "affiliation": null, "fullName": "Xiangshi Ren", "givenName": "Xiangshi", "surname": "Ren", "__typename": "ArticleAuthorType" }, { "affiliation": null, "fullName": "Jibin Yin", "givenName": "Jibin", "surname": "Yin", "__typename": "ArticleAuthorType" }, { "affiliation": null, "fullName": "Tomoki Oya", "givenName": "Tomoki", "surname": "Oya", "__typename": "ArticleAuthorType" }, { "affiliation": null, "fullName": "Yuanning Liu", "givenName": "Yuanning", "surname": "Liu", "__typename": "ArticleAuthorType" } ], "idPrefix": "icicic", "isOpenAccess": false, "showRecommendedArticles": true, "showBuyMe": true, "hasPdf": true, "pubDate": "2008-12-01T00:00:00", "pubType": "proceedings", "pages": "364", "year": "2008", "issn": null, "isbn": "978-0-7695-3161-8", "notes": null, "notesType": null, "__typename": "ArticleType" }, "webExtras": [], "adjacentArticles": { "previous": { "fno": "31610363", "articleId": "12OmNwwuDTx", "__typename": "AdjacentArticleType" }, "next": { "fno": "31610365", "articleId": "12OmNz2C1zk", "__typename": "AdjacentArticleType" }, "__typename": "AdjacentArticlesType" }, "recommendedArticles": [ { "id": "proceedings/icm/2011/4522/1/4522a202", "title": "The Dynamic Retrieval Tree Menu Based on Dojo", "doi": null, "abstractUrl": "/proceedings-article/icm/2011/4522a202/12OmNA14A4S", "parentPublication": { "id": "icm/2011/4522/1", "title": "Information Technology, Computer Engineering and Management Sciences, International Conference of", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/icpr/2000/0750/4/07504547", "title": "On-Line Signature Verification Using Pen-Position, Pen-Pressure and Pen-Inclination Trajectories", "doi": null, "abstractUrl": "/proceedings-article/icpr/2000/07504547/12OmNBKW9vr", "parentPublication": { "id": "proceedings/icpr/2000/0750/4", "title": "Pattern Recognition, International Conference on", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/ozchi/1996/7525/0/75250314", "title": "GIGA: A Pen-Based Constraint Drawing System", "doi": null, "abstractUrl": "/proceedings-article/ozchi/1996/75250314/12OmNBSBk6C", "parentPublication": { "id": "proceedings/ozchi/1996/7525/0", "title": "Proceedings Sixth Australian Conference on Computer-Human Interaction", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/3dui/2010/6846/0/05444721", "title": "An evaluation of menu properties and pointing techniques in a projection-based VR environment", "doi": null, "abstractUrl": "/proceedings-article/3dui/2010/05444721/12OmNCdBDQK", "parentPublication": { "id": "proceedings/3dui/2010/6846/0", "title": "2010 IEEE Symposium on 3D User Interfaces (3DUI)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/haptics/2004/2112/0/21120224", "title": "A Study of Haptic Linear and Pie Menus in a 3D Fish Tank VR Environment", "doi": null, "abstractUrl": "/proceedings-article/haptics/2004/21120224/12OmNqyUUH0", "parentPublication": { "id": "proceedings/haptics/2004/2112/0", "title": "Haptic Interfaces for Virtual Environment and Teleoperator Systems, International Symposium on", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/ozchi/1996/7525/0/75250252", "title": "The Oval Menu - Evolution and Evaluation of a Widget", "doi": null, "abstractUrl": "/proceedings-article/ozchi/1996/75250252/12OmNxX3urn", "parentPublication": { "id": "proceedings/ozchi/1996/7525/0", "title": "Proceedings Sixth Australian Conference on Computer-Human Interaction", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2001/0948/0/09480149", "title": "Design and Evaluation of Menu Systems for Immersive Virtual Environments", "doi": null, "abstractUrl": "/proceedings-article/vr/2001/09480149/12OmNy3Agvx", "parentPublication": { "id": "proceedings/vr/2001/0948/0", "title": "Virtual Reality Conference, IEEE", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2017/6647/0/07892336", "title": "Comparison of a speech-based and a pie-menu-based interaction metaphor for application control", "doi": null, "abstractUrl": "/proceedings-article/vr/2017/07892336/12OmNyU63tX", "parentPublication": { "id": "proceedings/vr/2017/6647/0", "title": "2017 IEEE Virtual Reality (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/tabletop/2007/3013/0/30130121", "title": "Improving Menu Interaction for Cluttered Tabletop Setups with User-Drawn Path Menus", "doi": null, "abstractUrl": "/proceedings-article/tabletop/2007/30130121/12OmNyVes0N", "parentPublication": { "id": "proceedings/tabletop/2007/3013/0", "title": "Horizontal Interactive Human-Computer Systems, International Workshop on", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "trans/tg/2013/04/ttg2013040644", "title": "Extended Pie Menus for Immersive Virtual Environments", "doi": null, "abstractUrl": "/journal/tg/2013/04/ttg2013040644/13rRUxC0SEg", "parentPublication": { "id": "trans/tg", "title": "IEEE Transactions on Visualization & Computer Graphics", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" } ], "articleVideos": [] }
{ "proceeding": { "id": "1cI6akLvAuQ", "title": "2019 IEEE Conference on Virtual Reality and 3D User Interfaces (VR)", "acronym": "vr", "groupId": "1000791", "volume": "0", "displayVolume": "0", "year": "2019", "__typename": "ProceedingType" }, "article": { "id": "1cJ18TJZQf6", "doi": "10.1109/VR.2019.8797900", "title": "Menus on the Desk? System Control in DeskVR", "normalizedTitle": "Menus on the Desk? System Control in DeskVR", "abstract": "In this work, we evaluate the impact of passive haptic feedback on touch-based menus, given the constraints and possibilities of a seated, desk-based scenario in VR. Therefore, we compare a menu that once is placed on the surface of a desk and once mid-air on a surface in front of the user. The study design is completed by two conditions without passive haptic feedback. In the conducted user study (n=33), we found effects of passive haptics (present vs-non-present) and menu alignment (desk vs. mid-air) on the task performance and subjective look &amp; feel. However, the race between the conditions was close. An overall winner was the mid-air menu with passive haptic feedback, which however raises hardware requirements.", "abstracts": [ { "abstractType": "Regular", "content": "In this work, we evaluate the impact of passive haptic feedback on touch-based menus, given the constraints and possibilities of a seated, desk-based scenario in VR. Therefore, we compare a menu that once is placed on the surface of a desk and once mid-air on a surface in front of the user. The study design is completed by two conditions without passive haptic feedback. In the conducted user study (n=33), we found effects of passive haptics (present vs-non-present) and menu alignment (desk vs. mid-air) on the task performance and subjective look &amp; feel. However, the race between the conditions was close. An overall winner was the mid-air menu with passive haptic feedback, which however raises hardware requirements.", "__typename": "ArticleAbstractType" } ], "normalizedAbstract": "In this work, we evaluate the impact of passive haptic feedback on touch-based menus, given the constraints and possibilities of a seated, desk-based scenario in VR. Therefore, we compare a menu that once is placed on the surface of a desk and once mid-air on a surface in front of the user. The study design is completed by two conditions without passive haptic feedback. In the conducted user study (n=33), we found effects of passive haptics (present vs-non-present) and menu alignment (desk vs. mid-air) on the task performance and subjective look & feel. However, the race between the conditions was close. An overall winner was the mid-air menu with passive haptic feedback, which however raises hardware requirements.", "fno": "08797900", "keywords": [ "Haptic Interfaces", "Virtual Reality", "System Control", "Passive Haptic Feedback", "Touch Based Menus", "Seated Desk Based Scenario", "Mid Air Menu", "Desk VR", "Haptic Interfaces", "Task Analysis", "Virtual Reality", "Standards", "Cloud Computing", "Atmospheric Measurements", "Particle Measurements", "Human Centered Concepts Human Computer Interaction HCI Interaction Paradigms", "Virtual Reality", "Human Centered Concepts Human Computer Interaction HCI Visualization", "Empirical Studies In Visualization" ], "authors": [ { "affiliation": "Germany JARA-HPC, Visual Computing Institute, RWTH Aachen University, Aachen, Germany", "fullName": "Daniel Zielasko", "givenName": "Daniel", "surname": "Zielasko", "__typename": "ArticleAuthorType" }, { "affiliation": "Germany JARA-HPC, Visual Computing Institute, RWTH Aachen University, Aachen, Germany", "fullName": "Marcel Krüger", "givenName": "Marcel", "surname": "Krüger", "__typename": "ArticleAuthorType" }, { "affiliation": "University of Trier, Germany", "fullName": "Benjamin Weyers", "givenName": "Benjamin", "surname": "Weyers", "__typename": "ArticleAuthorType" }, { "affiliation": "Germany JARA-HPC, Visual Computing Institute, RWTH Aachen University, Aachen, Germany", "fullName": "Torsten W. Kuhlen", "givenName": "Torsten W.", "surname": "Kuhlen", "__typename": "ArticleAuthorType" } ], "idPrefix": "vr", "isOpenAccess": false, "showRecommendedArticles": true, "showBuyMe": true, "hasPdf": true, "pubDate": "2019-03-01T00:00:00", "pubType": "proceedings", "pages": "1287-1288", "year": "2019", "issn": null, "isbn": "978-1-7281-1377-7", "notes": null, "notesType": null, "__typename": "ArticleType" }, "webExtras": [], "adjacentArticles": { "previous": { "fno": "08798068", "articleId": "1cJ0VOT7caI", "__typename": "AdjacentArticleType" }, "next": { "fno": "08798041", "articleId": "1cJ1dohSP0k", "__typename": "AdjacentArticleType" }, "__typename": "AdjacentArticlesType" }, "recommendedArticles": [ { "id": "proceedings/vr/2003/1882/0/18820217", "title": "The Proactive Desk : A New Force Display System for a Digital Desk Using a 2-DOF Linear Induction Motor", "doi": null, "abstractUrl": "/proceedings-article/vr/2003/18820217/12OmNApu5p3", "parentPublication": { "id": "proceedings/vr/2003/1882/0", "title": "Proceedings IEEE Virtual Reality 2003", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/3dui/2014/3624/0/06798850", "title": "A comparison of different methods for reducing the unintended positional drift accompanying walking-in-place locomotion", "doi": null, "abstractUrl": "/proceedings-article/3dui/2014/06798850/12OmNvCzFbu", "parentPublication": { "id": "proceedings/3dui/2014/3624/0", "title": "2014 IEEE Symposium on 3D User Interfaces (3DUI)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2011/0039/0/05759467", "title": "Effects of sensory feedback while interacting with graphical menus in virtual environments", "doi": null, "abstractUrl": "/proceedings-article/vr/2011/05759467/12OmNvoFjQv", "parentPublication": { "id": "proceedings/vr/2011/0039/0", "title": "2011 IEEE Virtual Reality (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2018/3365/0/08446235", "title": "Influences on the Elicitation of Interpersonal Space with Virtual Humans", "doi": null, "abstractUrl": "/proceedings-article/vr/2018/08446235/13bd1eW2l9F", "parentPublication": { "id": "proceedings/vr/2018/3365/0", "title": "2018 IEEE Conference on Virtual Reality and 3D User Interfaces (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "trans/tg/2013/04/ttg2013040644", "title": "Extended Pie Menus for Immersive Virtual Environments", "doi": null, "abstractUrl": "/journal/tg/2013/04/ttg2013040644/13rRUxC0SEg", "parentPublication": { "id": "trans/tg", "title": "IEEE Transactions on Visualization & Computer Graphics", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "trans/th/2013/03/tth2013030268", "title": "Application of Psychophysical Techniques to Haptic Research", "doi": null, "abstractUrl": "/journal/th/2013/03/tth2013030268/13rRUxcbnHq", "parentPublication": { "id": "trans/th", "title": "IEEE Transactions on Haptics", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "trans/th/2014/02/06714450", "title": "Planar Hand Motion Guidance Using Fingertip Skin-Stretch Feedback", "doi": null, "abstractUrl": "/journal/th/2014/02/06714450/13rRUzp02oB", "parentPublication": { "id": "trans/th", "title": "IEEE Transactions on Haptics", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/ismar/2022/5325/0/532500a538", "title": "CardsVR: A Two-Person VR Experience with Passive Haptic Feedback from a Deck of Playing Cards", "doi": null, "abstractUrl": "/proceedings-article/ismar/2022/532500a538/1JrRaySJ7So", "parentPublication": { "id": "proceedings/ismar/2022/5325/0", "title": "2022 IEEE International Symposium on Mixed and Augmented Reality (ISMAR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/wevr/2019/4050/0/08809589", "title": "Passive Haptic Menus for Desk-Based and HMD-Projected Virtual Reality", "doi": null, "abstractUrl": "/proceedings-article/wevr/2019/08809589/1cI61Rx4b9m", "parentPublication": { "id": "proceedings/wevr/2019/4050/0", "title": "2019 IEEE 5th Workshop on Everyday Virtual Reality (WEVR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/ai4i/2019/4087/0/408700a071", "title": "Haptic Data Acquisition for Perceived Roughness and Hardness of Texture", "doi": null, "abstractUrl": "/proceedings-article/ai4i/2019/408700a071/1i2ojoOMRIA", "parentPublication": { "id": "proceedings/ai4i/2019/4087/0", "title": "2019 Second International Conference on Artificial Intelligence for Industries (AI4I)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" } ], "articleVideos": [] }
{ "proceeding": { "id": "12OmNCbU3aW", "title": "2015 IEEE International Conference on Multimedia and Expo (ICME)", "acronym": "icme", "groupId": "1000477", "volume": "0", "displayVolume": "0", "year": "2015", "__typename": "ProceedingType" }, "article": { "id": "12OmNro0IfC", "doi": "10.1109/ICME.2015.7177481", "title": "Temporal spotting of human actions from videos containing actor's unintentional motions", "normalizedTitle": "Temporal spotting of human actions from videos containing actor's unintentional motions", "abstract": "This paper proposes a method for temporal action spotting: the temporal segmentation and classification of human actions in videos. Naturally performed human actions often involve actor's unintentional motions. These unintentional motions yield false visual evidences in the videos, which are not related to the performed actions and degrade the performance of temporal action spotting. To deal with this problem, our proposed method empolys a voting-based approach in which the temporal relation between each action and its visual evidence is probabilistically modeled as a voting score function. Due to the approach, our method can robustly spot the target actions even when the actions involve several unintentional motions, because the effect of the false visual evidences yielded by the unintentional motions can be canceled by other visual evidences observed with the target actions. Experimental results showed that the proposed method is highly robust to the unintentional motions.", "abstracts": [ { "abstractType": "Regular", "content": "This paper proposes a method for temporal action spotting: the temporal segmentation and classification of human actions in videos. Naturally performed human actions often involve actor's unintentional motions. These unintentional motions yield false visual evidences in the videos, which are not related to the performed actions and degrade the performance of temporal action spotting. To deal with this problem, our proposed method empolys a voting-based approach in which the temporal relation between each action and its visual evidence is probabilistically modeled as a voting score function. Due to the approach, our method can robustly spot the target actions even when the actions involve several unintentional motions, because the effect of the false visual evidences yielded by the unintentional motions can be canceled by other visual evidences observed with the target actions. Experimental results showed that the proposed method is highly robust to the unintentional motions.", "__typename": "ArticleAbstractType" } ], "normalizedAbstract": "This paper proposes a method for temporal action spotting: the temporal segmentation and classification of human actions in videos. Naturally performed human actions often involve actor's unintentional motions. These unintentional motions yield false visual evidences in the videos, which are not related to the performed actions and degrade the performance of temporal action spotting. To deal with this problem, our proposed method empolys a voting-based approach in which the temporal relation between each action and its visual evidence is probabilistically modeled as a voting score function. Due to the approach, our method can robustly spot the target actions even when the actions involve several unintentional motions, because the effect of the false visual evidences yielded by the unintentional motions can be canceled by other visual evidences observed with the target actions. Experimental results showed that the proposed method is highly robust to the unintentional motions.", "fno": "07177481", "keywords": [ "Legged Locomotion", "Voting Based Approach", "Action Recognition", "Temporal Action Spotting", "Temporal Action Segmentation", "Unintentional Motions" ], "authors": [ { "affiliation": "Graduate School of Engineering, Osaka University, Japan", "fullName": "Keita Hara", "givenName": "Keita", "surname": "Hara", "__typename": "ArticleAuthorType" }, { "affiliation": "Graduate School of Engineering, Osaka University, Japan", "fullName": "Kazuaki Nakamura", "givenName": "Kazuaki", "surname": "Nakamura", "__typename": "ArticleAuthorType" }, { "affiliation": "Graduate School of Engineering, Osaka University, Japan", "fullName": "Noboru Babaguchi", "givenName": "Noboru", "surname": "Babaguchi", "__typename": "ArticleAuthorType" } ], "idPrefix": "icme", "isOpenAccess": false, "showRecommendedArticles": true, "showBuyMe": true, "hasPdf": true, "pubDate": "2015-06-01T00:00:00", "pubType": "proceedings", "pages": "1-6", "year": "2015", "issn": null, "isbn": "978-1-4799-7082-7", "notes": null, "notesType": null, "__typename": "ArticleType" }, "webExtras": [], "adjacentArticles": { "previous": { "fno": "07177480", "articleId": "12OmNzlUKE9", "__typename": "AdjacentArticleType" }, "next": { "fno": "07177482", "articleId": "12OmNwCsdLa", "__typename": "AdjacentArticleType" }, "__typename": "AdjacentArticlesType" }, "recommendedArticles": [ { "id": "proceedings/isspit/2012/5606/0/06621298", "title": "Robust classification of human actions from 3D data", "doi": null, "abstractUrl": "/proceedings-article/isspit/2012/06621298/12OmNwD1q1d", "parentPublication": { "id": "proceedings/isspit/2012/5606/0", "title": "2012 IEEE International Symposium on Signal Processing and Information Technology (ISSPIT)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/iccv/2009/4420/0/05459368", "title": "Learning actions from the Web", "doi": null, "abstractUrl": "/proceedings-article/iccv/2009/05459368/12OmNwGIcxb", "parentPublication": { "id": "proceedings/iccv/2009/4420/0", "title": "2009 IEEE 12th International Conference on Computer Vision (ICCV)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/iccps/2011/640/0/05945417", "title": "Automatic Generation of Balletic Motions", "doi": null, "abstractUrl": "/proceedings-article/iccps/2011/05945417/12OmNwHhoLQ", "parentPublication": { "id": "proceedings/iccps/2011/640/0", "title": "2011 IEEE/ACM International Conference on Cyber-Physical Systems (ICCPS)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/simultech/2014/060/0/07094997", "title": "An effective implementation of agent's complex actions by reusing primitive motions", "doi": null, "abstractUrl": "/proceedings-article/simultech/2014/07094997/12OmNxdm4rR", "parentPublication": { "id": "proceedings/simultech/2014/060/0", "title": "2014 International Conference on Simulation and Modeling Methodologies, Technologies and Applications (SIMULTECH)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/hase/2014/3466/0/3466a073", "title": "Modeling and Verification of Humanoid Robot Task Coordination", "doi": null, "abstractUrl": "/proceedings-article/hase/2014/3466a073/12OmNyRxFwS", "parentPublication": { "id": "proceedings/hase/2014/3466/0", "title": "2014 IEEE 15th International Symposium on High-Assurance Systems Engineering", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/iacc/2017/1560/0/07976794", "title": "A Step towards Textual Instructions to Virtual Actions", "doi": null, "abstractUrl": "/proceedings-article/iacc/2017/07976794/12OmNzRZpYf", "parentPublication": { "id": "proceedings/iacc/2017/1560/0", "title": "2017 IEEE 7th International Advance Computing Conference (IACC)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/isms/2012/4668/0/4668a312", "title": "Video Stabilization by Detecting Intentional and Unintentional Camera Motions", "doi": null, "abstractUrl": "/proceedings-article/isms/2012/4668a312/12OmNzVGcMk", "parentPublication": { "id": "proceedings/isms/2012/4668/0", "title": "Intelligent Systems, Modelling and Simulation, International Conference on", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "trans/tg/2013/04/ttg2013040652", "title": "Personified and Multistate Camera Motions for First-Person Navigation in Desktop Virtual Reality", "doi": null, "abstractUrl": "/journal/tg/2013/04/ttg2013040652/13rRUygT7mX", "parentPublication": { "id": "trans/tg", "title": "IEEE Transactions on Visualization & Computer Graphics", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/bwcca/2010/4236/0/05632369", "title": "Frequency Domain Analysis of Human Motions in Surveillance Video", "doi": null, "abstractUrl": "/proceedings-article/bwcca/2010/05632369/183rAdNYMSs", "parentPublication": { "id": "proceedings/bwcca/2010/4236/0", "title": "2010 International Conference on Broadband, Wireless Computing, Communication and Applications", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/icme/2019/9552/0/955200b036", "title": "Recognizing Micro Actions in Videos: Learning Motion Details via Segment-Level Temporal Pyramid", "doi": null, "abstractUrl": "/proceedings-article/icme/2019/955200b036/1cdOUiFHyes", "parentPublication": { "id": "proceedings/icme/2019/9552/0", "title": "2019 IEEE International Conference on Multimedia and Expo (ICME)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" } ], "articleVideos": [] }
{ "proceeding": { "id": "12OmNxV4itF", "title": "2017 IEEE Virtual Reality (VR)", "acronym": "vr", "groupId": "1000791", "volume": "0", "displayVolume": "0", "year": "2017", "__typename": "ProceedingType" }, "article": { "id": "12OmNx7ouWn", "doi": "10.1109/VR.2017.7892303", "title": "Object location memory error in virtual and real environments", "normalizedTitle": "Object location memory error in virtual and real environments", "abstract": "We aim to further explore the transfer of spatial knowledge from virtual to real spaces. Based on previous research on spatial memory in immersive virtual reality (VR) we ran a study that looked at the effect of three locomotion techniques (joystick, pointing-and-teleporting and walking-in-place) on object location learning and recall. Participants were asked to learn the location of a virtual object in a virtual environment (VE). After a short period of time they were asked to recall the location by placing a real version of the object in the real-world equivalent environment. Results indicate that the average placement error, or distance between original and recalled object location, is approximately 20cm for all locomotion technique conditions. This result is similar to the outcome of a previous study on spatial memory in VEs that used real walking. We report this unexpected finding and suggest further work on spatial memory in VR by recommending the replication of this study in different environments and using objects with a wider diversity of properties, including varying sizes and shapes.", "abstracts": [ { "abstractType": "Regular", "content": "We aim to further explore the transfer of spatial knowledge from virtual to real spaces. Based on previous research on spatial memory in immersive virtual reality (VR) we ran a study that looked at the effect of three locomotion techniques (joystick, pointing-and-teleporting and walking-in-place) on object location learning and recall. Participants were asked to learn the location of a virtual object in a virtual environment (VE). After a short period of time they were asked to recall the location by placing a real version of the object in the real-world equivalent environment. Results indicate that the average placement error, or distance between original and recalled object location, is approximately 20cm for all locomotion technique conditions. This result is similar to the outcome of a previous study on spatial memory in VEs that used real walking. We report this unexpected finding and suggest further work on spatial memory in VR by recommending the replication of this study in different environments and using objects with a wider diversity of properties, including varying sizes and shapes.", "__typename": "ArticleAbstractType" } ], "normalizedAbstract": "We aim to further explore the transfer of spatial knowledge from virtual to real spaces. Based on previous research on spatial memory in immersive virtual reality (VR) we ran a study that looked at the effect of three locomotion techniques (joystick, pointing-and-teleporting and walking-in-place) on object location learning and recall. Participants were asked to learn the location of a virtual object in a virtual environment (VE). After a short period of time they were asked to recall the location by placing a real version of the object in the real-world equivalent environment. Results indicate that the average placement error, or distance between original and recalled object location, is approximately 20cm for all locomotion technique conditions. This result is similar to the outcome of a previous study on spatial memory in VEs that used real walking. We report this unexpected finding and suggest further work on spatial memory in VR by recommending the replication of this study in different environments and using objects with a wider diversity of properties, including varying sizes and shapes.", "fno": "07892303", "keywords": [ "Legged Locomotion", "Virtual Environments", "Navigation", "Visualization", "Standards", "H 5 1 Information Interfaces And Presentation Multimedia Information Systems Artificial Augmented And Virtual Realities", "I 3 7 Computer Graphics Three Dimensional Graphics And Realism Virtual Reality" ], "authors": [ { "affiliation": "University College London, UK", "fullName": "Mengxin Xu", "givenName": "Mengxin", "surname": "Xu", "__typename": "ArticleAuthorType" }, { "affiliation": "University College London, UK", "fullName": "María Murcia-López", "givenName": "María", "surname": "Murcia-López", "__typename": "ArticleAuthorType" }, { "affiliation": "University College London, UK", "fullName": "Anthony Steed", "givenName": "Anthony", "surname": "Steed", "__typename": "ArticleAuthorType" } ], "idPrefix": "vr", "isOpenAccess": false, "showRecommendedArticles": true, "showBuyMe": true, "hasPdf": true, "pubDate": "2017-01-01T00:00:00", "pubType": "proceedings", "pages": "315-316", "year": "2017", "issn": "2375-5334", "isbn": "978-1-5090-6647-6", "notes": null, "notesType": null, "__typename": "ArticleType" }, "webExtras": [], "adjacentArticles": { "previous": { "fno": "07892302", "articleId": "12OmNznkKb4", "__typename": "AdjacentArticleType" }, "next": { "fno": "07892304", "articleId": "12OmNqzcvSl", "__typename": "AdjacentArticleType" }, "__typename": "AdjacentArticlesType" }, "recommendedArticles": [ { "id": "proceedings/vr/2017/6647/0/07892276", "title": "A comparison of methods for navigation and wayfinding in large virtual environments using walking", "doi": null, "abstractUrl": "/proceedings-article/vr/2017/07892276/12OmNAQJzMG", "parentPublication": { "id": "proceedings/vr/2017/6647/0", "title": "2017 IEEE Virtual Reality (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2014/2871/0/06802053", "title": "An enhanced steering algorithm for redirected walking in virtual environments", "doi": null, "abstractUrl": "/proceedings-article/vr/2014/06802053/12OmNCbU2Wt", "parentPublication": { "id": "proceedings/vr/2014/2871/0", "title": "2014 IEEE Virtual Reality (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/3dui/2012/1204/0/06184180", "title": "From virtual to actual mobility: Assessing the benefits of active locomotion through an immersive virtual environment using a motorized wheelchair", "doi": null, "abstractUrl": "/proceedings-article/3dui/2012/06184180/12OmNxdDFLw", "parentPublication": { "id": "proceedings/3dui/2012/1204/0", "title": "2012 IEEE Symposium on 3D User Interfaces (3DUI)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/3dui/2013/6097/0/06550194", "title": "Flexible spaces: Dynamic layout generation for infinite walking in virtual environments", "doi": null, "abstractUrl": "/proceedings-article/3dui/2013/06550194/12OmNyFU75b", "parentPublication": { "id": "proceedings/3dui/2013/6097/0", "title": "2013 IEEE Symposium on 3D User Interfaces (3DUI)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2018/3365/0/08446383", "title": "Simulated Reference Frame: A Cost-Effective Solution to Improve Spatial Orientation in VR", "doi": null, "abstractUrl": "/proceedings-article/vr/2018/08446383/13bd1fHrlRE", "parentPublication": { "id": "proceedings/vr/2018/3365/0", "title": "2018 IEEE Conference on Virtual Reality and 3D User Interfaces (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "trans/tg/2018/07/07946183", "title": "Walking with Virtual People: Evaluation of Locomotion Interfaces in Dynamic Environments", "doi": null, "abstractUrl": "/journal/tg/2018/07/07946183/13rRUEgs2C2", "parentPublication": { "id": "trans/tg", "title": "IEEE Transactions on Visualization & Computer Graphics", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "trans/tg/2022/05/09714054", "title": "Remote research on locomotion interfaces for virtual reality: Replication of a lab-based study on teleporting interfaces", "doi": null, "abstractUrl": "/journal/tg/2022/05/09714054/1B0XZAXWaIg", "parentPublication": { "id": "trans/tg", "title": "IEEE Transactions on Visualization & Computer Graphics", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2019/1377/0/08798286", "title": "Evaluating the Effectiveness of Redirected Walking with Auditory Distractors for Navigation in Virtual Environments", "doi": null, "abstractUrl": "/proceedings-article/vr/2019/08798286/1cJ0PIoIPV6", "parentPublication": { "id": "proceedings/vr/2019/1377/0", "title": "2019 IEEE Conference on Virtual Reality and 3D User Interfaces (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/ismar/2020/8508/0/850800a452", "title": "Studying the Inter-Relation Between Locomotion Techniques and Embodiment in Virtual Reality", "doi": null, "abstractUrl": "/proceedings-article/ismar/2020/850800a452/1pysvNRUnD2", "parentPublication": { "id": "proceedings/ismar/2020/8508/0", "title": "2020 IEEE International Symposium on Mixed and Augmented Reality (ISMAR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vrw/2021/4057/0/405700a391", "title": "The Effectiveness of Locomotion Interfaces Depends on Self-Motion Cues, Environmental Cues, and the Individual", "doi": null, "abstractUrl": "/proceedings-article/vrw/2021/405700a391/1tnXFgLAfSw", "parentPublication": { "id": "proceedings/vrw/2021/4057/0", "title": "2021 IEEE Conference on Virtual Reality and 3D User Interfaces Abstracts and Workshops (VRW)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" } ], "articleVideos": [] }
{ "proceeding": { "id": "13bd1eJgoia", "title": "2018 IEEE Conference on Virtual Reality and 3D User Interfaces (VR)", "acronym": "vr", "groupId": "1000791", "volume": "0", "displayVolume": "0", "year": "2018", "__typename": "ProceedingType" }, "article": { "id": "13bd1eSlyu1", "doi": "10.1109/VR.2018.8446177", "title": "You Shall Not Pass: Non-Intrusive Feedback for Virtual Walls in VR Environments with Room-Scale Mapping", "normalizedTitle": "You Shall Not Pass: Non-Intrusive Feedback for Virtual Walls in VR Environments with Room-Scale Mapping", "abstract": "Room-scale mapping facilitates natural locomotion in virtual reality (VR), but it creates a problem when encountering virtual walls. In traditional video games, player avatars can simply be prevented from moving through walls. This is not possible in VR with room-scale mapping due to the lack of physical boundaries. Game design is either limited by avoiding walls, or the players might ignore them, which endangers the immersion and the overall game experience. To prevent players from walking through walls, we propose a combination of auditory, visual, and vibrotactile feedback for wall collisions. This solution can be implemented with standard game engine features, does not require any additional hardware or sensors, and is independent of game concept and narrative. A between-group study with 46 participants showed that a large majority of players without the feedback did pass through virtual walls, while 87% of the participants with the feedback refrained from walking through walls. The study found no notable differences in game experience.", "abstracts": [ { "abstractType": "Regular", "content": "Room-scale mapping facilitates natural locomotion in virtual reality (VR), but it creates a problem when encountering virtual walls. In traditional video games, player avatars can simply be prevented from moving through walls. This is not possible in VR with room-scale mapping due to the lack of physical boundaries. Game design is either limited by avoiding walls, or the players might ignore them, which endangers the immersion and the overall game experience. To prevent players from walking through walls, we propose a combination of auditory, visual, and vibrotactile feedback for wall collisions. This solution can be implemented with standard game engine features, does not require any additional hardware or sensors, and is independent of game concept and narrative. A between-group study with 46 participants showed that a large majority of players without the feedback did pass through virtual walls, while 87% of the participants with the feedback refrained from walking through walls. The study found no notable differences in game experience.", "__typename": "ArticleAbstractType" } ], "normalizedAbstract": "Room-scale mapping facilitates natural locomotion in virtual reality (VR), but it creates a problem when encountering virtual walls. In traditional video games, player avatars can simply be prevented from moving through walls. This is not possible in VR with room-scale mapping due to the lack of physical boundaries. Game design is either limited by avoiding walls, or the players might ignore them, which endangers the immersion and the overall game experience. To prevent players from walking through walls, we propose a combination of auditory, visual, and vibrotactile feedback for wall collisions. This solution can be implemented with standard game engine features, does not require any additional hardware or sensors, and is independent of game concept and narrative. A between-group study with 46 participants showed that a large majority of players without the feedback did pass through virtual walls, while 87% of the participants with the feedback refrained from walking through walls. The study found no notable differences in game experience.", "fno": "08446177", "keywords": [ "Avatars", "Computer Games", "Virtual Reality", "Game Concept", "Narrative", "Players", "Virtual Walls", "Game Experience", "Nonintrusive Feedback", "VR Environments", "Virtual Reality", "Traditional Video Games", "Player Avatars", "Game Design", "Avoiding Walls", "Wall Collisions", "Standard Game Engine Features", "Room Scale Mapping", "Natural Locomotion", "Games", "Legged Locomotion", "Visualization", "Hardware", "Haptic Interfaces", "Resists", "Vibrations", "Virtual Reality", "Virtual Walls", "Tactile Feedback", "Haptic Feedback", "Visual Feedback", "Auditory Feedback", "Locomotion", "Game Design", "K 8 0 Personal Computing General Games", "H 5 2 Information Interfaces And Presentation User Interfaces Interaction Styles" ], "authors": [ { "affiliation": "University of the Arts Bremen, Germany", "fullName": "Mette Boldt", "givenName": "Mette", "surname": "Boldt", "__typename": "ArticleAuthorType" }, { "affiliation": "University of Bremen, Germany", "fullName": "Michael Bonfert", "givenName": "Michael", "surname": "Bonfert", "__typename": "ArticleAuthorType" }, { "affiliation": "University of Bremen, Germany", "fullName": "Inga Lehne", "givenName": "Inga", "surname": "Lehne", "__typename": "ArticleAuthorType" }, { "affiliation": "University of Bremen, Germany", "fullName": "Melina Cahnbley", "givenName": "Melina", "surname": "Cahnbley", "__typename": "ArticleAuthorType" }, { "affiliation": "Univ. of Bremen, Bremen, Germany", "fullName": "Kim Korschinq", "givenName": "Kim", "surname": "Korschinq", "__typename": "ArticleAuthorType" }, { "affiliation": "University of Bremen, Germany", "fullName": "Loannis Bikas", "givenName": "Loannis", "surname": "Bikas", "__typename": "ArticleAuthorType" }, { "affiliation": "University of Bremen, Germany", "fullName": "Stefan Finke", "givenName": "Stefan", "surname": "Finke", "__typename": "ArticleAuthorType" }, { "affiliation": "University of Bremen, Germany", "fullName": "Martin Hanci", "givenName": "Martin", "surname": "Hanci", "__typename": "ArticleAuthorType" }, { "affiliation": "University of Bremen, Germany", "fullName": "Valentin Kraft", "givenName": "Valentin", "surname": "Kraft", "__typename": "ArticleAuthorType" }, { "affiliation": "University of Bremen, Germany", "fullName": "Boxuan Liu", "givenName": "Boxuan", "surname": "Liu", "__typename": "ArticleAuthorType" }, { "affiliation": "University of Bremen, Germany", "fullName": "Tram Nguyen", "givenName": "Tram", "surname": "Nguyen", "__typename": "ArticleAuthorType" }, { "affiliation": "University of Bremen, Germany", "fullName": "Alina Panova", "givenName": "Alina", "surname": "Panova", "__typename": "ArticleAuthorType" }, { "affiliation": "University of Bremen, Germany", "fullName": "Ramneek Singh", "givenName": "Ramneek", "surname": "Singh", "__typename": "ArticleAuthorType" }, { "affiliation": "University of Bremen, Germany", "fullName": "Alexander Steenbergen", "givenName": "Alexander", "surname": "Steenbergen", "__typename": "ArticleAuthorType" }, { "affiliation": "University of Bremen, Germany", "fullName": "Rainer Malaka", "givenName": "Rainer", "surname": "Malaka", "__typename": "ArticleAuthorType" }, { "affiliation": "University of Bremen, Germany", "fullName": "Jan Smeddinck", "givenName": "Jan", "surname": "Smeddinck", "__typename": "ArticleAuthorType" } ], "idPrefix": "vr", "isOpenAccess": false, "showRecommendedArticles": true, "showBuyMe": true, "hasPdf": true, "pubDate": "2018-03-01T00:00:00", "pubType": "proceedings", "pages": "143-150", "year": "2018", "issn": null, "isbn": "978-1-5386-3365-6", "notes": null, "notesType": null, "__typename": "ArticleType" }, "webExtras": [], "adjacentArticles": { "previous": { "fno": "08446180", "articleId": "13bd1sv5NyE", "__typename": "AdjacentArticleType" }, "next": { "fno": "08446250", "articleId": "13bd1eTtWYT", "__typename": "AdjacentArticleType" }, "__typename": "AdjacentArticlesType" }, "recommendedArticles": [ { "id": "proceedings/3dui/2016/0842/0/07460037", "title": "An initial exploration of a multi-sensory design space: Tactile support for walking in immersive virtual environments", "doi": null, "abstractUrl": "/proceedings-article/3dui/2016/07460037/12OmNrYCXTx", "parentPublication": { "id": "proceedings/3dui/2016/0842/0", "title": "2016 IEEE Symposium on 3D User Interfaces (3DUI)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2015/1727/0/07223390", "title": "Third person's footsteps enhanced moving sensation of seated person", "doi": null, "abstractUrl": "/proceedings-article/vr/2015/07223390/12OmNxFsmDI", "parentPublication": { "id": "proceedings/vr/2015/1727/0", "title": "2015 IEEE Virtual Reality (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/percom/2018/3224/0/08444589", "title": "Crowd Counting Through Walls Using WiFi", "doi": null, "abstractUrl": "/proceedings-article/percom/2018/08444589/13bd1eSlysm", "parentPublication": { "id": "proceedings/percom/2018/3224/0", "title": "2018 IEEE International Conference on Pervasive Computing and Communications (PerCom)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2018/3365/0/08446385", "title": "Game Room Map Integration in Virtual Environments for Free Walking", "doi": null, "abstractUrl": "/proceedings-article/vr/2018/08446385/13bd1fZBGd6", "parentPublication": { "id": "proceedings/vr/2018/3365/0", "title": "2018 IEEE Conference on Virtual Reality and 3D User Interfaces (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "trans/ta/2017/03/07450621", "title": "Emotion Rendering in Plantar Vibro-Tactile Simulations of Imagined Walking Styles", "doi": null, "abstractUrl": "/journal/ta/2017/03/07450621/13rRUwIF6cq", "parentPublication": { "id": "trans/ta", "title": "IEEE Transactions on Affective Computing", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "trans/th/2016/03/07444168", "title": "Comparison of Walking and Traveling-Wave Piezoelectric Motors as Actuators in Kinesthetic Haptic Devices", "doi": null, "abstractUrl": "/journal/th/2016/03/07444168/13rRUxDqS8t", "parentPublication": { "id": "trans/th", "title": "IEEE Transactions on Haptics", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "trans/tg/5555/01/09911682", "title": "Effect of Vibrations on Impression of Walking and Embodiment With First- and Third-Person Avatar", "doi": null, "abstractUrl": "/journal/tg/5555/01/09911682/1HeiWQWKlTG", "parentPublication": { "id": "trans/tg", "title": "IEEE Transactions on Visualization & Computer Graphics", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2019/1377/0/08798231", "title": "The Effect of Hanger Reflex on Virtual Reality Redirected Walking", "doi": null, "abstractUrl": "/proceedings-article/vr/2019/08798231/1cJ0KBrAUYE", "parentPublication": { "id": "proceedings/vr/2019/1377/0", "title": "2019 IEEE Conference on Virtual Reality and 3D User Interfaces (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "trans/tm/2021/06/09003416", "title": "GaitWay: Monitoring and Recognizing Gait Speed Through the Walls", "doi": null, "abstractUrl": "/journal/tm/2021/06/09003416/1hy7Z6oVnkk", "parentPublication": { "id": "trans/tm", "title": "IEEE Transactions on Mobile Computing", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/ismar-adjunct/2020/7675/0/767500a264", "title": "Towards an AR game for walking rehabilitation: Preliminary study of the impact of augmented feedback modalities on walking speed", "doi": null, "abstractUrl": "/proceedings-article/ismar-adjunct/2020/767500a264/1pBMjJvQae4", "parentPublication": { "id": "proceedings/ismar-adjunct/2020/7675/0", "title": "2020 IEEE International Symposium on Mixed and Augmented Reality Adjunct (ISMAR-Adjunct)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" } ], "articleVideos": [] }
{ "proceeding": { "id": "19F1LC52tjO", "title": "2018 IEEE International Symposium on Mixed and Augmented Reality Adjunct (ISMAR-Adjunct)", "acronym": "ismar-adjunct", "groupId": "1810084", "volume": "0", "displayVolume": "0", "year": "2018", "__typename": "ProceedingType" }, "article": { "id": "19F1PlWtKJa", "doi": "10.1109/ISMAR-Adjunct.2018.00079", "title": "Walking-in-Place for VR Navigation Independent of Gaze Direction Using a Waist-Worn Inertial Measurement Unit", "normalizedTitle": "Walking-in-Place for VR Navigation Independent of Gaze Direction Using a Waist-Worn Inertial Measurement Unit", "abstract": "Many techniques have been proposed for navigation using head-mounted-displays (HMDs) in virtual reality (VR). A walking-in-place (WIP) interface for virtual locomotion provides a high presence and an immersive experience in a virtual environment (VE). However, most of the WIP techniques can only navigate users in the direction of their gaze. Other WIP methods considering virtual locomotion direction have complex configurations or are only feasible when the tracking spaces are not limited. This paper proposes a WIP interface independent of gaze direction based on the data analysis of waist-mounted inertial sensors. Our method can navigate in the locomotion direction by calculating the orientation of the pelvis. We experimentally compared two WIP methods using a navigation task that required participants to periodically observe the surrounding VE: (1) Conventional WIP (gaze-based direction) (2) Proposed WIP (pelvis-based direction). While there was no difference in learnability or cybersickness between the two methods, the proposed method had shorter task time and higher efficiency.", "abstracts": [ { "abstractType": "Regular", "content": "Many techniques have been proposed for navigation using head-mounted-displays (HMDs) in virtual reality (VR). A walking-in-place (WIP) interface for virtual locomotion provides a high presence and an immersive experience in a virtual environment (VE). However, most of the WIP techniques can only navigate users in the direction of their gaze. Other WIP methods considering virtual locomotion direction have complex configurations or are only feasible when the tracking spaces are not limited. This paper proposes a WIP interface independent of gaze direction based on the data analysis of waist-mounted inertial sensors. Our method can navigate in the locomotion direction by calculating the orientation of the pelvis. We experimentally compared two WIP methods using a navigation task that required participants to periodically observe the surrounding VE: (1) Conventional WIP (gaze-based direction) (2) Proposed WIP (pelvis-based direction). While there was no difference in learnability or cybersickness between the two methods, the proposed method had shorter task time and higher efficiency.", "__typename": "ArticleAbstractType" } ], "normalizedAbstract": "Many techniques have been proposed for navigation using head-mounted-displays (HMDs) in virtual reality (VR). A walking-in-place (WIP) interface for virtual locomotion provides a high presence and an immersive experience in a virtual environment (VE). However, most of the WIP techniques can only navigate users in the direction of their gaze. Other WIP methods considering virtual locomotion direction have complex configurations or are only feasible when the tracking spaces are not limited. This paper proposes a WIP interface independent of gaze direction based on the data analysis of waist-mounted inertial sensors. Our method can navigate in the locomotion direction by calculating the orientation of the pelvis. We experimentally compared two WIP methods using a navigation task that required participants to periodically observe the surrounding VE: (1) Conventional WIP (gaze-based direction) (2) Proposed WIP (pelvis-based direction). While there was no difference in learnability or cybersickness between the two methods, the proposed method had shorter task time and higher efficiency.", "fno": "08699289", "keywords": [ "Helmet Mounted Displays", "Inertial Navigation", "Sensors", "User Interfaces", "Virtual Reality", "Gaze Direction", "Waist Worn Inertial Measurement Unit", "Head Mounted Displays", "Virtual Reality", "Walking In Place Interface", "Immersive Experience", "Virtual Environment", "WIP Techniques", "WIP Methods", "Virtual Locomotion Direction", "Complex Configurations", "WIP Interface", "Waist Mounted Inertial Sensors", "Navigation Task", "Gaze Based Direction", "VR Navigation", "Data Analysis", "Pelvis Orientation", "Pelvis Based Direction", "Cybersickness", "Learnability", "Task Analysis", "Navigation", "Legged Locomotion", "Acceleration", "Sensors", "Tracking", "Foot", "Computing Methodologies X 2014 Computer Graphics X 2014 Graphics Systems And Interfaces X 2014 Virtual Reality", "Human Centered Computing X 2014 Human Computer Interaction HCI X 2014 Interaction Techniques" ], "authors": [ { "affiliation": "Electronics and Telecommunications Research Institute", "fullName": "Chanho Park", "givenName": "Chanho", "surname": "Park", "__typename": "ArticleAuthorType" }, { "affiliation": "Electronics and Telecommunications Research Institute", "fullName": "Kyungho Jang", "givenName": "Kyungho", "surname": "Jang", "__typename": "ArticleAuthorType" }, { "affiliation": "Electronics and Telecommunications Research Institute", "fullName": "Junsuk Lee", "givenName": "Junsuk", "surname": "Lee", "__typename": "ArticleAuthorType" } ], "idPrefix": "ismar-adjunct", "isOpenAccess": false, "showRecommendedArticles": true, "showBuyMe": true, "hasPdf": true, "pubDate": "2018-10-01T00:00:00", "pubType": "proceedings", "pages": "254-257", "year": "2018", "issn": null, "isbn": "978-1-5386-7592-2", "notes": null, "notesType": null, "__typename": "ArticleType" }, "webExtras": [], "adjacentArticles": { "previous": { "fno": "08699259", "articleId": "19F1SLp4sx2", "__typename": "AdjacentArticleType" }, "next": { "fno": "08699246", "articleId": "19F1OIk174Q", "__typename": "AdjacentArticleType" }, "__typename": "AdjacentArticlesType" }, "recommendedArticles": [ { "id": "proceedings/3dui/2013/6097/0/06550193", "title": "Tapping-In-Place: Increasing the naturalness of immersive walking-in-place locomotion through novel gestural input", "doi": null, "abstractUrl": "/proceedings-article/3dui/2013/06550193/12OmNAnMuyq", "parentPublication": { "id": "proceedings/3dui/2013/6097/0", "title": "2013 IEEE Symposium on 3D User Interfaces (3DUI)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2016/0836/0/07504772", "title": "Evaluating two alternative walking in place interfaces for virtual reality gaming", "doi": null, "abstractUrl": "/proceedings-article/vr/2016/07504772/12OmNCf1Dnb", "parentPublication": { "id": "proceedings/vr/2016/0836/0", "title": "2016 IEEE Virtual Reality (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/3dui/2016/0842/0/07460030", "title": "Eye tracking for locomotion prediction in redirected walking", "doi": null, "abstractUrl": "/proceedings-article/3dui/2016/07460030/12OmNz4SOsF", "parentPublication": { "id": "proceedings/3dui/2016/0842/0", "title": "2016 IEEE Symposium on 3D User Interfaces (3DUI)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2018/3365/0/08446165", "title": "A Threefold Approach for Precise and Efficient Locomotion in Virtual Environments with Varying Accessibility", "doi": null, "abstractUrl": "/proceedings-article/vr/2018/08446165/13bd1AIBM28", "parentPublication": { "id": "proceedings/vr/2018/3365/0", "title": "2018 IEEE Conference on Virtual Reality and 3D User Interfaces (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2018/3365/0/08446180", "title": "Effect of Virtual Human Gaze Behaviour During an Orthogonal Collision Avoidance Walking Task", "doi": null, "abstractUrl": "/proceedings-article/vr/2018/08446180/13bd1sv5NyE", "parentPublication": { "id": "proceedings/vr/2018/3365/0", "title": "2018 IEEE Conference on Virtual Reality and 3D User Interfaces (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "trans/tg/2014/04/ttg201404569", "title": "Establishing the Range of Perceptually Natural Visual Walking Speeds for Virtual Walking-In-Place Locomotion", "doi": null, "abstractUrl": "/journal/tg/2014/04/ttg201404569/13rRUxAASTb", "parentPublication": { "id": "trans/tg", "title": "IEEE Transactions on Visualization & Computer Graphics", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "trans/tg/5555/01/09978713", "title": "Revisiting Walking-in-Place by Introducing Step-Height Control, Elastic Input, and Pseudo-Haptic Feedback", "doi": null, "abstractUrl": "/journal/tg/5555/01/09978713/1IXUnnVaWoE", "parentPublication": { "id": "trans/tg", "title": "IEEE Transactions on Visualization & Computer Graphics", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "trans/tg/2023/05/10049680", "title": "Assisted walking-in-place: Introducing assisted motion to walking-by-cycling in embodied virtual reality", "doi": null, "abstractUrl": "/journal/tg/2023/05/10049680/1KYolEFtr6U", "parentPublication": { "id": "trans/tg", "title": "IEEE Transactions on Visualization & Computer Graphics", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2019/1377/0/08798345", "title": "Investigation of Visual Self-Representation for a Walking-in-Place Navigation System in Virtual Reality", "doi": null, "abstractUrl": "/proceedings-article/vr/2019/08798345/1cJ1hpkUgHS", "parentPublication": { "id": "proceedings/vr/2019/1377/0", "title": "2019 IEEE Conference on Virtual Reality and 3D User Interfaces (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vrw/2021/4057/0/405700a498", "title": "SHeF-WIP: Walking-in-Place based on Step Height and Frequency for Wider Range of Virtual Speed", "doi": null, "abstractUrl": "/proceedings-article/vrw/2021/405700a498/1tnWFlvbESk", "parentPublication": { "id": "proceedings/vrw/2021/4057/0", "title": "2021 IEEE Conference on Virtual Reality and 3D User Interfaces Abstracts and Workshops (VRW)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" } ], "articleVideos": [] }
{ "proceeding": { "id": "1CJbEwHHqEg", "title": "2022 IEEE on Conference Virtual Reality and 3D User Interfaces (VR)", "acronym": "vr", "groupId": "1000791", "volume": "0", "displayVolume": "0", "year": "2022", "__typename": "ProceedingType" }, "article": { "id": "1CJbMX1TyoM", "doi": "10.1109/VR51125.2022.00104", "title": "Evaluating the Impact of Limited Physical Space on the Navigation Performance of Two Locomotion Methods in Immersive Virtual Environments", "normalizedTitle": "Evaluating the Impact of Limited Physical Space on the Navigation Performance of Two Locomotion Methods in Immersive Virtual Environments", "abstract": "Consumer level virtual experiences almost always occur when physical space is limited, either by the constraints of an indoor space or of a tracked area. This observation coupled with the need for movement through large virtual spaces has resulted in a proliferation of research into locomotion interfaces that decouples movement through the virtual environment from movement in the real world. While many locomotion interfaces support movement of some kind in the real world, some do not. This paper examines the effect of the amount of physical space used in the real world on one popular locomotion interface, resetting, when compared to a locomotion interface that requires minimal physical space, walking in place. The metric used to compare the two locomotion interfaces was navigation performance, specifically, the acquisition of survey knowledge. We find that, while there are trade-offs between the two methods, walking in place is preferable in small spaces.", "abstracts": [ { "abstractType": "Regular", "content": "Consumer level virtual experiences almost always occur when physical space is limited, either by the constraints of an indoor space or of a tracked area. This observation coupled with the need for movement through large virtual spaces has resulted in a proliferation of research into locomotion interfaces that decouples movement through the virtual environment from movement in the real world. While many locomotion interfaces support movement of some kind in the real world, some do not. This paper examines the effect of the amount of physical space used in the real world on one popular locomotion interface, resetting, when compared to a locomotion interface that requires minimal physical space, walking in place. The metric used to compare the two locomotion interfaces was navigation performance, specifically, the acquisition of survey knowledge. We find that, while there are trade-offs between the two methods, walking in place is preferable in small spaces.", "__typename": "ArticleAbstractType" } ], "normalizedAbstract": "Consumer level virtual experiences almost always occur when physical space is limited, either by the constraints of an indoor space or of a tracked area. This observation coupled with the need for movement through large virtual spaces has resulted in a proliferation of research into locomotion interfaces that decouples movement through the virtual environment from movement in the real world. While many locomotion interfaces support movement of some kind in the real world, some do not. This paper examines the effect of the amount of physical space used in the real world on one popular locomotion interface, resetting, when compared to a locomotion interface that requires minimal physical space, walking in place. The metric used to compare the two locomotion interfaces was navigation performance, specifically, the acquisition of survey knowledge. We find that, while there are trade-offs between the two methods, walking in place is preferable in small spaces.", "fno": "961700a821", "keywords": [ "Human Computer Interaction", "Virtual Reality", "Immersive Virtual Environments", "Consumer Level Virtual Experiences", "Indoor Space", "Virtual Spaces", "Locomotion Interface", "Navigation Performance", "Physical Space", "Walking In Place", "Legged Locomotion", "Measurement", "Costs", "Three Dimensional Displays", "Navigation", "Tracking", "Sociology", "Virtual Reality", "Locomotion Methods", "Walking In Place", "Resetting" ], "authors": [ { "affiliation": "Verizon Wireless", "fullName": "Richard A. Paris", "givenName": "Richard A.", "surname": "Paris", "__typename": "ArticleAuthorType" }, { "affiliation": "Trinity College Dublin", "fullName": "Lauren E. Buck", "givenName": "Lauren E.", "surname": "Buck", "__typename": "ArticleAuthorType" }, { "affiliation": "Vanderbilt University", "fullName": "Timothy P. McNamara", "givenName": "Timothy P.", "surname": "McNamara", "__typename": "ArticleAuthorType" }, { "affiliation": "Vanderbilt University", "fullName": "Bobby Bodenheimer", "givenName": "Bobby", "surname": "Bodenheimer", "__typename": "ArticleAuthorType" } ], "idPrefix": "vr", "isOpenAccess": false, "showRecommendedArticles": true, "showBuyMe": true, "hasPdf": true, "pubDate": "2022-03-01T00:00:00", "pubType": "proceedings", "pages": "821-831", "year": "2022", "issn": null, "isbn": "978-1-6654-9617-9", "notes": null, "notesType": null, "__typename": "ArticleType" }, "webExtras": [], "adjacentArticles": { "previous": { "fno": "961700a812", "articleId": "1CJczvrAl0Y", "__typename": "AdjacentArticleType" }, "next": { "fno": "961700a832", "articleId": "1CJbR6qnKdW", "__typename": "AdjacentArticleType" }, "__typename": "AdjacentArticlesType" }, "recommendedArticles": [ { "id": "proceedings/cw/2015/9403/0/9403a229", "title": "Using Locomotion Models for Estimating Walking Targets in Immersive Virtual Environments", "doi": null, "abstractUrl": "/proceedings-article/cw/2015/9403a229/12OmNB7LvFe", "parentPublication": { "id": "proceedings/cw/2015/9403/0", "title": "2015 International Conference on Cyberworlds (CW)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2017/6647/0/07892348", "title": "Steering locomotion by vestibular perturbation in room-scale VR", "doi": null, "abstractUrl": "/proceedings-article/vr/2017/07892348/12OmNvrMUgU", "parentPublication": { "id": "proceedings/vr/2017/6647/0", "title": "2017 IEEE Virtual Reality (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/3dui/2012/1204/0/06184180", "title": "From virtual to actual mobility: Assessing the benefits of active locomotion through an immersive virtual environment using a motorized wheelchair", "doi": null, "abstractUrl": "/proceedings-article/3dui/2012/06184180/12OmNxdDFLw", "parentPublication": { "id": "proceedings/3dui/2012/1204/0", "title": "2012 IEEE Symposium on 3D User Interfaces (3DUI)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/3dui/2016/0842/0/07460030", "title": "Eye tracking for locomotion prediction in redirected walking", "doi": null, "abstractUrl": "/proceedings-article/3dui/2016/07460030/12OmNz4SOsF", "parentPublication": { "id": "proceedings/3dui/2016/0842/0", "title": "2016 IEEE Symposium on 3D User Interfaces (3DUI)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2018/3365/0/08446165", "title": "A Threefold Approach for Precise and Efficient Locomotion in Virtual Environments with Varying Accessibility", "doi": null, "abstractUrl": "/proceedings-article/vr/2018/08446165/13bd1AIBM28", "parentPublication": { "id": "proceedings/vr/2018/3365/0", "title": "2018 IEEE Conference on Virtual Reality and 3D User Interfaces (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2018/3365/0/08446130", "title": "Rapid, Continuous Movement Between Nodes as an Accessible Virtual Reality Locomotion Technique", "doi": null, "abstractUrl": "/proceedings-article/vr/2018/08446130/13bd1f3HvEx", "parentPublication": { "id": "proceedings/vr/2018/3365/0", "title": "2018 IEEE Conference on Virtual Reality and 3D User Interfaces (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "trans/tg/2012/04/ttg2012040538", "title": "Redirecting Walking and Driving for Natural Navigation in Immersive Virtual Environments", "doi": null, "abstractUrl": "/journal/tg/2012/04/ttg2012040538/13rRUwgQpDs", "parentPublication": { "id": "trans/tg", "title": "IEEE Transactions on Visualization & Computer Graphics", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "trans/tg/2014/04/ttg201404569", "title": "Establishing the Range of Perceptually Natural Visual Walking Speeds for Virtual Walking-In-Place Locomotion", "doi": null, "abstractUrl": "/journal/tg/2014/04/ttg201404569/13rRUxAASTb", "parentPublication": { "id": "trans/tg", "title": "IEEE Transactions on Visualization & Computer Graphics", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2019/1377/0/08798070", "title": "User-Centered Extension of a Locomotion Typology: Movement-Related Sensory Feedback and Spatial Learning", "doi": null, "abstractUrl": "/proceedings-article/vr/2019/08798070/1cJ18ja0QXC", "parentPublication": { "id": "proceedings/vr/2019/1377/0", "title": "2019 IEEE Conference on Virtual Reality and 3D User Interfaces (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vrw/2021/4057/0/405700a391", "title": "The Effectiveness of Locomotion Interfaces Depends on Self-Motion Cues, Environmental Cues, and the Individual", "doi": null, "abstractUrl": "/proceedings-article/vrw/2021/405700a391/1tnXFgLAfSw", "parentPublication": { "id": "proceedings/vrw/2021/4057/0", "title": "2021 IEEE Conference on Virtual Reality and 3D User Interfaces Abstracts and Workshops (VRW)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" } ], "articleVideos": [] }
{ "proceeding": { "id": "1jIx7fmpQ9a", "title": "2020 IEEE Conference on Virtual Reality and 3D User Interfaces (VR)", "acronym": "vr", "groupId": "1000791", "volume": "0", "displayVolume": "0", "year": "2020", "__typename": "ProceedingType" }, "article": { "id": "1jIx7XMm676", "doi": "10.1109/VR46266.2020.00032", "title": "Feature Guided Path Redirection for VR Navigation", "normalizedTitle": "Feature Guided Path Redirection for VR Navigation", "abstract": "Path redirection for virtual reality (VR) navigation allows the user to explore a large virtual environment (VE) while the VR application is hosted in a limited physical space. Static mapping redirection methods deform the virtual scene to fit the physical space. The challenge is to deform the virtual scene in a reasonable way, making the distortions friendly to the user&#x2019;s visual perception. In this paper we propose a feature-guided path redirection method that finds and takes into account the visual features of 3D virtual scenes. In a first offline step, a collection of view-independent and view-dependent visual features of the VE are extracted and stored in a visual feature map. Then, in a second offline step, the navigation path is deformed to fit in the confines of the available physical space through a mass-spring system optimization, according to distortion sensitive factors derived from the visual feature map. Finally, a novel detail preserving rendering algorithm is employed to preserve the original visual detail as the user navigates the VE on the redirected path. We tested our method on several scenes, where our method showed a reduced VE 3D mesh distortion, when compared to the path redirection methods without feature guidance.", "abstracts": [ { "abstractType": "Regular", "content": "Path redirection for virtual reality (VR) navigation allows the user to explore a large virtual environment (VE) while the VR application is hosted in a limited physical space. Static mapping redirection methods deform the virtual scene to fit the physical space. The challenge is to deform the virtual scene in a reasonable way, making the distortions friendly to the user&#x2019;s visual perception. In this paper we propose a feature-guided path redirection method that finds and takes into account the visual features of 3D virtual scenes. In a first offline step, a collection of view-independent and view-dependent visual features of the VE are extracted and stored in a visual feature map. Then, in a second offline step, the navigation path is deformed to fit in the confines of the available physical space through a mass-spring system optimization, according to distortion sensitive factors derived from the visual feature map. Finally, a novel detail preserving rendering algorithm is employed to preserve the original visual detail as the user navigates the VE on the redirected path. We tested our method on several scenes, where our method showed a reduced VE 3D mesh distortion, when compared to the path redirection methods without feature guidance.", "__typename": "ArticleAbstractType" } ], "normalizedAbstract": "Path redirection for virtual reality (VR) navigation allows the user to explore a large virtual environment (VE) while the VR application is hosted in a limited physical space. Static mapping redirection methods deform the virtual scene to fit the physical space. The challenge is to deform the virtual scene in a reasonable way, making the distortions friendly to the user’s visual perception. In this paper we propose a feature-guided path redirection method that finds and takes into account the visual features of 3D virtual scenes. In a first offline step, a collection of view-independent and view-dependent visual features of the VE are extracted and stored in a visual feature map. Then, in a second offline step, the navigation path is deformed to fit in the confines of the available physical space through a mass-spring system optimization, according to distortion sensitive factors derived from the visual feature map. Finally, a novel detail preserving rendering algorithm is employed to preserve the original visual detail as the user navigates the VE on the redirected path. We tested our method on several scenes, where our method showed a reduced VE 3D mesh distortion, when compared to the path redirection methods without feature guidance.", "fno": "09089579", "keywords": [ "Visualization", "Distortion", "Feature Extraction", "Geometry", "Navigation", "Three Dimensional Displays", "Legged Locomotion", "Virtual Reality", "Navigation", "Path Redirection" ], "authors": [ { "affiliation": "Beihang University,State Key Laboratory of Virtual Reality Technology and Systems, Beijing Advanced Innovation Center for Biomedical Engineering,Beijing,China", "fullName": "Antong Cao", "givenName": "Antong", "surname": "Cao", "__typename": "ArticleAuthorType" }, { "affiliation": "Beihang University,State Key Laboratory of Virtual Reality Technology and Systems, Beijing Advanced Innovation Center for Biomedical Engineering,Beijing,China", "fullName": "Lili Wang", "givenName": "Lili", "surname": "Wang", "__typename": "ArticleAuthorType" }, { "affiliation": "Beihang University,State Key Laboratory of Virtual Reality Technology and Systems, Beijing Advanced Innovation Center for Biomedical Engineering,Beijing,China", "fullName": "Yi Liu", "givenName": "Yi", "surname": "Liu", "__typename": "ArticleAuthorType" }, { "affiliation": "Purdue University,Department of Computer Science,United States", "fullName": "Voicu Popescu", "givenName": "Voicu", "surname": "Popescu", "__typename": "ArticleAuthorType" } ], "idPrefix": "vr", "isOpenAccess": false, "showRecommendedArticles": true, "showBuyMe": true, "hasPdf": true, "pubDate": "2020-03-01T00:00:00", "pubType": "proceedings", "pages": "137-145", "year": "2020", "issn": null, "isbn": "978-1-7281-5608-8", "notes": null, "notesType": null, "__typename": "ArticleType" }, "webExtras": [], "adjacentArticles": { "previous": { "fno": "09089539", "articleId": "1jIx8nPLhmg", "__typename": "AdjacentArticleType" }, "next": { "fno": "09089569", "articleId": "1jIxfFs8qgo", "__typename": "AdjacentArticleType" }, "__typename": "AdjacentArticlesType" }, "recommendedArticles": [ { "id": "proceedings/3dui/2016/0842/0/07460032", "title": "Automated path prediction for redirected walking using navigation meshes", "doi": null, "abstractUrl": "/proceedings-article/3dui/2016/07460032/12OmNBKEymO", "parentPublication": { "id": "proceedings/3dui/2016/0842/0", "title": "2016 IEEE Symposium on 3D User Interfaces (3DUI)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2016/0836/0/07504752", "title": "Disguising rotational gain for redirected walking in virtual reality: Effect of visual density", "doi": null, "abstractUrl": "/proceedings-article/vr/2016/07504752/12OmNyr8YkS", "parentPublication": { "id": "proceedings/vr/2016/0836/0", "title": "2016 IEEE Virtual Reality (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "trans/tg/2017/04/07833190", "title": "Bending the Curve: Sensitivity to Bending of Curved Paths and Application in Room-Scale VR", "doi": null, "abstractUrl": "/journal/tg/2017/04/07833190/13rRUIIVlcQ", "parentPublication": { "id": "trans/tg", "title": "IEEE Transactions on Visualization & Computer Graphics", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/ismar-adjunct/2018/7592/0/08699289", "title": "Walking-in-Place for VR Navigation Independent of Gaze Direction Using a Waist-Worn Inertial Measurement Unit", "doi": null, "abstractUrl": "/proceedings-article/ismar-adjunct/2018/08699289/19F1PlWtKJa", "parentPublication": { "id": "proceedings/ismar-adjunct/2018/7592/0", "title": "2018 IEEE International Symposium on Mixed and Augmented Reality Adjunct (ISMAR-Adjunct)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2022/9617/0/961700a167", "title": "Foldable Spaces: An Overt Redirection Approach for Natural Walking in Virtual Reality", "doi": null, "abstractUrl": "/proceedings-article/vr/2022/961700a167/1CJc5J6RYYM", "parentPublication": { "id": "proceedings/vr/2022/9617/0", "title": "2022 IEEE on Conference Virtual Reality and 3D User Interfaces (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2022/9617/0/961700a464", "title": "RedirectedDoors: Redirection While Opening Doors in Virtual Reality", "doi": null, "abstractUrl": "/proceedings-article/vr/2022/961700a464/1CJc9xfqBSo", "parentPublication": { "id": "proceedings/vr/2022/9617/0", "title": "2022 IEEE on Conference Virtual Reality and 3D User Interfaces (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "trans/tg/5555/01/09893374", "title": "A Segmented Redirection Mapping Method for Roadmaps of Large Constrained Virtual Environments", "doi": null, "abstractUrl": "/journal/tg/5555/01/09893374/1GGLIh8KmSA", "parentPublication": { "id": "trans/tg", "title": "IEEE Transactions on Visualization & Computer Graphics", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vrw/2020/6532/0/09090521", "title": "A Constrained Path Redirection for Passive Haptics", "doi": null, "abstractUrl": "/proceedings-article/vrw/2020/09090521/1jIxpAQuq8o", "parentPublication": { "id": "proceedings/vrw/2020/6532/0", "title": "2020 IEEE Conference on Virtual Reality and 3D User Interfaces Abstracts and Workshops (VRW)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "trans/tg/2022/09/09332290", "title": "Quantifiable Fine-Grain Occlusion Removal Assistance for Efficient VR Exploration", "doi": null, "abstractUrl": "/journal/tg/2022/09/09332290/1qzsRxXpW4o", "parentPublication": { "id": "trans/tg", "title": "IEEE Transactions on Visualization & Computer Graphics", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2021/1838/0/255600a075", "title": "Blink-Suppressed Hand Redirection", "doi": null, "abstractUrl": "/proceedings-article/vr/2021/255600a075/1tuAtVjRLUc", "parentPublication": { "id": "proceedings/vr/2021/1838/0", "title": "2021 IEEE Virtual Reality and 3D User Interfaces (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" } ], "articleVideos": [] }
{ "proceeding": { "id": "12OmNC3XhhG", "title": "2014 7th International Conference on Advanced Software Engineering and Its Applications (ASEA)", "acronym": "asea", "groupId": "1002550", "volume": "0", "displayVolume": "0", "year": "2014", "__typename": "ProceedingType" }, "article": { "id": "12OmNBrlPzE", "doi": "10.1109/ASEA.2014.19", "title": "A Design of Efficient Medical Information System to Enhance Health Behaviors After Radical Prostatectomy", "normalizedTitle": "A Design of Efficient Medical Information System to Enhance Health Behaviors After Radical Prostatectomy", "abstract": "This paper is to design an efficient medical information system to enhancehealth behaviors after radical prostatectomy. The subjects of this paper were a total of142 patients who had visited a general hospital located in Metropolitan area. Theresults of this study are as follows. First, for soybean intake, positive change of healthbehaviors diminished the progression rate of prostate cancer(31.24±0.35, t=-0.57,p=.000). Second, this paper found that the health promotion behavior in prostatecancer patients was increased by 69.2-73.6% compared with the previous status.Therefore, systematic adoption of the medical information system to minimize thedamage of prostate cancer will contribute effectively to the rapid disease recovery andprevention.", "abstracts": [ { "abstractType": "Regular", "content": "This paper is to design an efficient medical information system to enhancehealth behaviors after radical prostatectomy. The subjects of this paper were a total of142 patients who had visited a general hospital located in Metropolitan area. Theresults of this study are as follows. First, for soybean intake, positive change of healthbehaviors diminished the progression rate of prostate cancer(31.24±0.35, t=-0.57,p=.000). Second, this paper found that the health promotion behavior in prostatecancer patients was increased by 69.2-73.6% compared with the previous status.Therefore, systematic adoption of the medical information system to minimize thedamage of prostate cancer will contribute effectively to the rapid disease recovery andprevention.", "__typename": "ArticleAbstractType" } ], "normalizedAbstract": "This paper is to design an efficient medical information system to enhancehealth behaviors after radical prostatectomy. The subjects of this paper were a total of142 patients who had visited a general hospital located in Metropolitan area. Theresults of this study are as follows. First, for soybean intake, positive change of healthbehaviors diminished the progression rate of prostate cancer(31.24±0.35, t=-0.57,p=.000). Second, this paper found that the health promotion behavior in prostatecancer patients was increased by 69.2-73.6% compared with the previous status.Therefore, systematic adoption of the medical information system to minimize thedamage of prostate cancer will contribute effectively to the rapid disease recovery andprevention.", "fno": "07023890", "keywords": [ "Medical Information Systems", "Prostate Cancer", "Diseases", "Educational Institutions", "Systematics", "Tumors", "Efficiency", "Medical Information System", "Health Behaviors", "Radical Prostatectomy" ], "authors": [ { "affiliation": null, "fullName": "Seong-Ran Lee", "givenName": "Seong-Ran", "surname": "Lee", "__typename": "ArticleAuthorType" } ], "idPrefix": "asea", "isOpenAccess": false, "showRecommendedArticles": true, "showBuyMe": true, "hasPdf": true, "pubDate": "2014-12-01T00:00:00", "pubType": "proceedings", "pages": "26-31", "year": "2014", "issn": null, "isbn": "978-1-4799-7760-4", "notes": null, "notesType": null, "__typename": "ArticleType" }, "webExtras": [], "adjacentArticles": { "previous": { "fno": "07023889", "articleId": "12OmNAOKnPp", "__typename": "AdjacentArticleType" }, "next": { "fno": "07023891", "articleId": "12OmNyo1nRx", "__typename": "AdjacentArticleType" }, "__typename": "AdjacentArticlesType" }, "recommendedArticles": [ { "id": "proceedings/niss/2009/3687/0/3687b063", "title": "Applying Data Mining for Prostate Cancer", "doi": null, "abstractUrl": "/proceedings-article/niss/2009/3687b063/12OmNzC5SNj", "parentPublication": { "id": "proceedings/niss/2009/3687/0", "title": "2009 International Conference on New Trends in Information and Service Science (NISS 2009)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/ichi/2018/5377/0/537701a380", "title": "Mapping the Treatment Journey for Patients with Prostate Cancer", "doi": null, "abstractUrl": "/proceedings-article/ichi/2018/537701a380/12OmNzSQdji", "parentPublication": { "id": "proceedings/ichi/2018/5377/0", "title": "2018 IEEE International Conference on Healthcare Informatics (ICHI)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/nicoint/2018/6909/0/690901a074", "title": "Development of Communication Tools for Informed Consent Using VR Technology", "doi": null, "abstractUrl": "/proceedings-article/nicoint/2018/690901a074/13bd1gzWkRB", "parentPublication": { "id": "proceedings/nicoint/2018/6909/0", "title": "2018 Nicograph International (NicoInt)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "trans/tb/2019/03/08329998", "title": "Differentiating Prostate Cancer from Benign Prostatic Hyperplasia Using PSAD Based on Machine Learning: Single-Center Retrospective Study in China", "doi": null, "abstractUrl": "/journal/tb/2019/03/08329998/13rRUIIVlaY", "parentPublication": { "id": "trans/tb", "title": "IEEE/ACM Transactions on Computational Biology and Bioinformatics", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "trans/tg/2017/01/07539638", "title": "PROACT: Iterative Design of a Patient-Centered Visualization for Effective Prostate Cancer Health Risk Communication", "doi": null, "abstractUrl": "/journal/tg/2017/01/07539638/13rRUxYINfk", "parentPublication": { "id": "trans/tg", "title": "IEEE Transactions on Visualization & Computer Graphics", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/icpr/2018/3788/0/08545754", "title": "Fully convolutional neural networks for prostate cancer detection using multi-parametric magnetic resonance images: an initial investigation", "doi": null, "abstractUrl": "/proceedings-article/icpr/2018/08545754/17D45W9KVGP", "parentPublication": { "id": "proceedings/icpr/2018/3788/0", "title": "2018 24th International Conference on Pattern Recognition (ICPR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/bibe/2018/6217/0/247100a155", "title": "Identification of the PCa28 Gene Signature as a Predictor in Prostate Cancer", "doi": null, "abstractUrl": "/proceedings-article/bibe/2018/247100a155/17D45WgziNP", "parentPublication": { "id": "proceedings/bibe/2018/6217/0", "title": "2018 IEEE 18th International Conference on Bioinformatics and Bioengineering (BIBE)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/cbms/2019/2286/0/228600a355", "title": "Early Radiomic Experiences in Classifying Prostate Cancer Aggressiveness using 3D Local Binary Patterns", "doi": null, "abstractUrl": "/proceedings-article/cbms/2019/228600a355/1cdO39adoQM", "parentPublication": { "id": "proceedings/cbms/2019/2286/0", "title": "2019 IEEE 32nd International Symposium on Computer-Based Medical Systems (CBMS)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "trans/tb/2020/06/08836110", "title": "Computerized Classification of Prostate Cancer Gleason Scores from Whole Slide Images", "doi": null, "abstractUrl": "/journal/tb/2020/06/08836110/1di9To7Bcdy", "parentPublication": { "id": "trans/tb", "title": "IEEE/ACM Transactions on Computational Biology and Bioinformatics", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/smartcloud/2020/6547/0/654700a094", "title": "The Risk Prediction of Prostate Cancer Based on A Improved Hybrid Algorithm", "doi": null, "abstractUrl": "/proceedings-article/smartcloud/2020/654700a094/1p6f5ZkqOje", "parentPublication": { "id": "proceedings/smartcloud/2020/6547/0", "title": "2020 IEEE International Conference on Smart Cloud (SmartCloud)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" } ], "articleVideos": [] }
{ "proceeding": { "id": "12OmNvk7JKD", "title": "IEEE Virtual Reality Conference (VR 2006)", "acronym": "vr", "groupId": "1000791", "volume": "0", "displayVolume": "0", "year": "2006", "__typename": "ProceedingType" }, "article": { "id": "12OmNwDACqI", "doi": "10.1109/VR.2006.91", "title": "Interpersonal Scenarios: Virtual \\approx Real?", "normalizedTitle": "Interpersonal Scenarios: Virtual \\approx Real?", "abstract": "This paper reports on a study to examine the similarities and differences in experiencing an interpersonal scenario with real and virtual humans. A system that allows medical students to interview a life-size virtual patient using natural speech and gestures was used as a platform for this comparison. Study participants interviewed either a virtual patient or a standardized patient, an actor trained to represent a medical condition. Subtle yet substantial differences were found in the participants? rapport with the patient and the flow of the conversation. The virtual patient?s limited expressiveness was a significant source of these differences. However, overall task performance was similar, as were perceptions of the educational value of the interaction.", "abstracts": [ { "abstractType": "Regular", "content": "This paper reports on a study to examine the similarities and differences in experiencing an interpersonal scenario with real and virtual humans. A system that allows medical students to interview a life-size virtual patient using natural speech and gestures was used as a platform for this comparison. Study participants interviewed either a virtual patient or a standardized patient, an actor trained to represent a medical condition. Subtle yet substantial differences were found in the participants? rapport with the patient and the flow of the conversation. The virtual patient?s limited expressiveness was a significant source of these differences. However, overall task performance was similar, as were perceptions of the educational value of the interaction.", "__typename": "ArticleAbstractType" } ], "normalizedAbstract": "This paper reports on a study to examine the similarities and differences in experiencing an interpersonal scenario with real and virtual humans. A system that allows medical students to interview a life-size virtual patient using natural speech and gestures was used as a platform for this comparison. Study participants interviewed either a virtual patient or a standardized patient, an actor trained to represent a medical condition. Subtle yet substantial differences were found in the participants? rapport with the patient and the flow of the conversation. The virtual patient?s limited expressiveness was a significant source of these differences. However, overall task performance was similar, as were perceptions of the educational value of the interaction.", "fno": "02240059", "keywords": [ "Virtual Characters", "Multimodal Interaction", "Human Computer Interaction", "Medical Education", "Immersive Virtual Environments" ], "authors": [ { "affiliation": "Science and Engineering,University of Florida", "fullName": "Andrew Raij", "givenName": "Andrew", "surname": "Raij", "__typename": "ArticleAuthorType" }, { "affiliation": "Science and Engineering,University of Florida", "fullName": "Kyle Johnsen", "givenName": "Kyle", "surname": "Johnsen", "__typename": "ArticleAuthorType" }, { "affiliation": "Science and Engineering,University of Florida", "fullName": "Robert Dickerson", "givenName": "Robert", "surname": "Dickerson", "__typename": "ArticleAuthorType" }, { "affiliation": "Science and Engineering,University of Florida", "fullName": "Benjamin Lok", "givenName": "Benjamin", "surname": "Lok", "__typename": "ArticleAuthorType" }, { "affiliation": "University of Florida", "fullName": "Marc Cohen", "givenName": "Marc", "surname": "Cohen", "__typename": "ArticleAuthorType" }, { "affiliation": "Medical College of Georgia", "fullName": "Thomas Bernard", "givenName": "Thomas", "surname": "Bernard", "__typename": "ArticleAuthorType" }, { "affiliation": "Medical College of Georgia", "fullName": "Christopher Oxendine", "givenName": "Christopher", "surname": "Oxendine", "__typename": "ArticleAuthorType" }, { "affiliation": "Medical College of Georgia", "fullName": "Peggy Wagner", "givenName": "Peggy", "surname": "Wagner", "__typename": "ArticleAuthorType" }, { "affiliation": "Medical College of Georgia", "fullName": "D. Scott Lind", "givenName": "D. Scott", "surname": "Lind", "__typename": "ArticleAuthorType" } ], "idPrefix": "vr", "isOpenAccess": false, "showRecommendedArticles": true, "showBuyMe": true, "hasPdf": true, "pubDate": "2006-03-01T00:00:00", "pubType": "proceedings", "pages": "59-66", "year": "2006", "issn": "1087-8270", "isbn": "1-4244-0224-7", "notes": null, "notesType": null, "__typename": "ArticleType" }, "webExtras": [], "adjacentArticles": { "previous": { "fno": "02240051", "articleId": "12OmNrGb2lk", "__typename": "AdjacentArticleType" }, "next": { "fno": "02240067", "articleId": "12OmNxwENJt", "__typename": "AdjacentArticleType" }, "__typename": "AdjacentArticlesType" }, "recommendedArticles": [ { "id": "proceedings/acii/2017/0563/0/08273640", "title": "Comparing virtual reality with computer monitors as rating environments for affective dimensions in social interactions", "doi": null, "abstractUrl": "/proceedings-article/acii/2017/08273640/12OmNwDSdGJ", "parentPublication": { "id": "proceedings/acii/2017/0563/0", "title": "2017 Seventh International Conference on Affective Computing and Intelligent Interaction (ACII)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2002/1492/0/14920275", "title": "Perceived Egocentric Distances in Real, Image-Based, and Traditional Virtual Environments", "doi": null, "abstractUrl": "/proceedings-article/vr/2002/14920275/12OmNwHhoQ2", "parentPublication": { "id": "proceedings/vr/2002/1492/0", "title": "Proceedings IEEE Virtual Reality 2002", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vrais/1995/7084/0/70840048", "title": "Virtual-reality monitoring", "doi": null, "abstractUrl": "/proceedings-article/vrais/1995/70840048/12OmNzUPpwc", "parentPublication": { "id": "proceedings/vrais/1995/7084/0", "title": "Virtual Reality Annual International Symposium", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/svr/2012/4725/0/4725a261", "title": "An Integration Model for Access to Archaeological Collections Using Multimodal Interaction in Virtual Environments", "doi": null, "abstractUrl": "/proceedings-article/svr/2012/4725a261/12OmNzt0ILB", "parentPublication": { "id": "proceedings/svr/2012/4725/0", "title": "2012 14th Symposium on Virtual and Augmented Reality", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2018/3365/0/08446235", "title": "Influences on the Elicitation of Interpersonal Space with Virtual Humans", "doi": null, "abstractUrl": "/proceedings-article/vr/2018/08446235/13bd1eW2l9F", "parentPublication": { "id": "proceedings/vr/2018/3365/0", "title": "2018 IEEE Conference on Virtual Reality and 3D User Interfaces (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "trans/tg/2007/03/v0443", "title": "Comparing Interpersonal Interactions with a Virtual Human to Those with a Real Human", "doi": null, "abstractUrl": "/journal/tg/2007/03/v0443/13rRUwvT9gk", "parentPublication": { "id": "trans/tg", "title": "IEEE Transactions on Visualization & Computer Graphics", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "trans/ta/2021/03/08642402", "title": "First Impressions Count! The Role of the Human&#x0027;s Emotional State on Rapport Established with an Empathic versus Neutral Virtual Therapist", "doi": null, "abstractUrl": "/journal/ta/2021/03/08642402/17PYEmawc80", "parentPublication": { "id": "trans/ta", "title": "IEEE Transactions on Affective Computing", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/cw/2022/6814/0/681400a213", "title": "EEG-based Evaluation on Intuitive Gesture Interaction in Virtual Environment", "doi": null, "abstractUrl": "/proceedings-article/cw/2022/681400a213/1I6RKHGKLD2", "parentPublication": { "id": "proceedings/cw/2022/6814/0", "title": "2022 International Conference on Cyberworlds (CW)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/aivr/2022/5725/0/572500a073", "title": "Empathizing with virtual agents: the effect of personification and general empathic tendencies", "doi": null, "abstractUrl": "/proceedings-article/aivr/2022/572500a073/1KmFdlbCJji", "parentPublication": { "id": "proceedings/aivr/2022/5725/0", "title": "2022 IEEE International Conference on Artificial Intelligence and Virtual Reality (AIVR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2020/5608/0/09089452", "title": "Real and Virtual Environment Mismatching Induces Arousal and Alters Movement Behavior", "doi": null, "abstractUrl": "/proceedings-article/vr/2020/09089452/1jIxcobDHi0", "parentPublication": { "id": "proceedings/vr/2020/5608/0", "title": "2020 IEEE Conference on Virtual Reality and 3D User Interfaces (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" } ], "articleVideos": [] }
{ "proceeding": { "id": "12OmNxWcH0g", "title": "2012 IEEE 14th International Conference on Commerce and Enterprise Computing", "acronym": "cec", "groupId": "1002843", "volume": "0", "displayVolume": "0", "year": "2012", "__typename": "ProceedingType" }, "article": { "id": "12OmNwtn3xN", "doi": "10.1109/CEC.2012.24", "title": "Social network characteristics of online shopping interpersonal relationship in real and virtual communities", "normalizedTitle": "Social network characteristics of online shopping interpersonal relationship in real and virtual communities", "abstract": "Both real and virtual communities are important channels for consumers to release, disseminate or access to reputation information, and shopping interpersonal relationship appears among community members who communicate with each other through word of mouth. This paper selects 200 true communities and 400 virtual communities, and takes two million real trading data of 100,000 Taobao users in these communities as a basis. We build a network with direction and weight for characterizing online shopping interpersonal of communities from the perspective of social network. Through social network analysis, we further study structure characteristics of community online shopping interpersonal influence from four dimensions, namely network containing degrees, network density, centrality, and cohesive subgroup. This paper attempt to explore law of interpersonal communication, dissemination and effect within communities, and provide a theoretical basis and practical guidance for word of mouth marketing within communities.", "abstracts": [ { "abstractType": "Regular", "content": "Both real and virtual communities are important channels for consumers to release, disseminate or access to reputation information, and shopping interpersonal relationship appears among community members who communicate with each other through word of mouth. This paper selects 200 true communities and 400 virtual communities, and takes two million real trading data of 100,000 Taobao users in these communities as a basis. We build a network with direction and weight for characterizing online shopping interpersonal of communities from the perspective of social network. Through social network analysis, we further study structure characteristics of community online shopping interpersonal influence from four dimensions, namely network containing degrees, network density, centrality, and cohesive subgroup. This paper attempt to explore law of interpersonal communication, dissemination and effect within communities, and provide a theoretical basis and practical guidance for word of mouth marketing within communities.", "__typename": "ArticleAbstractType" } ], "normalizedAbstract": "Both real and virtual communities are important channels for consumers to release, disseminate or access to reputation information, and shopping interpersonal relationship appears among community members who communicate with each other through word of mouth. This paper selects 200 true communities and 400 virtual communities, and takes two million real trading data of 100,000 Taobao users in these communities as a basis. We build a network with direction and weight for characterizing online shopping interpersonal of communities from the perspective of social network. Through social network analysis, we further study structure characteristics of community online shopping interpersonal influence from four dimensions, namely network containing degrees, network density, centrality, and cohesive subgroup. This paper attempt to explore law of interpersonal communication, dissemination and effect within communities, and provide a theoretical basis and practical guidance for word of mouth marketing within communities.", "fno": "4857a101", "keywords": [ "Communities", "Mouth", "Social Network Services", "Buildings", "Educational Institutions", "Software", "Decision Making" ], "authors": [ { "affiliation": null, "fullName": "Zhang Chong", "givenName": "Zhang", "surname": "Chong", "__typename": "ArticleAuthorType" }, { "affiliation": null, "fullName": "Wang Bian", "givenName": "Wang", "surname": "Bian", "__typename": "ArticleAuthorType" }, { "affiliation": null, "fullName": "Lv Benfu", "givenName": "Lv", "surname": "Benfu", "__typename": "ArticleAuthorType" }, { "affiliation": null, "fullName": "Peng Geng", "givenName": "Peng", "surname": "Geng", "__typename": "ArticleAuthorType" } ], "idPrefix": "cec", "isOpenAccess": false, "showRecommendedArticles": true, "showBuyMe": true, "hasPdf": true, "pubDate": "2012-09-01T00:00:00", "pubType": "proceedings", "pages": "101-106", "year": "2012", "issn": null, "isbn": "978-1-4673-6246-7", "notes": null, "notesType": null, "__typename": "ArticleType" }, "webExtras": [], "adjacentArticles": { "previous": { "fno": "4857a094", "articleId": "12OmNzuZUwE", "__typename": "AdjacentArticleType" }, "next": { "fno": "4857a107", "articleId": "12OmNznkJXj", "__typename": "AdjacentArticleType" }, "__typename": "AdjacentArticlesType" }, "recommendedArticles": [ { "id": "proceedings/hicss/2001/0981/4/09814003", "title": "New Knowledge and Micro-Level Online Organization: 'Communities of Practice' as a Development Framework", "doi": null, "abstractUrl": "/proceedings-article/hicss/2001/09814003/12OmNAZx8RX", "parentPublication": { "id": "proceedings/hicss/2001/0981/4", "title": "Proceedings of the 34th Annual Hawaii International Conference on System Sciences", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/bife/2013/4777/0/4777a053", "title": "A Consensus Reaching Model for Collaborative Decision Making in Web 2.0 Communities", "doi": null, "abstractUrl": "/proceedings-article/bife/2013/4777a053/12OmNApu5ns", "parentPublication": { "id": "proceedings/bife/2013/4777/0", "title": "2013 Sixth International Conference on Business Intelligence and Financial Engineering (BIFE)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/asonam/2013/2240/0/06785703", "title": "A spatial LDA model for discovering regional communities", "doi": null, "abstractUrl": "/proceedings-article/asonam/2013/06785703/12OmNArbG4T", "parentPublication": { "id": "proceedings/asonam/2013/2240/0", "title": "2013 International Conference on Advances in Social Networks Analysis and Mining (ASONAM)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/hicss/2013/4892/0/4892d604", "title": "Customer Knowledge Contribution Behavior in Social Shopping Communities", "doi": null, "abstractUrl": "/proceedings-article/hicss/2013/4892d604/12OmNx7ouJC", "parentPublication": { "id": "proceedings/hicss/2013/4892/0", "title": "2013 46th Hawaii International Conference on System Sciences", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/culture-computing/2013/5047/0/5047a131", "title": "Exploring Respectful Design Directions for Indigenous Communities", "doi": null, "abstractUrl": "/proceedings-article/culture-computing/2013/5047a131/12OmNxuFBnN", "parentPublication": { "id": "proceedings/culture-computing/2013/5047/0", "title": "2013 International Conference on Culture and Computing (Culture Computing)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/hicss/2014/2504/0/2504a604", "title": "Understanding Information Adoption in Online Review Communities: The Role of Herd Factors", "doi": null, "abstractUrl": "/proceedings-article/hicss/2014/2504a604/12OmNyuyaej", "parentPublication": { "id": "proceedings/hicss/2014/2504/0", "title": "2014 47th Hawaii International Conference on System Sciences (HICSS)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/aswec/2008/3100/0/3100a387", "title": "Open Source Communities as Social Networks: An Analysis of Some Peculiar Characteristics", "doi": null, "abstractUrl": "/proceedings-article/aswec/2008/3100a387/12OmNzAFSYq", "parentPublication": { "id": "proceedings/aswec/2008/3100/0", "title": "2008 19th Australian Software Engineering Conference", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/hicss/2001/0981/0/00926353", "title": "New knowledge and micro-level online organization: 'communities of practice' as a development framework", "doi": null, "abstractUrl": "/proceedings-article/hicss/2001/00926353/12OmNzUgddV", "parentPublication": { "id": "proceedings/hicss/2001/0981/2", "title": "Proceedings of the 34th Annual Hawaii International Conference on System Sciences", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/hicss/2015/7367/0/7367b993", "title": "Introduction to Social Networking and Communities Minitrack", "doi": null, "abstractUrl": "/proceedings-article/hicss/2015/7367b993/12OmNzV70wF", "parentPublication": { "id": "proceedings/hicss/2015/7367/0", "title": "2015 48th Hawaii International Conference on System Sciences (HICSS)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "mags/so/2013/01/mso2013010029", "title": "Uncovering Latent Social Communities in Software Development", "doi": null, "abstractUrl": "/magazine/so/2013/01/mso2013010029/13rRUILtJoZ", "parentPublication": { "id": "mags/so", "title": "IEEE Software", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" } ], "articleVideos": [] }
{ "proceeding": { "id": "12OmNCmpcMJ", "title": "Proceedings of the 26th IEEE International Symposium on Computer-Based Medical Systems", "acronym": "cbms", "groupId": "1000153", "volume": "0", "displayVolume": "0", "year": "1997", "__typename": "ProceedingType" }, "article": { "id": "12OmNzaQozw", "doi": "10.1109/CBMS.1997.596420", "title": "Knowledge-based mechanical imaging", "normalizedTitle": "Knowledge-based mechanical imaging", "abstract": "Mechanical imaging (MI) is a new modality of medical imaging, mimicking palpatory diagnostics. MI provides a 3D reconstruction of the internal structures of body soft tissues using measurements of stress patterns on the surface of the investigated tissue. In contrast to other, existing methods of medical imaging which use sophisticated hardware such as superconductive magnets, expensive X-ray equipment and complex ultrasonic phased arrays, MI hardware consists of inexpensive mechanical sensors and a positioning system connected to a PC. The results of our pilot studies have proven the feasibility of the MI technology. Currently, devices for the MI of the prostate gland and breast are being developed. Recently, the first 3D MI of in-vivo prostate has been obtained. Preliminary data strongly suggest that MI technology is an efficient means of objectively evaluating and imaging the prostate, and of detecting prostate cancer.", "abstracts": [ { "abstractType": "Regular", "content": "Mechanical imaging (MI) is a new modality of medical imaging, mimicking palpatory diagnostics. MI provides a 3D reconstruction of the internal structures of body soft tissues using measurements of stress patterns on the surface of the investigated tissue. In contrast to other, existing methods of medical imaging which use sophisticated hardware such as superconductive magnets, expensive X-ray equipment and complex ultrasonic phased arrays, MI hardware consists of inexpensive mechanical sensors and a positioning system connected to a PC. The results of our pilot studies have proven the feasibility of the MI technology. Currently, devices for the MI of the prostate gland and breast are being developed. Recently, the first 3D MI of in-vivo prostate has been obtained. Preliminary data strongly suggest that MI technology is an efficient means of objectively evaluating and imaging the prostate, and of detecting prostate cancer.", "__typename": "ArticleAbstractType" } ], "normalizedAbstract": "Mechanical imaging (MI) is a new modality of medical imaging, mimicking palpatory diagnostics. MI provides a 3D reconstruction of the internal structures of body soft tissues using measurements of stress patterns on the surface of the investigated tissue. In contrast to other, existing methods of medical imaging which use sophisticated hardware such as superconductive magnets, expensive X-ray equipment and complex ultrasonic phased arrays, MI hardware consists of inexpensive mechanical sensors and a positioning system connected to a PC. The results of our pilot studies have proven the feasibility of the MI technology. Currently, devices for the MI of the prostate gland and breast are being developed. Recently, the first 3D MI of in-vivo prostate has been obtained. Preliminary data strongly suggest that MI technology is an efficient means of objectively evaluating and imaging the prostate, and of detecting prostate cancer.", "fno": "79280120", "keywords": [ "Medical Image Processing Knowledge Based Mechanical Imaging Medical Imaging Modality Palpatory Diagnostics 3 D Reconstruction Internal Structures Body Soft Tissues Stress Pattern Measurements Mechanical Sensors Positioning System Microcomputer Prostate Gland Prostate Cancer" ], "authors": [ { "affiliation": "Artann Lab., East Brunswick, NJ, USA", "fullName": "A.P. Sarvazyan", "givenName": "A.P.", "surname": "Sarvazyan", "__typename": "ArticleAuthorType" } ], "idPrefix": "cbms", "isOpenAccess": false, "showRecommendedArticles": false, "showBuyMe": true, "hasPdf": true, "pubDate": "1997-03-01T00:00:00", "pubType": "proceedings", "pages": "120", "year": "1997", "issn": "1063-7125", "isbn": "0-8186-7928-X", "notes": null, "notesType": null, "__typename": "ArticleType" }, "webExtras": [], "adjacentArticles": { "previous": { "fno": "79280114", "articleId": "12OmNybx220", "__typename": "AdjacentArticleType" }, "next": { "fno": "79280126", "articleId": "12OmNx6PiAo", "__typename": "AdjacentArticleType" }, "__typename": "AdjacentArticlesType" }, "recommendedArticles": [], "articleVideos": [] }
{ "proceeding": { "id": "12OmNzBOhX1", "title": "2013 Humaine Association Conference on Affective Computing and Intelligent Interaction (ACII)", "acronym": "acii", "groupId": "1002992", "volume": "0", "displayVolume": "0", "year": "2013", "__typename": "ProceedingType" }, "article": { "id": "12OmNzuZUEe", "doi": "10.1109/ACII.2013.109", "title": "Affective Conversational Models: Interpersonal Stance in a Police Interview Context", "normalizedTitle": "Affective Conversational Models: Interpersonal Stance in a Police Interview Context", "abstract": "Building an affective conversational model for a virtual character that can play the role of suspect in a police interview training game comes with challenges. This paper focuses on the response modeling of interpersonal stance of a believable artificial conversational partner. Based on Leary's interpersonal stance theory a computational of model interpersonal stance is created. Other psychological theories and ideas that are proposed to be integrated into the computational stance model are: face and politeness, rapport, and status or role. Proposed evaluation methods for the model use comparison of human behavior with model predicted behavior.", "abstracts": [ { "abstractType": "Regular", "content": "Building an affective conversational model for a virtual character that can play the role of suspect in a police interview training game comes with challenges. This paper focuses on the response modeling of interpersonal stance of a believable artificial conversational partner. Based on Leary's interpersonal stance theory a computational of model interpersonal stance is created. Other psychological theories and ideas that are proposed to be integrated into the computational stance model are: face and politeness, rapport, and status or role. Proposed evaluation methods for the model use comparison of human behavior with model predicted behavior.", "__typename": "ArticleAbstractType" } ], "normalizedAbstract": "Building an affective conversational model for a virtual character that can play the role of suspect in a police interview training game comes with challenges. This paper focuses on the response modeling of interpersonal stance of a believable artificial conversational partner. Based on Leary's interpersonal stance theory a computational of model interpersonal stance is created. Other psychological theories and ideas that are proposed to be integrated into the computational stance model are: face and politeness, rapport, and status or role. Proposed evaluation methods for the model use comparison of human behavior with model predicted behavior.", "fno": "5048a624", "keywords": [ "Computational Modeling", "Face", "Interviews", "Training", "Vectors", "Context", "Psychology", "Yet We Do Not", "We Have The Right To Remain Silent" ], "authors": [ { "affiliation": "Human Media Interaction, Univ. of Twente, Enschede, Netherlands", "fullName": "Merijn Bruijnes", "givenName": "Merijn", "surname": "Bruijnes", "__typename": "ArticleAuthorType" } ], "idPrefix": "acii", "isOpenAccess": false, "showRecommendedArticles": true, "showBuyMe": true, "hasPdf": true, "pubDate": "2013-09-01T00:00:00", "pubType": "proceedings", "pages": "624-629", "year": "2013", "issn": "2156-8103", "isbn": "978-0-7695-5048-0", "notes": null, "notesType": null, "__typename": "ArticleType" }, "webExtras": [], "adjacentArticles": { "previous": { "fno": "5048a618", "articleId": "12OmNyoAAau", "__typename": "AdjacentArticleType" }, "next": { "fno": "5048a630", "articleId": "12OmNC3Xhmt", "__typename": "AdjacentArticleType" }, "__typename": "AdjacentArticlesType" }, "recommendedArticles": [ { "id": "proceedings/acii/2013/5048/0/5048a258", "title": "From Emotions to Interpersonal Stances: Multi-level Analysis of Smiling Virtual Characters", "doi": null, "abstractUrl": "/proceedings-article/acii/2013/5048a258/12OmNAlvHJC", "parentPublication": { "id": "proceedings/acii/2013/5048/0", "title": "2013 Humaine Association Conference on Affective Computing and Intelligent Interaction (ACII)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/icalt/2014/4038/0/4038a249", "title": "Virtual Agent Constructionism: Experiences from Health Professions Students Creating Virtual Conversational Agent Representations of Patients", "doi": null, "abstractUrl": "/proceedings-article/icalt/2014/4038a249/12OmNBgQFQ3", "parentPublication": { "id": "proceedings/icalt/2014/4038/0", "title": "2014 IEEE 14th International Conference on Advanced Learning Technologies (ICALT)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/fg/2018/2335/0/233501a365", "title": "Facial Expression Grounded Conversational Dialogue Generation", "doi": null, "abstractUrl": "/proceedings-article/fg/2018/233501a365/12OmNC3FGgx", "parentPublication": { "id": "proceedings/fg/2018/2335/0", "title": "2018 13th IEEE International Conference on Automatic Face & Gesture Recognition (FG 2018)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/cvprw/2012/1611/0/06239349", "title": "Parameterizing interpersonal behaviour with Laban movement analysis — A Bayesian approach", "doi": null, "abstractUrl": "/proceedings-article/cvprw/2012/06239349/12OmNvAAttN", "parentPublication": { "id": "proceedings/cvprw/2012/1611/0", "title": "2012 IEEE Computer Society Conference on Computer Vision and Pattern Recognition Workshops", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/acii/2013/5048/0/5048a019", "title": "A Multimodal Corpus Approach to the Design of Virtual Recruiters", "doi": null, "abstractUrl": "/proceedings-article/acii/2013/5048a019/12OmNvwTGBu", "parentPublication": { "id": "proceedings/acii/2013/5048/0", "title": "2013 Humaine Association Conference on Affective Computing and Intelligent Interaction (ACII)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/acii/2013/5048/0/5048a203", "title": "Conversational Topics Handle Social Relationships", "doi": null, "abstractUrl": "/proceedings-article/acii/2013/5048a203/12OmNx8Ouq7", "parentPublication": { "id": "proceedings/acii/2013/5048/0", "title": "2013 Humaine Association Conference on Affective Computing and Intelligent Interaction (ACII)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/passat-socialcom/2012/5638/0/06406349", "title": "Mutual Stance Building in Dyad of Virtual Agents: Smile Alignment and Synchronisation", "doi": null, "abstractUrl": "/proceedings-article/passat-socialcom/2012/06406349/12OmNxE2mMQ", "parentPublication": { "id": "proceedings/passat-socialcom/2012/5638/0", "title": "2012 International Conference on Privacy, Security, Risk and Trust (PASSAT)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "mags/pc/2019/02/08794689", "title": "Towards Interpersonal Assistants: Next-Generation Conversational Agents", "doi": null, "abstractUrl": "/magazine/pc/2019/02/08794689/1cplVUN5THi", "parentPublication": { "id": "mags/pc", "title": "IEEE Pervasive Computing", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "trans/ta/2022/01/08868168", "title": "Exploiting Evolutionary Algorithms to Model Nonverbal Reactions to Conversational Interruptions in User-Agent Interactions", "doi": null, "abstractUrl": "/journal/ta/2022/01/08868168/1e7BTZFGmw8", "parentPublication": { "id": "trans/ta", "title": "IEEE Transactions on Affective Computing", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/acii/2019/3888/0/08925449", "title": "Situation-Aware Emotion Regulation of Conversational Agents with Kinetic Earables", "doi": null, "abstractUrl": "/proceedings-article/acii/2019/08925449/1fHGFcb5YZ2", "parentPublication": { "id": "proceedings/acii/2019/3888/0", "title": "2019 8th International Conference on Affective Computing and Intelligent Interaction (ACII)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" } ], "articleVideos": [] }
{ "proceeding": { "id": "1CJcAaH6aYg", "title": "2022 IEEE Conference on Virtual Reality and 3D User Interfaces Abstracts and Workshops (VRW)", "acronym": "vrw", "groupId": "1836626", "volume": "0", "displayVolume": "0", "year": "2022", "__typename": "ProceedingType" }, "article": { "id": "1CJcDgr8xyg", "doi": "10.1109/VRW55335.2022.00238", "title": "Interpersonal Distance to a Speaking Avatar: Loudness Matters Irrespective of Contents", "normalizedTitle": "Interpersonal Distance to a Speaking Avatar: Loudness Matters Irrespective of Contents", "abstract": "It is important for us to maintain appropriate interpersonal distance depending on situations in effective and safe communications. We aimed to investigate the effects of speech loudness and clarity on the interpersonal distance towards an avatar in a virtual environment. We found that the louder speech of the avatar made the distance between the participants and the avatar larger than the quiet speech, but the clarity of the speech did not significantly affect the distance. These results suggest that the perception of loudness modulates the interpersonal distance towards the virtual avatar to maintain the intimate equilibrium.", "abstracts": [ { "abstractType": "Regular", "content": "It is important for us to maintain appropriate interpersonal distance depending on situations in effective and safe communications. We aimed to investigate the effects of speech loudness and clarity on the interpersonal distance towards an avatar in a virtual environment. We found that the louder speech of the avatar made the distance between the participants and the avatar larger than the quiet speech, but the clarity of the speech did not significantly affect the distance. These results suggest that the perception of loudness modulates the interpersonal distance towards the virtual avatar to maintain the intimate equilibrium.", "__typename": "ArticleAbstractType" } ], "normalizedAbstract": "It is important for us to maintain appropriate interpersonal distance depending on situations in effective and safe communications. We aimed to investigate the effects of speech loudness and clarity on the interpersonal distance towards an avatar in a virtual environment. We found that the louder speech of the avatar made the distance between the participants and the avatar larger than the quiet speech, but the clarity of the speech did not significantly affect the distance. These results suggest that the perception of loudness modulates the interpersonal distance towards the virtual avatar to maintain the intimate equilibrium.", "fno": "840200a774", "keywords": [ "Avatars", "Loudness", "Speech", "Speech Loudness", "Speech Clarity", "Virtual Avatar", "Speaking Avatar", "Interpersonal Distance", "Virtual Environment", "Solid Modeling", "Three Dimensional Displays", "Avatars", "Conferences", "Computational Modeling", "Virtual Environments", "User Interfaces", "Proxemics", "Interpersonal Distance", "Personal Space", "Loudness", "1 3 7 Computer Graphics Three Dimensional Graphics And Realism Virtual Reality", "H 1 2 Models And Principles User Machine Systems Human Factors" ], "authors": [ { "affiliation": "Toyohashi University of Technology", "fullName": "Kota Takahashi", "givenName": "Kota", "surname": "Takahashi", "__typename": "ArticleAuthorType" }, { "affiliation": "Toyohashi University of Technology", "fullName": "Yasuyuki Inoue", "givenName": "Yasuyuki", "surname": "Inoue", "__typename": "ArticleAuthorType" }, { "affiliation": "Toyohashi University of Technology", "fullName": "Michiteru Kitazaki", "givenName": "Michiteru", "surname": "Kitazaki", "__typename": "ArticleAuthorType" } ], "idPrefix": "vrw", "isOpenAccess": false, "showRecommendedArticles": true, "showBuyMe": true, "hasPdf": true, "pubDate": "2022-03-01T00:00:00", "pubType": "proceedings", "pages": "774-775", "year": "2022", "issn": null, "isbn": "978-1-6654-8402-2", "notes": null, "notesType": null, "__typename": "ArticleType" }, "webExtras": [], "adjacentArticles": { "previous": { "fno": "840200a772", "articleId": "1CJcAx6yEG4", "__typename": "AdjacentArticleType" }, "next": { "fno": "840200a776", "articleId": "1CJfqXWigW4", "__typename": "AdjacentArticleType" }, "__typename": "AdjacentArticlesType" }, "recommendedArticles": [ { "id": "proceedings/cw/2015/9403/0/9403a325", "title": "Instant Messenger with Personalized 3D Avatar", "doi": null, "abstractUrl": "/proceedings-article/cw/2015/9403a325/12OmNAkEU6d", "parentPublication": { "id": "proceedings/cw/2015/9403/0", "title": "2015 International Conference on Cyberworlds (CW)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2016/0836/0/07504689", "title": "The impact of a self-avatar on cognitive load in immersive virtual reality", "doi": null, "abstractUrl": "/proceedings-article/vr/2016/07504689/12OmNviHKla", "parentPublication": { "id": "proceedings/vr/2016/0836/0", "title": "2016 IEEE Virtual Reality (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "trans/tg/2022/05/09714123", "title": "The Impact of Embodiment and Avatar Sizing on Personal Space in Immersive Virtual Environments", "doi": null, "abstractUrl": "/journal/tg/2022/05/09714123/1B0Y0yXxNbG", "parentPublication": { "id": "trans/tg", "title": "IEEE Transactions on Visualization & Computer Graphics", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vrw/2022/8402/0/840200a770", "title": "Emotional Empathy and Facial Mimicry of Avatar Faces", "doi": null, "abstractUrl": "/proceedings-article/vrw/2022/840200a770/1CJdHd5yTSM", "parentPublication": { "id": "proceedings/vrw/2022/8402/0", "title": "2022 IEEE Conference on Virtual Reality and 3D User Interfaces Abstracts and Workshops (VRW)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vrw/2022/8402/0/840200a764", "title": "Automatic 3D Avatar Generation from a Single RBG Frontal Image", "doi": null, "abstractUrl": "/proceedings-article/vrw/2022/840200a764/1CJexMJUGxa", "parentPublication": { "id": "proceedings/vrw/2022/8402/0", "title": "2022 IEEE Conference on Virtual Reality and 3D User Interfaces Abstracts and Workshops (VRW)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2023/4815/0/481500a072", "title": "Volumetric Avatar Reconstruction with Spatio-Temporally Offset RGBD Cameras", "doi": null, "abstractUrl": "/proceedings-article/vr/2023/481500a072/1MNgmRWwNUI", "parentPublication": { "id": "proceedings/vr/2023/4815/0", "title": "2023 IEEE Conference Virtual Reality and 3D User Interfaces (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "trans/tg/2021/05/09382845", "title": "The Influence of Avatar Representation on Interpersonal Communication in Virtual Social Environments", "doi": null, "abstractUrl": "/journal/tg/2021/05/09382845/1saZq7bIPUQ", "parentPublication": { "id": "trans/tg", "title": "IEEE Transactions on Visualization & Computer Graphics", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vrw/2021/4057/0/405700a613", "title": "Communications in Virtual Environment Improve Interpersonal Impression", "doi": null, "abstractUrl": "/proceedings-article/vrw/2021/405700a613/1tnWHW8JhK0", "parentPublication": { "id": "proceedings/vrw/2021/4057/0", "title": "2021 IEEE Conference on Virtual Reality and 3D User Interfaces Abstracts and Workshops (VRW)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vrw/2021/4057/0/405700a127", "title": "Evidence for a Relationship Between Self-Avatar Fixations and Perceived Avatar Similarity within Low-Cost Virtual Reality Embodiment", "doi": null, "abstractUrl": "/proceedings-article/vrw/2021/405700a127/1tnXDDh8sqk", "parentPublication": { "id": "proceedings/vrw/2021/4057/0", "title": "2021 IEEE Conference on Virtual Reality and 3D User Interfaces Abstracts and Workshops (VRW)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2021/1838/0/255600a455", "title": "Correction of Avatar Hand Movements Supports Learning of a Motor Skill", "doi": null, "abstractUrl": "/proceedings-article/vr/2021/255600a455/1tuBfJZ11HG", "parentPublication": { "id": "proceedings/vr/2021/1838/0", "title": "2021 IEEE Virtual Reality and 3D User Interfaces (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" } ], "articleVideos": [] }
{ "proceeding": { "id": "1tnWwqMuCzu", "title": "2021 IEEE Conference on Virtual Reality and 3D User Interfaces Abstracts and Workshops (VRW)", "acronym": "vrw", "groupId": "1836626", "volume": "0", "displayVolume": "0", "year": "2021", "__typename": "ProceedingType" }, "article": { "id": "1tnWHW8JhK0", "doi": "10.1109/VRW52623.2021.00189", "title": "Communications in Virtual Environment Improve Interpersonal Impression", "normalizedTitle": "Communications in Virtual Environment Improve Interpersonal Impression", "abstract": "Pseudo physical touch is used for communications in virtual environments such as VRChat. We aimed to test if the pseudo-touch communication affects social impression in a virtual environment. Nineteen participants performed the controlled experiment with a partner who was an experimenter with three types of communications: no touch, pseudo touch, and actual touch. Subjective ratings of attractiveness and the communication easiness with the partner increased in all conditions, suggesting that the communication in virtual environments improves interpersonal attraction and communicability either with or without physical or pseudo touch.", "abstracts": [ { "abstractType": "Regular", "content": "Pseudo physical touch is used for communications in virtual environments such as VRChat. We aimed to test if the pseudo-touch communication affects social impression in a virtual environment. Nineteen participants performed the controlled experiment with a partner who was an experimenter with three types of communications: no touch, pseudo touch, and actual touch. Subjective ratings of attractiveness and the communication easiness with the partner increased in all conditions, suggesting that the communication in virtual environments improves interpersonal attraction and communicability either with or without physical or pseudo touch.", "__typename": "ArticleAbstractType" } ], "normalizedAbstract": "Pseudo physical touch is used for communications in virtual environments such as VRChat. We aimed to test if the pseudo-touch communication affects social impression in a virtual environment. Nineteen participants performed the controlled experiment with a partner who was an experimenter with three types of communications: no touch, pseudo touch, and actual touch. Subjective ratings of attractiveness and the communication easiness with the partner increased in all conditions, suggesting that the communication in virtual environments improves interpersonal attraction and communicability either with or without physical or pseudo touch.", "fno": "405700a613", "keywords": [ "Haptic Interfaces", "Virtual Reality", "Virtual Environment", "Pseudophysical Touch", "Pseudotouch Communication", "Social Impression", "Interpersonal Attraction", "Communicability", "VR Chat", "Solid Modeling", "Three Dimensional Displays", "Conferences", "Computational Modeling", "Virtual Environments", "User Interfaces", "Pseudo Touch", "Communication", "Interpersonal Impression" ], "authors": [ { "affiliation": "Toyohashi University of Technology", "fullName": "Yuki Kato", "givenName": "Yuki", "surname": "Kato", "__typename": "ArticleAuthorType" }, { "affiliation": "Toyohashi University of Technology", "fullName": "Maki Sugimoto", "givenName": "Maki", "surname": "Sugimoto", "__typename": "ArticleAuthorType" }, { "affiliation": "Toyohashi University of Technology", "fullName": "Masahiro Inami", "givenName": "Masahiro", "surname": "Inami", "__typename": "ArticleAuthorType" }, { "affiliation": "Toyohashi University of Technology", "fullName": "Michiteru Kitazaki", "givenName": "Michiteru", "surname": "Kitazaki", "__typename": "ArticleAuthorType" } ], "idPrefix": "vrw", "isOpenAccess": false, "showRecommendedArticles": true, "showBuyMe": true, "hasPdf": true, "pubDate": "2021-03-01T00:00:00", "pubType": "proceedings", "pages": "613-614", "year": "2021", "issn": null, "isbn": "978-1-6654-4057-8", "notes": null, "notesType": null, "__typename": "ArticleType" }, "webExtras": [], "adjacentArticles": { "previous": { "fno": "405700a611", "articleId": "1tnWMj0AXn2", "__typename": "AdjacentArticleType" }, "next": { "fno": "405700a615", "articleId": "1tnX2a6RYaI", "__typename": "AdjacentArticleType" }, "__typename": "AdjacentArticlesType" }, "recommendedArticles": [ { "id": "proceedings/vr/2009/3943/0/04811019", "title": "Virtual Humans That Touch Back: Enhancing Nonverbal Communication with Virtual Humans through Bidirectional Touch", "doi": null, "abstractUrl": "/proceedings-article/vr/2009/04811019/12OmNwMXnsz", "parentPublication": { "id": "proceedings/vr/2009/3943/0", "title": "2009 IEEE Virtual Reality Conference", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2018/3365/0/08446128", "title": "Rendering of Pressure and Textures Using Wearable Haptics in Immersive VR Environments", "doi": null, "abstractUrl": "/proceedings-article/vr/2018/08446128/13bd1eSlyt0", "parentPublication": { "id": "proceedings/vr/2018/3365/0", "title": "2018 IEEE Conference on Virtual Reality and 3D User Interfaces (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2018/3365/0/08446235", "title": "Influences on the Elicitation of Interpersonal Space with Virtual Humans", "doi": null, "abstractUrl": "/proceedings-article/vr/2018/08446235/13bd1eW2l9F", "parentPublication": { "id": "proceedings/vr/2018/3365/0", "title": "2018 IEEE Conference on Virtual Reality and 3D User Interfaces (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2018/3365/0/08446054", "title": "Keynote Speaker Tactile Reality", "doi": null, "abstractUrl": "/proceedings-article/vr/2018/08446054/13bd1h03qOn", "parentPublication": { "id": "proceedings/vr/2018/3365/0", "title": "2018 IEEE Conference on Virtual Reality and 3D User Interfaces (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "mags/cg/2013/02/mcg2013020080", "title": "Touch-Based Interfaces for Interacting with 3D Content in Public Exhibitions", "doi": null, "abstractUrl": "/magazine/cg/2013/02/mcg2013020080/13rRUxZRbrO", "parentPublication": { "id": "mags/cg", "title": "IEEE Computer Graphics and Applications", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "mags/cg/2017/01/mcg2017010020", "title": "Playing with Senses in VR: Alternate Perceptions Combining Vision and Touch", "doi": null, "abstractUrl": "/magazine/cg/2017/01/mcg2017010020/13rRUytF43L", "parentPublication": { "id": "mags/cg", "title": "IEEE Computer Graphics and Applications", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vrw/2022/8402/0/840200a774", "title": "Interpersonal Distance to a Speaking Avatar: Loudness Matters Irrespective of Contents", "doi": null, "abstractUrl": "/proceedings-article/vrw/2022/840200a774/1CJcDgr8xyg", "parentPublication": { "id": "proceedings/vrw/2022/8402/0", "title": "2022 IEEE Conference on Virtual Reality and 3D User Interfaces Abstracts and Workshops (VRW)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vrw/2022/8402/0/840200a782", "title": "Virtual Touch Modulates Perception of Pleasant Touch", "doi": null, "abstractUrl": "/proceedings-article/vrw/2022/840200a782/1CJd7sjt0go", "parentPublication": { "id": "proceedings/vrw/2022/8402/0", "title": "2022 IEEE Conference on Virtual Reality and 3D User Interfaces Abstracts and Workshops (VRW)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vrw/2022/8402/0/840200a802", "title": "Knowing the Partner&#x0027;s Objective Increases Embodiment towards a Limb Controlled by the Partner", "doi": null, "abstractUrl": "/proceedings-article/vrw/2022/840200a802/1CJfq7DQm76", "parentPublication": { "id": "proceedings/vrw/2022/8402/0", "title": "2022 IEEE Conference on Virtual Reality and 3D User Interfaces Abstracts and Workshops (VRW)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2019/1377/0/08797993", "title": "VirtualTablet: Extending Movable Surfaces with Touch Interaction", "doi": null, "abstractUrl": "/proceedings-article/vr/2019/08797993/1cJ1hgQ4Li8", "parentPublication": { "id": "proceedings/vr/2019/1377/0", "title": "2019 IEEE Conference on Virtual Reality and 3D User Interfaces (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" } ], "articleVideos": [] }
{ "proceeding": { "id": "12OmNvAiSpZ", "title": "2015 IEEE Virtual Reality (VR)", "acronym": "vr", "groupId": "1000791", "volume": "0", "displayVolume": "0", "year": "2015", "__typename": "ProceedingType" }, "article": { "id": "12OmNwqft3l", "doi": "10.1109/VR.2015.7223389", "title": "The effect of head mounted display weight and locomotion method on the perceived naturalness of virtual walking speeds", "normalizedTitle": "The effect of head mounted display weight and locomotion method on the perceived naturalness of virtual walking speeds", "abstract": "This poster details a study investigating the effect of Head Mounted Display (HMD) weight and locomotion method (Walking-In-Place and treadmill walking) on the perceived naturalness of virtual walking speeds. The results revealed significant main effects of movement type, but no significant effects of HMD weight were identified.", "abstracts": [ { "abstractType": "Regular", "content": "This poster details a study investigating the effect of Head Mounted Display (HMD) weight and locomotion method (Walking-In-Place and treadmill walking) on the perceived naturalness of virtual walking speeds. The results revealed significant main effects of movement type, but no significant effects of HMD weight were identified.", "__typename": "ArticleAbstractType" } ], "normalizedAbstract": "This poster details a study investigating the effect of Head Mounted Display (HMD) weight and locomotion method (Walking-In-Place and treadmill walking) on the perceived naturalness of virtual walking speeds. The results revealed significant main effects of movement type, but no significant effects of HMD weight were identified.", "fno": "07223389", "keywords": [ "Legged Locomotion", "Visualization", "Optical Distortion", "Distortion", "Virtual Environments", "Analysis Of Variance", "I 3 7 Computer Graphics Three Dimenshional Graphics And Realism Virtual Reality", "H 1 2 Information Systems User Machine Systems Human Factors" ], "authors": [ { "affiliation": "Aalborg University", "fullName": "Niels Christian Nilsson", "givenName": "Niels Christian", "surname": "Nilsson", "__typename": "ArticleAuthorType" }, { "affiliation": "Aalborg University", "fullName": "Stefania Serafin", "givenName": "Stefania", "surname": "Serafin", "__typename": "ArticleAuthorType" }, { "affiliation": "Aalborg University", "fullName": "Rolf Nordahl", "givenName": "Rolf", "surname": "Nordahl", "__typename": "ArticleAuthorType" } ], "idPrefix": "vr", "isOpenAccess": false, "showRecommendedArticles": true, "showBuyMe": true, "hasPdf": true, "pubDate": "2015-03-01T00:00:00", "pubType": "proceedings", "pages": "249-250", "year": "2015", "issn": null, "isbn": "978-1-4799-1727-3", "notes": null, "notesType": null, "__typename": "ArticleType" }, "webExtras": [], "adjacentArticles": { "previous": { "fno": "07223388", "articleId": "12OmNCdk2Jm", "__typename": "AdjacentArticleType" }, "next": { "fno": "07223390", "articleId": "12OmNxFsmDI", "__typename": "AdjacentArticleType" }, "__typename": "AdjacentArticlesType" }, "recommendedArticles": [ { "id": "proceedings/vr/2013/4795/0/06549395", "title": "Flexible and general redirected walking for head-mounted displays", "doi": null, "abstractUrl": "/proceedings-article/vr/2013/06549395/12OmNxFJXN3", "parentPublication": { "id": "proceedings/vr/2013/4795/0", "title": "2013 IEEE Virtual Reality (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2015/1727/0/07223328", "title": "The effect of visual display properties and gain presentation mode on the perceived naturalness of virtual walking speeds", "doi": null, "abstractUrl": "/proceedings-article/vr/2015/07223328/12OmNxGja3F", "parentPublication": { "id": "proceedings/vr/2015/1727/0", "title": "2015 IEEE Virtual Reality (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2015/1727/0/07223327", "title": "Virtual proxemics: Locomotion in the presence of obstacles in large immersive projection environments", "doi": null, "abstractUrl": "/proceedings-article/vr/2015/07223327/12OmNzcPAkQ", "parentPublication": { "id": "proceedings/vr/2015/1727/0", "title": "2015 IEEE Virtual Reality (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2018/3365/0/08446432", "title": "Inducing Compensatory Changes in Gait Similar to External Perturbations Using an Immersive Head Mounted Display", "doi": null, "abstractUrl": "/proceedings-article/vr/2018/08446432/13bd1fKQxs2", "parentPublication": { "id": "proceedings/vr/2018/3365/0", "title": "2018 IEEE Conference on Virtual Reality and 3D User Interfaces (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "trans/tg/2014/04/ttg201404569", "title": "Establishing the Range of Perceptually Natural Visual Walking Speeds for Virtual Walking-In-Place Locomotion", "doi": null, "abstractUrl": "/journal/tg/2014/04/ttg201404569/13rRUxAASTb", "parentPublication": { "id": "trans/tg", "title": "IEEE Transactions on Visualization & Computer Graphics", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "trans/tg/2014/04/ttg201404596", "title": "Dynamic Affordances in Embodied Interactive Systems: The Role of Display and Mode of Locomotion", "doi": null, "abstractUrl": "/journal/tg/2014/04/ttg201404596/13rRUxOve9K", "parentPublication": { "id": "trans/tg", "title": "IEEE Transactions on Visualization & Computer Graphics", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2019/1377/0/08798209", "title": "Enactive Approach to Assess Perceived Speed Error during Walking and Running in Virtual Reality", "doi": null, "abstractUrl": "/proceedings-article/vr/2019/08798209/1cI6auzeLYY", "parentPublication": { "id": "proceedings/vr/2019/1377/0", "title": "2019 IEEE Conference on Virtual Reality and 3D User Interfaces (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2019/1377/0/08797842", "title": "A pilot study of gaze-gait relations analysis in a VR environment using HMD and LRF", "doi": null, "abstractUrl": "/proceedings-article/vr/2019/08797842/1cJ15kwNxnO", "parentPublication": { "id": "proceedings/vr/2019/1377/0", "title": "2019 IEEE Conference on Virtual Reality and 3D User Interfaces (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vrw/2020/6532/0/09090453", "title": "Perception of Walking Self-body Avatar Enhances Virtual-walking Sensation", "doi": null, "abstractUrl": "/proceedings-article/vrw/2020/09090453/1jIxoojmMy4", "parentPublication": { "id": "proceedings/vrw/2020/6532/0", "title": "2020 IEEE Conference on Virtual Reality and 3D User Interfaces Abstracts and Workshops (VRW)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/ismar/2020/8508/0/850800a649", "title": "Comparing World and Screen Coordinate Systems in Optical See-Through Head-Mounted Displays for Text Readability while Walking", "doi": null, "abstractUrl": "/proceedings-article/ismar/2020/850800a649/1pysvKFdazS", "parentPublication": { "id": "proceedings/ismar/2020/8508/0", "title": "2020 IEEE International Symposium on Mixed and Augmented Reality (ISMAR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" } ], "articleVideos": [] }
{ "proceeding": { "id": "13bd1eJgoia", "title": "2018 IEEE Conference on Virtual Reality and 3D User Interfaces (VR)", "acronym": "vr", "groupId": "1000791", "volume": "0", "displayVolume": "0", "year": "2018", "__typename": "ProceedingType" }, "article": { "id": "13bd1gJ1v0m", "doi": "10.1109/VR.2018.8446587", "title": "Do Textures and Global Illumination Influence the Perception of Redirected Walking Based on Translational Gain?", "normalizedTitle": "Do Textures and Global Illumination Influence the Perception of Redirected Walking Based on Translational Gain?", "abstract": "For locomotion in virtual environments (VE) the method of redirected walking (RDW) enables users to explore large virtual areas within a restricted physical space by (almost) natural walking. The trick behind this method is to manipulate the virtual camera in an user-undetectable manner that leads to a change of his movements. If the virtual camera is manipulated too strong then the user recognizes this manipulation and reacts accordingly. We studied the effect of human perception of RDW under the influence of the level of realism in rendering the virtual scene.", "abstracts": [ { "abstractType": "Regular", "content": "For locomotion in virtual environments (VE) the method of redirected walking (RDW) enables users to explore large virtual areas within a restricted physical space by (almost) natural walking. The trick behind this method is to manipulate the virtual camera in an user-undetectable manner that leads to a change of his movements. If the virtual camera is manipulated too strong then the user recognizes this manipulation and reacts accordingly. We studied the effect of human perception of RDW under the influence of the level of realism in rendering the virtual scene.", "__typename": "ArticleAbstractType" } ], "normalizedAbstract": "For locomotion in virtual environments (VE) the method of redirected walking (RDW) enables users to explore large virtual areas within a restricted physical space by (almost) natural walking. The trick behind this method is to manipulate the virtual camera in an user-undetectable manner that leads to a change of his movements. If the virtual camera is manipulated too strong then the user recognizes this manipulation and reacts accordingly. We studied the effect of human perception of RDW under the influence of the level of realism in rendering the virtual scene.", "fno": "08446587", "keywords": [ "Legged Locomotion", "Lighting", "Rendering Computer Graphics", "Virtual Environments", "Cameras", "Tracking", "Virtual Reality", "Locomotion", "Human Perception" ], "authors": [ { "affiliation": "TH Köln, Germany", "fullName": "Kristoffer Waldow", "givenName": "Kristoffer", "surname": "Waldow", "__typename": "ArticleAuthorType" }, { "affiliation": "TH Köln, Germany", "fullName": "Arnulph Fuhrmann", "givenName": "Arnulph", "surname": "Fuhrmann", "__typename": "ArticleAuthorType" }, { "affiliation": "TH Köln, Germany", "fullName": "Stefan M. Grünvogel", "givenName": "Stefan M.", "surname": "Grünvogel", "__typename": "ArticleAuthorType" } ], "idPrefix": "vr", "isOpenAccess": false, "showRecommendedArticles": true, "showBuyMe": true, "hasPdf": true, "pubDate": "2018-03-01T00:00:00", "pubType": "proceedings", "pages": "717-718", "year": "2018", "issn": null, "isbn": "978-1-5386-3365-6", "notes": null, "notesType": null, "__typename": "ArticleType" }, "webExtras": [], "adjacentArticles": { "previous": { "fno": "08446207", "articleId": "13bd1gCd7SW", "__typename": "AdjacentArticleType" }, "next": { "fno": "08446544", "articleId": "13bd1gzWkQY", "__typename": "AdjacentArticleType" }, "__typename": "AdjacentArticlesType" }, "recommendedArticles": [ { "id": "proceedings/vr/2017/6647/0/07892373", "title": "Application of redirected walking in room-scale VR", "doi": null, "abstractUrl": "/proceedings-article/vr/2017/07892373/12OmNxG1ySA", "parentPublication": { "id": "proceedings/vr/2017/6647/0", "title": "2017 IEEE Virtual Reality (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2016/0836/0/07504742", "title": "Simultaneous mapping and redirected walking for ad hoc free walking in virtual environments", "doi": null, "abstractUrl": "/proceedings-article/vr/2016/07504742/12OmNyUFg0I", "parentPublication": { "id": "proceedings/vr/2016/0836/0", "title": "2016 IEEE Virtual Reality (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2018/3365/0/08446563", "title": "Redirected Walking in Irregularly Shaped Physical Environments with Dynamic Obstacles", "doi": null, "abstractUrl": "/proceedings-article/vr/2018/08446563/13bd1eW2l9A", "parentPublication": { "id": "proceedings/vr/2018/3365/0", "title": "2018 IEEE Conference on Virtual Reality and 3D User Interfaces (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2018/3365/0/08446225", "title": "Effect of Environment Size on Curvature Redirected Walking Thresholds", "doi": null, "abstractUrl": "/proceedings-article/vr/2018/08446225/13bd1sx4Zt8", "parentPublication": { "id": "proceedings/vr/2018/3365/0", "title": "2018 IEEE Conference on Virtual Reality and 3D User Interfaces (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "trans/tg/2014/04/ttg201404579", "title": "Performance of Redirected Walking Algorithms in a Constrained Virtual World", "doi": null, "abstractUrl": "/journal/tg/2014/04/ttg201404579/13rRUwjoNx4", "parentPublication": { "id": "trans/tg", "title": "IEEE Transactions on Visualization & Computer Graphics", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "trans/tg/2015/04/07036075", "title": "Cognitive Resource Demands of Redirected Walking", "doi": null, "abstractUrl": "/journal/tg/2015/04/07036075/13rRUxcKzVm", "parentPublication": { "id": "trans/tg", "title": "IEEE Transactions on Visualization & Computer Graphics", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/dasc-picom-cbdcom-cyberscitech/2021/2174/0/217400a349", "title": "A Redirected Walking Toolkit for Exploring Large-Scale Virtual Environments", "doi": null, "abstractUrl": "/proceedings-article/dasc-picom-cbdcom-cyberscitech/2021/217400a349/1BLnzoFxHHy", "parentPublication": { "id": "proceedings/dasc-picom-cbdcom-cyberscitech/2021/2174/0", "title": "2021 IEEE Intl Conf on Dependable, Autonomic and Secure Computing, Intl Conf on Pervasive Intelligence and Computing, Intl Conf on Cloud and Big Data Computing, Intl Conf on Cyber Science and Technology Congress (DASC/PiCom/CBDCom/CyberSciTech)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2022/9617/0/961700a524", "title": "The Chaotic Behavior of Redirection &#x2013; Revisiting Simulations in Redirected Walking", "doi": null, "abstractUrl": "/proceedings-article/vr/2022/961700a524/1CJc4FECUko", "parentPublication": { "id": "proceedings/vr/2022/9617/0", "title": "2022 IEEE on Conference Virtual Reality and 3D User Interfaces (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2023/4815/0/481500a053", "title": "Redirected Walking Based on Historical User Walking Data", "doi": null, "abstractUrl": "/proceedings-article/vr/2023/481500a053/1MNgUnNG7Ju", "parentPublication": { "id": "proceedings/vr/2023/4815/0", "title": "2023 IEEE Conference Virtual Reality and 3D User Interfaces (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2019/1377/0/08798286", "title": "Evaluating the Effectiveness of Redirected Walking with Auditory Distractors for Navigation in Virtual Environments", "doi": null, "abstractUrl": "/proceedings-article/vr/2019/08798286/1cJ0PIoIPV6", "parentPublication": { "id": "proceedings/vr/2019/1377/0", "title": "2019 IEEE Conference on Virtual Reality and 3D User Interfaces (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" } ], "articleVideos": [] }
{ "proceeding": { "id": "12OmNwpGgL6", "title": "Proceedings Fourth IEEE International Conference on Multimodal Interfaces", "acronym": "icmi", "groupId": "1002175", "volume": "0", "displayVolume": "0", "year": "2002", "__typename": "ProceedingType" }, "article": { "id": "12OmNAGNCeq", "doi": "10.1109/ICMI.2002.1167004", "title": "Active Gaze Tracking for Human-Robot Interaction", "normalizedTitle": "Active Gaze Tracking for Human-Robot Interaction", "abstract": "In our effort to make human-robot interfaces more user-friendly, we built an active gaze tracking system that can measure a person?s gaze direction in real-time. Gaze normally tells which object in his/her surrounding a person is interested in. Therefore, it can be used as a medium for human-robot interaction like instructing a robot arm to pick a certain object a user is looking at. In this paper, we discuss how we developed and put together algorithms for zoom camera calibration, low-level control of active head, face and gaze tracking to create an active gaze tracking system.", "abstracts": [ { "abstractType": "Regular", "content": "In our effort to make human-robot interfaces more user-friendly, we built an active gaze tracking system that can measure a person?s gaze direction in real-time. Gaze normally tells which object in his/her surrounding a person is interested in. Therefore, it can be used as a medium for human-robot interaction like instructing a robot arm to pick a certain object a user is looking at. In this paper, we discuss how we developed and put together algorithms for zoom camera calibration, low-level control of active head, face and gaze tracking to create an active gaze tracking system.", "__typename": "ArticleAbstractType" } ], "normalizedAbstract": "In our effort to make human-robot interfaces more user-friendly, we built an active gaze tracking system that can measure a person?s gaze direction in real-time. Gaze normally tells which object in his/her surrounding a person is interested in. Therefore, it can be used as a medium for human-robot interaction like instructing a robot arm to pick a certain object a user is looking at. In this paper, we discuss how we developed and put together algorithms for zoom camera calibration, low-level control of active head, face and gaze tracking to create an active gaze tracking system.", "fno": "18340261", "keywords": [ "Active Gaze Tracking", "Active Face Tracking", "Human Robot Interface" ], "authors": [ { "affiliation": "Australian National University", "fullName": "Rowel Atienza", "givenName": "Rowel", "surname": "Atienza", "__typename": "ArticleAuthorType" }, { "affiliation": "Australian National University", "fullName": "Alexander Zelinsky", "givenName": "Alexander", "surname": "Zelinsky", "__typename": "ArticleAuthorType" } ], "idPrefix": "icmi", "isOpenAccess": false, "showRecommendedArticles": true, "showBuyMe": true, "hasPdf": true, "pubDate": "2002-10-01T00:00:00", "pubType": "proceedings", "pages": "261", "year": "2002", "issn": null, "isbn": "0-7695-1834-6", "notes": null, "notesType": null, "__typename": "ArticleType" }, "webExtras": [], "adjacentArticles": { "previous": { "fno": "18340253", "articleId": "12OmNrNh0Fr", "__typename": "AdjacentArticleType" }, "next": { "fno": "18340267", "articleId": "12OmNqFJhUd", "__typename": "AdjacentArticleType" }, "__typename": "AdjacentArticlesType" }, "recommendedArticles": [ { "id": "proceedings/cvpr/2011/0394/0/05995675", "title": "Probabilistic gaze estimation without active personal calibration", "doi": null, "abstractUrl": "/proceedings-article/cvpr/2011/05995675/12OmNC8MsAV", "parentPublication": { "id": "proceedings/cvpr/2011/0394/0", "title": "CVPR 2011", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/icinis/2010/4249/0/4249a048", "title": "Implementation and Optimization of the Eye Gaze Tracking System Based on DM642", "doi": null, "abstractUrl": "/proceedings-article/icinis/2010/4249a048/12OmNs4S8I4", "parentPublication": { "id": "proceedings/icinis/2010/4249/0", "title": "Intelligent Networks and Intelligent Systems, International Workshop on", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/fg/2011/9140/0/05771469", "title": "Constraint-based gaze estimation without active calibration", "doi": null, "abstractUrl": "/proceedings-article/fg/2011/05771469/12OmNwdbVch", "parentPublication": { "id": "proceedings/fg/2011/9140/0", "title": "Face and Gesture 2011", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/icoip/2010/4252/1/4252a131", "title": "A Simplified 3D Gaze Tracking Technology with Stereo Vision", "doi": null, "abstractUrl": "/proceedings-article/icoip/2010/4252a131/12OmNwqft0F", "parentPublication": { "id": "proceedings/icoip/2010/4252/2", "title": "Optoelectronics and Image Processing, International Conference on", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/cgames/2011/1451/0/06000327", "title": "Gaze tracking as a game input interface", "doi": null, "abstractUrl": "/proceedings-article/cgames/2011/06000327/12OmNxRWIeo", "parentPublication": { "id": "proceedings/cgames/2011/1451/0", "title": "2011 16th International Conference on Computer Games (CGAMES)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/ratfg-rts/1999/0378/0/03780077", "title": "Real-Time Stereo Face Tracking System for Visual Human Interfaces", "doi": null, "abstractUrl": "/proceedings-article/ratfg-rts/1999/03780077/12OmNxbEtJK", "parentPublication": { "id": "proceedings/ratfg-rts/1999/0378/0", "title": "Recognition, Analysis, &amp; Tracking of Faces &amp; Gestures in Real -Time Systems, IEEE ICCV Workshop on", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/ratfg-rts/1999/0378/0/03780083", "title": "Tracking and Pursuing Persons with a Mobile Robot", "doi": null, "abstractUrl": "/proceedings-article/ratfg-rts/1999/03780083/12OmNzkMlVp", "parentPublication": { "id": "proceedings/ratfg-rts/1999/0378/0", "title": "Recognition, Analysis, &amp; Tracking of Faces &amp; Gestures in Real -Time Systems, IEEE ICCV Workshop on", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "trans/tp/2010/03/ttp2010030478", "title": "In the Eye of the Beholder: A Survey of Models for Eyes and Gaze", "doi": null, "abstractUrl": "/journal/tp/2010/03/ttp2010030478/13rRUxOdD9o", "parentPublication": { "id": "trans/tp", "title": "IEEE Transactions on Pattern Analysis & Machine Intelligence", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "trans/tg/2022/11/09873986", "title": "Weighted Pointer: Error-aware Gaze-based Interaction through Fallback Modalities", "doi": null, "abstractUrl": "/journal/tg/2022/11/09873986/1GjwNuaj2ms", "parentPublication": { "id": "trans/tg", "title": "IEEE Transactions on Visualization & Computer Graphics", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/aciiw/2022/5490/0/10086017", "title": "Implementation of Gaze Estimation in Dialogue to Human-Robot Interaction", "doi": null, "abstractUrl": "/proceedings-article/aciiw/2022/10086017/1M664wXq1gY", "parentPublication": { "id": "proceedings/aciiw/2022/5490/0", "title": "2022 10th International Conference on Affective Computing and Intelligent Interaction Workshops and Demos (ACIIW)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" } ], "articleVideos": [] }
{ "proceeding": { "id": "12OmNrkjVbR", "title": "2013 IEEE International Conference on Multimedia and Expo (ICME)", "acronym": "icme", "groupId": "1000477", "volume": "0", "displayVolume": "0", "year": "2013", "__typename": "ProceedingType" }, "article": { "id": "12OmNC3XhtM", "doi": "10.1109/ICME.2013.6607532", "title": "Calibration-free gaze tracking using particle filter", "normalizedTitle": "Calibration-free gaze tracking using particle filter", "abstract": "This paper presents a novel approach for gaze estimation using only a low-cost camera and requiring no calibration. The main idea is based on the center-bias property of human gaze distribution to get a coarse estimate of the current gaze position as well as benefit from temporal information to enhance this rough gaze estimate. Firstly, we propose a method for detecting the eye center location and a mapping model based on the center-bias effect to convert it to gaze position. This initial gaze estimate then serves to construct the likelihood model of the eye-appearance. The final gaze position is estimated by fusing the likelihood model with the prior information obtained from previous observations on the basis of the particle filtering framework. Extensive experiments demonstrate the good performance of the proposed system with an average estimation error of 3.43° which outperforms state-of-the-art methods. Furthermore, the low complexity of the proposed system makes it suitable for real-time applications.", "abstracts": [ { "abstractType": "Regular", "content": "This paper presents a novel approach for gaze estimation using only a low-cost camera and requiring no calibration. The main idea is based on the center-bias property of human gaze distribution to get a coarse estimate of the current gaze position as well as benefit from temporal information to enhance this rough gaze estimate. Firstly, we propose a method for detecting the eye center location and a mapping model based on the center-bias effect to convert it to gaze position. This initial gaze estimate then serves to construct the likelihood model of the eye-appearance. The final gaze position is estimated by fusing the likelihood model with the prior information obtained from previous observations on the basis of the particle filtering framework. Extensive experiments demonstrate the good performance of the proposed system with an average estimation error of 3.43° which outperforms state-of-the-art methods. Furthermore, the low complexity of the proposed system makes it suitable for real-time applications.", "__typename": "ArticleAbstractType" } ], "normalizedAbstract": "This paper presents a novel approach for gaze estimation using only a low-cost camera and requiring no calibration. The main idea is based on the center-bias property of human gaze distribution to get a coarse estimate of the current gaze position as well as benefit from temporal information to enhance this rough gaze estimate. Firstly, we propose a method for detecting the eye center location and a mapping model based on the center-bias effect to convert it to gaze position. This initial gaze estimate then serves to construct the likelihood model of the eye-appearance. The final gaze position is estimated by fusing the likelihood model with the prior information obtained from previous observations on the basis of the particle filtering framework. Extensive experiments demonstrate the good performance of the proposed system with an average estimation error of 3.43° which outperforms state-of-the-art methods. Furthermore, the low complexity of the proposed system makes it suitable for real-time applications.", "fno": "06607532", "keywords": [ "Accuracy", "Estimation", "Computational Modeling", "Calibration", "Motion Pictures", "Cameras", "Mathematical Model", "Particle Filter", "HCI", "Gaze Estimation", "Eye Center Detection", "Calibration Free" ], "authors": [ { "affiliation": "Technicolor, 975 Ave. des Champs Blancs, 35576 Cesson-Sevigne, France", "fullName": "PhiBang Nguyen", "givenName": "PhiBang", "surname": "Nguyen", "__typename": "ArticleAuthorType" }, { "affiliation": "Technicolor, 975 Ave. des Champs Blancs, 35576 Cesson-Sevigne, France", "fullName": "Julien Fleureau", "givenName": "Julien", "surname": "Fleureau", "__typename": "ArticleAuthorType" }, { "affiliation": "Technicolor, 975 Ave. des Champs Blancs, 35576 Cesson-Sevigne, France", "fullName": "Christel Chamaret", "givenName": "Christel", "surname": "Chamaret", "__typename": "ArticleAuthorType" }, { "affiliation": "Technicolor, 975 Ave. des Champs Blancs, 35576 Cesson-Sevigne, France", "fullName": "Philippe Guillotel", "givenName": "Philippe", "surname": "Guillotel", "__typename": "ArticleAuthorType" } ], "idPrefix": "icme", "isOpenAccess": false, "showRecommendedArticles": true, "showBuyMe": true, "hasPdf": true, "pubDate": "2013-07-01T00:00:00", "pubType": "proceedings", "pages": "1-6", "year": "2013", "issn": "1945-7871", "isbn": "978-1-4799-0015-2", "notes": null, "notesType": null, "__typename": "ArticleType" }, "webExtras": [], "adjacentArticles": { "previous": { "fno": "06607531", "articleId": "12OmNwp74Kc", "__typename": "AdjacentArticleType" }, "next": { "fno": "06607533", "articleId": "12OmNCu4nbm", "__typename": "AdjacentArticleType" }, "__typename": "AdjacentArticlesType" }, "recommendedArticles": [ { "id": "proceedings/icme/2014/4761/0/06890322", "title": "Realtime gaze estimation with online calibration", "doi": null, "abstractUrl": "/proceedings-article/icme/2014/06890322/12OmNvjyxUU", "parentPublication": { "id": "proceedings/icme/2014/4761/0", "title": "2014 IEEE International Conference on Multimedia and Expo (ICME)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/fg/2011/9140/0/05771469", "title": "Constraint-based gaze estimation without active calibration", "doi": null, "abstractUrl": "/proceedings-article/fg/2011/05771469/12OmNwdbVch", "parentPublication": { "id": "proceedings/fg/2011/9140/0", "title": "Face and Gesture 2011", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/3dpvt/2006/2825/0/04155710", "title": "Gaze Tracking by Using Factorized Likelihoods Particle Filtering and Stereo Vision", "doi": null, "abstractUrl": "/proceedings-article/3dpvt/2006/04155710/12OmNx7G625", "parentPublication": { "id": "proceedings/3dpvt/2006/2825/0", "title": "Third International Symposium on 3D Data Processing, Visualization, and Transmission (3DPVT'06)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/cvprw/2014/4308/0/4308a606", "title": "Eye-Model-Based Gaze Estimation by RGB-D Camera", "doi": null, "abstractUrl": "/proceedings-article/cvprw/2014/4308a606/12OmNyqiaTI", "parentPublication": { "id": "proceedings/cvprw/2014/4308/0", "title": "2014 IEEE Conference on Computer Vision and Pattern Recognition Workshops (CVPRW)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/icpr/2000/0750/4/07504201", "title": "A Calibration-Free Gaze Tracking Technique", "doi": null, "abstractUrl": "/proceedings-article/icpr/2000/07504201/12OmNyuy9Ls", "parentPublication": { "id": "proceedings/icpr/2000/0750/4", "title": "Pattern Recognition, International Conference on", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/iccv/2013/2840/0/2840a137", "title": "Calibration-Free Gaze Estimation Using Human Gaze Patterns", "doi": null, "abstractUrl": "/proceedings-article/iccv/2013/2840a137/12OmNzC5Tkq", "parentPublication": { "id": "proceedings/iccv/2013/2840/0", "title": "2013 IEEE International Conference on Computer Vision (ICCV)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/wacv/2015/6683/0/6683a642", "title": "Towards Convenient Calibration for Cross-Ratio Based Gaze Estimation", "doi": null, "abstractUrl": "/proceedings-article/wacv/2015/6683a642/12OmNzE54Hh", "parentPublication": { "id": "proceedings/wacv/2015/6683/0", "title": "2015 IEEE Winter Conference on Applications of Computer Vision (WACV)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/crv/2015/1986/0/1986a176", "title": "Mobile 3D Gaze Tracking Calibration", "doi": null, "abstractUrl": "/proceedings-article/crv/2015/1986a176/12OmNzzxusS", "parentPublication": { "id": "proceedings/crv/2015/1986/0", "title": "2015 12th Conference on Computer and Robot Vision (CRV)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "trans/tp/2023/01/09706357", "title": "Towards High Performance Low Complexity Calibration in Appearance Based Gaze Estimation", "doi": null, "abstractUrl": "/journal/tp/2023/01/09706357/1AO2a7pgNPO", "parentPublication": { "id": "trans/tp", "title": "IEEE Transactions on Pattern Analysis & Machine Intelligence", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/wacv/2020/6553/0/09093419", "title": "Offset Calibration for Appearance-Based Gaze Estimation via Gaze Decomposition", "doi": null, "abstractUrl": "/proceedings-article/wacv/2020/09093419/1jPbibCw0gw", "parentPublication": { "id": "proceedings/wacv/2020/6553/0", "title": "2020 IEEE Winter Conference on Applications of Computer Vision (WACV)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" } ], "articleVideos": [] }
{ "proceeding": { "id": "12OmNvAiSpZ", "title": "2015 IEEE Virtual Reality (VR)", "acronym": "vr", "groupId": "1000791", "volume": "0", "displayVolume": "0", "year": "2015", "__typename": "ProceedingType" }, "article": { "id": "12OmNwtEEzT", "doi": "10.1109/VR.2015.7223361", "title": "AR-SSVEP for brain-machine interface: Estimating user's gaze in head-mounted display with USB camera", "normalizedTitle": "AR-SSVEP for brain-machine interface: Estimating user's gaze in head-mounted display with USB camera", "abstract": "We aim to develop a brain-machine interface (BMI) system that estimates user's gaze or attention on an object to pick it up in the real world. In Experiment 1 and 2 we measured steady-state visual evoked potential (SSVEP) using luminance and/or contrast modulated flickers of photographic scenes presented on a head-mounted display (HMD). We applied multiclass SVM to estimate gaze locations for every 2s time-window data, and obtained significantly good classifications of gaze locations with the leave-one-session-out cross validation. In Experiment 3 we measured SSVEP using luminance and contrast modulated flickers of real scenes that were online captured by a USB camera and presented on the HMD. We put AR markers on real objects and made their locations flickering on HMD. We obtained the best performance of gaze classification with highest luminance and contrast modulation (73–91% accuracy at chance level 33%), and significantly good classification with low (25% of the highest) luminance and contrast modulation (42–50% accuracy). These results suggest that the luminance-modulated flickers of real scenes through USB camera can be applied to BMI by using augmented reality technology.", "abstracts": [ { "abstractType": "Regular", "content": "We aim to develop a brain-machine interface (BMI) system that estimates user's gaze or attention on an object to pick it up in the real world. In Experiment 1 and 2 we measured steady-state visual evoked potential (SSVEP) using luminance and/or contrast modulated flickers of photographic scenes presented on a head-mounted display (HMD). We applied multiclass SVM to estimate gaze locations for every 2s time-window data, and obtained significantly good classifications of gaze locations with the leave-one-session-out cross validation. In Experiment 3 we measured SSVEP using luminance and contrast modulated flickers of real scenes that were online captured by a USB camera and presented on the HMD. We put AR markers on real objects and made their locations flickering on HMD. We obtained the best performance of gaze classification with highest luminance and contrast modulation (73–91% accuracy at chance level 33%), and significantly good classification with low (25% of the highest) luminance and contrast modulation (42–50% accuracy). These results suggest that the luminance-modulated flickers of real scenes through USB camera can be applied to BMI by using augmented reality technology.", "__typename": "ArticleAbstractType" } ], "normalizedAbstract": "We aim to develop a brain-machine interface (BMI) system that estimates user's gaze or attention on an object to pick it up in the real world. In Experiment 1 and 2 we measured steady-state visual evoked potential (SSVEP) using luminance and/or contrast modulated flickers of photographic scenes presented on a head-mounted display (HMD). We applied multiclass SVM to estimate gaze locations for every 2s time-window data, and obtained significantly good classifications of gaze locations with the leave-one-session-out cross validation. In Experiment 3 we measured SSVEP using luminance and contrast modulated flickers of real scenes that were online captured by a USB camera and presented on the HMD. We put AR markers on real objects and made their locations flickering on HMD. We obtained the best performance of gaze classification with highest luminance and contrast modulation (73–91% accuracy at chance level 33%), and significantly good classification with low (25% of the highest) luminance and contrast modulation (42–50% accuracy). These results suggest that the luminance-modulated flickers of real scenes through USB camera can be applied to BMI by using augmented reality technology.", "fno": "07223361", "keywords": [ "Modulation", "Universal Serial Bus", "Cameras", "Electroencephalography", "Brain Computer Interfaces", "Visualization", "Accuracy", "SVM", "SSVEP", "EEG", "Brain Machine Interface", "Augmented Reality" ], "authors": [ { "affiliation": "Graduate School of Engineering, Toyohashi University of Technology", "fullName": "Shuto Horii", "givenName": "Shuto", "surname": "Horii", "__typename": "ArticleAuthorType" }, { "affiliation": "Department of Computer Science and Engineering, Toyohashi University of Technology", "fullName": "Shigeki Nakauchi", "givenName": "Shigeki", "surname": "Nakauchi", "__typename": "ArticleAuthorType" }, { "affiliation": "Department of Computer Science and Engineering, Toyohashi University of Technology", "fullName": "Michiteru Kitazaki", "givenName": "Michiteru", "surname": "Kitazaki", "__typename": "ArticleAuthorType" } ], "idPrefix": "vr", "isOpenAccess": false, "showRecommendedArticles": true, "showBuyMe": true, "hasPdf": true, "pubDate": "2015-03-01T00:00:00", "pubType": "proceedings", "pages": "193-194", "year": "2015", "issn": null, "isbn": "978-1-4799-1727-3", "notes": null, "notesType": null, "__typename": "ArticleType" }, "webExtras": [], "adjacentArticles": { "previous": { "fno": "07223360", "articleId": "12OmNyoiZ78", "__typename": "AdjacentArticleType" }, "next": { "fno": "07223362", "articleId": "12OmNAFWOOX", "__typename": "AdjacentArticleType" }, "__typename": "AdjacentArticlesType" }, "recommendedArticles": [ { "id": "proceedings/wf-iot/2016/4130/0/07845512", "title": "SandUSB: An installation-free sandbox for USB peripherals", "doi": null, "abstractUrl": "/proceedings-article/wf-iot/2016/07845512/12OmNAle6D6", "parentPublication": { "id": "proceedings/wf-iot/2016/4130/0", "title": "2016 IEEE 3rd World Forum on Internet of Things (WF-IoT)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/fg/2008/2153/0/04813466", "title": "A fast and robust 3D head pose and gaze estimation system", "doi": null, "abstractUrl": "/proceedings-article/fg/2008/04813466/12OmNBqv2dy", "parentPublication": { "id": "proceedings/fg/2008/2153/0", "title": "2008 8th IEEE International Conference on Automatic Face & Gesture Recognition", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/mtv/2013/3246/0/3246a083", "title": "USB Validation Challenges on C45SOI & C28NM Technology Products", "doi": null, "abstractUrl": "/proceedings-article/mtv/2013/3246a083/12OmNvFHfD5", "parentPublication": { "id": "proceedings/mtv/2013/3246/0", "title": "2013 14th International Workshop on Microprocessor Test and Verification (MTV)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/icisce/2016/2535/0/2535a968", "title": "Evaluating the Feasibility of a Novel Approach for SSVEP Detection Accuracy Improvement Using Phase Shifts", "doi": null, "abstractUrl": "/proceedings-article/icisce/2016/2535a968/12OmNyrqzwt", "parentPublication": { "id": "proceedings/icisce/2016/2535/0", "title": "2016 3rd International Conference on Information Science and Control Engineering (ICISCE)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/fit/2018/9355/0/935500a099", "title": "Mesh of SSVEP-Based BCI and Eye-Tracker for Use of Higher Frequency Stimuli and Lower Number of EEG Channels", "doi": null, "abstractUrl": "/proceedings-article/fit/2018/935500a099/17D45VTRowY", "parentPublication": { "id": "proceedings/fit/2018/9355/0", "title": "2018 International Conference on Frontiers of Information Technology (FIT)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/dasc-picom-datacom-cyberscitech/2018/7518/0/751800a070", "title": "USB SPY: A Stratagem for Tracing USB Storage Devices", "doi": null, "abstractUrl": "/proceedings-article/dasc-picom-datacom-cyberscitech/2018/751800a070/17D45Wuc33W", "parentPublication": { "id": "proceedings/dasc-picom-datacom-cyberscitech/2018/7518/0", "title": "2018 IEEE 16th Intl Conf on Dependable, Autonomic and Secure Computing, 16th Intl Conf on Pervasive Intelligence and Computing, 4th Intl Conf on Big Data Intelligence and Computing and Cyber Science and Technology Congress(DASC/PiCom/DataCom/CyberSciTech)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/bwcca/2010/4236/0/05633778", "title": "A Solution to Protecting USB Keyboard Data", "doi": null, "abstractUrl": "/proceedings-article/bwcca/2010/05633778/183rAfUygI9", "parentPublication": { "id": "proceedings/bwcca/2010/4236/0", "title": "2010 International Conference on Broadband, Wireless Computing, Communication and Applications", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2019/1377/0/08797852", "title": "Perception of Volumetric Characters&#x0027; Eye-Gaze Direction in Head-Mounted Displays", "doi": null, "abstractUrl": "/proceedings-article/vr/2019/08797852/1cJ0UskDCRa", "parentPublication": { "id": "proceedings/vr/2019/1377/0", "title": "2019 IEEE Conference on Virtual Reality and 3D User Interfaces (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/ewdts/2020/9899/0/09225144", "title": "Exploiting EEG Signals for Eye Motion Tracking", "doi": null, "abstractUrl": "/proceedings-article/ewdts/2020/09225144/1nWNWOWhzj2", "parentPublication": { "id": "proceedings/ewdts/2020/9899/0", "title": "2020 IEEE East-West Design & Test Symposium (EWDTS)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/bibe/2021/4261/0/09635427", "title": "Sparse Graph-based Representations of SSVEP Responses Under the Variational Bayesian Framework", "doi": null, "abstractUrl": "/proceedings-article/bibe/2021/09635427/1zmvmRsSc9O", "parentPublication": { "id": "proceedings/bibe/2021/4261/0", "title": "2021 IEEE 21st International Conference on Bioinformatics and Bioengineering (BIBE)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" } ], "articleVideos": [] }
{ "proceeding": { "id": "12OmNvA1hvm", "title": "2012 Third International Conference on Emerging Security Technologies", "acronym": "est", "groupId": "1800155", "volume": "0", "displayVolume": "0", "year": "2012", "__typename": "ProceedingType" }, "article": { "id": "12OmNxwWoG1", "doi": "10.1109/EST.2012.12", "title": "Liveness Detection Using Gaze Collinearity", "normalizedTitle": "Liveness Detection Using Gaze Collinearity", "abstract": "This paper presents a liveness detection method based on tracking the gaze of the user of a face recognition system using a single camera. The user is required to follow a visual animation of a moving object on a display screen while his/her gaze is measured. The visual stimulus is designed to direct the gaze of the user to sets of collinear points on the screen. Features based on the measured collinearity of the observed gaze are then used to discriminate between live attempts at responding to this challenge and those conducted by âimpostorsâ holding photographs and attempting to follow the stimulus. An initial set of experiments is reported that indicates the effectiveness of the proposed method in detecting this class of spoofing attacks.", "abstracts": [ { "abstractType": "Regular", "content": "This paper presents a liveness detection method based on tracking the gaze of the user of a face recognition system using a single camera. The user is required to follow a visual animation of a moving object on a display screen while his/her gaze is measured. The visual stimulus is designed to direct the gaze of the user to sets of collinear points on the screen. Features based on the measured collinearity of the observed gaze are then used to discriminate between live attempts at responding to this challenge and those conducted by âimpostorsâ holding photographs and attempting to follow the stimulus. An initial set of experiments is reported that indicates the effectiveness of the proposed method in detecting this class of spoofing attacks.", "__typename": "ArticleAbstractType" } ], "normalizedAbstract": "This paper presents a liveness detection method based on tracking the gaze of the user of a face recognition system using a single camera. The user is required to follow a visual animation of a moving object on a display screen while his/her gaze is measured. The visual stimulus is designed to direct the gaze of the user to sets of collinear points on the screen. Features based on the measured collinearity of the observed gaze are then used to discriminate between live attempts at responding to this challenge and those conducted by âimpostorsâ holding photographs and attempting to follow the stimulus. An initial set of experiments is reported that indicates the effectiveness of the proposed method in detecting this class of spoofing attacks.", "fno": "4791a062", "keywords": [ "Face", "Cameras", "Face Recognition", "Security", "Visualization", "Feature Extraction", "Conferences", "Gaze", "Face Liveness", "Biometrics" ], "authors": [ { "affiliation": null, "fullName": "Asad Ali", "givenName": "Asad", "surname": "Ali", "__typename": "ArticleAuthorType" }, { "affiliation": null, "fullName": "Farzin Deravi", "givenName": "Farzin", "surname": "Deravi", "__typename": "ArticleAuthorType" }, { "affiliation": null, "fullName": "Sanaul Hoque", "givenName": "Sanaul", "surname": "Hoque", "__typename": "ArticleAuthorType" } ], "idPrefix": "est", "isOpenAccess": false, "showRecommendedArticles": true, "showBuyMe": true, "hasPdf": true, "pubDate": "2012-09-01T00:00:00", "pubType": "proceedings", "pages": "62-65", "year": "2012", "issn": null, "isbn": "978-1-4673-2448-9", "notes": null, "notesType": null, "__typename": "ArticleType" }, "webExtras": [], "adjacentArticles": { "previous": { "fno": "4791a058", "articleId": "12OmNBEGYLj", "__typename": "AdjacentArticleType" }, "next": { "fno": "4791a066", "articleId": "12OmNzVXNM4", "__typename": "AdjacentArticleType" }, "__typename": "AdjacentArticlesType" }, "recommendedArticles": [ { "id": "proceedings/icmi/2002/1834/0/18340261", "title": "Active Gaze Tracking for Human-Robot Interaction", "doi": null, "abstractUrl": "/proceedings-article/icmi/2002/18340261/12OmNAGNCeq", "parentPublication": { "id": "proceedings/icmi/2002/1834/0", "title": "Proceedings Fourth IEEE International Conference on Multimodal Interfaces", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/icoip/2010/4252/1/4252a131", "title": "A Simplified 3D Gaze Tracking Technology with Stereo Vision", "doi": null, "abstractUrl": "/proceedings-article/icoip/2010/4252a131/12OmNwqft0F", "parentPublication": { "id": "proceedings/icoip/2010/4252/2", "title": "Optoelectronics and Image Processing, International Conference on", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/sitis/2017/4283/0/4283a350", "title": "Pholder: An Eye-Gaze Assisted Reading Application on Android", "doi": null, "abstractUrl": "/proceedings-article/sitis/2017/4283a350/12OmNz2kqfc", "parentPublication": { "id": "proceedings/sitis/2017/4283/0", "title": "2017 13th International Conference on Signal-Image Technology & Internet-Based Systems (SITIS)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/ihmsc/2010/4151/1/4151a300", "title": "A Novel Simple 2D Model of Eye Gaze Estimation", "doi": null, "abstractUrl": "/proceedings-article/ihmsc/2010/4151a300/12OmNzQR1nK", "parentPublication": { "id": "proceedings/ihmsc/2010/4151/1", "title": "Intelligent Human-Machine Systems and Cybernetics, International Conference on", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/est/2013/5077/0/5077a008", "title": "Directional Sensitivity of Gaze-Collinearity Features in Liveness Detection", "doi": null, "abstractUrl": "/proceedings-article/est/2013/5077a008/12OmNzl3X1U", "parentPublication": { "id": "proceedings/est/2013/5077/0", "title": "2013 Fourth International Conference on Emerging Security Technologies (EST)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "trans/tg/2016/01/07194851", "title": "Gaze Stripes: Image-Based Visualization of Eye Tracking Data", "doi": null, "abstractUrl": "/journal/tg/2016/01/07194851/13rRUIJuxvk", "parentPublication": { "id": "trans/tg", "title": "IEEE Transactions on Visualization & Computer Graphics", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "trans/tm/5555/01/09802919", "title": "Continuous Gaze Tracking With Implicit Saliency-Aware Calibration on Mobile Devices", "doi": null, "abstractUrl": "/journal/tm/5555/01/09802919/1Eo1vvDggH6", "parentPublication": { "id": "trans/tm", "title": "IEEE Transactions on Mobile Computing", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/iccnea/2022/9109/0/910900a006", "title": "Attention Mechanism Based Full-face Gaze Estimation for Human-computer Interaction", "doi": null, "abstractUrl": "/proceedings-article/iccnea/2022/910900a006/1HYv5Pstq1y", "parentPublication": { "id": "proceedings/iccnea/2022/9109/0", "title": "2022 International Conference on Computer Network, Electronic and Automation (ICCNEA)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/percom-workshops/2019/9151/0/08730846", "title": "Gaze Estimation Using Residual Neural Network", "doi": null, "abstractUrl": "/proceedings-article/percom-workshops/2019/08730846/1aDSMwUBvBS", "parentPublication": { "id": "proceedings/percom-workshops/2019/9151/0", "title": "2019 IEEE International Conference on Pervasive Computing and Communications Workshops (PerCom Workshops)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/ithings-greencom-cpscom-smartdata/2019/2980/0/298000a655", "title": "A Multi-Modal Gaze Tracking Algorithm", "doi": null, "abstractUrl": "/proceedings-article/ithings-greencom-cpscom-smartdata/2019/298000a655/1ehBL8sk06I", "parentPublication": { "id": "proceedings/ithings-greencom-cpscom-smartdata/2019/2980/0", "title": "2019 International Conference on Internet of Things (iThings) and IEEE Green Computing and Communications (GreenCom) and IEEE Cyber, Physical and Social Computing (CPSCom) and IEEE Smart Data (SmartData)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" } ], "articleVideos": [] }
{ "proceeding": { "id": "12OmNBaT60w", "title": "2015 11th IEEE International Conference and Workshops on Automatic Face and Gesture Recognition (FG)", "acronym": "fg", "groupId": "1000065", "volume": "1", "displayVolume": "1", "year": "2015", "__typename": "ProceedingType" }, "article": { "id": "12OmNyQGS7p", "doi": "10.1109/FG.2015.7163121", "title": "Robust gaze estimation based on adaptive fusion of multiple cameras", "normalizedTitle": "Robust gaze estimation based on adaptive fusion of multiple cameras", "abstract": "Gaze movements play a crucial role in human-computer interaction (HCI) applications. Recently, gaze tracking systems with a wide variety of applications have attracted much interest by the industry as well as the scientific community. The state-of-the-art gaze trackers are mostly non-intrusive and report high estimation accuracies. However, they require complex setups such as camera and geometric calibration in addition to subject-specific calibration. In this paper, we introduce a multi-camera gaze estimation system which requires less effort for the users in terms of the system setup and calibration. The system is based on an adaptive fusion of multiple independent camera systems in which the gaze estimation relies on simple cross-ratio (CR) geometry. Experimental results conducted on real data show that the proposed system achieves a significant accuracy improvement, by around 25%, over the traditional CR-based single camera systems through the novel adaptive multi-camera fusion scheme. The real-time system achieves <;0.9° accuracy error with very few calibration data (5 points) under natural head movements, which is competitive with more complex systems. Hence, the proposed system enables fast and user-friendly gaze tracking with minimum user effort without sacrificing too much accuracy.", "abstracts": [ { "abstractType": "Regular", "content": "Gaze movements play a crucial role in human-computer interaction (HCI) applications. Recently, gaze tracking systems with a wide variety of applications have attracted much interest by the industry as well as the scientific community. The state-of-the-art gaze trackers are mostly non-intrusive and report high estimation accuracies. However, they require complex setups such as camera and geometric calibration in addition to subject-specific calibration. In this paper, we introduce a multi-camera gaze estimation system which requires less effort for the users in terms of the system setup and calibration. The system is based on an adaptive fusion of multiple independent camera systems in which the gaze estimation relies on simple cross-ratio (CR) geometry. Experimental results conducted on real data show that the proposed system achieves a significant accuracy improvement, by around 25%, over the traditional CR-based single camera systems through the novel adaptive multi-camera fusion scheme. The real-time system achieves <;0.9° accuracy error with very few calibration data (5 points) under natural head movements, which is competitive with more complex systems. Hence, the proposed system enables fast and user-friendly gaze tracking with minimum user effort without sacrificing too much accuracy.", "__typename": "ArticleAbstractType" } ], "normalizedAbstract": "Gaze movements play a crucial role in human-computer interaction (HCI) applications. Recently, gaze tracking systems with a wide variety of applications have attracted much interest by the industry as well as the scientific community. The state-of-the-art gaze trackers are mostly non-intrusive and report high estimation accuracies. However, they require complex setups such as camera and geometric calibration in addition to subject-specific calibration. In this paper, we introduce a multi-camera gaze estimation system which requires less effort for the users in terms of the system setup and calibration. The system is based on an adaptive fusion of multiple independent camera systems in which the gaze estimation relies on simple cross-ratio (CR) geometry. Experimental results conducted on real data show that the proposed system achieves a significant accuracy improvement, by around 25%, over the traditional CR-based single camera systems through the novel adaptive multi-camera fusion scheme. The real-time system achieves <;0.9° accuracy error with very few calibration data (5 points) under natural head movements, which is competitive with more complex systems. Hence, the proposed system enables fast and user-friendly gaze tracking with minimum user effort without sacrificing too much accuracy.", "fno": "07163121", "keywords": [ "Cameras", "Estimation", "Calibration", "Monitoring", "Accuracy", "Feature Extraction", "Robustness" ], "authors": [ { "affiliation": "Signal Process. Lab. (LTS5), Ecole Polytech. Fed. de Lausanne, Lausanne, Switzerland", "fullName": "Nuri Murat Arar", "givenName": "Nuri Murat", "surname": "Arar", "__typename": "ArticleAuthorType" }, { "affiliation": "Signal Process. Lab. (LTS5), Ecole Polytech. Fed. de Lausanne, Lausanne, Switzerland", "fullName": "Hua Gao", "givenName": null, "surname": "Hua Gao", "__typename": "ArticleAuthorType" }, { "affiliation": "Signal Process. Lab. (LTS5), Ecole Polytech. Fed. de Lausanne, Lausanne, Switzerland", "fullName": "Jean-Philippe Thiran", "givenName": "Jean-Philippe", "surname": "Thiran", "__typename": "ArticleAuthorType" } ], "idPrefix": "fg", "isOpenAccess": false, "showRecommendedArticles": true, "showBuyMe": true, "hasPdf": true, "pubDate": "2015-05-01T00:00:00", "pubType": "proceedings", "pages": "1-7", "year": "2015", "issn": null, "isbn": "978-1-4799-6026-2", "notes": null, "notesType": null, "__typename": "ArticleType" }, "webExtras": [], "adjacentArticles": { "previous": { "fno": "07163120", "articleId": "12OmNx7G61x", "__typename": "AdjacentArticleType" }, "next": { "fno": "07163122", "articleId": "12OmNzBOhIF", "__typename": "AdjacentArticleType" }, "__typename": "AdjacentArticlesType" }, "recommendedArticles": [ { "id": "proceedings/fg/2008/2153/0/04813466", "title": "A fast and robust 3D head pose and gaze estimation system", "doi": null, "abstractUrl": "/proceedings-article/fg/2008/04813466/12OmNBqv2dy", "parentPublication": { "id": "proceedings/fg/2008/2153/0", "title": "2008 8th IEEE International Conference on Automatic Face & Gesture Recognition", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/icme/2013/0015/0/06607532", "title": "Calibration-free gaze tracking using particle filter", "doi": null, "abstractUrl": "/proceedings-article/icme/2013/06607532/12OmNC3XhtM", "parentPublication": { "id": "proceedings/icme/2013/0015/0", "title": "2013 IEEE International Conference on Multimedia and Expo (ICME)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/icme/2014/4761/0/06890322", "title": "Realtime gaze estimation with online calibration", "doi": null, "abstractUrl": "/proceedings-article/icme/2014/06890322/12OmNvjyxUU", "parentPublication": { "id": "proceedings/icme/2014/4761/0", "title": "2014 IEEE International Conference on Multimedia and Expo (ICME)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/fg/2011/9140/0/05771469", "title": "Constraint-based gaze estimation without active calibration", "doi": null, "abstractUrl": "/proceedings-article/fg/2011/05771469/12OmNwdbVch", "parentPublication": { "id": "proceedings/fg/2011/9140/0", "title": "Face and Gesture 2011", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/cvprw/2014/4308/0/4308a606", "title": "Eye-Model-Based Gaze Estimation by RGB-D Camera", "doi": null, "abstractUrl": "/proceedings-article/cvprw/2014/4308a606/12OmNyqiaTI", "parentPublication": { "id": "proceedings/cvprw/2014/4308/0", "title": "2014 IEEE Conference on Computer Vision and Pattern Recognition Workshops (CVPRW)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/3dv/2016/5407/0/5407a658", "title": "Robust Plane-Based Calibration of Multiple Non-Overlapping Cameras", "doi": null, "abstractUrl": "/proceedings-article/3dv/2016/5407a658/12OmNzAohRO", "parentPublication": { "id": "proceedings/3dv/2016/5407/0", "title": "2016 Fourth International Conference on 3D Vision (3DV)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/wacv/2015/6683/0/6683a642", "title": "Towards Convenient Calibration for Cross-Ratio Based Gaze Estimation", "doi": null, "abstractUrl": "/proceedings-article/wacv/2015/6683a642/12OmNzE54Hh", "parentPublication": { "id": "proceedings/wacv/2015/6683/0", "title": "2015 IEEE Winter Conference on Applications of Computer Vision (WACV)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "mags/mu/2014/04/mmu2014040028", "title": "Real-Time Gaze Estimation with Online Calibration", "doi": null, "abstractUrl": "/magazine/mu/2014/04/mmu2014040028/13rRUx0geby", "parentPublication": { "id": "mags/mu", "title": "IEEE MultiMedia", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/cisai/2021/0692/0/069200a651", "title": "Gaze Estimation Based on Difference Residual Network", "doi": null, "abstractUrl": "/proceedings-article/cisai/2021/069200a651/1BmOqegCHjG", "parentPublication": { "id": "proceedings/cisai/2021/0692/0", "title": "2021 International Conference on Computer Information Science and Artificial Intelligence (CISAI)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/iccv/2019/4803/0/480300j367", "title": "Few-Shot Adaptive Gaze Estimation", "doi": null, "abstractUrl": "/proceedings-article/iccv/2019/480300j367/1hVlzqYU93y", "parentPublication": { "id": "proceedings/iccv/2019/4803/0", "title": "2019 IEEE/CVF International Conference on Computer Vision (ICCV)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" } ], "articleVideos": [] }
{ "proceeding": { "id": "12OmNzRZpZR", "title": "2017 13th International Conference on Signal-Image Technology & Internet-Based Systems (SITIS)", "acronym": "sitis", "groupId": "1002425", "volume": "0", "displayVolume": "0", "year": "2017", "__typename": "ProceedingType" }, "article": { "id": "12OmNz2kqfc", "doi": "10.1109/SITIS.2017.64", "title": "Pholder: An Eye-Gaze Assisted Reading Application on Android", "normalizedTitle": "Pholder: An Eye-Gaze Assisted Reading Application on Android", "abstract": "Eye-gaze has been used extensively in human computer interface design, web layout design and as assistive technology. We successfully built a reading application with automatic scrolling, using the images captured by the in-build camera to determine the eye-gaze. The application, Pholder, uses the appearance-based method for gaze estimation and tracking of gaze movement directions for scrolling of the screen. We used an innovative technique, using the integration of pixel intensity, for gaze movement estimation which is more robust then other techniques.", "abstracts": [ { "abstractType": "Regular", "content": "Eye-gaze has been used extensively in human computer interface design, web layout design and as assistive technology. We successfully built a reading application with automatic scrolling, using the images captured by the in-build camera to determine the eye-gaze. The application, Pholder, uses the appearance-based method for gaze estimation and tracking of gaze movement directions for scrolling of the screen. We used an innovative technique, using the integration of pixel intensity, for gaze movement estimation which is more robust then other techniques.", "__typename": "ArticleAbstractType" } ], "normalizedAbstract": "Eye-gaze has been used extensively in human computer interface design, web layout design and as assistive technology. We successfully built a reading application with automatic scrolling, using the images captured by the in-build camera to determine the eye-gaze. The application, Pholder, uses the appearance-based method for gaze estimation and tracking of gaze movement directions for scrolling of the screen. We used an innovative technique, using the integration of pixel intensity, for gaze movement estimation which is more robust then other techniques.", "fno": "4283a350", "keywords": [ "Android Operating System", "Gaze Tracking", "Human Computer Interaction", "Web Layout Design", "Assistive Technology", "Reading Application", "Eye Gaze", "Pholder", "Gaze Estimation", "Tracking", "Gaze Movement Directions", "Gaze Movement Estimation", "Human Computer Interface Design", "Face", "Estimation", "Robustness", "Tracking", "Libraries", "Cameras", "Switches", "Gaze Movement Estimation", "Saccade", "Mobile", "Appearance Based Method" ], "authors": [ { "affiliation": null, "fullName": "Christiantia Wirawan", "givenName": "Christiantia", "surname": "Wirawan", "__typename": "ArticleAuthorType" }, { "affiliation": null, "fullName": "Hu Qingyao", "givenName": "Hu", "surname": "Qingyao", "__typename": "ArticleAuthorType" }, { "affiliation": null, "fullName": "Liu Yi", "givenName": "Liu", "surname": "Yi", "__typename": "ArticleAuthorType" }, { "affiliation": null, "fullName": "Senglidet Yean", "givenName": "Senglidet", "surname": "Yean", "__typename": "ArticleAuthorType" }, { "affiliation": null, "fullName": "Bu-Sung Lee", "givenName": "Bu-Sung", "surname": "Lee", "__typename": "ArticleAuthorType" }, { "affiliation": null, "fullName": "Fang Ran", "givenName": "Fang", "surname": "Ran", "__typename": "ArticleAuthorType" } ], "idPrefix": "sitis", "isOpenAccess": false, "showRecommendedArticles": true, "showBuyMe": true, "hasPdf": true, "pubDate": "2017-12-01T00:00:00", "pubType": "proceedings", "pages": "350-353", "year": "2017", "issn": null, "isbn": "978-1-5386-4283-2", "notes": null, "notesType": null, "__typename": "ArticleType" }, "webExtras": [], "adjacentArticles": { "previous": { "fno": "4283a346", "articleId": "12OmNyugz4M", "__typename": "AdjacentArticleType" }, "next": { "fno": "4283a354", "articleId": "12OmNCw3z9U", "__typename": "AdjacentArticleType" }, "__typename": "AdjacentArticlesType" }, "recommendedArticles": [ { "id": "proceedings/gcis/2009/3571/2/3571b133", "title": "Key Techniques of Eye Gaze Tracking Based on Pupil Corneal Reflection", "doi": null, "abstractUrl": "/proceedings-article/gcis/2009/3571b133/12OmNA0vo1q", "parentPublication": { "id": "proceedings/gcis/2009/3571/2", "title": "2009 WRI Global Congress on Intelligent Systems", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/fg/2004/2122/0/21220785", "title": "Non-intrusive Eye Gaze Estimation without Knowledge of Eye Pose", "doi": null, "abstractUrl": "/proceedings-article/fg/2004/21220785/12OmNqJq4je", "parentPublication": { "id": "proceedings/fg/2004/2122/0", "title": "Sixth IEEE International Conference on Automatic Face and Gesture Recognition, 2004. Proceedings.", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/iccv/2017/1032/0/1032b003", "title": "Real Time Eye Gaze Tracking with 3D Deformable Eye-Face Model", "doi": null, "abstractUrl": "/proceedings-article/iccv/2017/1032b003/12OmNwNeYAV", "parentPublication": { "id": "proceedings/iccv/2017/1032/0", "title": "2017 IEEE International Conference on Computer Vision (ICCV)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/icpr/2018/3788/0/08545162", "title": "Automatic Eye Gaze Estimation using Geometric & Texture-based Networks", "doi": null, "abstractUrl": "/proceedings-article/icpr/2018/08545162/17D45WrVg95", "parentPublication": { "id": "proceedings/icpr/2018/3788/0", "title": "2018 24th International Conference on Pattern Recognition (ICPR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/cvprw/2018/6100/0/610000c221", "title": "Unraveling Human Perception of Facial Aging Using Eye Gaze", "doi": null, "abstractUrl": "/proceedings-article/cvprw/2018/610000c221/17D45WwsQ8l", "parentPublication": { "id": "proceedings/cvprw/2018/6100/0", "title": "2018 IEEE/CVF Conference on Computer Vision and Pattern Recognition Workshops (CVPRW)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/icpr/2018/3788/0/08545635", "title": "Gaze-Aided Eye Detection via Appearance Learning", "doi": null, "abstractUrl": "/proceedings-article/icpr/2018/08545635/17D45X7VTgp", "parentPublication": { "id": "proceedings/icpr/2018/3788/0", "title": "2018 24th International Conference on Pattern Recognition (ICPR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/csci/2017/2652/0/2652a526", "title": "Real Time Eye Gaze Estimation", "doi": null, "abstractUrl": "/proceedings-article/csci/2017/2652a526/17D45Xh13w8", "parentPublication": { "id": "proceedings/csci/2017/2652/0", "title": "2017 International Conference on Computational Science and Computational Intelligence (CSCI)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/icpr/2022/9062/0/09956312", "title": "A Joint Cascaded Framework for Simultaneous Eye State, Eye Center, and Gaze Estimation", "doi": null, "abstractUrl": "/proceedings-article/icpr/2022/09956312/1IHq8em8jug", "parentPublication": { "id": "proceedings/icpr/2022/9062/0", "title": "2022 26th International Conference on Pattern Recognition (ICPR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/ismar/2022/5325/0/532500a082", "title": "Real-time Gaze Tracking with Head-eye Coordination for Head-mounted Displays", "doi": null, "abstractUrl": "/proceedings-article/ismar/2022/532500a082/1JrQQ8dsLKM", "parentPublication": { "id": "proceedings/ismar/2022/5325/0", "title": "2022 IEEE International Symposium on Mixed and Augmented Reality (ISMAR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/iri/2020/1054/0/09191571", "title": "Automated Filtering of Eye Gaze Metrics from Dynamic Areas of Interest", "doi": null, "abstractUrl": "/proceedings-article/iri/2020/09191571/1n0IyGDlxPq", "parentPublication": { "id": "proceedings/iri/2020/1054/0", "title": "2020 IEEE 21st International Conference on Information Reuse and Integration for Data Science (IRI)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" } ], "articleVideos": [] }
{ "proceeding": { "id": "12OmNBTawn8", "title": "2014 IEEE Conference on Computer Vision and Pattern Recognition Workshops (CVPRW)", "acronym": "cvprw", "groupId": "1001809", "volume": "0", "displayVolume": "0", "year": "2014", "__typename": "ProceedingType" }, "article": { "id": "12OmNzuIjrA", "doi": "10.1109/CVPRW.2014.30", "title": "Estimating Gaze Direction of Vehicle Drivers Using a Smartphone Camera", "normalizedTitle": "Estimating Gaze Direction of Vehicle Drivers Using a Smartphone Camera", "abstract": "Many automated driver monitoring technologies have been proposed to enhance vehicle and road safety. Most existing solutions involve the use of specialized embedded hardware, primarily in high-end automobiles. This paper explores driver assistance methods that can be implemented on mobile devices such as a consumer smartphone, thus offering a level of safety enhancement that is more widely accessible. Specifically, the paper focuses on estimating driver gaze direction as an indicator of driver attention. Input video frames from a smartphone camera facing the driver are first processed through a coarse head pose direction. Next, the locations and scales of face parts, namely mouth, eyes, and nose, define a feature descriptor that is supplied to an SVM gaze classifier which outputs one of 8 common driver gaze directions. A key novel aspect is an in-situ approach for gathering training data that improves generalization performance across drivers, vehicles, smartphones, and capture geometry. Experimental results show that a high accuracy of gaze direction estimation is achieved for four scenarios with different drivers, vehicles, smartphones and camera locations.", "abstracts": [ { "abstractType": "Regular", "content": "Many automated driver monitoring technologies have been proposed to enhance vehicle and road safety. Most existing solutions involve the use of specialized embedded hardware, primarily in high-end automobiles. This paper explores driver assistance methods that can be implemented on mobile devices such as a consumer smartphone, thus offering a level of safety enhancement that is more widely accessible. Specifically, the paper focuses on estimating driver gaze direction as an indicator of driver attention. Input video frames from a smartphone camera facing the driver are first processed through a coarse head pose direction. Next, the locations and scales of face parts, namely mouth, eyes, and nose, define a feature descriptor that is supplied to an SVM gaze classifier which outputs one of 8 common driver gaze directions. A key novel aspect is an in-situ approach for gathering training data that improves generalization performance across drivers, vehicles, smartphones, and capture geometry. Experimental results show that a high accuracy of gaze direction estimation is achieved for four scenarios with different drivers, vehicles, smartphones and camera locations.", "__typename": "ArticleAbstractType" } ], "normalizedAbstract": "Many automated driver monitoring technologies have been proposed to enhance vehicle and road safety. Most existing solutions involve the use of specialized embedded hardware, primarily in high-end automobiles. This paper explores driver assistance methods that can be implemented on mobile devices such as a consumer smartphone, thus offering a level of safety enhancement that is more widely accessible. Specifically, the paper focuses on estimating driver gaze direction as an indicator of driver attention. Input video frames from a smartphone camera facing the driver are first processed through a coarse head pose direction. Next, the locations and scales of face parts, namely mouth, eyes, and nose, define a feature descriptor that is supplied to an SVM gaze classifier which outputs one of 8 common driver gaze directions. A key novel aspect is an in-situ approach for gathering training data that improves generalization performance across drivers, vehicles, smartphones, and capture geometry. Experimental results show that a high accuracy of gaze direction estimation is achieved for four scenarios with different drivers, vehicles, smartphones and camera locations.", "fno": "4308a165", "keywords": [ "Vehicles", "Cameras", "Training", "Monitoring", "Face", "Estimation", "Accuracy" ], "authors": [ { "affiliation": null, "fullName": "Meng-Che Chuang", "givenName": "Meng-Che", "surname": "Chuang", "__typename": "ArticleAuthorType" }, { "affiliation": null, "fullName": "Raja Bala", "givenName": "Raja", "surname": "Bala", "__typename": "ArticleAuthorType" }, { "affiliation": null, "fullName": "Edgar A. Bernal", "givenName": "Edgar A.", "surname": "Bernal", "__typename": "ArticleAuthorType" }, { "affiliation": null, "fullName": "Peter Paul", "givenName": "Peter", "surname": "Paul", "__typename": "ArticleAuthorType" }, { "affiliation": null, "fullName": "Aaron Burry", "givenName": "Aaron", "surname": "Burry", "__typename": "ArticleAuthorType" } ], "idPrefix": "cvprw", "isOpenAccess": false, "showRecommendedArticles": true, "showBuyMe": true, "hasPdf": true, "pubDate": "2014-06-01T00:00:00", "pubType": "proceedings", "pages": "165-170", "year": "2014", "issn": "2160-7516", "isbn": "978-1-4799-4308-1", "notes": null, "notesType": null, "__typename": "ArticleType" }, "webExtras": [], "adjacentArticles": { "previous": { "fno": "4308a158", "articleId": "12OmNAS9zSE", "__typename": "AdjacentArticleType" }, "next": { "fno": "4308a171", "articleId": "12OmNxd4tjU", "__typename": "AdjacentArticleType" }, "__typename": "AdjacentArticlesType" }, "recommendedArticles": [ { "id": "proceedings/isuvr/2017/3091/0/3091a026", "title": "Estimating Gaze Depth Using Multi-Layer Perceptron", "doi": null, "abstractUrl": "/proceedings-article/isuvr/2017/3091a026/12OmNAkWvFD", "parentPublication": { "id": "proceedings/isuvr/2017/3091/0", "title": "2017 International Symposium on Ubiquitous Virtual Reality (ISUVR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/bigcomp/2014/3919/0/06741444", "title": "Head pose and gaze direction tracking for detecting a drowsy driver", "doi": null, "abstractUrl": "/proceedings-article/bigcomp/2014/06741444/12OmNrkBwsu", "parentPublication": { "id": "proceedings/bigcomp/2014/3919/0", "title": "2014 International Conference on Big Data and Smart Computing (BIGCOMP)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/bigcomp/2016/8796/0/07425813", "title": "Real-time categorization of driver's gaze zone using the deep learning techniques", "doi": null, "abstractUrl": "/proceedings-article/bigcomp/2016/07425813/12OmNy68EDN", "parentPublication": { "id": "proceedings/bigcomp/2016/8796/0", "title": "2016 International Conference on Big Data and Smart Computing (BigComp)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "trans/tg/2013/04/ttg2013040681", "title": "Estimating the Gaze of a Virtuality Human", "doi": null, "abstractUrl": "/journal/tg/2013/04/ttg2013040681/13rRUILLkvr", "parentPublication": { "id": "trans/tg", "title": "IEEE Transactions on Visualization & Computer Graphics", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/dasc-picom-cbdcom-cyberscitech/2021/2174/0/217400a912", "title": "Eye Movement Driving Analysis during Parallel Parking along Roadways: Comparison of Experienced and Novice Drivers", "doi": null, "abstractUrl": "/proceedings-article/dasc-picom-cbdcom-cyberscitech/2021/217400a912/1BLnrXODDri", "parentPublication": { "id": "proceedings/dasc-picom-cbdcom-cyberscitech/2021/2174/0", "title": "2021 IEEE Intl Conf on Dependable, Autonomic and Secure Computing, Intl Conf on Pervasive Intelligence and Computing, Intl Conf on Cloud and Big Data Computing, Intl Conf on Cyber Science and Technology Congress (DASC/PiCom/CBDCom/CyberSciTech)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/hpcc-dss-smartcity-dependsys/2021/9457/0/945700b783", "title": "Driving Gaze Behavior Prediction at S-Curve Based on Driver Experience using Machine Learning", "doi": null, "abstractUrl": "/proceedings-article/hpcc-dss-smartcity-dependsys/2021/945700b783/1DNDnvwmHni", "parentPublication": { "id": "proceedings/hpcc-dss-smartcity-dependsys/2021/9457/0", "title": "2021 IEEE 23rd Int Conf on High Performance Computing & Communications; 7th Int Conf on Data Science & Systems; 19th Int Conf on Smart City; 7th Int Conf on Dependability in Sensor, Cloud & Big Data Systems & Application (HPCC/DSS/SmartCity/DependSys)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "trans/tg/5555/01/09786815", "title": "Analyzing the Effect of Diverse Gaze and Head Direction on Facial Expression Recognition with Photo-Reflective Sensors Embedded in a Head-Mounted Display", "doi": null, "abstractUrl": "/journal/tg/5555/01/09786815/1DSumaVNxG8", "parentPublication": { "id": "trans/tg", "title": "IEEE Transactions on Visualization & Computer Graphics", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/mipr/2022/9548/0/954800a293", "title": "A Review of Personalized Health Navigation for Drivers", "doi": null, "abstractUrl": "/proceedings-article/mipr/2022/954800a293/1GvdbiZT6o0", "parentPublication": { "id": "proceedings/mipr/2022/9548/0", "title": "2022 IEEE 5th International Conference on Multimedia Information Processing and Retrieval (MIPR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/iccvw/2019/5023/0/502300e443", "title": "An Analysis of How Driver Experience Affects Eye-Gaze Behavior for Robotic Wheelchair Operation", "doi": null, "abstractUrl": "/proceedings-article/iccvw/2019/502300e443/1i5mpuErKpi", "parentPublication": { "id": "proceedings/iccvw/2019/5023/0", "title": "2019 IEEE/CVF International Conference on Computer Vision Workshop (ICCVW)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/mass/2019/4601/0/460100a010", "title": "Recognizing Driver Talking Direction in Running Vehicles with a Smartphone", "doi": null, "abstractUrl": "/proceedings-article/mass/2019/460100a010/1joXzveQn8k", "parentPublication": { "id": "proceedings/mass/2019/4601/0", "title": "2019 IEEE 16th International Conference on Mobile Ad Hoc and Sensor Systems (MASS)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" } ], "articleVideos": [] }
{ "proceeding": { "id": "1hQqfuoOyHu", "title": "2019 IEEE/CVF International Conference on Computer Vision (ICCV)", "acronym": "iccv", "groupId": "1000149", "volume": "0", "displayVolume": "0", "year": "2019", "__typename": "ProceedingType" }, "article": { "id": "1hQqsJoZU8U", "doi": "10.1109/ICCV.2019.00701", "title": "Gaze360: Physically Unconstrained Gaze Estimation in the Wild", "normalizedTitle": "Gaze360: Physically Unconstrained Gaze Estimation in the Wild", "abstract": "Understanding where people are looking is an informative social cue. In this work, we present Gaze360, a large-scale remote gaze-tracking dataset and method for robust 3D gaze estimation in unconstrained images. Our dataset consists of 238 subjects in indoor and outdoor environments with labelled 3D gaze across a wide range of head poses and distances. It is the largest publicly available dataset of its kind by both subject and variety, made possible by a simple and efficient collection method. Our proposed 3D gaze model extends existing models to include temporal information and to directly output an estimate of gaze uncertainty. We demonstrate the benefits of our model via an ablation study, and show its generalization performance via a cross-dataset evaluation against other recent gaze benchmark datasets. We furthermore propose a simple self-supervised approach to improve cross-dataset domain adaptation. Finally, we demonstrate an application of our model for estimating customer attention in a supermarket setting. Our dataset and models will be made available at http://gaze360.csail.mit.edu.", "abstracts": [ { "abstractType": "Regular", "content": "Understanding where people are looking is an informative social cue. In this work, we present Gaze360, a large-scale remote gaze-tracking dataset and method for robust 3D gaze estimation in unconstrained images. Our dataset consists of 238 subjects in indoor and outdoor environments with labelled 3D gaze across a wide range of head poses and distances. It is the largest publicly available dataset of its kind by both subject and variety, made possible by a simple and efficient collection method. Our proposed 3D gaze model extends existing models to include temporal information and to directly output an estimate of gaze uncertainty. We demonstrate the benefits of our model via an ablation study, and show its generalization performance via a cross-dataset evaluation against other recent gaze benchmark datasets. We furthermore propose a simple self-supervised approach to improve cross-dataset domain adaptation. Finally, we demonstrate an application of our model for estimating customer attention in a supermarket setting. Our dataset and models will be made available at http://gaze360.csail.mit.edu.", "__typename": "ArticleAbstractType" } ], "normalizedAbstract": "Understanding where people are looking is an informative social cue. In this work, we present Gaze360, a large-scale remote gaze-tracking dataset and method for robust 3D gaze estimation in unconstrained images. Our dataset consists of 238 subjects in indoor and outdoor environments with labelled 3D gaze across a wide range of head poses and distances. It is the largest publicly available dataset of its kind by both subject and variety, made possible by a simple and efficient collection method. Our proposed 3D gaze model extends existing models to include temporal information and to directly output an estimate of gaze uncertainty. We demonstrate the benefits of our model via an ablation study, and show its generalization performance via a cross-dataset evaluation against other recent gaze benchmark datasets. We furthermore propose a simple self-supervised approach to improve cross-dataset domain adaptation. Finally, we demonstrate an application of our model for estimating customer attention in a supermarket setting. Our dataset and models will be made available at http://gaze360.csail.mit.edu.", "fno": "480300g911", "keywords": [ "Gaze Tracking", "Pose Estimation", "Stereo Image Processing", "Gaze Benchmark Datasets", "Cross Dataset Domain Adaptation", "3 D Gaze Model", "Simple Collection Method", "Head Poses", "Outdoor Environments", "Indoor Environments", "Unconstrained Images", "Large Scale Remote Gaze Tracking Dataset", "Gaze Estimation", "Cameras", "Three Dimensional Displays", "Estimation", "Face", "Adaptation Models", "Lighting" ], "authors": [ { "affiliation": "MIT", "fullName": "Petr Kellnhofer", "givenName": "Petr", "surname": "Kellnhofer", "__typename": "ArticleAuthorType" }, { "affiliation": "Massachusetts Institute of Technology", "fullName": "Adria Recasens", "givenName": "Adria", "surname": "Recasens", "__typename": "ArticleAuthorType" }, { "affiliation": "Toyota Research Institute", "fullName": "Simon Stent", "givenName": "Simon", "surname": "Stent", "__typename": "ArticleAuthorType" }, { "affiliation": "MIT", "fullName": "Wojciech Matusik", "givenName": "Wojciech", "surname": "Matusik", "__typename": "ArticleAuthorType" }, { "affiliation": "MIT", "fullName": "Antonio Torralba", "givenName": "Antonio", "surname": "Torralba", "__typename": "ArticleAuthorType" } ], "idPrefix": "iccv", "isOpenAccess": false, "showRecommendedArticles": true, "showBuyMe": true, "hasPdf": true, "pubDate": "2019-10-01T00:00:00", "pubType": "proceedings", "pages": "6911-6920", "year": "2019", "issn": null, "isbn": "978-1-7281-4803-8", "notes": null, "notesType": null, "__typename": "ArticleType" }, "webExtras": [], "adjacentArticles": { "previous": { "fno": "480300g901", "articleId": "1hVlFqRHkC4", "__typename": "AdjacentArticleType" }, "next": { "fno": "480300g921", "articleId": "1hVlEm2iAWk", "__typename": "AdjacentArticleType" }, "__typename": "AdjacentArticlesType" }, "recommendedArticles": [ { "id": "proceedings/cvpr/2014/5118/0/5118b821", "title": "Learning-by-Synthesis for Appearance-Based 3D Gaze Estimation", "doi": null, "abstractUrl": "/proceedings-article/cvpr/2014/5118b821/12OmNCbU31L", "parentPublication": { "id": "proceedings/cvpr/2014/5118/0", "title": "2014 IEEE Conference on Computer Vision and Pattern Recognition (CVPR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/icme/2014/4761/0/06890322", "title": "Realtime gaze estimation with online calibration", "doi": null, "abstractUrl": "/proceedings-article/icme/2014/06890322/12OmNvjyxUU", "parentPublication": { "id": "proceedings/icme/2014/4761/0", "title": "2014 IEEE International Conference on Multimedia and Expo (ICME)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/cvprw/2017/0733/0/0733c299", "title": "It’s Written All Over Your Face: Full-Face Appearance-Based Gaze Estimation", "doi": null, "abstractUrl": "/proceedings-article/cvprw/2017/0733c299/12OmNzaQoPr", "parentPublication": { "id": "proceedings/cvprw/2017/0733/0", "title": "2017 IEEE Conference on Computer Vision and Pattern Recognition Workshops (CVPRW)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "trans/tp/2019/01/08122058", "title": "MPIIGaze: Real-World Dataset and Deep Appearance-Based Gaze Estimation", "doi": null, "abstractUrl": "/journal/tp/2019/01/08122058/17D45WZZ7E5", "parentPublication": { "id": "trans/tp", "title": "IEEE Transactions on Pattern Analysis & Machine Intelligence", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/cvpr/2022/6946/0/6.946E192", "title": "GazeOnce: Real-Time Multi-Person Gaze Estimation", "doi": null, "abstractUrl": "/proceedings-article/cvpr/2022/6.946E192/1H1kNdDQNwc", "parentPublication": { "id": "proceedings/cvpr/2022/6946/0", "title": "2022 IEEE/CVF Conference on Computer Vision and Pattern Recognition (CVPR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/cvpr/2022/6946/0/694600c182", "title": "Dynamic 3D Gaze from Afar: Deep Gaze Estimation from Temporal Eye-Head-Body Coordination", "doi": null, "abstractUrl": "/proceedings-article/cvpr/2022/694600c182/1H1mDm1L85i", "parentPublication": { "id": "proceedings/cvpr/2022/6946/0", "title": "2022 IEEE/CVF Conference on Computer Vision and Pattern Recognition (CVPR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/icme/2019/9552/0/955200a850", "title": "Learning A 3D Gaze Estimator with Improved Itracker Combined with Bidirectional LSTM", "doi": null, "abstractUrl": "/proceedings-article/icme/2019/955200a850/1cdOP2R2dt6", "parentPublication": { "id": "proceedings/icme/2019/9552/0", "title": "2019 IEEE International Conference on Multimedia and Expo (ICME)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/wacv/2020/6553/0/09093476", "title": "Learning to Detect Head Movement in Unconstrained Remote Gaze Estimation in the Wild", "doi": null, "abstractUrl": "/proceedings-article/wacv/2020/09093476/1jPblTmx0s0", "parentPublication": { "id": "proceedings/wacv/2020/6553/0", "title": "2020 IEEE Winter Conference on Applications of Computer Vision (WACV)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/cvpr/2020/7168/0/716800h312", "title": "Unsupervised Representation Learning for Gaze Estimation", "doi": null, "abstractUrl": "/proceedings-article/cvpr/2020/716800h312/1m3o4PL98Q0", "parentPublication": { "id": "proceedings/cvpr/2020/7168/0", "title": "2020 IEEE/CVF Conference on Computer Vision and Pattern Recognition (CVPR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/cvpr/2021/4509/0/450900j975", "title": "Weakly-Supervised Physically Unconstrained Gaze Estimation", "doi": null, "abstractUrl": "/proceedings-article/cvpr/2021/450900j975/1yeHVhjSUkE", "parentPublication": { "id": "proceedings/cvpr/2021/4509/0", "title": "2021 IEEE/CVF Conference on Computer Vision and Pattern Recognition (CVPR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" } ], "articleVideos": [] }
{ "proceeding": { "id": "1tmhi3ly74c", "title": "2020 25th International Conference on Pattern Recognition (ICPR)", "acronym": "icpr", "groupId": "1000545", "volume": "0", "displayVolume": "0", "year": "2021", "__typename": "ProceedingType" }, "article": { "id": "1tmhwomVU08", "doi": "10.1109/ICPR48806.2021.9412211", "title": "Estimating Gaze Points from Facial Landmarks by a Remote Spherical Camera", "normalizedTitle": "Estimating Gaze Points from Facial Landmarks by a Remote Spherical Camera", "abstract": "From a spherical image, a gaze point, instead of gaze vectors, can be estimated directly because a remote spherical camera can observe a user's face and a gaze target simultaneously. This paper investigates the problem of estimating a gaze point in a spherical image from facial landmarks. In contrast with the existing methods which usually assume gaze points move on a narrow plane, the proposed method can cope with the situation where gaze points vary in depth for a relatively wide field of view. As shown in the results of comparative experiments, we find the orthogonal coordinates of facial landmarks on a unit sphere is a reasonable representation in comparison with spherical polar coordinates; the cues of head pose is helpful to improve the accuracy of gaze points. Consequently, the proposed method achieves a performance on the accuracy of gaze points estimation which is comparable to the state of the art methods.", "abstracts": [ { "abstractType": "Regular", "content": "From a spherical image, a gaze point, instead of gaze vectors, can be estimated directly because a remote spherical camera can observe a user's face and a gaze target simultaneously. This paper investigates the problem of estimating a gaze point in a spherical image from facial landmarks. In contrast with the existing methods which usually assume gaze points move on a narrow plane, the proposed method can cope with the situation where gaze points vary in depth for a relatively wide field of view. As shown in the results of comparative experiments, we find the orthogonal coordinates of facial landmarks on a unit sphere is a reasonable representation in comparison with spherical polar coordinates; the cues of head pose is helpful to improve the accuracy of gaze points. Consequently, the proposed method achieves a performance on the accuracy of gaze points estimation which is comparable to the state of the art methods.", "__typename": "ArticleAbstractType" } ], "normalizedAbstract": "From a spherical image, a gaze point, instead of gaze vectors, can be estimated directly because a remote spherical camera can observe a user's face and a gaze target simultaneously. This paper investigates the problem of estimating a gaze point in a spherical image from facial landmarks. In contrast with the existing methods which usually assume gaze points move on a narrow plane, the proposed method can cope with the situation where gaze points vary in depth for a relatively wide field of view. As shown in the results of comparative experiments, we find the orthogonal coordinates of facial landmarks on a unit sphere is a reasonable representation in comparison with spherical polar coordinates; the cues of head pose is helpful to improve the accuracy of gaze points. Consequently, the proposed method achieves a performance on the accuracy of gaze points estimation which is comparable to the state of the art methods.", "fno": "09412211", "keywords": [ "Cameras", "Face Recognition", "Feature Extraction", "Gaze Tracking", "Pose Estimation", "Gaze Point", "Gaze Vectors", "Remote Spherical Camera", "Gaze Target", "Spherical Image", "Facial Landmarks", "Gaze Points Estimation", "Support Vector Machines", "Face Recognition", "Neural Networks", "Buildings", "Estimation", "Cameras", "Magnetic Heads" ], "authors": [ { "affiliation": "Graduate School of Information Sciences, Hiroshima City University 3-4-1,Hiroshima,JAPAN,731-3194", "fullName": "Shigang Li", "givenName": "Shigang", "surname": "Li", "__typename": "ArticleAuthorType" }, { "affiliation": "Faculty of Information Sciences, Hiroshima City University 3-4-1,Hiroshima,JAPAN,731-3194", "fullName": "Norika Fujii", "givenName": "Norika", "surname": "Fujii", "__typename": "ArticleAuthorType" } ], "idPrefix": "icpr", "isOpenAccess": false, "showRecommendedArticles": true, "showBuyMe": true, "hasPdf": true, "pubDate": "2021-01-01T00:00:00", "pubType": "proceedings", "pages": "7633-7639", "year": "2021", "issn": "1051-4651", "isbn": "978-1-7281-8808-9", "notes": null, "notesType": null, "__typename": "ArticleType" }, "webExtras": [], "adjacentArticles": { "previous": { "fno": "09412153", "articleId": "1tmiHY12xy0", "__typename": "AdjacentArticleType" }, "next": { "fno": "09413029", "articleId": "1tmhIFmnerS", "__typename": "AdjacentArticleType" }, "__typename": "AdjacentArticlesType" }, "recommendedArticles": [ { "id": "proceedings/cvprw/2012/1611/0/06239182", "title": "Gaze estimation from multimodal Kinect data", "doi": null, "abstractUrl": "/proceedings-article/cvprw/2012/06239182/12OmNA2cYDO", "parentPublication": { "id": "proceedings/cvprw/2012/1611/0", "title": "2012 IEEE Computer Society Conference on Computer Vision and Pattern Recognition Workshops", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/fg/2018/2335/0/233501a535", "title": "Semi-Supervised Learning for Monocular Gaze Redirection", "doi": null, "abstractUrl": "/proceedings-article/fg/2018/233501a535/12OmNy3Agnw", "parentPublication": { "id": "proceedings/fg/2018/2335/0", "title": "2018 13th IEEE International Conference on Automatic Face & Gesture Recognition (FG 2018)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/smartcomp/2018/4705/0/470501a399", "title": "Towards Estimating Emotions and Satisfaction Level of Tourist Based on Eye Gaze and Head Movement", "doi": null, "abstractUrl": "/proceedings-article/smartcomp/2018/470501a399/12OmNy7Qfqu", "parentPublication": { "id": "proceedings/smartcomp/2018/4705/0", "title": "2018 IEEE International Conference on Smart Computing (SMARTCOMP)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/icpr/2014/5209/0/5209b869", "title": "Appearance-Based Gaze Tracking with Free Head Movement", "doi": null, "abstractUrl": "/proceedings-article/icpr/2014/5209b869/12OmNyo1nKa", "parentPublication": { "id": "proceedings/icpr/2014/5209/0", "title": "2014 22nd International Conference on Pattern Recognition (ICPR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/crv/2015/1986/0/1986a176", "title": "Mobile 3D Gaze Tracking Calibration", "doi": null, "abstractUrl": "/proceedings-article/crv/2015/1986a176/12OmNzzxusS", "parentPublication": { "id": "proceedings/crv/2015/1986/0", "title": "2015 12th Conference on Computer and Robot Vision (CRV)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "trans/tp/2018/11/08194910", "title": "Tracking Gaze and Visual Focus of Attention of People Involved in Social Interaction", "doi": null, "abstractUrl": "/journal/tp/2018/11/08194910/143fgZu11hD", "parentPublication": { "id": "trans/tp", "title": "IEEE Transactions on Pattern Analysis & Machine Intelligence", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "trans/tp/2018/11/08010348", "title": "Photorealistic Monocular Gaze Redirection Using Machine Learning", "doi": null, "abstractUrl": "/journal/tp/2018/11/08010348/143fh3uFDe8", "parentPublication": { "id": "trans/tp", "title": "IEEE Transactions on Pattern Analysis & Machine Intelligence", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/icpr/2018/3788/0/08545766", "title": "Recognition of Infants&#x0027; Gaze Behaviors and Emotions", "doi": null, "abstractUrl": "/proceedings-article/icpr/2018/08545766/17D45WB0qdg", "parentPublication": { "id": "proceedings/icpr/2018/3788/0", "title": "2018 24th International Conference on Pattern Recognition (ICPR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/cvprw/2018/6100/0/610000c237", "title": "Light-Weight Head Pose Invariant Gaze Tracking", "doi": null, "abstractUrl": "/proceedings-article/cvprw/2018/610000c237/17D45WXIkI8", "parentPublication": { "id": "proceedings/cvprw/2018/6100/0", "title": "2018 IEEE/CVF Conference on Computer Vision and Pattern Recognition Workshops (CVPRW)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "trans/tp/5555/01/10061572", "title": "Free-HeadGAN: Neural Talking Head Synthesis with Explicit Gaze Control", "doi": null, "abstractUrl": "/journal/tp/5555/01/10061572/1Lk2C6ZD2zC", "parentPublication": { "id": "trans/tp", "title": "IEEE Transactions on Pattern Analysis & Machine Intelligence", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" } ], "articleVideos": [] }
{ "proceeding": { "id": "12OmNy314bx", "title": "2017 IEEE Winter Conference on Applications of Computer Vision (WACV)", "acronym": "wacv", "groupId": "1000040", "volume": "0", "displayVolume": "0", "year": "2017", "__typename": "ProceedingType" }, "article": { "id": "12OmNAoDhTe", "doi": "10.1109/WACV.2017.124", "title": "Automatic Calibration of a Multiple-Projector Spherical Fish Tank VR Display", "normalizedTitle": "Automatic Calibration of a Multiple-Projector Spherical Fish Tank VR Display", "abstract": "We describe a novel automatic calibration method using a single camera for a multiple-projector spherical Fish Tank Virtual Reality (FTVR) display. Modeling the projector as an inverse camera, we estimate the intrinsic and extrinsic projector parameters automatically using a set of projected images on the spherical screen. A calibrated camera is placed beneath to observe partially visible projected patterns. Using the correspondence between the observed pattern and the projected pattern, we reconstruct the shape of the spherical display and finally recover the 3D position of each projected pixel on the display. Additionally we present a practical calibration evaluation method that estimates on-surface accuracy using the single camera. We use point mismatch as a metric to describe misalignment and line mismatch to describe distortion. We demonstrate our automatic approach can achieve an on-surface point mismatch less than 1mm and line mismatch less than 1 on a 30cm diameter spherical screen. Taken together, our calibration approach and evaluation method are automatic and accurate for a desktop spherical FTVR and can be applied to other multiple-projector displays with curved screens.", "abstracts": [ { "abstractType": "Regular", "content": "We describe a novel automatic calibration method using a single camera for a multiple-projector spherical Fish Tank Virtual Reality (FTVR) display. Modeling the projector as an inverse camera, we estimate the intrinsic and extrinsic projector parameters automatically using a set of projected images on the spherical screen. A calibrated camera is placed beneath to observe partially visible projected patterns. Using the correspondence between the observed pattern and the projected pattern, we reconstruct the shape of the spherical display and finally recover the 3D position of each projected pixel on the display. Additionally we present a practical calibration evaluation method that estimates on-surface accuracy using the single camera. We use point mismatch as a metric to describe misalignment and line mismatch to describe distortion. We demonstrate our automatic approach can achieve an on-surface point mismatch less than 1mm and line mismatch less than 1 on a 30cm diameter spherical screen. Taken together, our calibration approach and evaluation method are automatic and accurate for a desktop spherical FTVR and can be applied to other multiple-projector displays with curved screens.", "__typename": "ArticleAbstractType" } ], "normalizedAbstract": "We describe a novel automatic calibration method using a single camera for a multiple-projector spherical Fish Tank Virtual Reality (FTVR) display. Modeling the projector as an inverse camera, we estimate the intrinsic and extrinsic projector parameters automatically using a set of projected images on the spherical screen. A calibrated camera is placed beneath to observe partially visible projected patterns. Using the correspondence between the observed pattern and the projected pattern, we reconstruct the shape of the spherical display and finally recover the 3D position of each projected pixel on the display. Additionally we present a practical calibration evaluation method that estimates on-surface accuracy using the single camera. We use point mismatch as a metric to describe misalignment and line mismatch to describe distortion. We demonstrate our automatic approach can achieve an on-surface point mismatch less than 1mm and line mismatch less than 1 on a 30cm diameter spherical screen. Taken together, our calibration approach and evaluation method are automatic and accurate for a desktop spherical FTVR and can be applied to other multiple-projector displays with curved screens.", "fno": "07926707", "keywords": [ "Calibration", "Computer Displays", "Image Processing", "Image Sensors", "Virtual Reality", "Multiple Projector Spherical Fish Tank VR Display", "Automatic Calibration Method", "Single Camera", "Multipleprojector Spherical Fish Tank Virtual Reality", "FTVR Display", "Inverse Camera", "Extrinsic Projector Parameters", "Spherical Screen", "Calibrated Camera", "Partially Visible Projected Patterns", "Spherical Display", "Calibration Approach", "Curved Screens", "Cameras", "Calibration", "Three Dimensional Displays", "Shape", "Image Reconstruction", "Distortion", "Image Resolution" ], "authors": [ { "affiliation": null, "fullName": "Qian Zhou", "givenName": "Qian", "surname": "Zhou", "__typename": "ArticleAuthorType" }, { "affiliation": null, "fullName": "Gregor Miller", "givenName": "Gregor", "surname": "Miller", "__typename": "ArticleAuthorType" }, { "affiliation": null, "fullName": "Kai Wu", "givenName": "Kai", "surname": "Wu", "__typename": "ArticleAuthorType" }, { "affiliation": null, "fullName": "Daniela Correa", "givenName": "Daniela", "surname": "Correa", "__typename": "ArticleAuthorType" }, { "affiliation": null, "fullName": "Sidney Fels", "givenName": "Sidney", "surname": "Fels", "__typename": "ArticleAuthorType" } ], "idPrefix": "wacv", "isOpenAccess": false, "showRecommendedArticles": true, "showBuyMe": true, "hasPdf": true, "pubDate": "2017-03-01T00:00:00", "pubType": "proceedings", "pages": "1072-1081", "year": "2017", "issn": null, "isbn": "978-1-5090-4822-9", "notes": null, "notesType": null, "__typename": "ArticleType" }, "webExtras": [], "adjacentArticles": { "previous": { "fno": "07926706", "articleId": "12OmNzzfTnl", "__typename": "AdjacentArticleType" }, "next": { "fno": "07926708", "articleId": "12OmNBrV1LX", "__typename": "AdjacentArticleType" }, "__typename": "AdjacentArticlesType" }, "recommendedArticles": [ { "id": "proceedings/cvprw/2012/1611/0/06239202", "title": "2D and 3D visualization with dual-resolution for surveillance", "doi": null, "abstractUrl": "/proceedings-article/cvprw/2012/06239202/12OmNAle6UE", "parentPublication": { "id": "proceedings/cvprw/2012/1611/0", "title": "2012 IEEE Computer Society Conference on Computer Vision and Pattern Recognition Workshops", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/wacv/2013/5053/0/06475040", "title": "A full-spherical device for simultaneous geometry and reflectance acquisition", "doi": null, "abstractUrl": "/proceedings-article/wacv/2013/06475040/12OmNBSSV8O", "parentPublication": { "id": "proceedings/wacv/2013/5053/0", "title": "Applications of Computer Vision, IEEE Workshop on", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2017/6647/0/07892376", "title": "3DPS: An auto-calibrated three-dimensional perspective-corrected spherical display", "doi": null, "abstractUrl": "/proceedings-article/vr/2017/07892376/12OmNC2OSOD", "parentPublication": { "id": "proceedings/vr/2017/6647/0", "title": "2017 IEEE Virtual Reality (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/mmm/2004/2084/0/20840265", "title": "Multi-camera and Multi-projector based Seamless Live Image Display System", "doi": null, "abstractUrl": "/proceedings-article/mmm/2004/20840265/12OmNC3XhmF", "parentPublication": { "id": "proceedings/mmm/2004/2084/0", "title": "Multi-Media Modeling Conference, International", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/iccv/2001/1143/1/00937525", "title": "Smarter presentations: exploiting homography in camera-projector systems", "doi": null, "abstractUrl": "/proceedings-article/iccv/2001/00937525/12OmNxwncaw", "parentPublication": { "id": "proceedings/iccv/2001/1143/1", "title": "Computer Vision, IEEE International Conference on", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/cvprw/2014/4308/0/4308a449", "title": "Projection Center Calibration for a Co-located Projector Camera System", "doi": null, "abstractUrl": "/proceedings-article/cvprw/2014/4308a449/12OmNypIYA4", "parentPublication": { "id": "proceedings/cvprw/2014/4308/0", "title": "2014 IEEE Conference on Computer Vision and Pattern Recognition Workshops (CVPRW)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/cvprw/2010/7029/0/05543466", "title": "Interactive display of image details using a camera-coupled mobile projector", "doi": null, "abstractUrl": "/proceedings-article/cvprw/2010/05543466/12OmNzdoMtZ", "parentPublication": { "id": "proceedings/cvprw/2010/7029/0", "title": "2010 IEEE Computer Society Conference on Computer Vision and Pattern Recognition - Workshops", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "trans/tg/2015/11/07164353", "title": "On-Site Semi-Automatic Calibration and Registration of a Projector-Camera System Using Arbitrary Objects with Known Geometry", "doi": null, "abstractUrl": "/journal/tg/2015/11/07164353/13rRUEgs2M6", "parentPublication": { "id": "trans/tg", "title": "IEEE Transactions on Visualization & Computer Graphics", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "trans/tp/2008/10/ttp2008101831", "title": "Robust and Accurate Visual Echo Cancelation in a Full-duplex Projector-Camera System", "doi": null, "abstractUrl": "/journal/tp/2008/10/ttp2008101831/13rRUxjQyip", "parentPublication": { "id": "trans/tp", "title": "IEEE Transactions on Pattern Analysis & Machine Intelligence", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/ismar-adjunct/2019/4765/0/476500a261", "title": "A Projector Calibration Method Using a Mobile Camera for Projection Mapping System", "doi": null, "abstractUrl": "/proceedings-article/ismar-adjunct/2019/476500a261/1gysikN6QOQ", "parentPublication": { "id": "proceedings/ismar-adjunct/2019/4765/0", "title": "2019 IEEE International Symposium on Mixed and Augmented Reality Adjunct (ISMAR-Adjunct)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" } ], "articleVideos": [] }
{ "proceeding": { "id": "12OmNx8wTfL", "title": "ICPR 2008 19th International Conference on Pattern Recognition", "acronym": "icpr", "groupId": "1000545", "volume": "0", "displayVolume": "0", "year": "2008", "__typename": "ProceedingType" }, "article": { "id": "12OmNC8dgoP", "doi": "10.1109/ICPR.2008.4761481", "title": "Camera calibration for uneven terrains by observing pedestrians", "normalizedTitle": "Camera calibration for uneven terrains by observing pedestrians", "abstract": "A calibrated camera is essential for computer vision systems. The prime reason being that such a camera acts as an angle measuring device. Once the camera is calibrated, applications like 3D reconstruction or metrology or other applications requiring real world information from the video sequences can be envisioned. Motivated by this, we address the problem of calibrating multiple cameras, with an overlapping field of view (FoV), observing pedestrians in a scene walking on an uneven terrain. This problem of calibration from an uneven terrain has so far not been addressed in the vision community. We automatically estimated the infinite homography between the cameras by using the special geometric information obtained from observing pedestrians. This homography provides constraints on the intrinsic (or interior) camera parameters while also enabling us to estimate the extrinsic (or exterior) camera parameters. We test the proposed method on real as well as synthetic data; encouraging results demonstrate the applicability of the proposed method.", "abstracts": [ { "abstractType": "Regular", "content": "A calibrated camera is essential for computer vision systems. The prime reason being that such a camera acts as an angle measuring device. Once the camera is calibrated, applications like 3D reconstruction or metrology or other applications requiring real world information from the video sequences can be envisioned. Motivated by this, we address the problem of calibrating multiple cameras, with an overlapping field of view (FoV), observing pedestrians in a scene walking on an uneven terrain. This problem of calibration from an uneven terrain has so far not been addressed in the vision community. We automatically estimated the infinite homography between the cameras by using the special geometric information obtained from observing pedestrians. This homography provides constraints on the intrinsic (or interior) camera parameters while also enabling us to estimate the extrinsic (or exterior) camera parameters. We test the proposed method on real as well as synthetic data; encouraging results demonstrate the applicability of the proposed method.", "__typename": "ArticleAbstractType" } ], "normalizedAbstract": "A calibrated camera is essential for computer vision systems. The prime reason being that such a camera acts as an angle measuring device. Once the camera is calibrated, applications like 3D reconstruction or metrology or other applications requiring real world information from the video sequences can be envisioned. Motivated by this, we address the problem of calibrating multiple cameras, with an overlapping field of view (FoV), observing pedestrians in a scene walking on an uneven terrain. This problem of calibration from an uneven terrain has so far not been addressed in the vision community. We automatically estimated the infinite homography between the cameras by using the special geometric information obtained from observing pedestrians. This homography provides constraints on the intrinsic (or interior) camera parameters while also enabling us to estimate the extrinsic (or exterior) camera parameters. We test the proposed method on real as well as synthetic data; encouraging results demonstrate the applicability of the proposed method.", "fno": "04761481", "keywords": [ "Calibration", "Computational Geometry", "Computer Vision", "Image Sequences", "Video Cameras", "Uneven Terrain", "Camera Calibration", "Computer Vision System", "Pedestrian Observation", "Scene Walking", "Infinite Homography Estimation", "Special Geometric Information", "Video Sequence", "Cameras", "Calibration", "Application Software", "Computer Vision", "Goniometers", "Metrology", "Video Sequences", "Layout", "Legged Locomotion", "Testing" ], "authors": [ { "affiliation": "IRISA/INRIA Rennes, France", "fullName": "Imran N. Junejo", "givenName": "Imran N.", "surname": "Junejo", "__typename": "ArticleAuthorType" } ], "idPrefix": "icpr", "isOpenAccess": false, "showRecommendedArticles": true, "showBuyMe": true, "hasPdf": true, "pubDate": "2008-12-01T00:00:00", "pubType": "proceedings", "pages": "", "year": "2008", "issn": "1051-4651", "isbn": "978-1-4244-2174-9", "notes": null, "notesType": null, "__typename": "ArticleType" }, "webExtras": [], "adjacentArticles": { "previous": { "fno": "04761480", "articleId": "12OmNy5R3FF", "__typename": "AdjacentArticleType" }, "next": { "fno": "04761482", "articleId": "12OmNy6Zs4q", "__typename": "AdjacentArticleType" }, "__typename": "AdjacentArticlesType" }, "recommendedArticles": [ { "id": "proceedings/cvpr/2016/8851/0/8851d025", "title": "Camera Calibration from Periodic Motion of a Pedestrian", "doi": null, "abstractUrl": "/proceedings-article/cvpr/2016/8851d025/12OmNBIWXAf", "parentPublication": { "id": "proceedings/cvpr/2016/8851/0", "title": "2016 IEEE Conference on Computer Vision and Pattern Recognition (CVPR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/dicta/2010/4271/0/4271a335", "title": "Video Metrology without the Image-to-Ground Homography", "doi": null, "abstractUrl": "/proceedings-article/dicta/2010/4271a335/12OmNvm6VHT", "parentPublication": { "id": "proceedings/dicta/2010/4271/0", "title": "2010 International Conference on Digital Image Computing: Techniques and Applications", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/robot/1991/2163/0/00132014", "title": "Laser rangefinder calibration for a walking robot", "doi": null, "abstractUrl": "/proceedings-article/robot/1991/00132014/12OmNy9Prfx", "parentPublication": { "id": "proceedings/robot/1991/2163/0", "title": "Proceedings. 1991 IEEE International Conference on Robotics and Automation", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/avss/2006/2688/0/26880092", "title": "Robust Auto-Calibration from Pedestrians", "doi": null, "abstractUrl": "/proceedings-article/avss/2006/26880092/12OmNyv7m16", "parentPublication": { "id": "proceedings/avss/2006/2688/0", "title": "2006 IEEE International Conference on Video and Signal Based Surveillance", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/icpr/2010/4109/0/4109a320", "title": "Active Calibration of Camera-Projector Systems Based on Planar Homography", "doi": null, "abstractUrl": "/proceedings-article/icpr/2010/4109a320/12OmNzDehgc", "parentPublication": { "id": "proceedings/icpr/2010/4109/0", "title": "Pattern Recognition, International Conference on", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/iccvw/2011/0063/0/06130454", "title": "Camera auto-calibration using pedestrians and zebra-crossings", "doi": null, "abstractUrl": "/proceedings-article/iccvw/2011/06130454/12OmNzyp5VV", "parentPublication": { "id": "proceedings/iccvw/2011/0063/0", "title": "2011 IEEE International Conference on Computer Vision Workshops (ICCV Workshops)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "trans/tm/2018/06/08078196", "title": "Use of Phone Sensors to Enhance Distracted Pedestrians’ Safety", "doi": null, "abstractUrl": "/journal/tm/2018/06/08078196/13rRUxZ0o2c", "parentPublication": { "id": "trans/tm", "title": "IEEE Transactions on Mobile Computing", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/sitis/2018/9385/0/938500a676", "title": "Action and Intention Recognition of Pedestrians in Urban Traffic", "doi": null, "abstractUrl": "/proceedings-article/sitis/2018/938500a676/19RSwQ4eq9q", "parentPublication": { "id": "proceedings/sitis/2018/9385/0", "title": "2018 14th International Conference on Signal-Image Technology & Internet-Based Systems (SITIS)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2020/5608/0/09089462", "title": "VR Bridges: Simulating Smooth Uneven Surfaces in VR", "doi": null, "abstractUrl": "/proceedings-article/vr/2020/09089462/1jIxeZPD4LS", "parentPublication": { "id": "proceedings/vr/2020/5608/0", "title": "2020 IEEE Conference on Virtual Reality and 3D User Interfaces (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/ictai/2020/9228/0/922800a852", "title": "A Multi-Camera Tracker for Monitoring Pedestrians in Enclosed Environments", "doi": null, "abstractUrl": "/proceedings-article/ictai/2020/922800a852/1pP3yqdGSZy", "parentPublication": { "id": "proceedings/ictai/2020/9228/0", "title": "2020 IEEE 32nd International Conference on Tools with Artificial Intelligence (ICTAI)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" } ], "articleVideos": [] }
{ "proceeding": { "id": "12OmNAkEU4f", "title": "2011 IEEE International Conference on Multimedia and Expo", "acronym": "icme", "groupId": "1000477", "volume": "0", "displayVolume": "0", "year": "2011", "__typename": "ProceedingType" }, "article": { "id": "12OmNCd2rEL", "doi": "10.1109/ICME.2011.6011885", "title": "Novel projector calibration approaches of multi-resolution display", "normalizedTitle": "Novel projector calibration approaches of multi-resolution display", "abstract": "This paper proposes convenient and useful approaches to automatically calibrate the projectors of a multi-resolution display. The proposed approaches estimate both the keystone effect and misalignment of the projections with an assistance of a color camera. Structured light patterns are employed to construct the geometric relationship between projectors and the projection surface, and then pre-warp the images so that they appear undistorted as a result. Experimental results demonstrate that the proposed approaches successfully reduce the human-effort and lower the calibration time of multi-resolution display calibration task.", "abstracts": [ { "abstractType": "Regular", "content": "This paper proposes convenient and useful approaches to automatically calibrate the projectors of a multi-resolution display. The proposed approaches estimate both the keystone effect and misalignment of the projections with an assistance of a color camera. Structured light patterns are employed to construct the geometric relationship between projectors and the projection surface, and then pre-warp the images so that they appear undistorted as a result. Experimental results demonstrate that the proposed approaches successfully reduce the human-effort and lower the calibration time of multi-resolution display calibration task.", "__typename": "ArticleAbstractType" } ], "normalizedAbstract": "This paper proposes convenient and useful approaches to automatically calibrate the projectors of a multi-resolution display. The proposed approaches estimate both the keystone effect and misalignment of the projections with an assistance of a color camera. Structured light patterns are employed to construct the geometric relationship between projectors and the projection surface, and then pre-warp the images so that they appear undistorted as a result. Experimental results demonstrate that the proposed approaches successfully reduce the human-effort and lower the calibration time of multi-resolution display calibration task.", "fno": "06011885", "keywords": [ "Image Resolution", "Calibration", "Manuals", "Indexes", "Nonhomogeneous Media", "Cameras", "Projector Calibration", "Keystone Correction", "Structured Light", "Rear Projection", "Multi Projector", "Multi Resolution" ], "authors": [ { "affiliation": "Graduate Institute of Networking and Multimedia, National Taiwan University, Taiwan", "fullName": "Po-Hsun Chiu", "givenName": null, "surname": "Po-Hsun Chiu", "__typename": "ArticleAuthorType" }, { "affiliation": "Graduate Institute of Networking and Multimedia, National Taiwan University, Taiwan", "fullName": "Shih-Yao Lin", "givenName": null, "surname": "Shih-Yao Lin", "__typename": "ArticleAuthorType" }, { "affiliation": "Graduate Institute of Networking and Multimedia, National Taiwan University, Taiwan", "fullName": "Li-Wei Chan", "givenName": "Li-Wei", "surname": "Chan", "__typename": "ArticleAuthorType" }, { "affiliation": "Graduate Institute of Networking and Multimedia, National Taiwan University, Taiwan", "fullName": "Neng-Hao Yu", "givenName": null, "surname": "Neng-Hao Yu", "__typename": "ArticleAuthorType" }, { "affiliation": "Graduate Institute of Networking and Multimedia, National Taiwan University, Taiwan", "fullName": "Yi-Ping Hung", "givenName": "Yi-Ping", "surname": "Hung", "__typename": "ArticleAuthorType" } ], "idPrefix": "icme", "isOpenAccess": false, "showRecommendedArticles": true, "showBuyMe": true, "hasPdf": true, "pubDate": "2011-07-01T00:00:00", "pubType": "proceedings", "pages": "1-6", "year": "2011", "issn": "1945-7871", "isbn": "978-1-61284-348-3", "notes": null, "notesType": null, "__typename": "ArticleType" }, "webExtras": [], "adjacentArticles": { "previous": { "fno": "06011884", "articleId": "12OmNzSh1aJ", "__typename": "AdjacentArticleType" }, "next": { "fno": "06011886", "articleId": "12OmNwoPtAj", "__typename": "AdjacentArticleType" }, "__typename": "AdjacentArticlesType" }, "recommendedArticles": [ { "id": "proceedings/icig/2004/2244/0/01410480", "title": "A survey of multi-projector tiled display wall construction", "doi": null, "abstractUrl": "/proceedings-article/icig/2004/01410480/12OmNAWH9up", "parentPublication": { "id": "proceedings/icig/2004/2244/0", "title": "Proceedings. Third International Conference on Image and Graphics", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/wacv/2017/4822/0/07926707", "title": "Automatic Calibration of a Multiple-Projector Spherical Fish Tank VR Display", "doi": null, "abstractUrl": "/proceedings-article/wacv/2017/07926707/12OmNAoDhTe", "parentPublication": { "id": "proceedings/wacv/2017/4822/0", "title": "2017 IEEE Winter Conference on Applications of Computer Vision (WACV)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/ismar/2016/3641/0/3641a063", "title": "Practical and Precise Projector-Camera Calibration", "doi": null, "abstractUrl": "/proceedings-article/ismar/2016/3641a063/12OmNB7cjhR", "parentPublication": { "id": "proceedings/ismar/2016/3641/0", "title": "2016 IEEE International Symposium on Mixed and Augmented Reality (ISMAR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/cvprw/2011/0529/0/05981726", "title": "Fully automatic multi-projector calibration with an uncalibrated camera", "doi": null, "abstractUrl": "/proceedings-article/cvprw/2011/05981726/12OmNBSBk4F", "parentPublication": { "id": "proceedings/cvprw/2011/0529/0", "title": "CVPR 2011 WORKSHOPS", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/icpr/2004/2128/1/212810014", "title": "Auto-Calibration of Multi-Projector Display Walls", "doi": null, "abstractUrl": "/proceedings-article/icpr/2004/212810014/12OmNCb3fwi", "parentPublication": { "id": "proceedings/icpr/2004/2128/1", "title": "Pattern Recognition, International Conference on", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/cvprw/2009/3994/0/05204317", "title": "Geometric video projector auto-calibration", "doi": null, "abstractUrl": "/proceedings-article/cvprw/2009/05204317/12OmNCxtyKC", "parentPublication": { "id": "proceedings/cvprw/2009/3994/0", "title": "2009 IEEE Computer Society Conference on Computer Vision and Pattern Recognition Workshops", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/ieee-vis/2001/7200/0/7200yang", "title": "PixelFlex: A Reconfigurable Multi-Projector Display System", "doi": null, "abstractUrl": "/proceedings-article/ieee-vis/2001/7200yang/12OmNvnOwwY", "parentPublication": { "id": "proceedings/ieee-vis/2001/7200/0", "title": "Visualization Conference, IEEE", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2018/3365/0/08446433", "title": "A Calibration Method for Large-Scale Projection Based Floor Display System", "doi": null, "abstractUrl": "/proceedings-article/vr/2018/08446433/13bd1gJ1v0M", "parentPublication": { "id": "proceedings/vr/2018/3365/0", "title": "2018 IEEE Conference on Virtual Reality and 3D User Interfaces (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "trans/tg/2006/05/v1101", "title": "Asynchronous Distributed Calibration for Scalable and Reconfigurable Multi-Projector Displays", "doi": null, "abstractUrl": "/journal/tg/2006/05/v1101/13rRUwInvJ9", "parentPublication": { "id": "trans/tg", "title": "IEEE Transactions on Visualization & Computer Graphics", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/crv/2018/6481/0/648101a190", "title": "Multi-projector Resolution Enhancement Through Biased Interpolation", "doi": null, "abstractUrl": "/proceedings-article/crv/2018/648101a190/17D45XacGiu", "parentPublication": { "id": "proceedings/crv/2018/6481/0", "title": "2018 15th Conference on Computer and Robot Vision (CRV)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" } ], "articleVideos": [] }
{ "proceeding": { "id": "12OmNwp74rq", "title": "Proceedings of IEEE Conference on Computer Vision and Pattern Recognition", "acronym": "cvpr", "groupId": "1000147", "volume": "0", "displayVolume": "0", "year": "1993", "__typename": "ProceedingType" }, "article": { "id": "12OmNyo1nQx", "doi": "10.1109/CVPR.1993.341038", "title": "Efficient and robust methods of accurate camera calibration", "normalizedTitle": "Efficient and robust methods of accurate camera calibration", "abstract": "An algorithm for camera calibration is presented. It is a significant improvement in mathematical simplicity, accuracy and computational efficiency in the solution of all extrinsic (external camera geometric) and intrinsic (internal camera geometric and camera optics) parameters. The method involves a direct transformation from the three-dimensional (3-D) object world to the two-dimensional (2-D) image or sensor plane in terms of homogeneous vector forms for both coplanar and noncoplanar distributions of object points. A strong robust property of the proposed algorithm is demonstrated by proving that if the camera is calibrated with image data not compensated for image center displacement and scale factor, the proposed algorithm yields parameters that cause no error in the computation of both image and world coordinates.<>", "abstracts": [ { "abstractType": "Regular", "content": "An algorithm for camera calibration is presented. It is a significant improvement in mathematical simplicity, accuracy and computational efficiency in the solution of all extrinsic (external camera geometric) and intrinsic (internal camera geometric and camera optics) parameters. The method involves a direct transformation from the three-dimensional (3-D) object world to the two-dimensional (2-D) image or sensor plane in terms of homogeneous vector forms for both coplanar and noncoplanar distributions of object points. A strong robust property of the proposed algorithm is demonstrated by proving that if the camera is calibrated with image data not compensated for image center displacement and scale factor, the proposed algorithm yields parameters that cause no error in the computation of both image and world coordinates.<>", "__typename": "ArticleAbstractType" } ], "normalizedAbstract": "An algorithm for camera calibration is presented. It is a significant improvement in mathematical simplicity, accuracy and computational efficiency in the solution of all extrinsic (external camera geometric) and intrinsic (internal camera geometric and camera optics) parameters. The method involves a direct transformation from the three-dimensional (3-D) object world to the two-dimensional (2-D) image or sensor plane in terms of homogeneous vector forms for both coplanar and noncoplanar distributions of object points. A strong robust property of the proposed algorithm is demonstrated by proving that if the camera is calibrated with image data not compensated for image center displacement and scale factor, the proposed algorithm yields parameters that cause no error in the computation of both image and world coordinates.", "fno": "00341038", "keywords": [ "Cameras", "Calibration", "Computational Complexity", "Image Processing", "Extrinsic Parameters", "Intrinsic Parameters", "Camera Optics Parameters", "Camera Geometric Parameters", "Coplanar Distributions", "Robust Methods", "Accurate Camera Calibration", "Mathematical Simplicity", "Computational Efficiency", "Homogeneous Vector Forms", "Noncoplanar Distributions", "Robustness", "Cameras", "Calibration", "Equations", "Optical Sensors", "Image Sensors", "Computational Efficiency", "Image Analysis", "Integrated Circuit Modeling", "Optical Computing" ], "authors": [ { "affiliation": "Sch. of Electr. Eng., Purdue Univ., W. Lafayette, IN, USA", "fullName": "C. Chatterjee", "givenName": "C.", "surname": "Chatterjee", "__typename": "ArticleAuthorType" }, { "affiliation": "Sch. of Electr. Eng., Purdue Univ., W. Lafayette, IN, USA", "fullName": "V. Roychowdhury", "givenName": "V.", "surname": "Roychowdhury", "__typename": "ArticleAuthorType" } ], "idPrefix": "cvpr", "isOpenAccess": false, "showRecommendedArticles": true, "showBuyMe": true, "hasPdf": true, "pubDate": "1993-01-01T00:00:00", "pubType": "proceedings", "pages": "664-665", "year": "1993", "issn": "1063-6919", "isbn": null, "notes": null, "notesType": null, "__typename": "ArticleType" }, "webExtras": [], "adjacentArticles": { "previous": { "fno": "00341037", "articleId": "12OmNym2bUN", "__typename": "AdjacentArticleType" }, "next": { "fno": "00341039", "articleId": "12OmNvrvjcu", "__typename": "AdjacentArticleType" }, "__typename": "AdjacentArticlesType" }, "recommendedArticles": [ { "id": "proceedings/cvpr/1993/3880/0/00341083", "title": "Dynamic camera self-calibration from controlled motion sequences", "doi": null, "abstractUrl": "/proceedings-article/cvpr/1993/00341083/12OmNASraBT", "parentPublication": { "id": "proceedings/cvpr/1993/3880/0", "title": "Proceedings of IEEE Conference on Computer Vision and Pattern Recognition", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/ismar/2013/2869/0/06671778", "title": "Single-shot extrinsic calibration of a generically configured RGB-D camera rig from scene constraints", "doi": null, "abstractUrl": "/proceedings-article/ismar/2013/06671778/12OmNAle6AS", "parentPublication": { "id": "proceedings/ismar/2013/2869/0", "title": "2013 IEEE International Symposium on Mixed and Augmented Reality (ISMAR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/ssst/1991/2190/0/00138520", "title": "Decomposition of the camera calibration matrix", "doi": null, "abstractUrl": "/proceedings-article/ssst/1991/00138520/12OmNBTs7Fx", "parentPublication": { "id": "proceedings/ssst/1991/2190/0", "title": "The Twenty-Third Southeastern Symposium on System Theory", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/icpr/2014/5209/0/5209a202", "title": "Non-frontal Camera Calibration Using Focal Stack Imagery", "doi": null, "abstractUrl": "/proceedings-article/icpr/2014/5209a202/12OmNC943xl", "parentPublication": { "id": "proceedings/icpr/2014/5209/0", "title": "2014 22nd International Conference on Pattern Recognition (ICPR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/iccv/1990/2057/0/00139552", "title": "Sensitivity of the pose refinement problem to accurate estimation of camera parameters", "doi": null, "abstractUrl": "/proceedings-article/iccv/1990/00139552/12OmNqBtiQF", "parentPublication": { "id": "proceedings/iccv/1990/2057/0", "title": "Proceedings Third International Conference on Computer Vision", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/iccv/2015/8391/0/8391a828", "title": "Accurate Camera Calibration Robust to Defocus Using a Smartphone", "doi": null, "abstractUrl": "/proceedings-article/iccv/2015/8391a828/12OmNqGA4Zd", "parentPublication": { "id": "proceedings/iccv/2015/8391/0", "title": "2015 IEEE International Conference on Computer Vision (ICCV)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/icpr/1990/2062/1/00118093", "title": "An accurate camera calibration for the aerial image analysis", "doi": null, "abstractUrl": "/proceedings-article/icpr/1990/00118093/12OmNvStcwi", "parentPublication": { "id": "proceedings/icpr/1990/2062/1", "title": "Proceedings 10th International Conference on Pattern Recognition", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/3dimpvt/2012/4873/0/4873a464", "title": "Simple, Accurate, and Robust Projector-Camera Calibration", "doi": null, "abstractUrl": "/proceedings-article/3dimpvt/2012/4873a464/12OmNx0RIZY", "parentPublication": { "id": "proceedings/3dimpvt/2012/4873/0", "title": "2012 Second International Conference on 3D Imaging, Modeling, Processing, Visualization & Transmission", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "trans/tp/2009/02/ttp2009020376", "title": "High-Accuracy and Robust Localization of Large Control Markers for Geometric Camera Calibration", "doi": null, "abstractUrl": "/journal/tp/2009/02/ttp2009020376/13rRUwwJWGT", "parentPublication": { "id": "trans/tp", "title": "IEEE Transactions on Pattern Analysis & Machine Intelligence", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/iccv/2021/2812/0/281200q6208", "title": "CTRL-C: Camera calibration TRansformer with Line-Classification", "doi": null, "abstractUrl": "/proceedings-article/iccv/2021/281200q6208/1BmIOkkSQUg", "parentPublication": { "id": "proceedings/iccv/2021/2812/0", "title": "2021 IEEE/CVF International Conference on Computer Vision (ICCV)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" } ], "articleVideos": [] }
{ "proceeding": { "id": "12OmNAXxXaK", "title": "2017 IEEE International Conference on Computer Vision (ICCV)", "acronym": "iccv", "groupId": "1000149", "volume": "0", "displayVolume": "0", "year": "2017", "__typename": "ProceedingType" }, "article": { "id": "12OmNywfKHK", "doi": "10.1109/ICCV.2017.277", "title": "Low-Dimensionality Calibration through Local Anisotropic Scaling for Robust Hand Model Personalization", "normalizedTitle": "Low-Dimensionality Calibration through Local Anisotropic Scaling for Robust Hand Model Personalization", "abstract": "We present a robust algorithm for personalizing a sphere-mesh tracking model to a user from a collection of depth measurements. Our core contribution is to demonstrate how simple geometric reasoning can be exploited to build a shape-space, and how its performance is comparable to shape-spaces constructed from datasets of carefully calibrated models. We achieve this goal by first re-parameterizing the geometry of the tracking template, and introducing a multi-stage calibration optimization. Our novel parameterization decouples the degrees of freedom for pose and shape, resulting in improved convergence properties. Our analytically differentiable multi-stage calibration pipeline optimizes for the model in the natural low-dimensional space of local anisotropic scalings, leading to an effective solution that can be easily embedded in other tracking/calibration algorithms. Compared to existing sphere-mesh calibration algorithms, quantitative experiments assess our algorithm possesses a larger convergence basin, and our personalized models allows to perform motion tracking with superior accuracy.", "abstracts": [ { "abstractType": "Regular", "content": "We present a robust algorithm for personalizing a sphere-mesh tracking model to a user from a collection of depth measurements. Our core contribution is to demonstrate how simple geometric reasoning can be exploited to build a shape-space, and how its performance is comparable to shape-spaces constructed from datasets of carefully calibrated models. We achieve this goal by first re-parameterizing the geometry of the tracking template, and introducing a multi-stage calibration optimization. Our novel parameterization decouples the degrees of freedom for pose and shape, resulting in improved convergence properties. Our analytically differentiable multi-stage calibration pipeline optimizes for the model in the natural low-dimensional space of local anisotropic scalings, leading to an effective solution that can be easily embedded in other tracking/calibration algorithms. Compared to existing sphere-mesh calibration algorithms, quantitative experiments assess our algorithm possesses a larger convergence basin, and our personalized models allows to perform motion tracking with superior accuracy.", "__typename": "ArticleAbstractType" } ], "normalizedAbstract": "We present a robust algorithm for personalizing a sphere-mesh tracking model to a user from a collection of depth measurements. Our core contribution is to demonstrate how simple geometric reasoning can be exploited to build a shape-space, and how its performance is comparable to shape-spaces constructed from datasets of carefully calibrated models. We achieve this goal by first re-parameterizing the geometry of the tracking template, and introducing a multi-stage calibration optimization. Our novel parameterization decouples the degrees of freedom for pose and shape, resulting in improved convergence properties. Our analytically differentiable multi-stage calibration pipeline optimizes for the model in the natural low-dimensional space of local anisotropic scalings, leading to an effective solution that can be easily embedded in other tracking/calibration algorithms. Compared to existing sphere-mesh calibration algorithms, quantitative experiments assess our algorithm possesses a larger convergence basin, and our personalized models allows to perform motion tracking with superior accuracy.", "fno": "1032c554", "keywords": [ "Computational Geometry", "Computer Graphics", "Mesh Generation", "Optimisation", "Depth Measurements", "Core Contribution", "Simple Geometric Reasoning", "Shape Space", "Carefully Calibrated Models", "Tracking Template", "Multistage Calibration Optimization", "Novel Parameterization", "Degrees Of Freedom", "Pose Shape", "Multistage Calibration Pipeline Optimizes", "Tracking Calibration Algorithms", "Existing Sphere Mesh Calibration Algorithms", "Personalized Models", "Low Dimensionality Calibration", "Robust Hand Model Personalization", "Sphere Mesh Tracking Model", "Calibration", "Optimization", "Shape", "Robustness", "Convergence", "Tracking", "Cameras" ], "authors": [ { "affiliation": null, "fullName": "Edoardo Remelli", "givenName": "Edoardo", "surname": "Remelli", "__typename": "ArticleAuthorType" }, { "affiliation": null, "fullName": "Anastasia Tkach", "givenName": "Anastasia", "surname": "Tkach", "__typename": "ArticleAuthorType" }, { "affiliation": null, "fullName": "Andrea Tagliasacchi", "givenName": "Andrea", "surname": "Tagliasacchi", "__typename": "ArticleAuthorType" }, { "affiliation": null, "fullName": "Mark Pauly", "givenName": "Mark", "surname": "Pauly", "__typename": "ArticleAuthorType" } ], "idPrefix": "iccv", "isOpenAccess": false, "showRecommendedArticles": true, "showBuyMe": true, "hasPdf": true, "pubDate": "2017-10-01T00:00:00", "pubType": "proceedings", "pages": "2554-2562", "year": "2017", "issn": "2380-7504", "isbn": "978-1-5386-1032-9", "notes": null, "notesType": null, "__typename": "ArticleType" }, "webExtras": [], "adjacentArticles": { "previous": { "fno": "1032c545", "articleId": "12OmNx7G65m", "__typename": "AdjacentArticleType" }, "next": { "fno": "1032c563", "articleId": "12OmNzYeALr", "__typename": "AdjacentArticleType" }, "__typename": "AdjacentArticlesType" }, "recommendedArticles": [ { "id": "proceedings/ismar/2017/2943/0/2943a042", "title": "Robust Geometric Self-Calibration of Generic Multi-Projector Camera Systems", "doi": null, "abstractUrl": "/proceedings-article/ismar/2017/2943a042/12OmNCbCrRh", "parentPublication": { "id": "proceedings/ismar/2017/2943/0", "title": "2017 IEEE International Symposium on Mixed and Augmented Reality (ISMAR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/3dui/2016/0842/0/07460047", "title": "Evaluation of user-centric optical see-through head-mounted display calibration using a leap motion controller", "doi": null, "abstractUrl": "/proceedings-article/3dui/2016/07460047/12OmNrJRPdz", "parentPublication": { "id": "proceedings/3dui/2016/0842/0", "title": "2016 IEEE Symposium on 3D User Interfaces (3DUI)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/iccv/1999/0164/1/01640585", "title": "Calibration of Hand-Held Camera Sequences for Plenoptic Modeling", "doi": null, "abstractUrl": "/proceedings-article/iccv/1999/01640585/12OmNwBBqgC", "parentPublication": { "id": "proceedings/iccv/1999/0164/1", "title": "Proceedings of the Seventh IEEE International Conference on Computer Vision", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/icat/2013/11/0/06728915", "title": "Wide area optical user tracking in unconstrained indoor environments", "doi": null, "abstractUrl": "/proceedings-article/icat/2013/06728915/12OmNx2zjvN", "parentPublication": { "id": "proceedings/icat/2013/11/0", "title": "2013 23rd International Conference on Artificial Reality and Telexistence (ICAT)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/3dv/2014/7000/1/7000a187", "title": "Calibration of 3D Sensors Using a Spherical Target", "doi": null, "abstractUrl": "/proceedings-article/3dv/2014/7000a187/12OmNx4Q6A8", "parentPublication": { "id": "proceedings/3dv/2014/7000/2", "title": "2014 2nd International Conference on 3D Vision (3DV)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2016/0836/0/07504739", "title": "Evaluation of hand and stylus based calibration for optical see-through head-mounted displays using leap motion", "doi": null, "abstractUrl": "/proceedings-article/vr/2016/07504739/12OmNxE2mZD", "parentPublication": { "id": "proceedings/vr/2016/0836/0", "title": "2016 IEEE Virtual Reality (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/crv/2015/1986/0/1986a176", "title": "Mobile 3D Gaze Tracking Calibration", "doi": null, "abstractUrl": "/proceedings-article/crv/2015/1986a176/12OmNzzxusS", "parentPublication": { "id": "proceedings/crv/2015/1986/0", "title": "2015 12th Conference on Computer and Robot Vision (CRV)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "trans/tp/2013/10/ttp2013102357", "title": "Calibration by Correlation Using Metric Embedding from Nonmetric Similarities", "doi": null, "abstractUrl": "/journal/tp/2013/10/ttp2013102357/13rRUxjQyd1", "parentPublication": { "id": "trans/tp", "title": "IEEE Transactions on Pattern Analysis & Machine Intelligence", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "trans/tg/2018/11/08466021", "title": "Auto-Calibration for Dynamic Multi-Projection Mapping on Arbitrary Surfaces", "doi": null, "abstractUrl": "/journal/tg/2018/11/08466021/14M3DYlzziw", "parentPublication": { "id": "trans/tg", "title": "IEEE Transactions on Visualization & Computer Graphics", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2019/1377/0/08798073", "title": "A Fast Multi-RGBD-Camera Calibration", "doi": null, "abstractUrl": "/proceedings-article/vr/2019/08798073/1cJ1cEQE120", "parentPublication": { "id": "proceedings/vr/2019/1377/0", "title": "2019 IEEE Conference on Virtual Reality and 3D User Interfaces (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" } ], "articleVideos": [] }
{ "proceeding": { "id": "12OmNvAiSpZ", "title": "2015 IEEE Virtual Reality (VR)", "acronym": "vr", "groupId": "1000791", "volume": "0", "displayVolume": "0", "year": "2015", "__typename": "ProceedingType" }, "article": { "id": "12OmNzYNNiY", "doi": "10.1109/VR.2015.7223434", "title": "A multi-projector display system of arbitrary shape, size and resolution", "normalizedTitle": "A multi-projector display system of arbitrary shape, size and resolution", "abstract": "In this demo we will demonstrate integration of general content delivery from a windows desktop to a multi-projector display of arbitrary, shape, size and resolution automatically calibrated using our calibration methods. We have developed these sophisticated completely automatic geometric and color registration techniques in our lab for deploying seamless multi-projector displays on popular non-planar surfaces (e.g. cylinders, domes, truncated domes). This work has gotten significant attention in both VR and Visualization venues in the past 5 years and this will be the first time such calibration will be integrated with content delivery.", "abstracts": [ { "abstractType": "Regular", "content": "In this demo we will demonstrate integration of general content delivery from a windows desktop to a multi-projector display of arbitrary, shape, size and resolution automatically calibrated using our calibration methods. We have developed these sophisticated completely automatic geometric and color registration techniques in our lab for deploying seamless multi-projector displays on popular non-planar surfaces (e.g. cylinders, domes, truncated domes). This work has gotten significant attention in both VR and Visualization venues in the past 5 years and this will be the first time such calibration will be integrated with content delivery.", "__typename": "ArticleAbstractType" } ], "normalizedAbstract": "In this demo we will demonstrate integration of general content delivery from a windows desktop to a multi-projector display of arbitrary, shape, size and resolution automatically calibrated using our calibration methods. We have developed these sophisticated completely automatic geometric and color registration techniques in our lab for deploying seamless multi-projector displays on popular non-planar surfaces (e.g. cylinders, domes, truncated domes). This work has gotten significant attention in both VR and Visualization venues in the past 5 years and this will be the first time such calibration will be integrated with content delivery.", "fno": "07223434", "keywords": [ "Shape", "Visualization", "Calibration", "Cameras", "Image Color Analysis", "Electronic Mail", "Visualization Systems And Tools" ], "authors": [ { "affiliation": "University of California, Irvine", "fullName": "Aditi Majumder", "givenName": "Aditi", "surname": "Majumder", "__typename": "ArticleAuthorType" }, { "affiliation": "University of California, Irvine", "fullName": "Duy-Quoc Lai", "givenName": "Duy-Quoc", "surname": "Lai", "__typename": "ArticleAuthorType" }, { "affiliation": "University of California, Irvine", "fullName": "Mahdi Abbaspour Tehrani", "givenName": "Mahdi Abbaspour", "surname": "Tehrani", "__typename": "ArticleAuthorType" } ], "idPrefix": "vr", "isOpenAccess": false, "showRecommendedArticles": true, "showBuyMe": true, "hasPdf": true, "pubDate": "2015-03-01T00:00:00", "pubType": "proceedings", "pages": "339-340", "year": "2015", "issn": null, "isbn": "978-1-4799-1727-3", "notes": null, "notesType": null, "__typename": "ArticleType" }, "webExtras": [], "adjacentArticles": { "previous": { "fno": "07223433", "articleId": "12OmNzYNN2z", "__typename": "AdjacentArticleType" }, "next": { "fno": "07223435", "articleId": "12OmNy2JtcL", "__typename": "AdjacentArticleType" }, "__typename": "AdjacentArticlesType" }, "recommendedArticles": [ { "id": "proceedings/wacv/2017/4822/0/07926707", "title": "Automatic Calibration of a Multiple-Projector Spherical Fish Tank VR Display", "doi": null, "abstractUrl": "/proceedings-article/wacv/2017/07926707/12OmNAoDhTe", "parentPublication": { "id": "proceedings/wacv/2017/4822/0", "title": "2017 IEEE Winter Conference on Applications of Computer Vision (WACV)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2015/1727/0/07223400", "title": "Semi-automatic calibration of a projector-camera system using arbitrary objects with known geometry", "doi": null, "abstractUrl": "/proceedings-article/vr/2015/07223400/12OmNBJw9RK", "parentPublication": { "id": "proceedings/vr/2015/1727/0", "title": "2015 IEEE Virtual Reality (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/icpr/2008/2174/0/04761601", "title": "Calibration of projector-camera systems from virtual mutual projection", "doi": null, "abstractUrl": "/proceedings-article/icpr/2008/04761601/12OmNBp52Hx", "parentPublication": { "id": "proceedings/icpr/2008/2174/0", "title": "ICPR 2008 19th International Conference on Pattern Recognition", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/cvprw/2009/3994/0/05204316", "title": "A projector-camera system for creating a display with water drops", "doi": null, "abstractUrl": "/proceedings-article/cvprw/2009/05204316/12OmNqH9hk4", "parentPublication": { "id": "proceedings/cvprw/2009/3994/0", "title": "2009 IEEE Computer Society Conference on Computer Vision and Pattern Recognition Workshops", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/svr/2014/4261/0/4261a200", "title": "Projection Mapping for a Kinect-Projector System", "doi": null, "abstractUrl": "/proceedings-article/svr/2014/4261a200/12OmNwe2Izu", "parentPublication": { "id": "proceedings/svr/2014/4261/0", "title": "2014 XVI Symposium on Virtual and Augmented Reality (SVR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/cvpr/2017/0457/0/0457d596", "title": "Simultaneous Geometric and Radiometric Calibration of a Projector-Camera Pair", "doi": null, "abstractUrl": "/proceedings-article/cvpr/2017/0457d596/12OmNwpGgNQ", "parentPublication": { "id": "proceedings/cvpr/2017/0457/0", "title": "2017 IEEE Conference on Computer Vision and Pattern Recognition (CVPR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/cvprw/2014/4308/0/4308a449", "title": "Projection Center Calibration for a Co-located Projector Camera System", "doi": null, "abstractUrl": "/proceedings-article/cvprw/2014/4308a449/12OmNypIYA4", "parentPublication": { "id": "proceedings/cvprw/2014/4308/0", "title": "2014 IEEE Conference on Computer Vision and Pattern Recognition Workshops (CVPRW)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/ismar-adjunct/2018/7592/0/08699178", "title": "A Single-Shot-Per-Pose Camera-Projector Calibration System for Imperfect Planar Targets", "doi": null, "abstractUrl": "/proceedings-article/ismar-adjunct/2018/08699178/19F1O0IjR8k", "parentPublication": { "id": "proceedings/ismar-adjunct/2018/7592/0", "title": "2018 IEEE International Symposium on Mixed and Augmented Reality Adjunct (ISMAR-Adjunct)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "trans/tg/2021/04/08889677", "title": "Automated Geometric Registration for Multi-Projector Displays on Arbitrary 3D Shapes Using Uncalibrated Devices", "doi": null, "abstractUrl": "/journal/tg/2021/04/08889677/1eBugxXEgLe", "parentPublication": { "id": "trans/tg", "title": "IEEE Transactions on Visualization & Computer Graphics", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "trans/tg/2021/11/09523844", "title": "Directionally Decomposing Structured Light for Projector Calibration", "doi": null, "abstractUrl": "/journal/tg/2021/11/09523844/1wpqmnzDSzm", "parentPublication": { "id": "trans/tg", "title": "IEEE Transactions on Visualization & Computer Graphics", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" } ], "articleVideos": [] }
{ "proceeding": { "id": "12OmNAsTgXc", "title": "2011 IEEE International Conference on Computer Vision Workshops (ICCV Workshops)", "acronym": "iccvw", "groupId": "1800041", "volume": "0", "displayVolume": "0", "year": "2011", "__typename": "ProceedingType" }, "article": { "id": "12OmNzzxuyx", "doi": "10.1109/ICCVW.2011.6130255", "title": "Calibration of radially symmetric distortion based on linearity in the calibrated image", "normalizedTitle": "Calibration of radially symmetric distortion based on linearity in the calibrated image", "abstract": "For calibration of general radially symmetric distortion of omnidirectional cameras such as fish-eye lenses, calibration parameters are usually estimated so that curved lines, which are supposed to be straight in the real-world, are mapped to straight lines in the calibrated image, which is called plumbline principle. Under the principle, the camera with radially symmetric distortion can be calibrated by at least one distorted line in a image, theoretically, and the calibrated image is equivalent to the image taken by an ideal pin-hole camera. In this paper, the method to optimize the calibration parameters by maximizing the sum of the straightness, which is invariant under translation, rotation and magnification (scaling), of distorted lines on calibrated image is proposed. The performance of the proposed method is evaluated by artificial data and a real image.", "abstracts": [ { "abstractType": "Regular", "content": "For calibration of general radially symmetric distortion of omnidirectional cameras such as fish-eye lenses, calibration parameters are usually estimated so that curved lines, which are supposed to be straight in the real-world, are mapped to straight lines in the calibrated image, which is called plumbline principle. Under the principle, the camera with radially symmetric distortion can be calibrated by at least one distorted line in a image, theoretically, and the calibrated image is equivalent to the image taken by an ideal pin-hole camera. In this paper, the method to optimize the calibration parameters by maximizing the sum of the straightness, which is invariant under translation, rotation and magnification (scaling), of distorted lines on calibrated image is proposed. The performance of the proposed method is evaluated by artificial data and a real image.", "__typename": "ArticleAbstractType" } ], "normalizedAbstract": "For calibration of general radially symmetric distortion of omnidirectional cameras such as fish-eye lenses, calibration parameters are usually estimated so that curved lines, which are supposed to be straight in the real-world, are mapped to straight lines in the calibrated image, which is called plumbline principle. Under the principle, the camera with radially symmetric distortion can be calibrated by at least one distorted line in a image, theoretically, and the calibrated image is equivalent to the image taken by an ideal pin-hole camera. In this paper, the method to optimize the calibration parameters by maximizing the sum of the straightness, which is invariant under translation, rotation and magnification (scaling), of distorted lines on calibrated image is proposed. The performance of the proposed method is evaluated by artificial data and a real image.", "fno": "06130255", "keywords": [ "Calibration", "Cameras", "Image Processing", "Optical Distortion", "Realistic Images", "Radially Symmetric Distortion Calibration", "Calibrated Image", "Omnidirectional Cameras", "Calibration Parameter Estimation", "Plumbline Principle", "Ideal Pin Hole Camera", "Real Image", "Artificial Data", "Calibration", "Covariance Matrix", "Cameras", "Vectors", "Linearity", "Minimization", "Estimation" ], "authors": [ { "affiliation": "National Institute of Advanced Industrial Science and Technology (AIST), 1-1-1 Umezono, Tsukuba, Ibaraki 305-8568, JAPAN", "fullName": "Jun Fujiki", "givenName": "Jun", "surname": "Fujiki", "__typename": "ArticleAuthorType" }, { "affiliation": "Waseda University, 3-4-1 Okubo, Shinjuku, Tokyo 169-8555, JAPAN", "fullName": "Hideitsu Hino", "givenName": "Hideitsu", "surname": "Hino", "__typename": "ArticleAuthorType" }, { "affiliation": "AIST, Japan", "fullName": "Shotaro Akaho", "givenName": "Shotaro", "surname": "Akaho", "__typename": "ArticleAuthorType" }, { "affiliation": "Waseda University, Japan", "fullName": "Noboru Murata", "givenName": "Noboru", "surname": "Murata", "__typename": "ArticleAuthorType" } ], "idPrefix": "iccvw", "isOpenAccess": false, "showRecommendedArticles": true, "showBuyMe": true, "hasPdf": true, "pubDate": "2011-11-01T00:00:00", "pubType": "proceedings", "pages": "288-295", "year": "2011", "issn": null, "isbn": "978-1-4673-0063-6", "notes": null, "notesType": null, "__typename": "ArticleType" }, "webExtras": [], "adjacentArticles": { "previous": { "fno": "06130254", "articleId": "12OmNz5JCeO", "__typename": "AdjacentArticleType" }, "next": { "fno": "06130256", "articleId": "12OmNBl6EHn", "__typename": "AdjacentArticleType" }, "__typename": "AdjacentArticlesType" }, "recommendedArticles": [ { "id": "proceedings/3dpvt/2006/2825/0/282500480", "title": "A Factorization Based Self-Calibration for Radially Symmetric Cameras", "doi": null, "abstractUrl": "/proceedings-article/3dpvt/2006/282500480/12OmNBpVQ9v", "parentPublication": { "id": "proceedings/3dpvt/2006/2825/0", "title": "Third International Symposium on 3D Data Processing, Visualization, and Transmission (3DPVT'06)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/icpr/2010/4109/0/4109b812", "title": "Self-Calibration of Radially Symmetric Distortion by Model Selection", "doi": null, "abstractUrl": "/proceedings-article/icpr/2010/4109b812/12OmNrIJqwP", "parentPublication": { "id": "proceedings/icpr/2010/4109/0", "title": "Pattern Recognition, International Conference on", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/isme/2010/7669/2/05573869", "title": "A New Camera Calibration Method Based on Two Stages Distortion Model", "doi": null, "abstractUrl": "/proceedings-article/isme/2010/05573869/12OmNwBT1lH", "parentPublication": { "id": "proceedings/isme/2010/7669/2", "title": "2010 International Conference of Information Science and Management Engineering. ISME 2010", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vts/2010/6649/0/05469549", "title": "Calibration-assisted production testing for digitally-calibrated ADCs", "doi": null, "abstractUrl": "/proceedings-article/vts/2010/05469549/12OmNwDj0Ws", "parentPublication": { "id": "proceedings/vts/2010/6649/0", "title": "2010 28th VLSI Test Symposium (VTS)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/iccv/2015/8391/0/8391c192", "title": "Non-parametric Structure-Based Calibration of Radially Symmetric Cameras", "doi": null, "abstractUrl": "/proceedings-article/iccv/2015/8391c192/12OmNyxXlvx", "parentPublication": { "id": "proceedings/iccv/2015/8391/0", "title": "2015 IEEE International Conference on Computer Vision (ICCV)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/isme/2010/4132/2/4132b125", "title": "A New Camera Calibration Method Based on Two Stages Distortion Model", "doi": null, "abstractUrl": "/proceedings-article/isme/2010/4132b125/12OmNzR8Cyp", "parentPublication": { "id": "proceedings/isme/2010/4132/2", "title": "Information Science and Management Engineering, International Conference of", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/cvpr/2004/2158/1/01315028", "title": "Radiometric calibration of a Helmholtz stereo rig", "doi": null, "abstractUrl": "/proceedings-article/cvpr/2004/01315028/12OmNzdoMXW", "parentPublication": { "id": "proceedings/cvpr/2004/2158/1", "title": "Proceedings of the 2004 IEEE Computer Society Conference on Computer Vision and Pattern Recognition, 2004. CVPR 2004.", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "trans/tp/2009/09/ttp2009091552", "title": "Calibration of Cameras with Radially Symmetric Distortion", "doi": null, "abstractUrl": "/journal/tp/2009/09/ttp2009091552/13rRUEgs2N6", "parentPublication": { "id": "trans/tp", "title": "IEEE Transactions on Pattern Analysis & Machine Intelligence", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "trans/tp/2013/09/ttp2013092091", "title": "Calibration of Smooth Camera Models", "doi": null, "abstractUrl": "/journal/tp/2013/09/ttp2013092091/13rRUxNmPF7", "parentPublication": { "id": "trans/tp", "title": "IEEE Transactions on Pattern Analysis & Machine Intelligence", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "trans/tp/1992/11/i1095", "title": "Simple Calibration Algorithm for High-Distortion Lens Camera", "doi": null, "abstractUrl": "/journal/tp/1992/11/i1095/13rRUyYjK5R", "parentPublication": { "id": "trans/tp", "title": "IEEE Transactions on Pattern Analysis & Machine Intelligence", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" } ], "articleVideos": [] }
{ "proceeding": { "id": "1cI6akLvAuQ", "title": "2019 IEEE Conference on Virtual Reality and 3D User Interfaces (VR)", "acronym": "vr", "groupId": "1000791", "volume": "0", "displayVolume": "0", "year": "2019", "__typename": "ProceedingType" }, "article": { "id": "1cJ17trBZEQ", "doi": "10.1109/VR.2019.8798019", "title": "Large-Scale Projection-Based Immersive Display: The Design and Implementation of LargeSpace", "normalizedTitle": "Large-Scale Projection-Based Immersive Display: The Design and Implementation of LargeSpace", "abstract": "In this paper, we introduce LargeSpace, the world's largest immersive display, and discuss the principles of its design. To clarify the design of large-scale projection-based immersive displays, we address the optimum screen shape, projection approach, and arrangement of projectors and tracking cameras. In addition, a novel distortion correction method for panoramic stereo rendering is described. The method can be applied to any projection-based immersive display with any screen shape, and can generate real-time panoramic-stereoscopic views from the viewpoints of tracked participants. To validate the design principles and the rendering algorithm, we implement the LargeSpace and confirm that the method can generate the correct perspective from any position inside the screen viewing area. We implement several applications and show that large-scale immersive displays can be used in the fields of art and experimental psychology.", "abstracts": [ { "abstractType": "Regular", "content": "In this paper, we introduce LargeSpace, the world's largest immersive display, and discuss the principles of its design. To clarify the design of large-scale projection-based immersive displays, we address the optimum screen shape, projection approach, and arrangement of projectors and tracking cameras. In addition, a novel distortion correction method for panoramic stereo rendering is described. The method can be applied to any projection-based immersive display with any screen shape, and can generate real-time panoramic-stereoscopic views from the viewpoints of tracked participants. To validate the design principles and the rendering algorithm, we implement the LargeSpace and confirm that the method can generate the correct perspective from any position inside the screen viewing area. We implement several applications and show that large-scale immersive displays can be used in the fields of art and experimental psychology.", "__typename": "ArticleAbstractType" } ], "normalizedAbstract": "In this paper, we introduce LargeSpace, the world's largest immersive display, and discuss the principles of its design. To clarify the design of large-scale projection-based immersive displays, we address the optimum screen shape, projection approach, and arrangement of projectors and tracking cameras. In addition, a novel distortion correction method for panoramic stereo rendering is described. The method can be applied to any projection-based immersive display with any screen shape, and can generate real-time panoramic-stereoscopic views from the viewpoints of tracked participants. To validate the design principles and the rendering algorithm, we implement the LargeSpace and confirm that the method can generate the correct perspective from any position inside the screen viewing area. We implement several applications and show that large-scale immersive displays can be used in the fields of art and experimental psychology.", "fno": "08798019", "keywords": [ "Cameras", "Human Computer Interaction", "Image Sensors", "Optical Projectors", "Rendering Computer Graphics", "Stereo Image Processing", "Three Dimensional Displays", "Virtual Reality", "Screen Viewing Area", "Large Scale Immersive Displays", "Scale Projection Based Immersive Display", "Large Space", "Large Scale Projection Based Immersive Displays", "Optimum Screen Shape", "Projection Approach", "Tracking Cameras", "Panoramic Stereo Rendering", "Real Time Panoramic Stereoscopic Views", "Tracked Participants", "Design Principles", "Distortion Correction Method", "Shape", "Position Measurement", "Optical Variables Measurement", "Mirrors", "Virtual Environments", "Buildings", "Cameras", "Human Centered Computing X 2014 Human Computer Interaction HCI X 2014 Interaction Devices X 2014 Displays And Imagers", "Human Centered Computing X 2014 Visualization X 2014 Visualization Systems And Tools", "Applied Computing X 2014 Arts And Humanities X 2014 Media Arts" ], "authors": [ { "affiliation": "University of Tsukuba", "fullName": "Hikaru Takatori", "givenName": "Hikaru", "surname": "Takatori", "__typename": "ArticleAuthorType" }, { "affiliation": "University of Tsukuba", "fullName": "Masashi Hiraiwa", "givenName": "Masashi", "surname": "Hiraiwa", "__typename": "ArticleAuthorType" }, { "affiliation": "University of Tsukuba", "fullName": "Hiroaki Yano", "givenName": "Hiroaki", "surname": "Yano", "__typename": "ArticleAuthorType" }, { "affiliation": "University of Tsukuba", "fullName": "Hiroo Iwata", "givenName": "Hiroo", "surname": "Iwata", "__typename": "ArticleAuthorType" } ], "idPrefix": "vr", "isOpenAccess": false, "showRecommendedArticles": true, "showBuyMe": true, "hasPdf": true, "pubDate": "2019-03-01T00:00:00", "pubType": "proceedings", "pages": "557-565", "year": "2019", "issn": null, "isbn": "978-1-7281-1377-7", "notes": null, "notesType": null, "__typename": "ArticleType" }, "webExtras": [], "adjacentArticles": { "previous": { "fno": "08797719", "articleId": "1cJ1dVsXQDS", "__typename": "AdjacentArticleType" }, "next": { "fno": "08797752", "articleId": "1cJ0OYQFEPe", "__typename": "AdjacentArticleType" }, "__typename": "AdjacentArticlesType" }, "recommendedArticles": [ { "id": "proceedings/vr/1999/0093/0/00930079", "title": "Development of Wearable Force Display (HapticGEAR) for Immersive Projection Displays", "doi": null, "abstractUrl": "/proceedings-article/vr/1999/00930079/12OmNB1NVPi", "parentPublication": { "id": "proceedings/vr/1999/0093/0", "title": "Proceedings of Virtual Reality", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2015/1727/0/07223320", "title": "Distance estimation in large immersive projection systems, revisited", "doi": null, "abstractUrl": "/proceedings-article/vr/2015/07223320/12OmNBuL1cB", "parentPublication": { "id": "proceedings/vr/2015/1727/0", "title": "2015 IEEE Virtual Reality (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2006/0224/0/02240151", "title": "Compensating Indirect Scattering for Immersive and Semi-Immersive Projection Displays", "doi": null, "abstractUrl": "/proceedings-article/vr/2006/02240151/12OmNCgrDcp", "parentPublication": { "id": "proceedings/vr/2006/0224/0", "title": "IEEE Virtual Reality Conference (VR 2006)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/pg/2002/1784/0/17840003", "title": "The i-Cone™ — A Panoramic Display System for Virtual Environments", "doi": null, "abstractUrl": "/proceedings-article/pg/2002/17840003/12OmNqBbHKd", "parentPublication": { "id": "proceedings/pg/2002/1784/0", "title": "Computer Graphics and Applications, Pacific Conference on", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/cvpr/2007/1179/0/04270471", "title": "Inter-Reflection Compensation for Immersive Projection Display", "doi": null, "abstractUrl": "/proceedings-article/cvpr/2007/04270471/12OmNwpGgIc", "parentPublication": { "id": "proceedings/cvpr/2007/1179/0", "title": "2007 IEEE Conference on Computer Vision and Pattern Recognition", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/cw/2004/2140/0/21400361", "title": "Immersive Multi-Projector Display on Hybrid Screens with Human-Scale Haptic and Locomotion Interfaces", "doi": null, "abstractUrl": "/proceedings-article/cw/2004/21400361/12OmNxGALbe", "parentPublication": { "id": "proceedings/cw/2004/2140/0", "title": "2004 International Conference on Cyberworlds", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2008/1971/0/04480780", "title": "Inexpensive Immersive Projection", "doi": null, "abstractUrl": "/proceedings-article/vr/2008/04480780/12OmNzQR1rP", "parentPublication": { "id": "proceedings/vr/2008/1971/0", "title": "IEEE Virtual Reality 2008", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/icat/2007/3056/0/30560079", "title": "Room-sized Immersive Projection Display for Tele-immersion Environment", "doi": null, "abstractUrl": "/proceedings-article/icat/2007/30560079/12OmNzUxOgY", "parentPublication": { "id": "proceedings/icat/2007/3056/0", "title": "17th International Conference on Artificial Reality and Telexistence (ICAT 2007)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "trans/tg/2016/04/07383338", "title": "Inter-reflection Compensation of Immersive Projection Display by Spatio-Temporal Screen Reflectance Modulation", "doi": null, "abstractUrl": "/journal/tg/2016/04/07383338/13rRUwInvfc", "parentPublication": { "id": "trans/tg", "title": "IEEE Transactions on Visualization & Computer Graphics", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2009/3943/0/04811057", "title": "Immersive Rear Projection on Curved Screens", "doi": null, "abstractUrl": "/proceedings-article/vr/2009/04811057/1lssAh0wwUg", "parentPublication": { "id": "proceedings/vr/2009/3943/0", "title": "2009 IEEE Virtual Reality Conference", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" } ], "articleVideos": [] }
{ "proceeding": { "id": "12OmNwB2dUd", "title": "2016 IEEE Symposium on 3D User Interfaces (3DUI)", "acronym": "3dui", "groupId": "1001623", "volume": "0", "displayVolume": "0", "year": "2016", "__typename": "ProceedingType" }, "article": { "id": "12OmNBQkwZJ", "doi": "10.1109/3DUI.2016.7460066", "title": "Rhythmic vibrations to heels and forefeet to produce virtual walking", "normalizedTitle": "Rhythmic vibrations to heels and forefeet to produce virtual walking", "abstract": "We are developing a virtual walking system where stationary observers experience the sensation of active walking in a 3D scene. The system consists of two systems: a walking recording system and a walking experiencing system. The recording system uses two cameras that are mounted on the head and four microphones that are embedded on the soles of an actual walker to capture the 3D optic flow of a walker and the left and right foot strikes on the ground. The experiencing system presents 3D scenes on a head-mounted display and rhythmic vibrations on the heels and forefeet to an observer. We performed psychological experiments to evaluate the system. Observers perceived a significantly higher sensation of actual walking when the vibrations were synchronized with the optic flow rather than when the vibrations were random, and when the left and right heels and forefeet were stimulated rather than when only the heels were stimulated.", "abstracts": [ { "abstractType": "Regular", "content": "We are developing a virtual walking system where stationary observers experience the sensation of active walking in a 3D scene. The system consists of two systems: a walking recording system and a walking experiencing system. The recording system uses two cameras that are mounted on the head and four microphones that are embedded on the soles of an actual walker to capture the 3D optic flow of a walker and the left and right foot strikes on the ground. The experiencing system presents 3D scenes on a head-mounted display and rhythmic vibrations on the heels and forefeet to an observer. We performed psychological experiments to evaluate the system. Observers perceived a significantly higher sensation of actual walking when the vibrations were synchronized with the optic flow rather than when the vibrations were random, and when the left and right heels and forefeet were stimulated rather than when only the heels were stimulated.", "__typename": "ArticleAbstractType" } ], "normalizedAbstract": "We are developing a virtual walking system where stationary observers experience the sensation of active walking in a 3D scene. The system consists of two systems: a walking recording system and a walking experiencing system. The recording system uses two cameras that are mounted on the head and four microphones that are embedded on the soles of an actual walker to capture the 3D optic flow of a walker and the left and right foot strikes on the ground. The experiencing system presents 3D scenes on a head-mounted display and rhythmic vibrations on the heels and forefeet to an observer. We performed psychological experiments to evaluate the system. Observers perceived a significantly higher sensation of actual walking when the vibrations were synchronized with the optic flow rather than when the vibrations were random, and when the left and right heels and forefeet were stimulated rather than when only the heels were stimulated.", "fno": "07460066", "keywords": [ "Legged Locomotion", "Vibrations", "Three Dimensional Displays", "Optical Sensors", "Foot", "Synchronization", "Observers", "Telepresence", "Walking", "Optic Flow", "Tactile Stimulation" ], "authors": [ { "affiliation": "Toyohashi University of Technology", "fullName": "Ryota Kondo", "givenName": "Ryota", "surname": "Kondo", "__typename": "ArticleAuthorType" }, { "affiliation": "Toyohashi University of Technology", "fullName": "Keisuke Goto", "givenName": "Keisuke", "surname": "Goto", "__typename": "ArticleAuthorType" }, { "affiliation": "Toyohashi University of Technology", "fullName": "Katsuya Yoshiho", "givenName": "Katsuya", "surname": "Yoshiho", "__typename": "ArticleAuthorType" }, { "affiliation": "Tokyo Metropolitan University", "fullName": "Yasushi Ikei", "givenName": "Yasushi", "surname": "Ikei", "__typename": "ArticleAuthorType" }, { "affiliation": "The University of Electro-Communications", "fullName": "Koichi Hirota", "givenName": "Koichi", "surname": "Hirota", "__typename": "ArticleAuthorType" }, { "affiliation": "Toyohashi University of Technology", "fullName": "Michiteru Kitazaki", "givenName": "Michiteru", "surname": "Kitazaki", "__typename": "ArticleAuthorType" } ], "idPrefix": "3dui", "isOpenAccess": false, "showRecommendedArticles": true, "showBuyMe": true, "hasPdf": true, "pubDate": "2016-03-01T00:00:00", "pubType": "proceedings", "pages": "253-254", "year": "2016", "issn": null, "isbn": "978-1-5090-0842-1", "notes": null, "notesType": null, "__typename": "ArticleType" }, "webExtras": [], "adjacentArticles": { "previous": { "fno": "07460065", "articleId": "12OmNzlUKES", "__typename": "AdjacentArticleType" }, "next": { "fno": "07460067", "articleId": "12OmNqBtiSM", "__typename": "AdjacentArticleType" }, "__typename": "AdjacentArticlesType" }, "recommendedArticles": [ { "id": "proceedings/vr/2015/1727/0/07223431", "title": "Walking recording and experience system by Visual Psychophysics Lab", "doi": null, "abstractUrl": "/proceedings-article/vr/2015/07223431/12OmNB1NVNQ", "parentPublication": { "id": "proceedings/vr/2015/1727/0", "title": "2015 IEEE Virtual Reality (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/icmssp/2016/4519/0/07946040", "title": "3D Quasi-Passive Walker of Bipedal Robot with Flat Feet Gait Analysis of 3D Quasi-Passive Walking", "doi": null, "abstractUrl": "/proceedings-article/icmssp/2016/07946040/12OmNvmG7ZQ", "parentPublication": { "id": "proceedings/icmssp/2016/4519/0", "title": "2016 International Conference on Multimedia Systems and Signal Processing (ICMSSP)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/3dui/2012/1204/0/06184179", "title": "The King-Kong Effects: Improving sensation of walking in VR with visual and tactile vibrations at each step", "doi": null, "abstractUrl": "/proceedings-article/3dui/2012/06184179/12OmNwEJ0Lv", "parentPublication": { "id": "proceedings/3dui/2012/1204/0", "title": "2012 IEEE Symposium on 3D User Interfaces (3DUI)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "trans/ta/2017/03/07450621", "title": "Emotion Rendering in Plantar Vibro-Tactile Simulations of Imagined Walking Styles", "doi": null, "abstractUrl": "/journal/ta/2017/03/07450621/13rRUwIF6cq", "parentPublication": { "id": "trans/ta", "title": "IEEE Transactions on Affective Computing", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "trans/tg/2018/04/08267106", "title": "Force Rendering and its Evaluation of a Friction-Based Walking Sensation Display for a Seated User", "doi": null, "abstractUrl": "/journal/tg/2018/04/08267106/13rRUwIF6dW", "parentPublication": { "id": "trans/tg", "title": "IEEE Transactions on Visualization & Computer Graphics", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "trans/tg/5555/01/09911682", "title": "Effect of Vibrations on Impression of Walking and Embodiment With First- and Third-Person Avatar", "doi": null, "abstractUrl": "/journal/tg/5555/01/09911682/1HeiWQWKlTG", "parentPublication": { "id": "trans/tg", "title": "IEEE Transactions on Visualization & Computer Graphics", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vrw/2020/6532/0/09090634", "title": "Rhythmic proprioceptive stimulation improves embodiment in a walking avatar when added to visual stimulation", "doi": null, "abstractUrl": "/proceedings-article/vrw/2020/09090634/1jIxkrgIlEY", "parentPublication": { "id": "proceedings/vrw/2020/6532/0", "title": "2020 IEEE Conference on Virtual Reality and 3D User Interfaces Abstracts and Workshops (VRW)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vrw/2020/6532/0/09090453", "title": "Perception of Walking Self-body Avatar Enhances Virtual-walking Sensation", "doi": null, "abstractUrl": "/proceedings-article/vrw/2020/09090453/1jIxoojmMy4", "parentPublication": { "id": "proceedings/vrw/2020/6532/0", "title": "2020 IEEE Conference on Virtual Reality and 3D User Interfaces Abstracts and Workshops (VRW)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vrw/2020/6532/0/09090577", "title": "Resizing of the peripersonal space for the seated for different step frequencies of vibrations at the soles", "doi": null, "abstractUrl": "/proceedings-article/vrw/2020/09090577/1jIxp3AAdhK", "parentPublication": { "id": "proceedings/vrw/2020/6532/0", "title": "2020 IEEE Conference on Virtual Reality and 3D User Interfaces Abstracts and Workshops (VRW)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vrw/2021/4057/0/405700a607", "title": "Virtual Walking Generator from Omnidirectional Video with Ground-dependent Foot Vibrations", "doi": null, "abstractUrl": "/proceedings-article/vrw/2021/405700a607/1tnWZe0CPwA", "parentPublication": { "id": "proceedings/vrw/2021/4057/0", "title": "2021 IEEE Conference on Virtual Reality and 3D User Interfaces Abstracts and Workshops (VRW)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" } ], "articleVideos": [] }
{ "proceeding": { "id": "12OmNx5YvrG", "title": "IEEE Virtual Reality 2005", "acronym": "vr", "groupId": "1000791", "volume": "0", "displayVolume": "0", "year": "2005", "__typename": "ProceedingType" }, "article": { "id": "12OmNwJPMZr", "doi": "10.1109/VR.2005.1492799", "title": "Virtual acceleration with galvanic vestibular stimulation in a virtual reality environment", "normalizedTitle": "Virtual acceleration with galvanic vestibular stimulation in a virtual reality environment", "abstract": "This study describes the relation between the vection produced by optical flow and that created by galvanic vestibular stimulation. Vection is the illusion of self motion and is most often experienced when an observer views a large screen display containing a translating pattern. This illusion has only limited fidelity and duration unless it is reinforced by confirming vestibular information. Galvanic vestibular stimulation (GVS) can directly produce the sensation of vection.", "abstracts": [ { "abstractType": "Regular", "content": "This study describes the relation between the vection produced by optical flow and that created by galvanic vestibular stimulation. Vection is the illusion of self motion and is most often experienced when an observer views a large screen display containing a translating pattern. This illusion has only limited fidelity and duration unless it is reinforced by confirming vestibular information. Galvanic vestibular stimulation (GVS) can directly produce the sensation of vection.", "__typename": "ArticleAbstractType" } ], "normalizedAbstract": "This study describes the relation between the vection produced by optical flow and that created by galvanic vestibular stimulation. Vection is the illusion of self motion and is most often experienced when an observer views a large screen display containing a translating pattern. This illusion has only limited fidelity and duration unless it is reinforced by confirming vestibular information. Galvanic vestibular stimulation (GVS) can directly produce the sensation of vection.", "fno": "01492799", "keywords": [ "Ergonomics", "Virtual Reality", "User Interfaces", "Psychology", "Image Sequences", "Virtual Acceleration", "Galvanic Vestibular Stimulation", "Virtual Reality Environment", "Optical Flow", "Self Motion Illusion", "Large Screen Display", "Vestibular Information", "Vection Sensation", "Galvanic Vestibular Stimulus", "Motion Sickness", "Acceleration", "Galvanizing", "Virtual Reality", "Stimulated Emission", "Optical Sensors", "Image Motion Analysis", "Optical Scattering", "Ear", "Large Screen Displays", "Chromium" ], "authors": [ { "affiliation": "NTT Commun. Sci. Labs., Japan", "fullName": "T. Maeda", "givenName": "T.", "surname": "Maeda", "__typename": "ArticleAuthorType" }, { "affiliation": "NTT Commun. Sci. Labs., Japan", "fullName": "H. Ando", "givenName": "H.", "surname": "Ando", "__typename": "ArticleAuthorType" }, { "affiliation": null, "fullName": "M. Sugimoto", "givenName": "M.", "surname": "Sugimoto", "__typename": "ArticleAuthorType" } ], "idPrefix": "vr", "isOpenAccess": false, "showRecommendedArticles": true, "showBuyMe": true, "hasPdf": true, "pubDate": "2005-01-01T00:00:00", "pubType": "proceedings", "pages": "289,290", "year": "2005", "issn": "1087-8270", "isbn": "0-7803-8929-8", "notes": null, "notesType": null, "__typename": "ArticleType" }, "webExtras": [], "adjacentArticles": { "previous": { "fno": "01492798", "articleId": "12OmNBv2CdA", "__typename": "AdjacentArticleType" }, "next": { "fno": "01492800", "articleId": "12OmNzt0IAx", "__typename": "AdjacentArticleType" }, "__typename": "AdjacentArticlesType" }, "recommendedArticles": [ { "id": "proceedings/cybvis/1996/8058/0/00629471", "title": "Interactions of the nucleus of optic tract and vestibular system with gaze stabilization", "doi": null, "abstractUrl": "/proceedings-article/cybvis/1996/00629471/12OmNAle6uw", "parentPublication": { "id": "proceedings/cybvis/1996/8058/0", "title": "Proceedings II Workshop on Cybernetic Vision", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2017/6647/0/07892307", "title": "Air cushion: A pilot study of the passive technique to mitigate simulator sickness by responding to vection", "doi": null, "abstractUrl": "/proceedings-article/vr/2017/07892307/12OmNClQ0yz", "parentPublication": { "id": "proceedings/vr/2017/6647/0", "title": "2017 IEEE Virtual Reality (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/icat/2013/11/0/06728916", "title": "Countercurrent enhances acceleration sensation in galvanic vestibular stimulation", "doi": null, "abstractUrl": "/proceedings-article/icat/2013/06728916/12OmNvjyxGc", "parentPublication": { "id": "proceedings/icat/2013/11/0", "title": "2013 23rd International Conference on Artificial Reality and Telexistence (ICAT)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2005/8929/0/01492765", "title": "Towards lean and elegant self-motion simulation in virtual reality", "doi": null, "abstractUrl": "/proceedings-article/vr/2005/01492765/12OmNxWcHjT", "parentPublication": { "id": "proceedings/vr/2005/8929/0", "title": "IEEE Virtual Reality 2005", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2008/1971/0/04480760", "title": "Circular, Linear, and Curvilinear Vection in a Large-screen Virtual Environment with Floor Projection", "doi": null, "abstractUrl": "/proceedings-article/vr/2008/04480760/12OmNzAoi4A", "parentPublication": { "id": "proceedings/vr/2008/1971/0", "title": "IEEE Virtual Reality 2008", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "trans/tg/2013/04/ttg2013040701", "title": "Peripheral Stimulation and its Effect on Perceived Spatial Scale in Virtual Environments", "doi": null, "abstractUrl": "/journal/tg/2013/04/ttg2013040701/13rRUx0xPmZ", "parentPublication": { "id": "trans/tg", "title": "IEEE Transactions on Visualization & Computer Graphics", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "trans/tg/2022/05/09714040", "title": "Omnidirectional Galvanic Vestibular Stimulation in Virtual Reality", "doi": null, "abstractUrl": "/journal/tg/2022/05/09714040/1B0Y04eka8E", "parentPublication": { "id": "trans/tg", "title": "IEEE Transactions on Visualization & Computer Graphics", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2019/1377/0/08798158", "title": "PhantomLegs: Reducing Virtual Reality Sickness Using Head-Worn Haptic Devices", "doi": null, "abstractUrl": "/proceedings-article/vr/2019/08798158/1cJ16zT3GdW", "parentPublication": { "id": "proceedings/vr/2019/1377/0", "title": "2019 IEEE Conference on Virtual Reality and 3D User Interfaces (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/svr/2019/5434/0/543400a186", "title": "A Virtual Reality Based Platform to Balance Disorders Diagnosis and Vestibular Rehabilitation", "doi": null, "abstractUrl": "/proceedings-article/svr/2019/543400a186/1fHjx6UmaVW", "parentPublication": { "id": "proceedings/svr/2019/5434/0", "title": "2019 21st Symposium on Virtual and Augmented Reality (SVR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/ismar/2021/0158/0/015800a498", "title": "Redirected Walking using Noisy Galvanic Vestibular Stimulation", "doi": null, "abstractUrl": "/proceedings-article/ismar/2021/015800a498/1yeCU92Xt5K", "parentPublication": { "id": "proceedings/ismar/2021/0158/0", "title": "2021 IEEE International Symposium on Mixed and Augmented Reality (ISMAR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" } ], "articleVideos": [] }
{ "proceeding": { "id": "12OmNrNh0vw", "title": "2014 22nd International Conference on Pattern Recognition (ICPR)", "acronym": "icpr", "groupId": "1000545", "volume": "0", "displayVolume": "0", "year": "2014", "__typename": "ProceedingType" }, "article": { "id": "12OmNwdbVaL", "doi": "10.1109/ICPR.2014.492", "title": "Automatic Prediction of Perceived Traits Using Visual Cues under Varied Situational Context", "normalizedTitle": "Automatic Prediction of Perceived Traits Using Visual Cues under Varied Situational Context", "abstract": "Automatic assessment of human personality traits is a non-trivial problem, especially when perception is marked over a fairly short duration of time. In this study, thin slices of behavioral data are analyzed. Perceived physical and behavioral traits are assessed by external observers (raters). Along with the big-five personality trait model, four new traits are introduced and assessed in this work. The relationship between various traits is investigated to obtain a better understanding of observer perception and assessment. Perception change is also considered when participants interact with several virtual characters each with a distinct emotional style. Encapsulating these observations and analysis, an automated system is proposed by firstly computing low level visual features. Using these features a separate model is trained for each trait and performance is evaluated. Further, a weighted model based on rater credibility is proposed to address observer biases. Experimental results indicate that a weighted model show major improvement for automatic prediction of perceived physical and behavioral traits.", "abstracts": [ { "abstractType": "Regular", "content": "Automatic assessment of human personality traits is a non-trivial problem, especially when perception is marked over a fairly short duration of time. In this study, thin slices of behavioral data are analyzed. Perceived physical and behavioral traits are assessed by external observers (raters). Along with the big-five personality trait model, four new traits are introduced and assessed in this work. The relationship between various traits is investigated to obtain a better understanding of observer perception and assessment. Perception change is also considered when participants interact with several virtual characters each with a distinct emotional style. Encapsulating these observations and analysis, an automated system is proposed by firstly computing low level visual features. Using these features a separate model is trained for each trait and performance is evaluated. Further, a weighted model based on rater credibility is proposed to address observer biases. Experimental results indicate that a weighted model show major improvement for automatic prediction of perceived physical and behavioral traits.", "__typename": "ArticleAbstractType" } ], "normalizedAbstract": "Automatic assessment of human personality traits is a non-trivial problem, especially when perception is marked over a fairly short duration of time. In this study, thin slices of behavioral data are analyzed. Perceived physical and behavioral traits are assessed by external observers (raters). Along with the big-five personality trait model, four new traits are introduced and assessed in this work. The relationship between various traits is investigated to obtain a better understanding of observer perception and assessment. Perception change is also considered when participants interact with several virtual characters each with a distinct emotional style. Encapsulating these observations and analysis, an automated system is proposed by firstly computing low level visual features. Using these features a separate model is trained for each trait and performance is evaluated. Further, a weighted model based on rater credibility is proposed to address observer biases. Experimental results indicate that a weighted model show major improvement for automatic prediction of perceived physical and behavioral traits.", "fno": "5209c855", "keywords": [ "Correlation", "Context", "Visualization", "Observers", "Computational Modeling", "Predictive Models", "Face" ], "authors": [ { "affiliation": null, "fullName": "Jyoti Joshi", "givenName": "Jyoti", "surname": "Joshi", "__typename": "ArticleAuthorType" }, { "affiliation": null, "fullName": "Hatice Gunes", "givenName": "Hatice", "surname": "Gunes", "__typename": "ArticleAuthorType" }, { "affiliation": null, "fullName": "Roland Goecke", "givenName": "Roland", "surname": "Goecke", "__typename": "ArticleAuthorType" } ], "idPrefix": "icpr", "isOpenAccess": false, "showRecommendedArticles": true, "showBuyMe": true, "hasPdf": true, "pubDate": "2014-08-01T00:00:00", "pubType": "proceedings", "pages": "2855-2860", "year": "2014", "issn": "1051-4651", "isbn": "978-1-4799-5209-0", "notes": null, "notesType": null, "__typename": "ArticleType" }, "webExtras": [], "adjacentArticles": { "previous": { "fno": "5209c849", "articleId": "12OmNBt3qqY", "__typename": "AdjacentArticleType" }, "next": { "fno": "5209c861", "articleId": "12OmNwNeYwt", "__typename": "AdjacentArticleType" }, "__typename": "AdjacentArticlesType" }, "recommendedArticles": [ { "id": "proceedings/hicss/2005/2268/6/22680151a", "title": "Towards a Sociability Theory of Computer Anxiety: An Interpersonal Circumplex Perspective", "doi": null, "abstractUrl": "/proceedings-article/hicss/2005/22680151a/12OmNrF2DG4", "parentPublication": { "id": "proceedings/hicss/2005/2268/6", "title": "Proceedings of the 38th Annual Hawaii International Conference on System Sciences", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/passat-socialcom/2011/1931/0/06113237", "title": "Contextual Modeling of Personality States' Dynamics in Face-to-Face Interactions", "doi": null, "abstractUrl": "/proceedings-article/passat-socialcom/2011/06113237/12OmNrkT7D6", "parentPublication": { "id": "proceedings/passat-socialcom/2011/1931/0", "title": "2011 IEEE Third Int'l Conference on Privacy, Security, Risk and Trust (PASSAT) / 2011 IEEE Third Int'l Conference on Social Computing (SocialCom)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/ijcss/2011/4421/0/4421a145", "title": "Fairness is in the Eye of the Beholder: How Personality Traits Affect Perceived Fairness and Satisfaction in Hotel Service", "doi": null, "abstractUrl": "/proceedings-article/ijcss/2011/4421a145/12OmNvrvj9g", "parentPublication": { "id": "proceedings/ijcss/2011/4421/0", "title": "Service Sciences, International Joint Conference on", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/intetain/2015/0061/0/07325494", "title": "Exploiting reverse correlation for the generation of virtual characters from personality traits", "doi": null, "abstractUrl": "/proceedings-article/intetain/2015/07325494/12OmNwFid5V", "parentPublication": { "id": "proceedings/intetain/2015/0061/0", "title": "2015 7th International Conference on Intelligent Technologies for Interactive Entertainment (INTETAIN)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/iswc/2011/0774/0/05959587", "title": "Who's Who with Big-Five: Analyzing and Classifying Personality Traits with Smartphones", "doi": null, "abstractUrl": "/proceedings-article/iswc/2011/05959587/12OmNylKAYt", "parentPublication": { "id": "proceedings/iswc/2011/0774/0", "title": "2011 15th Annual International Symposium on Wearable Computers", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/hicss/2015/7367/0/7367d518", "title": "The Dark Side of the Insider: Detecting the Insider Threat through Examination of Dark Triad Personality Traits", "doi": null, "abstractUrl": "/proceedings-article/hicss/2015/7367d518/12OmNz2TCGs", "parentPublication": { "id": "proceedings/hicss/2015/7367/0", "title": "2015 48th Hawaii International Conference on System Sciences (HICSS)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "trans/ta/2015/02/06977907", "title": "What Your Face Vlogs About: Expressions of Emotion and Big-Five Traits Impressions in YouTube", "doi": null, "abstractUrl": "/journal/ta/2015/02/06977907/13rRUwbs2eV", "parentPublication": { "id": "trans/ta", "title": "IEEE Transactions on Affective Computing", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "trans/ta/5555/01/10061286", "title": "Personality Trait Recognition Based on Smartphone Typing Characteristics in the Wild", "doi": null, "abstractUrl": "/journal/ta/5555/01/10061286/1LiKHARauSQ", "parentPublication": { "id": "trans/ta", "title": "IEEE Transactions on Affective Computing", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/icbdie/2020/5900/0/09150263", "title": "Study on the Influence of College Students&#x2019; Personality Traits on Employment Intention Choice", "doi": null, "abstractUrl": "/proceedings-article/icbdie/2020/09150263/1lPGMIcrFAY", "parentPublication": { "id": "proceedings/icbdie/2020/5900/0", "title": "2020 International Conference on Big Data and Informatization Education (ICBDIE)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/scc/2020/8789/0/878900a054", "title": "Personality Traits Prediction Based on Users&#x2019; Digital Footprints in Social Networks via Attention RNN", "doi": null, "abstractUrl": "/proceedings-article/scc/2020/878900a054/1pttWLfhuRG", "parentPublication": { "id": "proceedings/scc/2020/8789/0", "title": "2020 IEEE International Conference on Services Computing (SCC)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" } ], "articleVideos": [] }
{ "proceeding": { "id": "12OmNxV4itF", "title": "2017 IEEE Virtual Reality (VR)", "acronym": "vr", "groupId": "1000791", "volume": "0", "displayVolume": "0", "year": "2017", "__typename": "ProceedingType" }, "article": { "id": "12OmNwtEEvF", "doi": "10.1109/VR.2017.7892287", "title": "Evaluation of airflow effect on a VR walk", "normalizedTitle": "Evaluation of airflow effect on a VR walk", "abstract": "The present study investigates the augmentation effect of airflow on the sensation of a virtual reality walk. The intensity of cutaneous sensation evoked by airflow during the real and virtual walk was measured. The airflow stimulus was added to the participant with passive vestibular motion and visual presentation. The result suggests that the sensation of walking was strongly increased by adding the airflow stimulus to the vestibular and optic presentations. The cutaneous sensation of airflow was perceived higher for the sitting participant than during a real walk in both a single and the combined stimuli. The equivalent speed of airflow for the sitting participant was lowered from the airflow speed in the real walk.", "abstracts": [ { "abstractType": "Regular", "content": "The present study investigates the augmentation effect of airflow on the sensation of a virtual reality walk. The intensity of cutaneous sensation evoked by airflow during the real and virtual walk was measured. The airflow stimulus was added to the participant with passive vestibular motion and visual presentation. The result suggests that the sensation of walking was strongly increased by adding the airflow stimulus to the vestibular and optic presentations. The cutaneous sensation of airflow was perceived higher for the sitting participant than during a real walk in both a single and the combined stimuli. The equivalent speed of airflow for the sitting participant was lowered from the airflow speed in the real walk.", "__typename": "ArticleAbstractType" } ], "normalizedAbstract": "The present study investigates the augmentation effect of airflow on the sensation of a virtual reality walk. The intensity of cutaneous sensation evoked by airflow during the real and virtual walk was measured. The airflow stimulus was added to the participant with passive vestibular motion and visual presentation. The result suggests that the sensation of walking was strongly increased by adding the airflow stimulus to the vestibular and optic presentations. The cutaneous sensation of airflow was perceived higher for the sitting participant than during a real walk in both a single and the combined stimuli. The equivalent speed of airflow for the sitting participant was lowered from the airflow speed in the real walk.", "fno": "07892287", "keywords": [ "Legged Locomotion", "Optical Sensors", "Adaptive Optics", "Standards", "Virtual Environments", "Electronic Mail", "Stimulated Emission", "Airflow", "Cutaneous Sensation", "Virtual Real Walking" ], "authors": [ { "affiliation": "Tokyo Metropolitan University, Japan", "fullName": "Masato Kurosawa", "givenName": "Masato", "surname": "Kurosawa", "__typename": "ArticleAuthorType" }, { "affiliation": "Tokyo Metropolitan University, Japan", "fullName": "Ken Ito", "givenName": "Ken", "surname": "Ito", "__typename": "ArticleAuthorType" }, { "affiliation": "Tokyo Metropolitan University, Japan", "fullName": "Yasushi Ikei", "givenName": "Yasushi", "surname": "Ikei", "__typename": "ArticleAuthorType" }, { "affiliation": "University of Electro-Communications, Japan", "fullName": "Koichi Hirota", "givenName": "Koichi", "surname": "Hirota", "__typename": "ArticleAuthorType" }, { "affiliation": "Toyohashi University of Technology, Japan", "fullName": "Michiteru Kitazaki", "givenName": "Michiteru", "surname": "Kitazaki", "__typename": "ArticleAuthorType" } ], "idPrefix": "vr", "isOpenAccess": false, "showRecommendedArticles": true, "showBuyMe": true, "hasPdf": true, "pubDate": "2017-01-01T00:00:00", "pubType": "proceedings", "pages": "283-284", "year": "2017", "issn": "2375-5334", "isbn": "978-1-5090-6647-6", "notes": null, "notesType": null, "__typename": "ArticleType" }, "webExtras": [], "adjacentArticles": { "previous": { "fno": "07892286", "articleId": "12OmNxymobo", "__typename": "AdjacentArticleType" }, "next": { "fno": "07892288", "articleId": "12OmNzUPptg", "__typename": "AdjacentArticleType" }, "__typename": "AdjacentArticlesType" }, "recommendedArticles": [ { "id": "proceedings/icis/2017/5507/0/07960051", "title": "Double hand-gesture interaction for walk-through in VR environment", "doi": null, "abstractUrl": "/proceedings-article/icis/2017/07960051/12OmNB06l56", "parentPublication": { "id": "proceedings/icis/2017/5507/0", "title": "2017 IEEE/ACIS 16th International Conference on Computer and Information Science (ICIS)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2010/6237/0/05444763", "title": "On the effect of airflow on odor presentation", "doi": null, "abstractUrl": "/proceedings-article/vr/2010/05444763/12OmNviHKkx", "parentPublication": { "id": "proceedings/vr/2010/6237/0", "title": "2010 IEEE Virtual Reality Conference (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/icca/2003/7777/0/01595152", "title": "Distributed Reinforcement Learning of a Six-Legged Robot to Walk", "doi": null, "abstractUrl": "/proceedings-article/icca/2003/01595152/12OmNvlxJyn", "parentPublication": { "id": "proceedings/icca/2003/7777/0", "title": "4th International Conference on Control and Automation. Final Program and Book of Abstracts", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2015/1727/0/07223390", "title": "Third person's footsteps enhanced moving sensation of seated person", "doi": null, "abstractUrl": "/proceedings-article/vr/2015/07223390/12OmNxFsmDI", "parentPublication": { "id": "proceedings/vr/2015/1727/0", "title": "2015 IEEE Virtual Reality (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2015/1727/0/07223404", "title": "Characteristics of virtual walking sensation created by a 3-dof motion seat", "doi": null, "abstractUrl": "/proceedings-article/vr/2015/07223404/12OmNzBwGnY", "parentPublication": { "id": "proceedings/vr/2015/1727/0", "title": "2015 IEEE Virtual Reality (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "trans/tg/2018/04/08267106", "title": "Force Rendering and its Evaluation of a Friction-Based Walking Sensation Display for a Seated User", "doi": null, "abstractUrl": "/journal/tg/2018/04/08267106/13rRUwIF6dW", "parentPublication": { "id": "trans/tg", "title": "IEEE Transactions on Visualization & Computer Graphics", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "trans/tg/2013/04/ttg2013040701", "title": "Peripheral Stimulation and its Effect on Perceived Spatial Scale in Virtual Environments", "doi": null, "abstractUrl": "/journal/tg/2013/04/ttg2013040701/13rRUx0xPmZ", "parentPublication": { "id": "trans/tg", "title": "IEEE Transactions on Visualization & Computer Graphics", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "trans/tg/2019/11/08794563", "title": "Estimation of Rotation Gain Thresholds Considering FOV, Gender, and Distractors", "doi": null, "abstractUrl": "/journal/tg/2019/11/08794563/1dNHkjixhDi", "parentPublication": { "id": "trans/tg", "title": "IEEE Transactions on Visualization & Computer Graphics", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vrw/2020/6532/0/09090608", "title": "Towards an Affordance of Embodied Locomotion Interfaces in VR: How to Know How to Move?", "doi": null, "abstractUrl": "/proceedings-article/vrw/2020/09090608/1jIxnjPP9Ti", "parentPublication": { "id": "proceedings/vrw/2020/6532/0", "title": "2020 IEEE Conference on Virtual Reality and 3D User Interfaces Abstracts and Workshops (VRW)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vrw/2020/6532/0/09090486", "title": "Walk this way: Evaluating the effect of perceived gender and attractiveness of motion on proximity in virtual reality", "doi": null, "abstractUrl": "/proceedings-article/vrw/2020/09090486/1jIxwwL7jmE", "parentPublication": { "id": "proceedings/vrw/2020/6532/0", "title": "2020 IEEE Conference on Virtual Reality and 3D User Interfaces Abstracts and Workshops (VRW)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" } ], "articleVideos": [] }
{ "proceeding": { "id": "12OmNvAiSpZ", "title": "2015 IEEE Virtual Reality (VR)", "acronym": "vr", "groupId": "1000791", "volume": "0", "displayVolume": "0", "year": "2015", "__typename": "ProceedingType" }, "article": { "id": "12OmNxGja3F", "doi": "10.1109/VR.2015.7223328", "title": "The effect of visual display properties and gain presentation mode on the perceived naturalness of virtual walking speeds", "normalizedTitle": "The effect of visual display properties and gain presentation mode on the perceived naturalness of virtual walking speeds", "abstract": "Individuals tend to find realistic walking speeds too slow when relying on treadmill walking or Walking-In-Place (WIP) techniques for virtual travel. This paper details three studies investigating the effects of visual display properties and gain presentation mode on the perceived naturalness of virtual walking speeds: The first study compared three different degrees of peripheral occlusion; the second study compared three different degrees of perceptual distortion produced by varying the geometric field of view (GFOV); and the third study compared three different ways of presenting visual gains. All three studies compared treadmill walking and WIP locomotion. The first study revealed no significant main effects of peripheral occlusion. The second study revealed a significant main effect of GFOV, suggesting that the GFOV size may be inversely proportional to the degree of underestimation of the visual speed. The third study found a significant main effect of gain presentation mode. Allowing participants to interactively adjust the gain led to a smaller range of perceptually natural gains and this approach was significantly faster. However, the efficiency may come at the expense of confidence. Generally the lower and upper bounds of the perceptually natural speeds were higher for treadmill walking than WIP. However, not all differences were statistically significant.", "abstracts": [ { "abstractType": "Regular", "content": "Individuals tend to find realistic walking speeds too slow when relying on treadmill walking or Walking-In-Place (WIP) techniques for virtual travel. This paper details three studies investigating the effects of visual display properties and gain presentation mode on the perceived naturalness of virtual walking speeds: The first study compared three different degrees of peripheral occlusion; the second study compared three different degrees of perceptual distortion produced by varying the geometric field of view (GFOV); and the third study compared three different ways of presenting visual gains. All three studies compared treadmill walking and WIP locomotion. The first study revealed no significant main effects of peripheral occlusion. The second study revealed a significant main effect of GFOV, suggesting that the GFOV size may be inversely proportional to the degree of underestimation of the visual speed. The third study found a significant main effect of gain presentation mode. Allowing participants to interactively adjust the gain led to a smaller range of perceptually natural gains and this approach was significantly faster. However, the efficiency may come at the expense of confidence. Generally the lower and upper bounds of the perceptually natural speeds were higher for treadmill walking than WIP. However, not all differences were statistically significant.", "__typename": "ArticleAbstractType" } ], "normalizedAbstract": "Individuals tend to find realistic walking speeds too slow when relying on treadmill walking or Walking-In-Place (WIP) techniques for virtual travel. This paper details three studies investigating the effects of visual display properties and gain presentation mode on the perceived naturalness of virtual walking speeds: The first study compared three different degrees of peripheral occlusion; the second study compared three different degrees of perceptual distortion produced by varying the geometric field of view (GFOV); and the third study compared three different ways of presenting visual gains. All three studies compared treadmill walking and WIP locomotion. The first study revealed no significant main effects of peripheral occlusion. The second study revealed a significant main effect of GFOV, suggesting that the GFOV size may be inversely proportional to the degree of underestimation of the visual speed. The third study found a significant main effect of gain presentation mode. Allowing participants to interactively adjust the gain led to a smaller range of perceptually natural gains and this approach was significantly faster. However, the efficiency may come at the expense of confidence. Generally the lower and upper bounds of the perceptually natural speeds were higher for treadmill walking than WIP. However, not all differences were statistically significant.", "fno": "07223328", "keywords": [ "Legged Locomotion", "Visualization", "Optical Distortion", "Analysis Of Variance", "Virtual Environments", "Distortion", "Adaptive Optics", "I 3 7 Computer Graphics Three Dimenshional Graphics And Realism Virtual Reality", "H 1 2 Information Systems User Machine Systems Human Factors" ], "authors": [ { "affiliation": "Aalborg University", "fullName": "Niels Christian Nilsson", "givenName": "Niels Christian", "surname": "Nilsson", "__typename": "ArticleAuthorType" }, { "affiliation": "Aalborg University", "fullName": "Stefania Serafin", "givenName": "Stefania", "surname": "Serafin", "__typename": "ArticleAuthorType" }, { "affiliation": "Aalborg University", "fullName": "Rolf Nordahl", "givenName": "Rolf", "surname": "Nordahl", "__typename": "ArticleAuthorType" } ], "idPrefix": "vr", "isOpenAccess": false, "showRecommendedArticles": true, "showBuyMe": true, "hasPdf": true, "pubDate": "2015-03-01T00:00:00", "pubType": "proceedings", "pages": "81-88", "year": "2015", "issn": null, "isbn": "978-1-4799-1727-3", "notes": null, "notesType": null, "__typename": "ArticleType" }, "webExtras": [], "adjacentArticles": { "previous": { "fno": "07223327", "articleId": "12OmNzcPAkQ", "__typename": "AdjacentArticleType" }, "next": { "fno": "07223329", "articleId": "12OmNvxsSSu", "__typename": "AdjacentArticleType" }, "__typename": "AdjacentArticlesType" }, "recommendedArticles": [ { "id": "proceedings/vr/2010/6237/0/05444812", "title": "GUD WIP: Gait-Understanding-Driven Walking-In-Place", "doi": null, "abstractUrl": "/proceedings-article/vr/2010/05444812/12OmNAle6ku", "parentPublication": { "id": "proceedings/vr/2010/6237/0", "title": "2010 IEEE Virtual Reality Conference (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/3dui/2013/6097/0/06550193", "title": "Tapping-In-Place: Increasing the naturalness of immersive walking-in-place locomotion through novel gestural input", "doi": null, "abstractUrl": "/proceedings-article/3dui/2013/06550193/12OmNAnMuyq", "parentPublication": { "id": "proceedings/3dui/2013/6097/0", "title": "2013 IEEE Symposium on 3D User Interfaces (3DUI)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2015/1727/0/07223389", "title": "The effect of head mounted display weight and locomotion method on the perceived naturalness of virtual walking speeds", "doi": null, "abstractUrl": "/proceedings-article/vr/2015/07223389/12OmNwqft3l", "parentPublication": { "id": "proceedings/vr/2015/1727/0", "title": "2015 IEEE Virtual Reality (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2018/3365/0/08446165", "title": "A Threefold Approach for Precise and Efficient Locomotion in Virtual Environments with Varying Accessibility", "doi": null, "abstractUrl": "/proceedings-article/vr/2018/08446165/13bd1AIBM28", "parentPublication": { "id": "proceedings/vr/2018/3365/0", "title": "2018 IEEE Conference on Virtual Reality and 3D User Interfaces (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "trans/tg/2014/04/ttg201404569", "title": "Establishing the Range of Perceptually Natural Visual Walking Speeds for Virtual Walking-In-Place Locomotion", "doi": null, "abstractUrl": "/journal/tg/2014/04/ttg201404569/13rRUxAASTb", "parentPublication": { "id": "trans/tg", "title": "IEEE Transactions on Visualization & Computer Graphics", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/iiai-aai/2022/9755/0/975500a501", "title": "The impact of zoning on congestion caused by workers with different walking speeds in order picking operations", "doi": null, "abstractUrl": "/proceedings-article/iiai-aai/2022/975500a501/1GU6QYg8KFq", "parentPublication": { "id": "proceedings/iiai-aai/2022/9755/0", "title": "2022 12th International Congress on Advanced Applied Informatics (IIAI-AAI)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "trans/tg/2023/05/10049680", "title": "Assisted walking-in-place: Introducing assisted motion to walking-by-cycling in embodied virtual reality", "doi": null, "abstractUrl": "/journal/tg/2023/05/10049680/1KYolEFtr6U", "parentPublication": { "id": "trans/tg", "title": "IEEE Transactions on Visualization & Computer Graphics", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2019/1377/0/08798209", "title": "Enactive Approach to Assess Perceived Speed Error during Walking and Running in Virtual Reality", "doi": null, "abstractUrl": "/proceedings-article/vr/2019/08798209/1cI6auzeLYY", "parentPublication": { "id": "proceedings/vr/2019/1377/0", "title": "2019 IEEE Conference on Virtual Reality and 3D User Interfaces (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vrw/2020/6532/0/09090453", "title": "Perception of Walking Self-body Avatar Enhances Virtual-walking Sensation", "doi": null, "abstractUrl": "/proceedings-article/vrw/2020/09090453/1jIxoojmMy4", "parentPublication": { "id": "proceedings/vrw/2020/6532/0", "title": "2020 IEEE Conference on Virtual Reality and 3D User Interfaces Abstracts and Workshops (VRW)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vrw/2021/4057/0/405700a498", "title": "SHeF-WIP: Walking-in-Place based on Step Height and Frequency for Wider Range of Virtual Speed", "doi": null, "abstractUrl": "/proceedings-article/vrw/2021/405700a498/1tnWFlvbESk", "parentPublication": { "id": "proceedings/vrw/2021/4057/0", "title": "2021 IEEE Conference on Virtual Reality and 3D User Interfaces Abstracts and Workshops (VRW)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" } ], "articleVideos": [] }
{ "proceeding": { "id": "12OmNAYXWAF", "title": "2016 IEEE Virtual Reality (VR)", "acronym": "vr", "groupId": "1000791", "volume": "0", "displayVolume": "0", "year": "2016", "__typename": "ProceedingType" }, "article": { "id": "12OmNxu6p8R", "doi": "10.1109/VR.2016.7504715", "title": "Vestibulohaptic passive stimulation for a walking sensation", "normalizedTitle": "Vestibulohaptic passive stimulation for a walking sensation", "abstract": "This paper describes a passive stimulation of a body to evoke a walking sensation using a vestibular and haptic device while the real body of the user is sitting. It imparts a pseudo body image to the user through the real (physical) body of the user as a part of the virtual reality (VR) display system. The created walking sensation was evaluated by nine factors to analyze the complex nature of the walking sensation.", "abstracts": [ { "abstractType": "Regular", "content": "This paper describes a passive stimulation of a body to evoke a walking sensation using a vestibular and haptic device while the real body of the user is sitting. It imparts a pseudo body image to the user through the real (physical) body of the user as a part of the virtual reality (VR) display system. The created walking sensation was evaluated by nine factors to analyze the complex nature of the walking sensation.", "__typename": "ArticleAbstractType" } ], "normalizedAbstract": "This paper describes a passive stimulation of a body to evoke a walking sensation using a vestibular and haptic device while the real body of the user is sitting. It imparts a pseudo body image to the user through the real (physical) body of the user as a part of the virtual reality (VR) display system. The created walking sensation was evaluated by nine factors to analyze the complex nature of the walking sensation.", "fno": "07504715", "keywords": [ "Legged Locomotion", "Avatars", "Haptic Interfaces", "Electronic Mail", "Muscles", "Solid Modeling", "H 5 2 Information Interfaces And Presentation User Interfaces", "H 5 1 Information Interfaces And Presentation Multimedia Information Systems Artificial Realities" ], "authors": [ { "affiliation": "Tokyo Metropolitan University", "fullName": "Yasushi Ikei", "givenName": "Yasushi", "surname": "Ikei", "__typename": "ArticleAuthorType" }, { "affiliation": "Tokyo Metropolitan University", "fullName": "Shunki Kato", "givenName": "Shunki", "surname": "Kato", "__typename": "ArticleAuthorType" }, { "affiliation": "Tokyo Metropolitan University", "fullName": "Kohei Komase", "givenName": "Kohei", "surname": "Komase", "__typename": "ArticleAuthorType" }, { "affiliation": "Tokyo Metropolitan University", "fullName": "Shogo Imao", "givenName": "Shogo", "surname": "Imao", "__typename": "ArticleAuthorType" }, { "affiliation": "Tokyo Metropolitan University, The University of Tokyo", "fullName": "Sho Sakurai", "givenName": "Sho", "surname": "Sakurai", "__typename": "ArticleAuthorType" }, { "affiliation": "NTT", "fullName": "Tomohiro Amemiya", "givenName": "Tomohiro", "surname": "Amemiya", "__typename": "ArticleAuthorType" }, { "affiliation": "Toyohashi University of Technology", "fullName": "Michiteru Kitazaki", "givenName": "Michiteru", "surname": "Kitazaki", "__typename": "ArticleAuthorType" }, { "affiliation": "The University of Electro-Communications", "fullName": "Koichi Hirota", "givenName": "Koichi", "surname": "Hirota", "__typename": "ArticleAuthorType" } ], "idPrefix": "vr", "isOpenAccess": false, "showRecommendedArticles": true, "showBuyMe": true, "hasPdf": true, "pubDate": "2016-03-01T00:00:00", "pubType": "proceedings", "pages": "185-186", "year": "2016", "issn": "2375-5334", "isbn": "978-1-5090-0836-0", "notes": null, "notesType": null, "__typename": "ArticleType" }, "webExtras": [], "adjacentArticles": { "previous": { "fno": "07504714", "articleId": "12OmNzaQowA", "__typename": "AdjacentArticleType" }, "next": { "fno": "07504716", "articleId": "12OmNz2kqqa", "__typename": "AdjacentArticleType" }, "__typename": "AdjacentArticlesType" }, "recommendedArticles": [ { "id": "proceedings/vr/2017/6647/0/07892287", "title": "Evaluation of airflow effect on a VR walk", "doi": null, "abstractUrl": "/proceedings-article/vr/2017/07892287/12OmNwtEEvF", "parentPublication": { "id": "proceedings/vr/2017/6647/0", "title": "2017 IEEE Virtual Reality (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2015/1727/0/07223390", "title": "Third person's footsteps enhanced moving sensation of seated person", "doi": null, "abstractUrl": "/proceedings-article/vr/2015/07223390/12OmNxFsmDI", "parentPublication": { "id": "proceedings/vr/2015/1727/0", "title": "2015 IEEE Virtual Reality (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2015/1727/0/07223404", "title": "Characteristics of virtual walking sensation created by a 3-dof motion seat", "doi": null, "abstractUrl": "/proceedings-article/vr/2015/07223404/12OmNzBwGnY", "parentPublication": { "id": "proceedings/vr/2015/1727/0", "title": "2015 IEEE Virtual Reality (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "trans/tg/2018/04/08267106", "title": "Force Rendering and its Evaluation of a Friction-Based Walking Sensation Display for a Seated User", "doi": null, "abstractUrl": "/journal/tg/2018/04/08267106/13rRUwIF6dW", "parentPublication": { "id": "trans/tg", "title": "IEEE Transactions on Visualization & Computer Graphics", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/irc/2019/9245/0/924500a288", "title": "A Bio-Inspired Musculoskeletal Model of the Lower Limb for Energy Economical Bipedal Walking", "doi": null, "abstractUrl": "/proceedings-article/irc/2019/924500a288/18M7ixxaQr6", "parentPublication": { "id": "proceedings/irc/2019/9245/0", "title": "2019 Third IEEE International Conference on Robotic Computing (IRC)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "trans/tg/2023/05/10049680", "title": "Assisted walking-in-place: Introducing assisted motion to walking-by-cycling in embodied virtual reality", "doi": null, "abstractUrl": "/journal/tg/2023/05/10049680/1KYolEFtr6U", "parentPublication": { "id": "trans/tg", "title": "IEEE Transactions on Visualization & Computer Graphics", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2019/1377/0/08798345", "title": "Investigation of Visual Self-Representation for a Walking-in-Place Navigation System in Virtual Reality", "doi": null, "abstractUrl": "/proceedings-article/vr/2019/08798345/1cJ1hpkUgHS", "parentPublication": { "id": "proceedings/vr/2019/1377/0", "title": "2019 IEEE Conference on Virtual Reality and 3D User Interfaces (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vrw/2020/6532/0/09090634", "title": "Rhythmic proprioceptive stimulation improves embodiment in a walking avatar when added to visual stimulation", "doi": null, "abstractUrl": "/proceedings-article/vrw/2020/09090634/1jIxkrgIlEY", "parentPublication": { "id": "proceedings/vrw/2020/6532/0", "title": "2020 IEEE Conference on Virtual Reality and 3D User Interfaces Abstracts and Workshops (VRW)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vrw/2020/6532/0/09090453", "title": "Perception of Walking Self-body Avatar Enhances Virtual-walking Sensation", "doi": null, "abstractUrl": "/proceedings-article/vrw/2020/09090453/1jIxoojmMy4", "parentPublication": { "id": "proceedings/vrw/2020/6532/0", "title": "2020 IEEE Conference on Virtual Reality and 3D User Interfaces Abstracts and Workshops (VRW)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/ismar/2021/0158/0/015800a498", "title": "Redirected Walking using Noisy Galvanic Vestibular Stimulation", "doi": null, "abstractUrl": "/proceedings-article/ismar/2021/015800a498/1yeCU92Xt5K", "parentPublication": { "id": "proceedings/ismar/2021/0158/0", "title": "2021 IEEE International Symposium on Mixed and Augmented Reality (ISMAR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" } ], "articleVideos": [] }
{ "proceeding": { "id": "13bd1eJgoia", "title": "2018 IEEE Conference on Virtual Reality and 3D User Interfaces (VR)", "acronym": "vr", "groupId": "1000791", "volume": "0", "displayVolume": "0", "year": "2018", "__typename": "ProceedingType" }, "article": { "id": "13bd1gQYgE6", "doi": "10.1109/VR.2018.8446282", "title": "Illusory Body Ownership Between Different Body Parts: Synchronization of Right Thumb and Right Arm", "normalizedTitle": "Illusory Body Ownership Between Different Body Parts: Synchronization of Right Thumb and Right Arm", "abstract": "Illusory body ownership can be induced by visual-tactile stimulation or visual-motor synchronicity. We aimed to test whether a right thumb could be remapped to a virtual right arm and illusory body ownership of the virtual arm induced through synchronous movements of the right thumb and the virtual right arm. We presented the virtual right arm in synchronization with movements of a participant's right thumb on a head-mounted display (HMD). We found that the participants felt as though their right thumb became the right arm, and that the right arm belonged to their own body.", "abstracts": [ { "abstractType": "Regular", "content": "Illusory body ownership can be induced by visual-tactile stimulation or visual-motor synchronicity. We aimed to test whether a right thumb could be remapped to a virtual right arm and illusory body ownership of the virtual arm induced through synchronous movements of the right thumb and the virtual right arm. We presented the virtual right arm in synchronization with movements of a participant's right thumb on a head-mounted display (HMD). We found that the participants felt as though their right thumb became the right arm, and that the right arm belonged to their own body.", "__typename": "ArticleAbstractType" } ], "normalizedAbstract": "Illusory body ownership can be induced by visual-tactile stimulation or visual-motor synchronicity. We aimed to test whether a right thumb could be remapped to a virtual right arm and illusory body ownership of the virtual arm induced through synchronous movements of the right thumb and the virtual right arm. We presented the virtual right arm in synchronization with movements of a participant's right thumb on a head-mounted display (HMD). We found that the participants felt as though their right thumb became the right arm, and that the right arm belonged to their own body.", "fno": "08446282", "keywords": [ "Haptic Interfaces", "Helmet Mounted Displays", "Mechanoception", "Virtual Reality", "Illusory Body Ownership", "Virtual Arm", "Synchronous Movements", "Right Thumb", "Right Arm", "Visual Tactile Stimulation", "Body Parts Synchronization", "Head Mounted Display", "HMD", "Thumb", "Synchronization", "Rubber", "Legged Locomotion", "Resists", "Observers", "Manipulators", "Body Ownership", "Rubber Hand Illusion", "Human Augmentation", "Virtual Reality", "I 3 7 Computer Graphics Three Dimensional Graphics And Realism Virtual Reality", "H L 2 Models And Principles User Machine Systems Human Factors" ], "authors": [ { "affiliation": "Toyohashi University of Technology", "fullName": "Ryota Kondo", "givenName": "Ryota", "surname": "Kondo", "__typename": "ArticleAuthorType" }, { "affiliation": "Keio University", "fullName": "Maki Sugimoto", "givenName": "Maki", "surname": "Sugimoto", "__typename": "ArticleAuthorType" }, { "affiliation": "Keio University", "fullName": "Kouta Minamizawa", "givenName": "Kouta", "surname": "Minamizawa", "__typename": "ArticleAuthorType" }, { "affiliation": "The University of Tokyo", "fullName": "Masahiko Inami", "givenName": "Masahiko", "surname": "Inami", "__typename": "ArticleAuthorType" }, { "affiliation": "Toyohashi University of Technology", "fullName": "Michiteru Kitazaki", "givenName": "Michiteru", "surname": "Kitazaki", "__typename": "ArticleAuthorType" }, { "affiliation": "Toyohashi University of Technology", "fullName": "Yamato Tani", "givenName": "Yamato", "surname": "Tani", "__typename": "ArticleAuthorType" } ], "idPrefix": "vr", "isOpenAccess": false, "showRecommendedArticles": true, "showBuyMe": true, "hasPdf": true, "pubDate": "2018-03-01T00:00:00", "pubType": "proceedings", "pages": "611-612", "year": "2018", "issn": null, "isbn": "978-1-5386-3365-6", "notes": null, "notesType": null, "__typename": "ArticleType" }, "webExtras": [], "adjacentArticles": { "previous": { "fno": "08446582", "articleId": "13bd1h03qOp", "__typename": "AdjacentArticleType" }, "next": { "fno": "08446528", "articleId": "13bd1eNNYn7", "__typename": "AdjacentArticleType" }, "__typename": "AdjacentArticlesType" }, "recommendedArticles": [ { "id": "proceedings/cts/2016/2300/0/07871048", "title": "Body Ownership in Virtual Reality", "doi": null, "abstractUrl": "/proceedings-article/cts/2016/07871048/12OmNCm7BF7", "parentPublication": { "id": "proceedings/cts/2016/2300/0", "title": "2016 International Conference on Collaboration Technologies and Systems (CTS)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/pive/2012/1218/0/06229793", "title": "Applying “out of the body” funneling and saltation to interaction with virtual and augmented objects", "doi": null, "abstractUrl": "/proceedings-article/pive/2012/06229793/12OmNrJRPoT", "parentPublication": { "id": "proceedings/pive/2012/1218/0", "title": "2012 IEEE VR Workshop on Perceptual Illusions in Virtual Environments", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2017/6647/0/07892233", "title": "VRRobot: Robot actuated props in an infinite virtual environment", "doi": null, "abstractUrl": "/proceedings-article/vr/2017/07892233/12OmNwkhTh6", "parentPublication": { "id": "proceedings/vr/2017/6647/0", "title": "2017 IEEE Virtual Reality (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2018/3365/0/08447562", "title": "In Limbo: The Effect of Gradual Visual Transition Between Real and Virtual on Virtual Body Ownership Illusion and Presence", "doi": null, "abstractUrl": "/proceedings-article/vr/2018/08447562/13bd1sx4Zt3", "parentPublication": { "id": "proceedings/vr/2018/3365/0", "title": "2018 IEEE Conference on Virtual Reality and 3D User Interfaces (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "trans/th/2016/02/07398091", "title": "Multiple Fingers – One Gestalt", "doi": null, "abstractUrl": "/journal/th/2016/02/07398091/13rRUxD9h5k", "parentPublication": { "id": "trans/th", "title": "IEEE Transactions on Haptics", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/cw/2018/7315/0/731500a439", "title": "Effects of Sound Volume Change When Squeezing a Virtual Soft Object with a Bare Hand", "doi": null, "abstractUrl": "/proceedings-article/cw/2018/731500a439/17D45Vw15sG", "parentPublication": { "id": "proceedings/cw/2018/7315/0", "title": "2018 International Conference on Cyberworlds (CW)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "trans/tg/5555/01/10048575", "title": "Sensory Attenuation with a Virtual Robotic Arm Controlled Using Facial Movements", "doi": null, "abstractUrl": "/journal/tg/5555/01/10048575/1KQ5KN76WNq", "parentPublication": { "id": "trans/tg", "title": "IEEE Transactions on Visualization & Computer Graphics", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2019/1377/0/08798346", "title": "Scrambled Body: A Method to Compare Full Body Illusion and Illusory Body Ownership of Body Parts", "doi": null, "abstractUrl": "/proceedings-article/vr/2019/08798346/1cJ0NSIEQda", "parentPublication": { "id": "proceedings/vr/2019/1377/0", "title": "2019 IEEE Conference on Virtual Reality and 3D User Interfaces (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2019/1377/0/08798222", "title": "Shared Body by Action Integration of Two Persons: Body Ownership, Sense of Agency and Task Performance", "doi": null, "abstractUrl": "/proceedings-article/vr/2019/08798222/1cJ0T8PM6qI", "parentPublication": { "id": "proceedings/vr/2019/1377/0", "title": "2019 IEEE Conference on Virtual Reality and 3D User Interfaces (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2019/1377/0/08797756", "title": "Field of View and Forward Motion Discrimination in Virtual Reality", "doi": null, "abstractUrl": "/proceedings-article/vr/2019/08797756/1cJ0UegDTgY", "parentPublication": { "id": "proceedings/vr/2019/1377/0", "title": "2019 IEEE Conference on Virtual Reality and 3D User Interfaces (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" } ], "articleVideos": [] }
{ "proceeding": { "id": "1cI6akLvAuQ", "title": "2019 IEEE Conference on Virtual Reality and 3D User Interfaces (VR)", "acronym": "vr", "groupId": "1000791", "volume": "0", "displayVolume": "0", "year": "2019", "__typename": "ProceedingType" }, "article": { "id": "1cJ0UegDTgY", "doi": "10.1109/VR.2019.8797756", "title": "Field of View and Forward Motion Discrimination in Virtual Reality", "normalizedTitle": "Field of View and Forward Motion Discrimination in Virtual Reality", "abstract": "There has long been interest in illusions of self-motion and their perception in virtual environments. Prior work has indicated that an observer's field of view size is an important factor in the perception of self-motion both real and illusory. Restricted fields of view in some virtual reality displays has limited the extent to which this can be studied. In this paper, we discuss a pilot study examining how well observers can discriminate forward motion velocities as viewed through two common, but differing, field of view configurations. We find that observers are quite sensitive to changes in forward motion. The perceived magnitude of this motion is also found to be affected by field of view size with a smaller field of view resulting in slower perceived velocity.", "abstracts": [ { "abstractType": "Regular", "content": "There has long been interest in illusions of self-motion and their perception in virtual environments. Prior work has indicated that an observer's field of view size is an important factor in the perception of self-motion both real and illusory. Restricted fields of view in some virtual reality displays has limited the extent to which this can be studied. In this paper, we discuss a pilot study examining how well observers can discriminate forward motion velocities as viewed through two common, but differing, field of view configurations. We find that observers are quite sensitive to changes in forward motion. The perceived magnitude of this motion is also found to be affected by field of view size with a smaller field of view resulting in slower perceived velocity.", "__typename": "ArticleAbstractType" } ], "normalizedAbstract": "There has long been interest in illusions of self-motion and their perception in virtual environments. Prior work has indicated that an observer's field of view size is an important factor in the perception of self-motion both real and illusory. Restricted fields of view in some virtual reality displays has limited the extent to which this can be studied. In this paper, we discuss a pilot study examining how well observers can discriminate forward motion velocities as viewed through two common, but differing, field of view configurations. We find that observers are quite sensitive to changes in forward motion. The perceived magnitude of this motion is also found to be affected by field of view size with a smaller field of view resulting in slower perceived velocity.", "fno": "08797756", "keywords": [ "Human Factors", "Virtual Reality", "Visual Perception", "Field Of View Configuration", "Forward Motion Velocities", "Virtual Reality Displays", "View Size", "Virtual Environments", "Forward Motion Discrimination", "Legged Locomotion", "Visualization", "Resists", "Observers", "Virtual Environments", "Adaptive Optics", "Human Centered Computing", "Interaction Paradigms", "Virtual Reality", "Computer Graphics", "Graphics Systems And Interfaces", "Perception" ], "authors": [ { "affiliation": "University of Mississippi", "fullName": "Jonathan E. Hopper", "givenName": "Jonathan E.", "surname": "Hopper", "__typename": "ArticleAuthorType" }, { "affiliation": "University of Mississippi", "fullName": "Hunter Finney", "givenName": "Hunter", "surname": "Finney", "__typename": "ArticleAuthorType" }, { "affiliation": "University of Mississippi", "fullName": "J. Adam Jones", "givenName": "J. Adam", "surname": "Jones", "__typename": "ArticleAuthorType" } ], "idPrefix": "vr", "isOpenAccess": false, "showRecommendedArticles": true, "showBuyMe": true, "hasPdf": true, "pubDate": "2019-03-01T00:00:00", "pubType": "proceedings", "pages": "1663-1666", "year": "2019", "issn": null, "isbn": "978-1-7281-1377-7", "notes": null, "notesType": null, "__typename": "ArticleType" }, "webExtras": [], "adjacentArticles": { "previous": { "fno": "08797757", "articleId": "1cJ0OehjVIY", "__typename": "AdjacentArticleType" }, "next": { "fno": "08798258", "articleId": "1cJ0Ij0W0py", "__typename": "AdjacentArticleType" }, "__typename": "AdjacentArticlesType" }, "recommendedArticles": [ { "id": "proceedings/vr/2015/1727/0/07223431", "title": "Walking recording and experience system by Visual Psychophysics Lab", "doi": null, "abstractUrl": "/proceedings-article/vr/2015/07223431/12OmNB1NVNQ", "parentPublication": { "id": "proceedings/vr/2015/1727/0", "title": "2015 IEEE Virtual Reality (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/acii/2013/5048/0/5048a460", "title": "Perception of Emotional Gaits Using Avatar Animation of Real and Artificially Synthesized Gaits", "doi": null, "abstractUrl": "/proceedings-article/acii/2013/5048a460/12OmNzWx07H", "parentPublication": { "id": "proceedings/acii/2013/5048/0", "title": "2013 Humaine Association Conference on Affective Computing and Intelligent Interaction (ACII)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "trans/tg/2013/04/ttg2013040701", "title": "Peripheral Stimulation and its Effect on Perceived Spatial Scale in Virtual Environments", "doi": null, "abstractUrl": "/journal/tg/2013/04/ttg2013040701/13rRUx0xPmZ", "parentPublication": { "id": "trans/tg", "title": "IEEE Transactions on Visualization & Computer Graphics", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/ismar-adjunct/2018/7592/0/08699200", "title": "Effective Free Field of View Scene Exploration in VR and AR", "doi": null, "abstractUrl": "/proceedings-article/ismar-adjunct/2018/08699200/19F1SrRS4vK", "parentPublication": { "id": "proceedings/ismar-adjunct/2018/7592/0", "title": "2018 IEEE International Symposium on Mixed and Augmented Reality Adjunct (ISMAR-Adjunct)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/ismar-adjunct/2022/5365/0/536500a278", "title": "A Conceptual Replication and Extension of Triangulation by Walking for Measuring Perceived Distance Through a Wall", "doi": null, "abstractUrl": "/proceedings-article/ismar-adjunct/2022/536500a278/1J7Wy5IYC2I", "parentPublication": { "id": "proceedings/ismar-adjunct/2022/5365/0", "title": "2022 IEEE International Symposium on Mixed and Augmented Reality Adjunct (ISMAR-Adjunct)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2019/1377/0/08798209", "title": "Enactive Approach to Assess Perceived Speed Error during Walking and Running in Virtual Reality", "doi": null, "abstractUrl": "/proceedings-article/vr/2019/08798209/1cI6auzeLYY", "parentPublication": { "id": "proceedings/vr/2019/1377/0", "title": "2019 IEEE Conference on Virtual Reality and 3D User Interfaces (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2019/1377/0/08798095", "title": "Distance Judgments to On- and Off-Ground Objects in Augmented Reality", "doi": null, "abstractUrl": "/proceedings-article/vr/2019/08798095/1cJ0Yxz6rrG", "parentPublication": { "id": "proceedings/vr/2019/1377/0", "title": "2019 IEEE Conference on Virtual Reality and 3D User Interfaces (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/vr/2019/1377/0/08798164", "title": "Reducing Cybersickness by Geometry Deformation", "doi": null, "abstractUrl": "/proceedings-article/vr/2019/08798164/1cJ1e7ULbji", "parentPublication": { "id": "proceedings/vr/2019/1377/0", "title": "2019 IEEE Conference on Virtual Reality and 3D User Interfaces (VR)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/acii/2019/3888/0/08925500", "title": "Robots Expressing Dominance: Effects of Behaviours and Modulation", "doi": null, "abstractUrl": "/proceedings-article/acii/2019/08925500/1fHGGZFtIw8", "parentPublication": { "id": "proceedings/acii/2019/3888/0", "title": "2019 8th International Conference on Affective Computing and Intelligent Interaction (ACII)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" }, { "id": "proceedings/icmew/2020/1485/0/09106059", "title": "Field-Of-View Effect on The Perceived Quality of Omnidirectional Images", "doi": null, "abstractUrl": "/proceedings-article/icmew/2020/09106059/1kwqECCHmus", "parentPublication": { "id": "proceedings/icmew/2020/1485/0", "title": "2020 IEEE International Conference on Multimedia & Expo Workshops (ICMEW)", "__typename": "ParentPublication" }, "__typename": "RecommendedArticleType" } ], "articleVideos": [] }