@techreport{oai:ipsj.ixsq.nii.ac.jp:00232612, author = {Wataru, Sano and Oishi, Taisei and Ryusei, Sugano and Ryoichi, Shinkuma and Gabriele, Trovato and Wataru, Sano and Oishi, Taisei and Ryusei, Sugano and Ryoichi, Shinkuma and Gabriele, Trovato}, issue = {8}, month = {Feb}, note = {Opportunities for digital-space experience increase through the metaverse. Increasing opportunities in digital-space experiences leads to decreasing opportunities in the real-space experiences. It is crucial to enhance real-space experiences without compromising the digital-space experiences. Prior research has been done to understand sharing experiences. The prior research is limited to a discussion of the sharing real-space experiences on real space. We present a framework for sharing real-space experiences in digital space through a video content with effects. The effects, derived from sensor data of real-space experiences, are applied to a first-person view video of an experiencer. The sensor data is 3D point cloud data. Simulated experiences, representing digital-space experiences, aim to convey realistic real-space experiences. We conduct an experiment to evaluate simulated experiences. We examine our framework through a quantitative evaluation and two types of subjective evaluation. The experiment compares the simulated experiences under 4 conditions. The conditions are sensor data type, statistic type, effects type, and video content type. Results are obtained and compared for both quantitative and subjective evaluations under each of the conditions. ANOVA is employed to test the significance of means. The video contents and the effects type show significant difference. Correlation matrices of subjective immersion evaluation are presented., Opportunities for digital-space experience increase through the metaverse. Increasing opportunities in digital-space experiences leads to decreasing opportunities in the real-space experiences. It is crucial to enhance real-space experiences without compromising the digital-space experiences. Prior research has been done to understand sharing experiences. The prior research is limited to a discussion of the sharing real-space experiences on real space. We present a framework for sharing real-space experiences in digital space through a video content with effects. The effects, derived from sensor data of real-space experiences, are applied to a first-person view video of an experiencer. The sensor data is 3D point cloud data. Simulated experiences, representing digital-space experiences, aim to convey realistic real-space experiences. We conduct an experiment to evaluate simulated experiences. We examine our framework through a quantitative evaluation and two types of subjective evaluation. The experiment compares the simulated experiences under 4 conditions. The conditions are sensor data type, statistic type, effects type, and video content type. Results are obtained and compared for both quantitative and subjective evaluations under each of the conditions. ANOVA is employed to test the significance of means. The video contents and the effects type show significant difference. Correlation matrices of subjective immersion evaluation are presented.}, title = {Sensing and computing framework for sharing real-space experience}, year = {2024} }