Description: Existing volumetric capture systems require many
cameras and lengthy post processing.
We introduce the first system that can capture a
completely clothed human body (including the back)
using a single RGB
webcam and in real time. Our deep-learning-based
approach enables new possibilities for low-cost and
consumer-accessible immersive teleportation.
|
We propose a novel hierarchical surface localization algorithm and a direct rendering method that progressively queries 3D locations in a coarse-to-fine manner and to extract surface from implicit occupancy fields with a minimum number of points to be evaluate. By culling unnecessary regions for evaluation we successfully accelerate the reconstruction by nearly 200 times without compromising the quality.
We introduce an Online Hard Example Mining (OHEM) technique that effectively suppresses failure modes due to the rare occurrence of challenging examples. We adaptively update the sampling probability of the training data based on the current reconstruction accuracy, which effectively alleviates reconstruction artifacts.
@article{li2020monocular, title={Monocular Real-Time Volumetric Performance Capture}, author={Li, Ruilong and Xiu, Yuliang and Saito, Shunsuke and Huang, Zeng and Olszewski, Kyle and Li, Hao}, journal={arXiv preprint arXiv:2007.13988}, year={2020} } @inproceedings{10.1145/3407662.3407756, author = {Li, Ruilong and Olszewski, Kyle and Xiu, Yuliang and Saito, Shunsuke and Huang, Zeng and Li, Hao}, title = {Volumetric Human Teleportation}, year = {2020}, isbn = {9781450380607}, publisher = {Association for Computing Machinery}, address = {New York, NY, USA}, url = {https://doi.org/10.1145/3407662.3407756}, doi = {10.1145/3407662.3407756}, booktitle = {ACM SIGGRAPH 2020 Real-Time Live!}, articleno = {9}, numpages = {1}, location = {Virtual Event, USA}, series = {SIGGRAPH 2020} }