@article{discovery10051088,
            note = {This version is the author accepted manuscript. For information on re-use, please refer to the publisher's terms and conditions.},
          volume = {37},
          number = {4},
           month = {August},
         journal = {ACM Transactions on Graphics},
           title = {Laplacian kernel splatting for efficient depth-of-field and motion blur synthesis or reconstruction},
            year = {2018},
       publisher = {Association for Computing Machinery},
            issn = {0730-0301},
          author = {Leimk{\"u}hler, T and Seidel, H-P and Ritschel, T},
        abstract = {Simulating combinations of depth-of-field and motion blur is an important factor to cinematic quality in synthetic images but can take long to compute. Splatting the point-spread function (PSF) of every pixel is general and provides high quality, but requires prohibitive compute time. We accelerate this in two steps: In a pre-process we optimize for sparse representations of the Laplacian of all possible PSFs that we call spreadlets. At runtime, spreadlets can be splat efficiently to the Laplacian of an image. Integrating this image produces the final result. Our approach scales faithfully to strong motion and large out-of-focus areas and compares favorably in speed and quality with off-line and interactive approaches. It is applicable to both synthesizing from pinhole as well as reconstructing from stochastic images, with or without layering.},
             url = {https://doi.org/10.1145/3197517.3201379}
}