xref: /aosp_15_r20/external/libdav1d/src/x86/looprestoration.h (revision c09093415860a1c2373dacd84c4fde00c507cdfd)
1 /*
2  * Copyright © 2018, VideoLAN and dav1d authors
3  * Copyright © 2018, Two Orioles, LLC
4  * All rights reserved.
5  *
6  * Redistribution and use in source and binary forms, with or without
7  * modification, are permitted provided that the following conditions are met:
8  *
9  * 1. Redistributions of source code must retain the above copyright notice, this
10  *    list of conditions and the following disclaimer.
11  *
12  * 2. Redistributions in binary form must reproduce the above copyright notice,
13  *    this list of conditions and the following disclaimer in the documentation
14  *    and/or other materials provided with the distribution.
15  *
16  * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
17  * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
18  * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
19  * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
20  * ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
21  * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
22  * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
23  * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
24  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
25  * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
26  */
27 
28 #include "src/cpu.h"
29 #include "src/looprestoration.h"
30 
31 #include "common/intops.h"
32 
33 #define decl_wiener_filter_fns(ext) \
34 decl_lr_filter_fn(BF(dav1d_wiener_filter7, ext)); \
35 decl_lr_filter_fn(BF(dav1d_wiener_filter5, ext))
36 
37 #define decl_sgr_filter_fns(ext) \
38 decl_lr_filter_fn(BF(dav1d_sgr_filter_5x5, ext)); \
39 decl_lr_filter_fn(BF(dav1d_sgr_filter_3x3, ext)); \
40 decl_lr_filter_fn(BF(dav1d_sgr_filter_mix, ext))
41 
42 decl_wiener_filter_fns(sse2);
43 decl_wiener_filter_fns(ssse3);
44 decl_wiener_filter_fns(avx2);
45 decl_wiener_filter_fns(avx512icl);
46 decl_sgr_filter_fns(ssse3);
47 decl_sgr_filter_fns(avx2);
48 decl_sgr_filter_fns(avx512icl);
49 
loop_restoration_dsp_init_x86(Dav1dLoopRestorationDSPContext * const c,const int bpc)50 static ALWAYS_INLINE void loop_restoration_dsp_init_x86(Dav1dLoopRestorationDSPContext *const c, const int bpc) {
51     const unsigned flags = dav1d_get_cpu_flags();
52 
53     if (!(flags & DAV1D_X86_CPU_FLAG_SSE2)) return;
54 #if BITDEPTH == 8
55     c->wiener[0] = BF(dav1d_wiener_filter7, sse2);
56     c->wiener[1] = BF(dav1d_wiener_filter5, sse2);
57 #endif
58 
59     if (!(flags & DAV1D_X86_CPU_FLAG_SSSE3)) return;
60     c->wiener[0] = BF(dav1d_wiener_filter7, ssse3);
61     c->wiener[1] = BF(dav1d_wiener_filter5, ssse3);
62     if (BITDEPTH == 8 || bpc == 10) {
63         c->sgr[0] = BF(dav1d_sgr_filter_5x5, ssse3);
64         c->sgr[1] = BF(dav1d_sgr_filter_3x3, ssse3);
65         c->sgr[2] = BF(dav1d_sgr_filter_mix, ssse3);
66     }
67 
68 #if ARCH_X86_64
69     if (!(flags & DAV1D_X86_CPU_FLAG_AVX2)) return;
70 
71     c->wiener[0] = BF(dav1d_wiener_filter7, avx2);
72     c->wiener[1] = BF(dav1d_wiener_filter5, avx2);
73     if (BITDEPTH == 8 || bpc == 10) {
74         c->sgr[0] = BF(dav1d_sgr_filter_5x5, avx2);
75         c->sgr[1] = BF(dav1d_sgr_filter_3x3, avx2);
76         c->sgr[2] = BF(dav1d_sgr_filter_mix, avx2);
77     }
78 
79     if (!(flags & DAV1D_X86_CPU_FLAG_AVX512ICL)) return;
80 
81     c->wiener[0] = BF(dav1d_wiener_filter7, avx512icl);
82 #if BITDEPTH == 8
83     /* With VNNI we don't need a 5-tap version. */
84     c->wiener[1] = c->wiener[0];
85 #else
86     c->wiener[1] = BF(dav1d_wiener_filter5, avx512icl);
87 #endif
88     if (BITDEPTH == 8 || bpc == 10) {
89         c->sgr[0] = BF(dav1d_sgr_filter_5x5, avx512icl);
90         c->sgr[1] = BF(dav1d_sgr_filter_3x3, avx512icl);
91         c->sgr[2] = BF(dav1d_sgr_filter_mix, avx512icl);
92     }
93 #endif
94 }
95