Lines Matching +full:64 +full:m
15 #if defined(CONFIG_DCACHE_WORD_ACCESS) && BITS_PER_LONG == 64
50 u64 m; in __siphash_aligned() local
53 m = le64_to_cpup(data); in __siphash_aligned()
54 v3 ^= m; in __siphash_aligned()
57 v0 ^= m; in __siphash_aligned()
59 #if defined(CONFIG_DCACHE_WORD_ACCESS) && BITS_PER_LONG == 64 in __siphash_aligned()
83 u64 m; in __siphash_unaligned() local
86 m = get_unaligned_le64(data); in __siphash_unaligned()
87 v3 ^= m; in __siphash_unaligned()
90 v0 ^= m; in __siphash_unaligned()
92 #if defined(CONFIG_DCACHE_WORD_ACCESS) && BITS_PER_LONG == 64 in __siphash_unaligned()
112 * siphash_1u64 - compute 64-bit siphash PRF value of a u64
128 * siphash_2u64 - compute 64-bit siphash PRF value of 2 u64
149 * siphash_3u64 - compute 64-bit siphash PRF value of 3 u64
176 * siphash_4u64 - compute 64-bit siphash PRF value of 4 u64
229 #if BITS_PER_LONG == 64
230 /* Note that on 64-bit, we make HalfSipHash1-3 actually be SipHash1-3, for
251 u64 m; in __hsiphash_aligned() local
254 m = le64_to_cpup(data); in __hsiphash_aligned()
255 v3 ^= m; in __hsiphash_aligned()
257 v0 ^= m; in __hsiphash_aligned()
259 #if defined(CONFIG_DCACHE_WORD_ACCESS) && BITS_PER_LONG == 64 in __hsiphash_aligned()
284 u64 m; in __hsiphash_unaligned() local
287 m = get_unaligned_le64(data); in __hsiphash_unaligned()
288 v3 ^= m; in __hsiphash_unaligned()
290 v0 ^= m; in __hsiphash_unaligned()
292 #if defined(CONFIG_DCACHE_WORD_ACCESS) && BITS_PER_LONG == 64 in __hsiphash_unaligned()
312 * hsiphash_1u32 - compute 64-bit hsiphash PRF value of a u32
413 u32 m; in __hsiphash_aligned() local
416 m = le32_to_cpup(data); in __hsiphash_aligned()
417 v3 ^= m; in __hsiphash_aligned()
419 v0 ^= m; in __hsiphash_aligned()
436 u32 m; in __hsiphash_unaligned() local
439 m = get_unaligned_le32(data); in __hsiphash_unaligned()
440 v3 ^= m; in __hsiphash_unaligned()
442 v0 ^= m; in __hsiphash_unaligned()