Changes in 4.9.320 9p: missing chunk of "fs/9p: Don't update file type when updating file attributes" random: remove stale maybe_reseed_primary_crng random: remove stale urandom_init_wait random: remove variable limit random: fix comment for unused random_min_urandom_seed random: convert get_random_int/long into get_random_u32/u64 random: move random_min_urandom_seed into CONFIG_SYSCTL ifdef block random: invalidate batched entropy after crng init random: silence compiler warnings and fix race random: add wait_for_random_bytes() API random: add get_random_{bytes,u32,u64,int,long,once}_wait family random: warn when kernel uses unseeded randomness random: do not ignore early device randomness random: suppress spammy warnings about unseeded randomness random: reorder READ_ONCE() in get_random_uXX random: fix warning message on ia64 and parisc random: use a different mixing algorithm for add_device_randomness() random: set up the NUMA crng instances after the CRNG is fully initialized random: fix possible sleeping allocation from irq context random: rate limit unseeded randomness warnings random: add a spinlock_t to struct batched_entropy char/random: silence a lockdep splat with printk() Revert "char/random: silence a lockdep splat with printk()" random: always use batched entropy for get_random_u{32,64} random: fix data race on crng_node_pool crypto: chacha20 - Fix keystream alignment for chacha20_block() random: always fill buffer in get_random_bytes_wait random: optimize add_interrupt_randomness drivers/char/random.c: remove unused dont_count_entropy random: Fix whitespace pre random-bytes work random: Return nbytes filled from hw RNG random: add a config option to trust the CPU's hwrng random: remove preempt disabled region random: Make crng state queryable random: make CPU trust a boot parameter drivers/char/random.c: constify poolinfo_table drivers/char/random.c: remove unused stuct poolinfo::poolbits drivers/char/random.c: make primary_crng static random: only read from /dev/random after its pool has received 128 bits random: move rand_initialize() earlier random: document get_random_int() family latent_entropy: avoid build error when plugin cflags are not set random: fix soft lockup when trying to read from an uninitialized blocking pool random: Support freezable kthreads in add_hwgenerator_randomness() fdt: add support for rng-seed random: Use wait_event_freezable() in add_hwgenerator_randomness() char/random: Add a newline at the end of the file Revert "hwrng: core - Freeze khwrng thread during suspend" crypto: Deduplicate le32_to_cpu_array() and cpu_to_le32_array() crypto: blake2s - generic C library implementation and selftest lib/crypto: blake2s: move hmac construction into wireguard lib/crypto: sha1: re-roll loops to reduce code size random: Don't wake crng_init_wait when crng_init == 1 random: Add a urandom_read_nowait() for random APIs that don't warn random: add GRND_INSECURE to return best-effort non-cryptographic bytes random: ignore GRND_RANDOM in getentropy(2) random: make /dev/random be almost like /dev/urandom random: fix crash on multiple early calls to add_bootloader_randomness() random: remove the blocking pool random: delete code to pull data into pools random: remove kernel.random.read_wakeup_threshold random: remove unnecessary unlikely() random: convert to ENTROPY_BITS for better code readability random: Add and use pr_fmt() random: fix typo in add_timer_randomness() random: remove some dead code of poolinfo random: split primary/secondary crng init paths random: avoid warnings for !CONFIG_NUMA builds x86: Remove arch_has_random, arch_has_random_seed powerpc: Remove arch_has_random, arch_has_random_seed linux/random.h: Remove arch_has_random, arch_has_random_seed linux/random.h: Use false with bool linux/random.h: Mark CONFIG_ARCH_RANDOM functions __must_check powerpc: Use bool in archrandom.h random: add arch_get_random_*long_early() random: avoid arch_get_random_seed_long() when collecting IRQ randomness random: remove dead code left over from blocking pool MAINTAINERS: co-maintain random.c crypto: blake2s - include <linux/bug.h> instead of <asm/bug.h> crypto: blake2s - adjust include guard naming random: document add_hwgenerator_randomness() with other input functions random: remove unused irq_flags argument from add_interrupt_randomness() random: use BLAKE2s instead of SHA1 in extraction random: do not sign extend bytes for rotation when mixing random: do not re-init if crng_reseed completes before primary init random: mix bootloader randomness into pool random: harmonize "crng init done" messages random: use IS_ENABLED(CONFIG_NUMA) instead of ifdefs random: initialize ChaCha20 constants with correct endianness random: early initialization of ChaCha constants random: avoid superfluous call to RDRAND in CRNG extraction random: don't reset crng_init_cnt on urandom_read() random: fix typo in comments random: cleanup poolinfo abstraction crypto: chacha20 - Fix chacha20_block() keystream alignment (again) random: cleanup integer types random: remove incomplete last_data logic random: remove unused extract_entropy() reserved argument random: try to actively add entropy rather than passively wait for it random: rather than entropy_store abstraction, use global random: remove unused OUTPUT_POOL constants random: de-duplicate INPUT_POOL constants random: prepend remaining pool constants with POOL_ random: cleanup fractional entropy shift constants random: access input_pool_data directly rather than through pointer random: simplify arithmetic function flow in account() random: continually use hwgenerator randomness random: access primary_pool directly rather than through pointer random: only call crng_finalize_init() for primary_crng random: use computational hash for entropy extraction random: simplify entropy debiting random: use linear min-entropy accumulation crediting random: always wake up entropy writers after extraction random: make credit_entropy_bits() always safe random: remove use_input_pool parameter from crng_reseed() random: remove batched entropy locking random: fix locking in crng_fast_load() random: use RDSEED instead of RDRAND in entropy extraction random: inline leaves of rand_initialize() random: ensure early RDSEED goes through mixer on init random: do not xor RDRAND when writing into /dev/random random: absorb fast pool into input pool after fast load random: use hash function for crng_slow_load() random: remove outdated INT_MAX >> 6 check in urandom_read() random: zero buffer after reading entropy from userspace random: tie batched entropy generation to base_crng generation random: remove ifdef'd out interrupt bench random: remove unused tracepoints random: add proper SPDX header random: deobfuscate irq u32/u64 contributions random: introduce drain_entropy() helper to declutter crng_reseed() random: remove useless header comment random: remove whitespace and reorder includes random: group initialization wait functions random: group entropy extraction functions random: group entropy collection functions random: group userspace read/write functions random: group sysctl functions random: rewrite header introductory comment workqueue: make workqueue available early during boot random: defer fast pool mixing to worker random: do not take pool spinlock at boot random: unify early init crng load accounting random: check for crng_init == 0 in add_device_randomness() hwrng: core - do not use multiple blank lines hwrng: core - rewrite better comparison to NULL hwrng: core - Rewrite the header hwrng: core - Move hwrng miscdev minor number to include/linux/miscdevice.h hwrng: core - remove unused PFX macro hwrng: use rng source with best quality hwrng: remember rng chosen by user random: pull add_hwgenerator_randomness() declaration into random.h random: clear fast pool, crng, and batches in cpuhp bring up random: round-robin registers as ulong, not u32 random: only wake up writers after zap if threshold was passed random: cleanup UUID handling random: unify cycles_t and jiffies usage and types random: do crng pre-init loading in worker rather than irq random: give sysctl_random_min_urandom_seed a more sensible value random: don't let 644 read-only sysctls be written to random: replace custom notifier chain with standard one random: use SipHash as interrupt entropy accumulator random: make consistent usage of crng_ready() random: reseed more often immediately after booting random: check for signal and try earlier when generating entropy random: skip fast_init if hwrng provides large chunk of entropy random: treat bootloader trust toggle the same way as cpu trust toggle random: re-add removed comment about get_random_{u32,u64} reseeding random: mix build-time latent entropy into pool at init random: do not split fast init input in add_hwgenerator_randomness() random: do not allow user to keep crng key around on stack random: check for signal_pending() outside of need_resched() check random: check for signals every PAGE_SIZE chunk of /dev/[u]random random: make random_get_entropy() return an unsigned long random: document crng_fast_key_erasure() destination possibility random: fix sysctl documentation nits init: call time_init() before rand_initialize() ia64: define get_cycles macro for arch-override s390: define get_cycles macro for arch-override parisc: define get_cycles macro for arch-override alpha: define get_cycles macro for arch-override powerpc: define get_cycles macro for arch-override timekeeping: Add raw clock fallback for random_get_entropy() m68k: use fallback for random_get_entropy() instead of zero mips: use fallback for random_get_entropy() instead of just c0 random arm: use fallback for random_get_entropy() instead of zero nios2: use fallback for random_get_entropy() instead of zero x86/tsc: Use fallback for random_get_entropy() instead of zero um: use fallback for random_get_entropy() instead of zero sparc: use fallback for random_get_entropy() instead of zero xtensa: use fallback for random_get_entropy() instead of zero uapi: rename ext2_swab() to swab() and share globally in swab.h random: insist on random_get_entropy() existing in order to simplify random: do not use batches when !crng_ready() random: do not pretend to handle premature next security model random: order timer entropy functions below interrupt functions random: do not use input pool from hard IRQs random: help compiler out with fast_mix() by using simpler arguments siphash: use one source of truth for siphash permutations random: use symbolic constants for crng_init states random: avoid initializing twice in credit race random: remove ratelimiting for in-kernel unseeded randomness random: use proper jiffies comparison macro random: handle latent entropy and command line from random_init() random: credit architectural init the exact amount random: use static branch for crng_ready() random: remove extern from functions in header random: use proper return types on get_random_{int,long}_wait() random: move initialization functions out of hot pages random: move randomize_page() into mm where it belongs random: convert to using fops->write_iter() random: wire up fops->splice_{read,write}_iter() random: check for signals after page of pool writes Revert "random: use static branch for crng_ready()" crypto: drbg - add FIPS 140-2 CTRNG for noise source crypto: drbg - always seeded with SP800-90B compliant noise source crypto: drbg - prepare for more fine-grained tracking of seeding state crypto: drbg - track whether DRBG was seeded with !rng_is_initialized() crypto: drbg - move dynamic ->reseed_threshold adjustments to __drbg_seed() crypto: drbg - always try to free Jitter RNG instance crypto: drbg - make reseeding from get_random_bytes() synchronous random: avoid checking crng_ready() twice in random_init() random: mark bootloader randomness code as __init random: account for arch randomness in bits ASoC: cs42l52: Fix TLV scales for mixer controls ASoC: cs53l30: Correct number of volume levels on SX controls ASoC: cs42l52: Correct TLV for Bypass Volume ASoC: cs42l56: Correct typo in minimum level for SX volume controls ata: libata-core: fix NULL pointer deref in ata_host_alloc_pinfo() ASoC: wm8962: Fix suspend while playing music scsi: vmw_pvscsi: Expand vcpuHint to 16 bits scsi: lpfc: Fix port stuck in bypassed state after LIP in PT2PT topology virtio-mmio: fix missing put_device() when vm_cmdline_parent registration failed nfc: nfcmrvl: Fix memory leak in nfcmrvl_play_deferred ipv6: Fix signed integer overflow in l2tp_ip6_sendmsg net: ethernet: mtk_eth_soc: fix misuse of mem alloc interface netdev[napi]_alloc_frag random: credit cpu and bootloader seeds by default pNFS: Don't keep retrying if the server replied NFS4ERR_LAYOUTUNAVAILABLE misc: atmel-ssc: Fix IRQ check in ssc_probe irqchip/gic/realview: Fix refcount leak in realview_gic_of_init irqchip/gic-v3: Iterate over possible CPUs by for_each_possible_cpu() comedi: vmk80xx: fix expression for tx buffer size USB: serial: option: add support for Cinterion MV31 with new baseline USB: serial: io_ti: add Agilent E5805A support usb: gadget: lpc32xx_udc: Fix refcount leak in lpc32xx_udc_probe serial: 8250: Store to lsr_save_flags after lsr read ext4: fix bug_on ext4_mb_use_inode_pa ext4: make variable "count" signed ext4: add reserved GDT blocks check l2tp: don't use inet_shutdown on ppp session destroy l2tp: fix race in pppol2tp_release with session object destroy s390/mm: use non-quiescing sske for KVM switch to keyed guest xprtrdma: fix incorrect header size calculations swiotlb: fix info leak with DMA_FROM_DEVICE Reinstate some of "swiotlb: rework "fix info leak with DMA_FROM_DEVICE"" fuse: fix pipe buffer lifetime for direct_io tcp: change source port randomizarion at connect() time tcp: add some entropy in __inet_hash_connect() secure_seq: use the 64 bits of the siphash for port offset calculation tcp: use different parts of the port_offset for index and offset tcp: add small random increments to the source port tcp: dynamically allocate the perturb table used by source ports tcp: increase source port perturb table to 2^16 tcp: drop the hash_32() part from the index calculation Linux 4.9.320 Conflicts: crypto/chacha20_generic.c drivers/char/random.c drivers/of/fdt.c include/crypto/chacha20.h lib/chacha20.c Merge resolution notes: - Added CHACHA20_KEY_SIZE and CHACHA20_BLOCK_SIZE constants to chacha.h, to minimize changes from the 4.9.320 version of random.c - Updated lib/vsprintf.c for "random: replace custom notifier chain with standard one". Change-Id: Ia7a12d8883b808f88bbe807d6150552bb084f6b3 Signed-off-by: Eric Biggers <ebiggers@google.com>
540 lines
11 KiB
C
540 lines
11 KiB
C
/* Copyright (C) 2016 Jason A. Donenfeld <Jason@zx2c4.com>. All Rights Reserved.
|
|
*
|
|
* This file is provided under a dual BSD/GPLv2 license.
|
|
*
|
|
* SipHash: a fast short-input PRF
|
|
* https://131002.net/siphash/
|
|
*
|
|
* This implementation is specifically for SipHash2-4 for a secure PRF
|
|
* and HalfSipHash1-3/SipHash1-3 for an insecure PRF only suitable for
|
|
* hashtables.
|
|
*/
|
|
|
|
#include <linux/siphash.h>
|
|
#include <asm/unaligned.h>
|
|
|
|
#if defined(CONFIG_DCACHE_WORD_ACCESS) && BITS_PER_LONG == 64
|
|
#include <linux/dcache.h>
|
|
#include <asm/word-at-a-time.h>
|
|
#endif
|
|
|
|
#define SIPROUND SIPHASH_PERMUTATION(v0, v1, v2, v3)
|
|
|
|
#define PREAMBLE(len) \
|
|
u64 v0 = SIPHASH_CONST_0; \
|
|
u64 v1 = SIPHASH_CONST_1; \
|
|
u64 v2 = SIPHASH_CONST_2; \
|
|
u64 v3 = SIPHASH_CONST_3; \
|
|
u64 b = ((u64)(len)) << 56; \
|
|
v3 ^= key->key[1]; \
|
|
v2 ^= key->key[0]; \
|
|
v1 ^= key->key[1]; \
|
|
v0 ^= key->key[0];
|
|
|
|
#define POSTAMBLE \
|
|
v3 ^= b; \
|
|
SIPROUND; \
|
|
SIPROUND; \
|
|
v0 ^= b; \
|
|
v2 ^= 0xff; \
|
|
SIPROUND; \
|
|
SIPROUND; \
|
|
SIPROUND; \
|
|
SIPROUND; \
|
|
return (v0 ^ v1) ^ (v2 ^ v3);
|
|
|
|
#ifndef CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS
|
|
u64 __siphash_aligned(const void *data, size_t len, const siphash_key_t *key)
|
|
{
|
|
const u8 *end = data + len - (len % sizeof(u64));
|
|
const u8 left = len & (sizeof(u64) - 1);
|
|
u64 m;
|
|
PREAMBLE(len)
|
|
for (; data != end; data += sizeof(u64)) {
|
|
m = le64_to_cpup(data);
|
|
v3 ^= m;
|
|
SIPROUND;
|
|
SIPROUND;
|
|
v0 ^= m;
|
|
}
|
|
#if defined(CONFIG_DCACHE_WORD_ACCESS) && BITS_PER_LONG == 64
|
|
if (left)
|
|
b |= le64_to_cpu((__force __le64)(load_unaligned_zeropad(data) &
|
|
bytemask_from_count(left)));
|
|
#else
|
|
switch (left) {
|
|
case 7: b |= ((u64)end[6]) << 48;
|
|
case 6: b |= ((u64)end[5]) << 40;
|
|
case 5: b |= ((u64)end[4]) << 32;
|
|
case 4: b |= le32_to_cpup(data); break;
|
|
case 3: b |= ((u64)end[2]) << 16;
|
|
case 2: b |= le16_to_cpup(data); break;
|
|
case 1: b |= end[0];
|
|
}
|
|
#endif
|
|
POSTAMBLE
|
|
}
|
|
EXPORT_SYMBOL(__siphash_aligned);
|
|
#endif
|
|
|
|
u64 __siphash_unaligned(const void *data, size_t len, const siphash_key_t *key)
|
|
{
|
|
const u8 *end = data + len - (len % sizeof(u64));
|
|
const u8 left = len & (sizeof(u64) - 1);
|
|
u64 m;
|
|
PREAMBLE(len)
|
|
for (; data != end; data += sizeof(u64)) {
|
|
m = get_unaligned_le64(data);
|
|
v3 ^= m;
|
|
SIPROUND;
|
|
SIPROUND;
|
|
v0 ^= m;
|
|
}
|
|
#if defined(CONFIG_DCACHE_WORD_ACCESS) && BITS_PER_LONG == 64
|
|
if (left)
|
|
b |= le64_to_cpu((__force __le64)(load_unaligned_zeropad(data) &
|
|
bytemask_from_count(left)));
|
|
#else
|
|
switch (left) {
|
|
case 7: b |= ((u64)end[6]) << 48;
|
|
case 6: b |= ((u64)end[5]) << 40;
|
|
case 5: b |= ((u64)end[4]) << 32;
|
|
case 4: b |= get_unaligned_le32(end); break;
|
|
case 3: b |= ((u64)end[2]) << 16;
|
|
case 2: b |= get_unaligned_le16(end); break;
|
|
case 1: b |= end[0];
|
|
}
|
|
#endif
|
|
POSTAMBLE
|
|
}
|
|
EXPORT_SYMBOL(__siphash_unaligned);
|
|
|
|
/**
|
|
* siphash_1u64 - compute 64-bit siphash PRF value of a u64
|
|
* @first: first u64
|
|
* @key: the siphash key
|
|
*/
|
|
u64 siphash_1u64(const u64 first, const siphash_key_t *key)
|
|
{
|
|
PREAMBLE(8)
|
|
v3 ^= first;
|
|
SIPROUND;
|
|
SIPROUND;
|
|
v0 ^= first;
|
|
POSTAMBLE
|
|
}
|
|
EXPORT_SYMBOL(siphash_1u64);
|
|
|
|
/**
|
|
* siphash_2u64 - compute 64-bit siphash PRF value of 2 u64
|
|
* @first: first u64
|
|
* @second: second u64
|
|
* @key: the siphash key
|
|
*/
|
|
u64 siphash_2u64(const u64 first, const u64 second, const siphash_key_t *key)
|
|
{
|
|
PREAMBLE(16)
|
|
v3 ^= first;
|
|
SIPROUND;
|
|
SIPROUND;
|
|
v0 ^= first;
|
|
v3 ^= second;
|
|
SIPROUND;
|
|
SIPROUND;
|
|
v0 ^= second;
|
|
POSTAMBLE
|
|
}
|
|
EXPORT_SYMBOL(siphash_2u64);
|
|
|
|
/**
|
|
* siphash_3u64 - compute 64-bit siphash PRF value of 3 u64
|
|
* @first: first u64
|
|
* @second: second u64
|
|
* @third: third u64
|
|
* @key: the siphash key
|
|
*/
|
|
u64 siphash_3u64(const u64 first, const u64 second, const u64 third,
|
|
const siphash_key_t *key)
|
|
{
|
|
PREAMBLE(24)
|
|
v3 ^= first;
|
|
SIPROUND;
|
|
SIPROUND;
|
|
v0 ^= first;
|
|
v3 ^= second;
|
|
SIPROUND;
|
|
SIPROUND;
|
|
v0 ^= second;
|
|
v3 ^= third;
|
|
SIPROUND;
|
|
SIPROUND;
|
|
v0 ^= third;
|
|
POSTAMBLE
|
|
}
|
|
EXPORT_SYMBOL(siphash_3u64);
|
|
|
|
/**
|
|
* siphash_4u64 - compute 64-bit siphash PRF value of 4 u64
|
|
* @first: first u64
|
|
* @second: second u64
|
|
* @third: third u64
|
|
* @forth: forth u64
|
|
* @key: the siphash key
|
|
*/
|
|
u64 siphash_4u64(const u64 first, const u64 second, const u64 third,
|
|
const u64 forth, const siphash_key_t *key)
|
|
{
|
|
PREAMBLE(32)
|
|
v3 ^= first;
|
|
SIPROUND;
|
|
SIPROUND;
|
|
v0 ^= first;
|
|
v3 ^= second;
|
|
SIPROUND;
|
|
SIPROUND;
|
|
v0 ^= second;
|
|
v3 ^= third;
|
|
SIPROUND;
|
|
SIPROUND;
|
|
v0 ^= third;
|
|
v3 ^= forth;
|
|
SIPROUND;
|
|
SIPROUND;
|
|
v0 ^= forth;
|
|
POSTAMBLE
|
|
}
|
|
EXPORT_SYMBOL(siphash_4u64);
|
|
|
|
u64 siphash_1u32(const u32 first, const siphash_key_t *key)
|
|
{
|
|
PREAMBLE(4)
|
|
b |= first;
|
|
POSTAMBLE
|
|
}
|
|
EXPORT_SYMBOL(siphash_1u32);
|
|
|
|
u64 siphash_3u32(const u32 first, const u32 second, const u32 third,
|
|
const siphash_key_t *key)
|
|
{
|
|
u64 combined = (u64)second << 32 | first;
|
|
PREAMBLE(12)
|
|
v3 ^= combined;
|
|
SIPROUND;
|
|
SIPROUND;
|
|
v0 ^= combined;
|
|
b |= third;
|
|
POSTAMBLE
|
|
}
|
|
EXPORT_SYMBOL(siphash_3u32);
|
|
|
|
#if BITS_PER_LONG == 64
|
|
/* Note that on 64-bit, we make HalfSipHash1-3 actually be SipHash1-3, for
|
|
* performance reasons. On 32-bit, below, we actually implement HalfSipHash1-3.
|
|
*/
|
|
|
|
#define HSIPROUND SIPROUND
|
|
#define HPREAMBLE(len) PREAMBLE(len)
|
|
#define HPOSTAMBLE \
|
|
v3 ^= b; \
|
|
HSIPROUND; \
|
|
v0 ^= b; \
|
|
v2 ^= 0xff; \
|
|
HSIPROUND; \
|
|
HSIPROUND; \
|
|
HSIPROUND; \
|
|
return (v0 ^ v1) ^ (v2 ^ v3);
|
|
|
|
#ifndef CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS
|
|
u32 __hsiphash_aligned(const void *data, size_t len, const hsiphash_key_t *key)
|
|
{
|
|
const u8 *end = data + len - (len % sizeof(u64));
|
|
const u8 left = len & (sizeof(u64) - 1);
|
|
u64 m;
|
|
HPREAMBLE(len)
|
|
for (; data != end; data += sizeof(u64)) {
|
|
m = le64_to_cpup(data);
|
|
v3 ^= m;
|
|
HSIPROUND;
|
|
v0 ^= m;
|
|
}
|
|
#if defined(CONFIG_DCACHE_WORD_ACCESS) && BITS_PER_LONG == 64
|
|
if (left)
|
|
b |= le64_to_cpu((__force __le64)(load_unaligned_zeropad(data) &
|
|
bytemask_from_count(left)));
|
|
#else
|
|
switch (left) {
|
|
case 7: b |= ((u64)end[6]) << 48;
|
|
case 6: b |= ((u64)end[5]) << 40;
|
|
case 5: b |= ((u64)end[4]) << 32;
|
|
case 4: b |= le32_to_cpup(data); break;
|
|
case 3: b |= ((u64)end[2]) << 16;
|
|
case 2: b |= le16_to_cpup(data); break;
|
|
case 1: b |= end[0];
|
|
}
|
|
#endif
|
|
HPOSTAMBLE
|
|
}
|
|
EXPORT_SYMBOL(__hsiphash_aligned);
|
|
#endif
|
|
|
|
u32 __hsiphash_unaligned(const void *data, size_t len,
|
|
const hsiphash_key_t *key)
|
|
{
|
|
const u8 *end = data + len - (len % sizeof(u64));
|
|
const u8 left = len & (sizeof(u64) - 1);
|
|
u64 m;
|
|
HPREAMBLE(len)
|
|
for (; data != end; data += sizeof(u64)) {
|
|
m = get_unaligned_le64(data);
|
|
v3 ^= m;
|
|
HSIPROUND;
|
|
v0 ^= m;
|
|
}
|
|
#if defined(CONFIG_DCACHE_WORD_ACCESS) && BITS_PER_LONG == 64
|
|
if (left)
|
|
b |= le64_to_cpu((__force __le64)(load_unaligned_zeropad(data) &
|
|
bytemask_from_count(left)));
|
|
#else
|
|
switch (left) {
|
|
case 7: b |= ((u64)end[6]) << 48;
|
|
case 6: b |= ((u64)end[5]) << 40;
|
|
case 5: b |= ((u64)end[4]) << 32;
|
|
case 4: b |= get_unaligned_le32(end); break;
|
|
case 3: b |= ((u64)end[2]) << 16;
|
|
case 2: b |= get_unaligned_le16(end); break;
|
|
case 1: b |= end[0];
|
|
}
|
|
#endif
|
|
HPOSTAMBLE
|
|
}
|
|
EXPORT_SYMBOL(__hsiphash_unaligned);
|
|
|
|
/**
|
|
* hsiphash_1u32 - compute 64-bit hsiphash PRF value of a u32
|
|
* @first: first u32
|
|
* @key: the hsiphash key
|
|
*/
|
|
u32 hsiphash_1u32(const u32 first, const hsiphash_key_t *key)
|
|
{
|
|
HPREAMBLE(4)
|
|
b |= first;
|
|
HPOSTAMBLE
|
|
}
|
|
EXPORT_SYMBOL(hsiphash_1u32);
|
|
|
|
/**
|
|
* hsiphash_2u32 - compute 32-bit hsiphash PRF value of 2 u32
|
|
* @first: first u32
|
|
* @second: second u32
|
|
* @key: the hsiphash key
|
|
*/
|
|
u32 hsiphash_2u32(const u32 first, const u32 second, const hsiphash_key_t *key)
|
|
{
|
|
u64 combined = (u64)second << 32 | first;
|
|
HPREAMBLE(8)
|
|
v3 ^= combined;
|
|
HSIPROUND;
|
|
v0 ^= combined;
|
|
HPOSTAMBLE
|
|
}
|
|
EXPORT_SYMBOL(hsiphash_2u32);
|
|
|
|
/**
|
|
* hsiphash_3u32 - compute 32-bit hsiphash PRF value of 3 u32
|
|
* @first: first u32
|
|
* @second: second u32
|
|
* @third: third u32
|
|
* @key: the hsiphash key
|
|
*/
|
|
u32 hsiphash_3u32(const u32 first, const u32 second, const u32 third,
|
|
const hsiphash_key_t *key)
|
|
{
|
|
u64 combined = (u64)second << 32 | first;
|
|
HPREAMBLE(12)
|
|
v3 ^= combined;
|
|
HSIPROUND;
|
|
v0 ^= combined;
|
|
b |= third;
|
|
HPOSTAMBLE
|
|
}
|
|
EXPORT_SYMBOL(hsiphash_3u32);
|
|
|
|
/**
|
|
* hsiphash_4u32 - compute 32-bit hsiphash PRF value of 4 u32
|
|
* @first: first u32
|
|
* @second: second u32
|
|
* @third: third u32
|
|
* @forth: forth u32
|
|
* @key: the hsiphash key
|
|
*/
|
|
u32 hsiphash_4u32(const u32 first, const u32 second, const u32 third,
|
|
const u32 forth, const hsiphash_key_t *key)
|
|
{
|
|
u64 combined = (u64)second << 32 | first;
|
|
HPREAMBLE(16)
|
|
v3 ^= combined;
|
|
HSIPROUND;
|
|
v0 ^= combined;
|
|
combined = (u64)forth << 32 | third;
|
|
v3 ^= combined;
|
|
HSIPROUND;
|
|
v0 ^= combined;
|
|
HPOSTAMBLE
|
|
}
|
|
EXPORT_SYMBOL(hsiphash_4u32);
|
|
#else
|
|
#define HSIPROUND HSIPHASH_PERMUTATION(v0, v1, v2, v3)
|
|
|
|
#define HPREAMBLE(len) \
|
|
u32 v0 = HSIPHASH_CONST_0; \
|
|
u32 v1 = HSIPHASH_CONST_1; \
|
|
u32 v2 = HSIPHASH_CONST_2; \
|
|
u32 v3 = HSIPHASH_CONST_3; \
|
|
u32 b = ((u32)(len)) << 24; \
|
|
v3 ^= key->key[1]; \
|
|
v2 ^= key->key[0]; \
|
|
v1 ^= key->key[1]; \
|
|
v0 ^= key->key[0];
|
|
|
|
#define HPOSTAMBLE \
|
|
v3 ^= b; \
|
|
HSIPROUND; \
|
|
v0 ^= b; \
|
|
v2 ^= 0xff; \
|
|
HSIPROUND; \
|
|
HSIPROUND; \
|
|
HSIPROUND; \
|
|
return v1 ^ v3;
|
|
|
|
#ifndef CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS
|
|
u32 __hsiphash_aligned(const void *data, size_t len, const hsiphash_key_t *key)
|
|
{
|
|
const u8 *end = data + len - (len % sizeof(u32));
|
|
const u8 left = len & (sizeof(u32) - 1);
|
|
u32 m;
|
|
HPREAMBLE(len)
|
|
for (; data != end; data += sizeof(u32)) {
|
|
m = le32_to_cpup(data);
|
|
v3 ^= m;
|
|
HSIPROUND;
|
|
v0 ^= m;
|
|
}
|
|
switch (left) {
|
|
case 3: b |= ((u32)end[2]) << 16;
|
|
case 2: b |= le16_to_cpup(data); break;
|
|
case 1: b |= end[0];
|
|
}
|
|
HPOSTAMBLE
|
|
}
|
|
EXPORT_SYMBOL(__hsiphash_aligned);
|
|
#endif
|
|
|
|
u32 __hsiphash_unaligned(const void *data, size_t len,
|
|
const hsiphash_key_t *key)
|
|
{
|
|
const u8 *end = data + len - (len % sizeof(u32));
|
|
const u8 left = len & (sizeof(u32) - 1);
|
|
u32 m;
|
|
HPREAMBLE(len)
|
|
for (; data != end; data += sizeof(u32)) {
|
|
m = get_unaligned_le32(data);
|
|
v3 ^= m;
|
|
HSIPROUND;
|
|
v0 ^= m;
|
|
}
|
|
switch (left) {
|
|
case 3: b |= ((u32)end[2]) << 16;
|
|
case 2: b |= get_unaligned_le16(end); break;
|
|
case 1: b |= end[0];
|
|
}
|
|
HPOSTAMBLE
|
|
}
|
|
EXPORT_SYMBOL(__hsiphash_unaligned);
|
|
|
|
/**
|
|
* hsiphash_1u32 - compute 32-bit hsiphash PRF value of a u32
|
|
* @first: first u32
|
|
* @key: the hsiphash key
|
|
*/
|
|
u32 hsiphash_1u32(const u32 first, const hsiphash_key_t *key)
|
|
{
|
|
HPREAMBLE(4)
|
|
v3 ^= first;
|
|
HSIPROUND;
|
|
v0 ^= first;
|
|
HPOSTAMBLE
|
|
}
|
|
EXPORT_SYMBOL(hsiphash_1u32);
|
|
|
|
/**
|
|
* hsiphash_2u32 - compute 32-bit hsiphash PRF value of 2 u32
|
|
* @first: first u32
|
|
* @second: second u32
|
|
* @key: the hsiphash key
|
|
*/
|
|
u32 hsiphash_2u32(const u32 first, const u32 second, const hsiphash_key_t *key)
|
|
{
|
|
HPREAMBLE(8)
|
|
v3 ^= first;
|
|
HSIPROUND;
|
|
v0 ^= first;
|
|
v3 ^= second;
|
|
HSIPROUND;
|
|
v0 ^= second;
|
|
HPOSTAMBLE
|
|
}
|
|
EXPORT_SYMBOL(hsiphash_2u32);
|
|
|
|
/**
|
|
* hsiphash_3u32 - compute 32-bit hsiphash PRF value of 3 u32
|
|
* @first: first u32
|
|
* @second: second u32
|
|
* @third: third u32
|
|
* @key: the hsiphash key
|
|
*/
|
|
u32 hsiphash_3u32(const u32 first, const u32 second, const u32 third,
|
|
const hsiphash_key_t *key)
|
|
{
|
|
HPREAMBLE(12)
|
|
v3 ^= first;
|
|
HSIPROUND;
|
|
v0 ^= first;
|
|
v3 ^= second;
|
|
HSIPROUND;
|
|
v0 ^= second;
|
|
v3 ^= third;
|
|
HSIPROUND;
|
|
v0 ^= third;
|
|
HPOSTAMBLE
|
|
}
|
|
EXPORT_SYMBOL(hsiphash_3u32);
|
|
|
|
/**
|
|
* hsiphash_4u32 - compute 32-bit hsiphash PRF value of 4 u32
|
|
* @first: first u32
|
|
* @second: second u32
|
|
* @third: third u32
|
|
* @forth: forth u32
|
|
* @key: the hsiphash key
|
|
*/
|
|
u32 hsiphash_4u32(const u32 first, const u32 second, const u32 third,
|
|
const u32 forth, const hsiphash_key_t *key)
|
|
{
|
|
HPREAMBLE(16)
|
|
v3 ^= first;
|
|
HSIPROUND;
|
|
v0 ^= first;
|
|
v3 ^= second;
|
|
HSIPROUND;
|
|
v0 ^= second;
|
|
v3 ^= third;
|
|
HSIPROUND;
|
|
v0 ^= third;
|
|
v3 ^= forth;
|
|
HSIPROUND;
|
|
v0 ^= forth;
|
|
HPOSTAMBLE
|
|
}
|
|
EXPORT_SYMBOL(hsiphash_4u32);
|
|
#endif
|