summaryrefslogtreecommitdiff
path: root/mysql/mysys_ssl/my_murmur3.cpp
blob: 82dccb61ef48d2ce65c20a2dcbefbc43c216e91d (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
/* Copyright (c) 2013, Oracle and/or its affiliates. All rights reserved.

   This program is free software; you can redistribute it and/or modify
   it under the terms of the GNU General Public License as published by
   the Free Software Foundation; version 2 of the License.

   This program is distributed in the hope that it will be useful,
   but WITHOUT ANY WARRANTY; without even the implied warranty of
   MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
   GNU General Public License for more details.

   You should have received a copy of the GNU General Public License
   along with this program; if not, write to the Free Software
   Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA */

/*
  Implementation of 32-bit version of MurmurHash3 - fast non-cryptographic
  hash function with good statistical properties, which is based on public
  domain code by Austin Appleby.
*/

#include <my_murmur3.h>


/*
  Platform-specific implementations of ROTL32 helper.
*/

#if defined(_MSC_VER)

/* Microsoft Visual Studio supports intrinsic for rotate left. */

#include <stdlib.h>

#define ROTL32(x, y)	_rotl(x, y)

/*
  Force inlining of intrinsic even though /Oi option is turned off
  in release builds.
*/
#pragma intrinsic(_rotl)

#else // !defined(_MSC_VER)

/*
  Many other compilers should be able to optimize the below
  function to single instruction.
*/

inline uint32 rotl32 (uint32 x, char r)
{
  return (x << r) | (x >> (32 - r));
}

#define	ROTL32(x,y)	rotl32(x,y)

#endif // !defined(_MSC_VER)


/**
  Compute 32-bit version of MurmurHash3 hash for the key.

  @param key   Key for which hash value to be computed.
  @param len   Key length.
  @param seed  Seed for hash computation.

  @note WARNING! Since MurmurHash3 is known to be susceptible to "hash DoS"
        attack it should not be used in any situation where attacker has
        control over key being hashed and thus can cause performance problems
        due to degradation of hash lookup to linear list search.

  @returns Hash value for the key.
*/

uint32 murmur3_32(const uchar *key, size_t len, uint32 seed)
{
  const uchar *tail= key + (len  - len % 4);

  uint32 h1= seed;

  /* Constants for magic numbers that are used more than once. */
  const uint32 c1= 0xcc9e2d51;
  const uint32 c2= 0x1b873593;

  /* Body: process all 32-bit blocks in the key. */

  for (const uchar *data= key; data != tail; data+= 4)
  {
    uint32 k1= uint4korr(data);

    k1*= c1;
    k1= ROTL32(k1, 15);
    k1*= c2;

    h1^= k1;
    h1= ROTL32(h1, 13);
    h1= h1 * 5 + 0xe6546b64;
  }

  /* Tail: handle remaining len % 4 bytes. */

  uint32 k1= 0;

  switch(len % 4)
  {
  case 3:
    k1^= static_cast<uint32>(tail[2]) << 16;
    /* Fall through. */
  case 2:
    k1^= static_cast<uint32>(tail[1]) << 8;
    /* Fall through. */
  case 1:
    k1^= tail[0];
    k1*= c1;
    k1= ROTL32(k1, 15);
    k1*= c2;
    h1^= k1;
  };

  /*
    Finalization mix:
    Add length and force all bits of a hash block to avalanche.
  */

  h1^= len;

  h1^= h1 >> 16;
  h1*= 0x85ebca6b;
  h1^= h1 >> 13;
  h1*= 0xc2b2ae35;
  h1^= h1 >> 16;

  return h1;
}