aboutsummaryrefslogtreecommitdiffstats
path: root/crypto
diff options
context:
space:
mode:
Diffstat (limited to 'crypto')
-rw-r--r--crypto/crypto.go15
-rw-r--r--crypto/crypto_test.go10
-rw-r--r--crypto/key_store_passphrase.go8
-rw-r--r--crypto/randentropy/rand_entropy.go10
-rw-r--r--crypto/sha3/LICENSE27
-rw-r--r--crypto/sha3/PATENTS22
-rw-r--r--crypto/sha3/doc.go66
-rw-r--r--crypto/sha3/hashes.go68
-rw-r--r--crypto/sha3/keccakf.go28
-rw-r--r--crypto/sha3/register.go18
-rw-r--r--crypto/sha3/sha3.go358
-rw-r--r--crypto/sha3/sha3_test.go306
-rw-r--r--crypto/sha3/shake.go60
-rw-r--r--crypto/sha3/testdata/keccakKats.json.deflatebin0 -> 521342 bytes
-rw-r--r--crypto/sha3/xor.go16
-rw-r--r--crypto/sha3/xor_generic.go28
-rw-r--r--crypto/sha3/xor_unaligned.go58
17 files changed, 846 insertions, 252 deletions
diff --git a/crypto/crypto.go b/crypto/crypto.go
index 850be4da6..f1f6affac 100644
--- a/crypto/crypto.go
+++ b/crypto/crypto.go
@@ -43,7 +43,7 @@ import (
"golang.org/x/crypto/ripemd160"
)
-func Sha3(data ...[]byte) []byte {
+func Keccak256(data ...[]byte) []byte {
d := sha3.NewKeccak256()
for _, b := range data {
d.Write(b)
@@ -51,7 +51,7 @@ func Sha3(data ...[]byte) []byte {
return d.Sum(nil)
}
-func Sha3Hash(data ...[]byte) (h common.Hash) {
+func Keccak256Hash(data ...[]byte) (h common.Hash) {
d := sha3.NewKeccak256()
for _, b := range data {
d.Write(b)
@@ -60,11 +60,14 @@ func Sha3Hash(data ...[]byte) (h common.Hash) {
return h
}
+// Deprecated: For backward compatibility as other packages depend on these
+func Sha3(data ...[]byte) []byte { return Keccak256(data...) }
+func Sha3Hash(data ...[]byte) common.Hash { return Keccak256Hash(data...) }
+
// Creates an ethereum address given the bytes and the nonce
func CreateAddress(b common.Address, nonce uint64) common.Address {
data, _ := rlp.EncodeToBytes([]interface{}{b, nonce})
- return common.BytesToAddress(Sha3(data)[12:])
- //return Sha3(common.NewValue([]interface{}{b, nonce}).Encode())[12:]
+ return common.BytesToAddress(Keccak256(data)[12:])
}
func Sha256(data []byte) []byte {
@@ -265,7 +268,7 @@ func decryptPreSaleKey(fileContent []byte, password string) (key *Key, err error
if err != nil {
return nil, err
}
- ethPriv := Sha3(plainText)
+ ethPriv := Keccak256(plainText)
ecKey := ToECDSA(ethPriv)
key = &Key{
Id: nil,
@@ -330,7 +333,7 @@ func PKCS7Unpad(in []byte) []byte {
func PubkeyToAddress(p ecdsa.PublicKey) common.Address {
pubBytes := FromECDSAPub(&p)
- return common.BytesToAddress(Sha3(pubBytes[1:])[12:])
+ return common.BytesToAddress(Keccak256(pubBytes[1:])[12:])
}
func zeroBytes(bytes []byte) {
diff --git a/crypto/crypto_test.go b/crypto/crypto_test.go
index 1681c7fef..58b29da49 100644
--- a/crypto/crypto_test.go
+++ b/crypto/crypto_test.go
@@ -40,13 +40,13 @@ var testPrivHex = "289c2857d4598e37fb9647507e47a309d6133539bf21a8b9cb6df88fd5232
func TestSha3(t *testing.T) {
msg := []byte("abc")
exp, _ := hex.DecodeString("4e03657aea45a94fc7d47ba826c8d667c0d1e6e33a64a036ec44f58fa12d6c45")
- checkhash(t, "Sha3-256", func(in []byte) []byte { return Sha3(in) }, msg, exp)
+ checkhash(t, "Sha3-256", func(in []byte) []byte { return Keccak256(in) }, msg, exp)
}
func TestSha3Hash(t *testing.T) {
msg := []byte("abc")
exp, _ := hex.DecodeString("4e03657aea45a94fc7d47ba826c8d667c0d1e6e33a64a036ec44f58fa12d6c45")
- checkhash(t, "Sha3-256-array", func(in []byte) []byte { h := Sha3Hash(in); return h[:] }, msg, exp)
+ checkhash(t, "Sha3-256-array", func(in []byte) []byte { h := Keccak256Hash(in); return h[:] }, msg, exp)
}
func TestSha256(t *testing.T) {
@@ -66,7 +66,7 @@ func BenchmarkSha3(b *testing.B) {
amount := 1000000
start := time.Now()
for i := 0; i < amount; i++ {
- Sha3(a)
+ Keccak256(a)
}
fmt.Println(amount, ":", time.Since(start))
@@ -84,7 +84,7 @@ func TestSign(t *testing.T) {
key, _ := HexToECDSA(testPrivHex)
addr := common.HexToAddress(testAddrHex)
- msg := Sha3([]byte("foo"))
+ msg := Keccak256([]byte("foo"))
sig, err := Sign(msg, key)
if err != nil {
t.Errorf("Sign error: %s", err)
@@ -238,7 +238,7 @@ func TestPythonIntegration(t *testing.T) {
k0, _ := HexToECDSA(kh)
k1 := FromECDSA(k0)
- msg0 := Sha3([]byte("foo"))
+ msg0 := Keccak256([]byte("foo"))
sig0, _ := secp256k1.Sign(msg0, k1)
msg1 := common.FromHex("00000000000000000000000000000000")
diff --git a/crypto/key_store_passphrase.go b/crypto/key_store_passphrase.go
index 94411d2f9..b7ae9e1de 100644
--- a/crypto/key_store_passphrase.go
+++ b/crypto/key_store_passphrase.go
@@ -110,7 +110,7 @@ func (ks keyStorePassphrase) StoreKey(key *Key, auth string) (err error) {
return err
}
- mac := Sha3(derivedKey[16:32], cipherText)
+ mac := Keccak256(derivedKey[16:32], cipherText)
scryptParamsJSON := make(map[string]interface{}, 5)
scryptParamsJSON["n"] = ks.scryptN
@@ -210,7 +210,7 @@ func decryptKeyV3(keyProtected *encryptedKeyJSONV3, auth string) (keyBytes []byt
return nil, nil, err
}
- calculatedMAC := Sha3(derivedKey[16:32], cipherText)
+ calculatedMAC := Keccak256(derivedKey[16:32], cipherText)
if !bytes.Equal(calculatedMAC, mac) {
return nil, nil, errors.New("Decryption failed: MAC mismatch")
}
@@ -244,12 +244,12 @@ func decryptKeyV1(keyProtected *encryptedKeyJSONV1, auth string) (keyBytes []byt
return nil, nil, err
}
- calculatedMAC := Sha3(derivedKey[16:32], cipherText)
+ calculatedMAC := Keccak256(derivedKey[16:32], cipherText)
if !bytes.Equal(calculatedMAC, mac) {
return nil, nil, errors.New("Decryption failed: MAC mismatch")
}
- plainText, err := aesCBCDecrypt(Sha3(derivedKey[:16])[:16], cipherText, iv)
+ plainText, err := aesCBCDecrypt(Keccak256(derivedKey[:16])[:16], cipherText, iv)
if err != nil {
return nil, nil, err
}
diff --git a/crypto/randentropy/rand_entropy.go b/crypto/randentropy/rand_entropy.go
index 0c2e3c051..539d3ac89 100644
--- a/crypto/randentropy/rand_entropy.go
+++ b/crypto/randentropy/rand_entropy.go
@@ -19,8 +19,6 @@ package randentropy
import (
crand "crypto/rand"
"io"
-
- "github.com/ethereum/go-ethereum/crypto/sha3"
)
var Reader io.Reader = &randEntropy{}
@@ -34,14 +32,6 @@ func (*randEntropy) Read(bytes []byte) (n int, err error) {
return len(bytes), nil
}
-// TODO: copied from crypto.go , move to sha3 package?
-func Sha3(data []byte) []byte {
- d := sha3.NewKeccak256()
- d.Write(data)
-
- return d.Sum(nil)
-}
-
func GetEntropyCSPRNG(n int) []byte {
mainBuff := make([]byte, n)
_, err := io.ReadFull(crand.Reader, mainBuff)
diff --git a/crypto/sha3/LICENSE b/crypto/sha3/LICENSE
new file mode 100644
index 000000000..6a66aea5e
--- /dev/null
+++ b/crypto/sha3/LICENSE
@@ -0,0 +1,27 @@
+Copyright (c) 2009 The Go Authors. All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+
+ * Redistributions of source code must retain the above copyright
+notice, this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above
+copyright notice, this list of conditions and the following disclaimer
+in the documentation and/or other materials provided with the
+distribution.
+ * Neither the name of Google Inc. nor the names of its
+contributors may be used to endorse or promote products derived from
+this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/crypto/sha3/PATENTS b/crypto/sha3/PATENTS
new file mode 100644
index 000000000..733099041
--- /dev/null
+++ b/crypto/sha3/PATENTS
@@ -0,0 +1,22 @@
+Additional IP Rights Grant (Patents)
+
+"This implementation" means the copyrightable works distributed by
+Google as part of the Go project.
+
+Google hereby grants to You a perpetual, worldwide, non-exclusive,
+no-charge, royalty-free, irrevocable (except as stated in this section)
+patent license to make, have made, use, offer to sell, sell, import,
+transfer and otherwise run, modify and propagate the contents of this
+implementation of Go, where such license applies only to those patent
+claims, both currently owned or controlled by Google and acquired in
+the future, licensable by Google that are necessarily infringed by this
+implementation of Go. This grant does not include claims that would be
+infringed only as a consequence of further modification of this
+implementation. If you or your agent or exclusive licensee institute or
+order or agree to the institution of patent litigation against any
+entity (including a cross-claim or counterclaim in a lawsuit) alleging
+that this implementation of Go or any code incorporated within this
+implementation of Go constitutes direct or contributory patent
+infringement, or inducement of patent infringement, then any patent
+rights granted to you under this License for this implementation of Go
+shall terminate as of the date such litigation is filed.
diff --git a/crypto/sha3/doc.go b/crypto/sha3/doc.go
new file mode 100644
index 000000000..3dab530f8
--- /dev/null
+++ b/crypto/sha3/doc.go
@@ -0,0 +1,66 @@
+// Copyright 2014 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Package sha3 implements the SHA-3 fixed-output-length hash functions and
+// the SHAKE variable-output-length hash functions defined by FIPS-202.
+//
+// Both types of hash function use the "sponge" construction and the Keccak
+// permutation. For a detailed specification see http://keccak.noekeon.org/
+//
+//
+// Guidance
+//
+// If you aren't sure what function you need, use SHAKE256 with at least 64
+// bytes of output. The SHAKE instances are faster than the SHA3 instances;
+// the latter have to allocate memory to conform to the hash.Hash interface.
+//
+// If you need a secret-key MAC (message authentication code), prepend the
+// secret key to the input, hash with SHAKE256 and read at least 32 bytes of
+// output.
+//
+//
+// Security strengths
+//
+// The SHA3-x (x equals 224, 256, 384, or 512) functions have a security
+// strength against preimage attacks of x bits. Since they only produce "x"
+// bits of output, their collision-resistance is only "x/2" bits.
+//
+// The SHAKE-256 and -128 functions have a generic security strength of 256 and
+// 128 bits against all attacks, provided that at least 2x bits of their output
+// is used. Requesting more than 64 or 32 bytes of output, respectively, does
+// not increase the collision-resistance of the SHAKE functions.
+//
+//
+// The sponge construction
+//
+// A sponge builds a pseudo-random function from a public pseudo-random
+// permutation, by applying the permutation to a state of "rate + capacity"
+// bytes, but hiding "capacity" of the bytes.
+//
+// A sponge starts out with a zero state. To hash an input using a sponge, up
+// to "rate" bytes of the input are XORed into the sponge's state. The sponge
+// is then "full" and the permutation is applied to "empty" it. This process is
+// repeated until all the input has been "absorbed". The input is then padded.
+// The digest is "squeezed" from the sponge in the same way, except that output
+// output is copied out instead of input being XORed in.
+//
+// A sponge is parameterized by its generic security strength, which is equal
+// to half its capacity; capacity + rate is equal to the permutation's width.
+// Since the KeccakF-1600 permutation is 1600 bits (200 bytes) wide, this means
+// that the security strength of a sponge instance is equal to (1600 - bitrate) / 2.
+//
+//
+// Recommendations
+//
+// The SHAKE functions are recommended for most new uses. They can produce
+// output of arbitrary length. SHAKE256, with an output length of at least
+// 64 bytes, provides 256-bit security against all attacks. The Keccak team
+// recommends it for most applications upgrading from SHA2-512. (NIST chose a
+// much stronger, but much slower, sponge instance for SHA3-512.)
+//
+// The SHA-3 functions are "drop-in" replacements for the SHA-2 functions.
+// They produce output of the same length, with the same security strengths
+// against all attacks. This means, in particular, that SHA3-256 only has
+// 128-bit collision resistance, because its output length is 32 bytes.
+package sha3
diff --git a/crypto/sha3/hashes.go b/crypto/sha3/hashes.go
new file mode 100644
index 000000000..669cd5543
--- /dev/null
+++ b/crypto/sha3/hashes.go
@@ -0,0 +1,68 @@
+// Copyright 2014 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package sha3
+
+// This file provides functions for creating instances of the SHA-3
+// and SHAKE hash functions, as well as utility functions for hashing
+// bytes.
+
+import (
+ "hash"
+)
+
+// NewKeccak256 creates a new Keccak-256 hash.
+func NewKeccak256() hash.Hash { return &state{rate: 136, outputLen: 32, dsbyte: 0x01} }
+
+// New224 creates a new SHA3-224 hash.
+// Its generic security strength is 224 bits against preimage attacks,
+// and 112 bits against collision attacks.
+func New224() hash.Hash { return &state{rate: 144, outputLen: 28, dsbyte: 0x06} }
+
+// New256 creates a new SHA3-256 hash.
+// Its generic security strength is 256 bits against preimage attacks,
+// and 128 bits against collision attacks.
+func New256() hash.Hash { return &state{rate: 136, outputLen: 32, dsbyte: 0x06} }
+
+// New384 creates a new SHA3-384 hash.
+// Its generic security strength is 384 bits against preimage attacks,
+// and 192 bits against collision attacks.
+func New384() hash.Hash { return &state{rate: 104, outputLen: 48, dsbyte: 0x06} }
+
+// New512 creates a new SHA3-512 hash.
+// Its generic security strength is 512 bits against preimage attacks,
+// and 256 bits against collision attacks.
+func New512() hash.Hash { return &state{rate: 72, outputLen: 64, dsbyte: 0x06} }
+
+// Sum224 returns the SHA3-224 digest of the data.
+func Sum224(data []byte) (digest [28]byte) {
+ h := New224()
+ h.Write(data)
+ h.Sum(digest[:0])
+ return
+}
+
+// Sum256 returns the SHA3-256 digest of the data.
+func Sum256(data []byte) (digest [32]byte) {
+ h := New256()
+ h.Write(data)
+ h.Sum(digest[:0])
+ return
+}
+
+// Sum384 returns the SHA3-384 digest of the data.
+func Sum384(data []byte) (digest [48]byte) {
+ h := New384()
+ h.Write(data)
+ h.Sum(digest[:0])
+ return
+}
+
+// Sum512 returns the SHA3-512 digest of the data.
+func Sum512(data []byte) (digest [64]byte) {
+ h := New512()
+ h.Write(data)
+ h.Sum(digest[:0])
+ return
+}
diff --git a/crypto/sha3/keccakf.go b/crypto/sha3/keccakf.go
index 5fb98cbeb..13e7058fa 100644
--- a/crypto/sha3/keccakf.go
+++ b/crypto/sha3/keccakf.go
@@ -1,30 +1,6 @@
// Copyright 2014 The Go Authors. All rights reserved.
-//
-// Redistribution and use in source and binary forms, with or without
-// modification, are permitted provided that the following conditions are
-// met:
-//
-// * Redistributions of source code must retain the above copyright
-// notice, this list of conditions and the following disclaimer.
-// * Redistributions in binary form must reproduce the above
-// copyright notice, this list of conditions and the following disclaimer
-// in the documentation and/or other materials provided with the
-// distribution.
-// * Neither the name of Google Inc. nor the names of its
-// contributors may be used to endorse or promote products derived from
-// this software without specific prior written permission.
-//
-// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
package sha3
diff --git a/crypto/sha3/register.go b/crypto/sha3/register.go
new file mode 100644
index 000000000..3cf6a22e0
--- /dev/null
+++ b/crypto/sha3/register.go
@@ -0,0 +1,18 @@
+// Copyright 2014 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// +build go1.4
+
+package sha3
+
+import (
+ "crypto"
+)
+
+func init() {
+ crypto.RegisterHash(crypto.SHA3_224, New224)
+ crypto.RegisterHash(crypto.SHA3_256, New256)
+ crypto.RegisterHash(crypto.SHA3_384, New384)
+ crypto.RegisterHash(crypto.SHA3_512, New512)
+}
diff --git a/crypto/sha3/sha3.go b/crypto/sha3/sha3.go
index ee24df5c9..c8fd31cb0 100644
--- a/crypto/sha3/sha3.go
+++ b/crypto/sha3/sha3.go
@@ -1,237 +1,193 @@
-// Copyright 2013 The Go Authors. All rights reserved.
-//
-// Redistribution and use in source and binary forms, with or without
-// modification, are permitted provided that the following conditions are
-// met:
-//
-// * Redistributions of source code must retain the above copyright
-// notice, this list of conditions and the following disclaimer.
-// * Redistributions in binary form must reproduce the above
-// copyright notice, this list of conditions and the following disclaimer
-// in the documentation and/or other materials provided with the
-// distribution.
-// * Neither the name of Google Inc. nor the names of its
-// contributors may be used to endorse or promote products derived from
-// this software without specific prior written permission.
-//
-// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-// Package sha3 implements the SHA3 hash algorithm (formerly called Keccak) chosen by NIST in 2012.
-// This file provides a SHA3 implementation which implements the standard hash.Hash interface.
-// Writing input data, including padding, and reading output data are computed in this file.
-// Note that the current implementation can compute the hash of an integral number of bytes only.
-// This is a consequence of the hash interface in which a buffer of bytes is passed in.
-// The internals of the Keccak-f function are computed in keccakf.go.
-// For the detailed specification, refer to the Keccak web site (http://keccak.noekeon.org/).
+// Copyright 2014 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
package sha3
-import (
- "encoding/binary"
- "hash"
+// spongeDirection indicates the direction bytes are flowing through the sponge.
+type spongeDirection int
+
+const (
+ // spongeAbsorbing indicates that the sponge is absorbing input.
+ spongeAbsorbing spongeDirection = iota
+ // spongeSqueezing indicates that the sponge is being squeezed.
+ spongeSqueezing
)
-// laneSize is the size in bytes of each "lane" of the internal state of SHA3 (5 * 5 * 8).
-// Note that changing this size would requires using a type other than uint64 to store each lane.
-const laneSize = 8
-
-// sliceSize represents the dimensions of the internal state, a square matrix of
-// sliceSize ** 2 lanes. This is the size of both the "rows" and "columns" dimensions in the
-// terminology of the SHA3 specification.
-const sliceSize = 5
-
-// numLanes represents the total number of lanes in the state.
-const numLanes = sliceSize * sliceSize
-
-// stateSize is the size in bytes of the internal state of SHA3 (5 * 5 * WSize).
-const stateSize = laneSize * numLanes
-
-// digest represents the partial evaluation of a checksum.
-// Note that capacity, and not outputSize, is the critical security parameter, as SHA3 can output
-// an arbitrary number of bytes for any given capacity. The Keccak proposal recommends that
-// capacity = 2*outputSize to ensure that finding a collision of size outputSize requires
-// O(2^{outputSize/2}) computations (the birthday lower bound). Future standards may modify the
-// capacity/outputSize ratio to allow for more output with lower cryptographic security.
-type digest struct {
- a [numLanes]uint64 // main state of the hash
- outputSize int // desired output size in bytes
- capacity int // number of bytes to leave untouched during squeeze/absorb
- absorbed int // number of bytes absorbed thus far
-}
+const (
+ // maxRate is the maximum size of the internal buffer. SHAKE-256
+ // currently needs the largest buffer.
+ maxRate = 168
+)
-// minInt returns the lesser of two integer arguments, to simplify the absorption routine.
-func minInt(v1, v2 int) int {
- if v1 <= v2 {
- return v1
- }
- return v2
+type state struct {
+ // Generic sponge components.
+ a [25]uint64 // main state of the hash
+ buf []byte // points into storage
+ rate int // the number of bytes of state to use
+
+ // dsbyte contains the "domain separation" bits and the first bit of
+ // the padding. Sections 6.1 and 6.2 of [1] separate the outputs of the
+ // SHA-3 and SHAKE functions by appending bitstrings to the message.
+ // Using a little-endian bit-ordering convention, these are "01" for SHA-3
+ // and "1111" for SHAKE, or 00000010b and 00001111b, respectively. Then the
+ // padding rule from section 5.1 is applied to pad the message to a multiple
+ // of the rate, which involves adding a "1" bit, zero or more "0" bits, and
+ // a final "1" bit. We merge the first "1" bit from the padding into dsbyte,
+ // giving 00000110b (0x06) and 00011111b (0x1f).
+ // [1] http://csrc.nist.gov/publications/drafts/fips-202/fips_202_draft.pdf
+ // "Draft FIPS 202: SHA-3 Standard: Permutation-Based Hash and
+ // Extendable-Output Functions (May 2014)"
+ dsbyte byte
+ storage [maxRate]byte
+
+ // Specific to SHA-3 and SHAKE.
+ fixedOutput bool // whether this is a fixed-ouput-length instance
+ outputLen int // the default output size in bytes
+ state spongeDirection // whether the sponge is absorbing or squeezing
}
-// rate returns the number of bytes of the internal state which can be absorbed or squeezed
-// in between calls to the permutation function.
-func (d *digest) rate() int {
- return stateSize - d.capacity
-}
+// BlockSize returns the rate of sponge underlying this hash function.
+func (d *state) BlockSize() int { return d.rate }
+
+// Size returns the output size of the hash function in bytes.
+func (d *state) Size() int { return d.outputLen }
-// Reset clears the internal state by zeroing bytes in the state buffer.
-// This can be skipped for a newly-created hash state; the default zero-allocated state is correct.
-func (d *digest) Reset() {
- d.absorbed = 0
+// Reset clears the internal state by zeroing the sponge state and
+// the byte buffer, and setting Sponge.state to absorbing.
+func (d *state) Reset() {
+ // Zero the permutation's state.
for i := range d.a {
d.a[i] = 0
}
+ d.state = spongeAbsorbing
+ d.buf = d.storage[:0]
}
-// BlockSize, required by the hash.Hash interface, does not have a standard intepretation
-// for a sponge-based construction like SHA3. We return the data rate: the number of bytes which
-// can be absorbed per invocation of the permutation function. For Merkle-Damgård based hashes
-// (ie SHA1, SHA2, MD5) the output size of the internal compression function is returned.
-// We consider this to be roughly equivalent because it represents the number of bytes of output
-// produced per cryptographic operation.
-func (d *digest) BlockSize() int { return d.rate() }
+func (d *state) clone() *state {
+ ret := *d
+ if ret.state == spongeAbsorbing {
+ ret.buf = ret.storage[:len(ret.buf)]
+ } else {
+ ret.buf = ret.storage[d.rate-cap(d.buf) : d.rate]
+ }
-// Size returns the output size of the hash function in bytes.
-func (d *digest) Size() int {
- return d.outputSize
+ return &ret
}
-// unalignedAbsorb is a helper function for Write, which absorbs data that isn't aligned with an
-// 8-byte lane. This requires shifting the individual bytes into position in a uint64.
-func (d *digest) unalignedAbsorb(p []byte) {
- var t uint64
- for i := len(p) - 1; i >= 0; i-- {
- t <<= 8
- t |= uint64(p[i])
+// permute applies the KeccakF-1600 permutation. It handles
+// any input-output buffering.
+func (d *state) permute() {
+ switch d.state {
+ case spongeAbsorbing:
+ // If we're absorbing, we need to xor the input into the state
+ // before applying the permutation.
+ xorIn(d, d.buf)
+ d.buf = d.storage[:0]
+ keccakF1600(&d.a)
+ case spongeSqueezing:
+ // If we're squeezing, we need to apply the permutatin before
+ // copying more output.
+ keccakF1600(&d.a)
+ d.buf = d.storage[:d.rate]
+ copyOut(d, d.buf)
}
- offset := (d.absorbed) % d.rate()
- t <<= 8 * uint(offset%laneSize)
- d.a[offset/laneSize] ^= t
- d.absorbed += len(p)
}
-// Write "absorbs" bytes into the state of the SHA3 hash, updating as needed when the sponge
-// "fills up" with rate() bytes. Since lanes are stored internally as type uint64, this requires
-// converting the incoming bytes into uint64s using a little endian interpretation. This
-// implementation is optimized for large, aligned writes of multiples of 8 bytes (laneSize).
-// Non-aligned or uneven numbers of bytes require shifting and are slower.
-func (d *digest) Write(p []byte) (int, error) {
- // An initial offset is needed if the we aren't absorbing to the first lane initially.
- offset := d.absorbed % d.rate()
- toWrite := len(p)
-
- // The first lane may need to absorb unaligned and/or incomplete data.
- if (offset%laneSize != 0 || len(p) < 8) && len(p) > 0 {
- toAbsorb := minInt(laneSize-(offset%laneSize), len(p))
- d.unalignedAbsorb(p[:toAbsorb])
- p = p[toAbsorb:]
- offset = (d.absorbed) % d.rate()
-
- // For every rate() bytes absorbed, the state must be permuted via the F Function.
- if (d.absorbed)%d.rate() == 0 {
- keccakF1600(&d.a)
- }
+// pads appends the domain separation bits in dsbyte, applies
+// the multi-bitrate 10..1 padding rule, and permutes the state.
+func (d *state) padAndPermute(dsbyte byte) {
+ if d.buf == nil {
+ d.buf = d.storage[:0]
}
+ // Pad with this instance's domain-separator bits. We know that there's
+ // at least one byte of space in d.buf because, if it were full,
+ // permute would have been called to empty it. dsbyte also contains the
+ // first one bit for the padding. See the comment in the state struct.
+ d.buf = append(d.buf, dsbyte)
+ zerosStart := len(d.buf)
+ d.buf = d.storage[:d.rate]
+ for i := zerosStart; i < d.rate; i++ {
+ d.buf[i] = 0
+ }
+ // This adds the final one bit for the padding. Because of the way that
+ // bits are numbered from the LSB upwards, the final bit is the MSB of
+ // the last byte.
+ d.buf[d.rate-1] ^= 0x80
+ // Apply the permutation
+ d.permute()
+ d.state = spongeSqueezing
+ d.buf = d.storage[:d.rate]
+ copyOut(d, d.buf)
+}
- // This loop should absorb the bulk of the data into full, aligned lanes.
- // It will call the update function as necessary.
- for len(p) > 7 {
- firstLane := offset / laneSize
- lastLane := minInt(d.rate()/laneSize, firstLane+len(p)/laneSize)
+// Write absorbs more data into the hash's state. It produces an error
+// if more data is written to the ShakeHash after writing
+func (d *state) Write(p []byte) (written int, err error) {
+ if d.state != spongeAbsorbing {
+ panic("sha3: write to sponge after read")
+ }
+ if d.buf == nil {
+ d.buf = d.storage[:0]
+ }
+ written = len(p)
- // This inner loop absorbs input bytes into the state in groups of 8, converted to uint64s.
- for lane := firstLane; lane < lastLane; lane++ {
- d.a[lane] ^= binary.LittleEndian.Uint64(p[:laneSize])
- p = p[laneSize:]
- }
- d.absorbed += (lastLane - firstLane) * laneSize
- // For every rate() bytes absorbed, the state must be permuted via the F Function.
- if (d.absorbed)%d.rate() == 0 {
+ for len(p) > 0 {
+ if len(d.buf) == 0 && len(p) >= d.rate {
+ // The fast path; absorb a full "rate" bytes of input and apply the permutation.
+ xorIn(d, p[:d.rate])
+ p = p[d.rate:]
keccakF1600(&d.a)
+ } else {
+ // The slow path; buffer the input until we can fill the sponge, and then xor it in.
+ todo := d.rate - len(d.buf)
+ if todo > len(p) {
+ todo = len(p)
+ }
+ d.buf = append(d.buf, p[:todo]...)
+ p = p[todo:]
+
+ // If the sponge is full, apply the permutation.
+ if len(d.buf) == d.rate {
+ d.permute()
+ }
}
-
- offset = 0
- }
-
- // If there are insufficient bytes to fill the final lane, an unaligned absorption.
- // This should always start at a correct lane boundary though, or else it would be caught
- // by the uneven opening lane case above.
- if len(p) > 0 {
- d.unalignedAbsorb(p)
}
- return toWrite, nil
-}
-
-// pad computes the SHA3 padding scheme based on the number of bytes absorbed.
-// The padding is a 1 bit, followed by an arbitrary number of 0s and then a final 1 bit, such that
-// the input bits plus padding bits are a multiple of rate(). Adding the padding simply requires
-// xoring an opening and closing bit into the appropriate lanes.
-func (d *digest) pad() {
- offset := d.absorbed % d.rate()
- // The opening pad bit must be shifted into position based on the number of bytes absorbed
- padOpenLane := offset / laneSize
- d.a[padOpenLane] ^= 0x0000000000000001 << uint(8*(offset%laneSize))
- // The closing padding bit is always in the last position
- padCloseLane := (d.rate() / laneSize) - 1
- d.a[padCloseLane] ^= 0x8000000000000000
+ return
}
-// finalize prepares the hash to output data by padding and one final permutation of the state.
-func (d *digest) finalize() {
- d.pad()
- keccakF1600(&d.a)
-}
-
-// squeeze outputs an arbitrary number of bytes from the hash state.
-// Squeezing can require multiple calls to the F function (one per rate() bytes squeezed),
-// although this is not the case for standard SHA3 parameters. This implementation only supports
-// squeezing a single time, subsequent squeezes may lose alignment. Future implementations
-// may wish to support multiple squeeze calls, for example to support use as a PRNG.
-func (d *digest) squeeze(in []byte, toSqueeze int) []byte {
- // Because we read in blocks of laneSize, we need enough room to read
- // an integral number of lanes
- needed := toSqueeze + (laneSize-toSqueeze%laneSize)%laneSize
- if cap(in)-len(in) < needed {
- newIn := make([]byte, len(in), len(in)+needed)
- copy(newIn, in)
- in = newIn
+// Read squeezes an arbitrary number of bytes from the sponge.
+func (d *state) Read(out []byte) (n int, err error) {
+ // If we're still absorbing, pad and apply the permutation.
+ if d.state == spongeAbsorbing {
+ d.padAndPermute(d.dsbyte)
}
- out := in[len(in) : len(in)+needed]
+ n = len(out)
+
+ // Now, do the squeezing.
for len(out) > 0 {
- for i := 0; i < d.rate() && len(out) > 0; i += laneSize {
- binary.LittleEndian.PutUint64(out[:], d.a[i/laneSize])
- out = out[laneSize:]
- }
- if len(out) > 0 {
- keccakF1600(&d.a)
+ n := copy(out, d.buf)
+ d.buf = d.buf[n:]
+ out = out[n:]
+
+ // Apply the permutation if we've squeezed the sponge dry.
+ if len(d.buf) == 0 {
+ d.permute()
}
}
- return in[:len(in)+toSqueeze] // Re-slice in case we wrote extra data.
-}
-// Sum applies padding to the hash state and then squeezes out the desired nubmer of output bytes.
-func (d *digest) Sum(in []byte) []byte {
- // Make a copy of the original hash so that caller can keep writing and summing.
- dup := *d
- dup.finalize()
- return dup.squeeze(in, dup.outputSize)
+ return
}
-// The NewKeccakX constructors enable initializing a hash in any of the four recommend sizes
-// from the Keccak specification, all of which set capacity=2*outputSize. Note that the final
-// NIST standard for SHA3 may specify different input/output lengths.
-// The output size is indicated in bits but converted into bytes internally.
-func NewKeccak224() hash.Hash { return &digest{outputSize: 224 / 8, capacity: 2 * 224 / 8} }
-func NewKeccak256() hash.Hash { return &digest{outputSize: 256 / 8, capacity: 2 * 256 / 8} }
-func NewKeccak384() hash.Hash { return &digest{outputSize: 384 / 8, capacity: 2 * 384 / 8} }
-func NewKeccak512() hash.Hash { return &digest{outputSize: 512 / 8, capacity: 2 * 512 / 8} }
+// Sum applies padding to the hash state and then squeezes out the desired
+// number of output bytes.
+func (d *state) Sum(in []byte) []byte {
+ // Make a copy of the original hash so that caller can keep writing
+ // and summing.
+ dup := d.clone()
+ hash := make([]byte, dup.outputLen)
+ dup.Read(hash)
+ return append(in, hash...)
+}
diff --git a/crypto/sha3/sha3_test.go b/crypto/sha3/sha3_test.go
new file mode 100644
index 000000000..caf72f279
--- /dev/null
+++ b/crypto/sha3/sha3_test.go
@@ -0,0 +1,306 @@
+// Copyright 2014 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package sha3
+
+// Tests include all the ShortMsgKATs provided by the Keccak team at
+// https://github.com/gvanas/KeccakCodePackage
+//
+// They only include the zero-bit case of the bitwise testvectors
+// published by NIST in the draft of FIPS-202.
+
+import (
+ "bytes"
+ "compress/flate"
+ "encoding/hex"
+ "encoding/json"
+ "hash"
+ "os"
+ "strings"
+ "testing"
+)
+
+const (
+ testString = "brekeccakkeccak koax koax"
+ katFilename = "testdata/keccakKats.json.deflate"
+)
+
+// Internal-use instances of SHAKE used to test against KATs.
+func newHashShake128() hash.Hash {
+ return &state{rate: 168, dsbyte: 0x1f, outputLen: 512}
+}
+func newHashShake256() hash.Hash {
+ return &state{rate: 136, dsbyte: 0x1f, outputLen: 512}
+}
+
+// testDigests contains functions returning hash.Hash instances
+// with output-length equal to the KAT length for both SHA-3 and
+// SHAKE instances.
+var testDigests = map[string]func() hash.Hash{
+ "SHA3-224": New224,
+ "SHA3-256": New256,
+ "SHA3-384": New384,
+ "SHA3-512": New512,
+ "SHAKE128": newHashShake128,
+ "SHAKE256": newHashShake256,
+}
+
+// testShakes contains functions that return ShakeHash instances for
+// testing the ShakeHash-specific interface.
+var testShakes = map[string]func() ShakeHash{
+ "SHAKE128": NewShake128,
+ "SHAKE256": NewShake256,
+}
+
+// decodeHex converts a hex-encoded string into a raw byte string.
+func decodeHex(s string) []byte {
+ b, err := hex.DecodeString(s)
+ if err != nil {
+ panic(err)
+ }
+ return b
+}
+
+// structs used to marshal JSON test-cases.
+type KeccakKats struct {
+ Kats map[string][]struct {
+ Digest string `json:"digest"`
+ Length int64 `json:"length"`
+ Message string `json:"message"`
+ }
+}
+
+func testUnalignedAndGeneric(t *testing.T, testf func(impl string)) {
+ xorInOrig, copyOutOrig := xorIn, copyOut
+ xorIn, copyOut = xorInGeneric, copyOutGeneric
+ testf("generic")
+ if xorImplementationUnaligned != "generic" {
+ xorIn, copyOut = xorInUnaligned, copyOutUnaligned
+ testf("unaligned")
+ }
+ xorIn, copyOut = xorInOrig, copyOutOrig
+}
+
+// TestKeccakKats tests the SHA-3 and Shake implementations against all the
+// ShortMsgKATs from https://github.com/gvanas/KeccakCodePackage
+// (The testvectors are stored in keccakKats.json.deflate due to their length.)
+func TestKeccakKats(t *testing.T) {
+ testUnalignedAndGeneric(t, func(impl string) {
+ // Read the KATs.
+ deflated, err := os.Open(katFilename)
+ if err != nil {
+ t.Errorf("error opening %s: %s", katFilename, err)
+ }
+ file := flate.NewReader(deflated)
+ dec := json.NewDecoder(file)
+ var katSet KeccakKats
+ err = dec.Decode(&katSet)
+ if err != nil {
+ t.Errorf("error decoding KATs: %s", err)
+ }
+
+ // Do the KATs.
+ for functionName, kats := range katSet.Kats {
+ d := testDigests[functionName]()
+ for _, kat := range kats {
+ d.Reset()
+ in, err := hex.DecodeString(kat.Message)
+ if err != nil {
+ t.Errorf("error decoding KAT: %s", err)
+ }
+ d.Write(in[:kat.Length/8])
+ got := strings.ToUpper(hex.EncodeToString(d.Sum(nil)))
+ if got != kat.Digest {
+ t.Errorf("function=%s, implementation=%s, length=%d\nmessage:\n %s\ngot:\n %s\nwanted:\n %s",
+ functionName, impl, kat.Length, kat.Message, got, kat.Digest)
+ t.Logf("wanted %+v", kat)
+ t.FailNow()
+ }
+ continue
+ }
+ }
+ })
+}
+
+// TestUnalignedWrite tests that writing data in an arbitrary pattern with
+// small input buffers.
+func testUnalignedWrite(t *testing.T) {
+ testUnalignedAndGeneric(t, func(impl string) {
+ buf := sequentialBytes(0x10000)
+ for alg, df := range testDigests {
+ d := df()
+ d.Reset()
+ d.Write(buf)
+ want := d.Sum(nil)
+ d.Reset()
+ for i := 0; i < len(buf); {
+ // Cycle through offsets which make a 137 byte sequence.
+ // Because 137 is prime this sequence should exercise all corner cases.
+ offsets := [17]int{1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 1}
+ for _, j := range offsets {
+ if v := len(buf) - i; v < j {
+ j = v
+ }
+ d.Write(buf[i : i+j])
+ i += j
+ }
+ }
+ got := d.Sum(nil)
+ if !bytes.Equal(got, want) {
+ t.Errorf("Unaligned writes, implementation=%s, alg=%s\ngot %q, want %q", impl, alg, got, want)
+ }
+ }
+ })
+}
+
+// TestAppend checks that appending works when reallocation is necessary.
+func TestAppend(t *testing.T) {
+ testUnalignedAndGeneric(t, func(impl string) {
+ d := New224()
+
+ for capacity := 2; capacity <= 66; capacity += 64 {
+ // The first time around the loop, Sum will have to reallocate.
+ // The second time, it will not.
+ buf := make([]byte, 2, capacity)
+ d.Reset()
+ d.Write([]byte{0xcc})
+ buf = d.Sum(buf)
+ expected := "0000DF70ADC49B2E76EEE3A6931B93FA41841C3AF2CDF5B32A18B5478C39"
+ if got := strings.ToUpper(hex.EncodeToString(buf)); got != expected {
+ t.Errorf("got %s, want %s", got, expected)
+ }
+ }
+ })
+}
+
+// TestAppendNoRealloc tests that appending works when no reallocation is necessary.
+func TestAppendNoRealloc(t *testing.T) {
+ testUnalignedAndGeneric(t, func(impl string) {
+ buf := make([]byte, 1, 200)
+ d := New224()
+ d.Write([]byte{0xcc})
+ buf = d.Sum(buf)
+ expected := "00DF70ADC49B2E76EEE3A6931B93FA41841C3AF2CDF5B32A18B5478C39"
+ if got := strings.ToUpper(hex.EncodeToString(buf)); got != expected {
+ t.Errorf("%s: got %s, want %s", impl, got, expected)
+ }
+ })
+}
+
+// TestSqueezing checks that squeezing the full output a single time produces
+// the same output as repeatedly squeezing the instance.
+func TestSqueezing(t *testing.T) {
+ testUnalignedAndGeneric(t, func(impl string) {
+ for functionName, newShakeHash := range testShakes {
+ d0 := newShakeHash()
+ d0.Write([]byte(testString))
+ ref := make([]byte, 32)
+ d0.Read(ref)
+
+ d1 := newShakeHash()
+ d1.Write([]byte(testString))
+ var multiple []byte
+ for _ = range ref {
+ one := make([]byte, 1)
+ d1.Read(one)
+ multiple = append(multiple, one...)
+ }
+ if !bytes.Equal(ref, multiple) {
+ t.Errorf("%s (%s): squeezing %d bytes one at a time failed", functionName, impl, len(ref))
+ }
+ }
+ })
+}
+
+// sequentialBytes produces a buffer of size consecutive bytes 0x00, 0x01, ..., used for testing.
+func sequentialBytes(size int) []byte {
+ result := make([]byte, size)
+ for i := range result {
+ result[i] = byte(i)
+ }
+ return result
+}
+
+// BenchmarkPermutationFunction measures the speed of the permutation function
+// with no input data.
+func BenchmarkPermutationFunction(b *testing.B) {
+ b.SetBytes(int64(200))
+ var lanes [25]uint64
+ for i := 0; i < b.N; i++ {
+ keccakF1600(&lanes)
+ }
+}
+
+// benchmarkHash tests the speed to hash num buffers of buflen each.
+func benchmarkHash(b *testing.B, h hash.Hash, size, num int) {
+ b.StopTimer()
+ h.Reset()
+ data := sequentialBytes(size)
+ b.SetBytes(int64(size * num))
+ b.StartTimer()
+
+ var state []byte
+ for i := 0; i < b.N; i++ {
+ for j := 0; j < num; j++ {
+ h.Write(data)
+ }
+ state = h.Sum(state[:0])
+ }
+ b.StopTimer()
+ h.Reset()
+}
+
+// benchmarkShake is specialized to the Shake instances, which don't
+// require a copy on reading output.
+func benchmarkShake(b *testing.B, h ShakeHash, size, num int) {
+ b.StopTimer()
+ h.Reset()
+ data := sequentialBytes(size)
+ d := make([]byte, 32)
+
+ b.SetBytes(int64(size * num))
+ b.StartTimer()
+
+ for i := 0; i < b.N; i++ {
+ h.Reset()
+ for j := 0; j < num; j++ {
+ h.Write(data)
+ }
+ h.Read(d)
+ }
+}
+
+func BenchmarkSha3_512_MTU(b *testing.B) { benchmarkHash(b, New512(), 1350, 1) }
+func BenchmarkSha3_384_MTU(b *testing.B) { benchmarkHash(b, New384(), 1350, 1) }
+func BenchmarkSha3_256_MTU(b *testing.B) { benchmarkHash(b, New256(), 1350, 1) }
+func BenchmarkSha3_224_MTU(b *testing.B) { benchmarkHash(b, New224(), 1350, 1) }
+
+func BenchmarkShake128_MTU(b *testing.B) { benchmarkShake(b, NewShake128(), 1350, 1) }
+func BenchmarkShake256_MTU(b *testing.B) { benchmarkShake(b, NewShake256(), 1350, 1) }
+func BenchmarkShake256_16x(b *testing.B) { benchmarkShake(b, NewShake256(), 16, 1024) }
+func BenchmarkShake256_1MiB(b *testing.B) { benchmarkShake(b, NewShake256(), 1024, 1024) }
+
+func BenchmarkSha3_512_1MiB(b *testing.B) { benchmarkHash(b, New512(), 1024, 1024) }
+
+func Example_sum() {
+ buf := []byte("some data to hash")
+ // A hash needs to be 64 bytes long to have 256-bit collision resistance.
+ h := make([]byte, 64)
+ // Compute a 64-byte hash of buf and put it in h.
+ ShakeSum256(h, buf)
+}
+
+func Example_mac() {
+ k := []byte("this is a secret key; you should generate a strong random key that's at least 32 bytes long")
+ buf := []byte("and this is some data to authenticate")
+ // A MAC with 32 bytes of output has 256-bit security strength -- if you use at least a 32-byte-long key.
+ h := make([]byte, 32)
+ d := NewShake256()
+ // Write the key into the hash.
+ d.Write(k)
+ // Now write the data.
+ d.Write(buf)
+ // Read 32 bytes of output from the hash into h.
+ d.Read(h)
+}
diff --git a/crypto/sha3/shake.go b/crypto/sha3/shake.go
new file mode 100644
index 000000000..841f9860f
--- /dev/null
+++ b/crypto/sha3/shake.go
@@ -0,0 +1,60 @@
+// Copyright 2014 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package sha3
+
+// This file defines the ShakeHash interface, and provides
+// functions for creating SHAKE instances, as well as utility
+// functions for hashing bytes to arbitrary-length output.
+
+import (
+ "io"
+)
+
+// ShakeHash defines the interface to hash functions that
+// support arbitrary-length output.
+type ShakeHash interface {
+ // Write absorbs more data into the hash's state. It panics if input is
+ // written to it after output has been read from it.
+ io.Writer
+
+ // Read reads more output from the hash; reading affects the hash's
+ // state. (ShakeHash.Read is thus very different from Hash.Sum)
+ // It never returns an error.
+ io.Reader
+
+ // Clone returns a copy of the ShakeHash in its current state.
+ Clone() ShakeHash
+
+ // Reset resets the ShakeHash to its initial state.
+ Reset()
+}
+
+func (d *state) Clone() ShakeHash {
+ return d.clone()
+}
+
+// NewShake128 creates a new SHAKE128 variable-output-length ShakeHash.
+// Its generic security strength is 128 bits against all attacks if at
+// least 32 bytes of its output are used.
+func NewShake128() ShakeHash { return &state{rate: 168, dsbyte: 0x1f} }
+
+// NewShake256 creates a new SHAKE128 variable-output-length ShakeHash.
+// Its generic security strength is 256 bits against all attacks if
+// at least 64 bytes of its output are used.
+func NewShake256() ShakeHash { return &state{rate: 136, dsbyte: 0x1f} }
+
+// ShakeSum128 writes an arbitrary-length digest of data into hash.
+func ShakeSum128(hash, data []byte) {
+ h := NewShake128()
+ h.Write(data)
+ h.Read(hash)
+}
+
+// ShakeSum256 writes an arbitrary-length digest of data into hash.
+func ShakeSum256(hash, data []byte) {
+ h := NewShake256()
+ h.Write(data)
+ h.Read(hash)
+}
diff --git a/crypto/sha3/testdata/keccakKats.json.deflate b/crypto/sha3/testdata/keccakKats.json.deflate
new file mode 100644
index 000000000..62e85ae24
--- /dev/null
+++ b/crypto/sha3/testdata/keccakKats.json.deflate
Binary files differ
diff --git a/crypto/sha3/xor.go b/crypto/sha3/xor.go
new file mode 100644
index 000000000..d622979c1
--- /dev/null
+++ b/crypto/sha3/xor.go
@@ -0,0 +1,16 @@
+// Copyright 2015 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// +build !amd64,!386 appengine
+
+package sha3
+
+var (
+ xorIn = xorInGeneric
+ copyOut = copyOutGeneric
+ xorInUnaligned = xorInGeneric
+ copyOutUnaligned = copyOutGeneric
+)
+
+const xorImplementationUnaligned = "generic"
diff --git a/crypto/sha3/xor_generic.go b/crypto/sha3/xor_generic.go
new file mode 100644
index 000000000..fd35f02ef
--- /dev/null
+++ b/crypto/sha3/xor_generic.go
@@ -0,0 +1,28 @@
+// Copyright 2015 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package sha3
+
+import "encoding/binary"
+
+// xorInGeneric xors the bytes in buf into the state; it
+// makes no non-portable assumptions about memory layout
+// or alignment.
+func xorInGeneric(d *state, buf []byte) {
+ n := len(buf) / 8
+
+ for i := 0; i < n; i++ {
+ a := binary.LittleEndian.Uint64(buf)
+ d.a[i] ^= a
+ buf = buf[8:]
+ }
+}
+
+// copyOutGeneric copies ulint64s to a byte buffer.
+func copyOutGeneric(d *state, b []byte) {
+ for i := 0; len(b) >= 8; i++ {
+ binary.LittleEndian.PutUint64(b, d.a[i])
+ b = b[8:]
+ }
+}
diff --git a/crypto/sha3/xor_unaligned.go b/crypto/sha3/xor_unaligned.go
new file mode 100644
index 000000000..c7851a1d8
--- /dev/null
+++ b/crypto/sha3/xor_unaligned.go
@@ -0,0 +1,58 @@
+// Copyright 2015 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// +build amd64 386
+// +build !appengine
+
+package sha3
+
+import "unsafe"
+
+func xorInUnaligned(d *state, buf []byte) {
+ bw := (*[maxRate / 8]uint64)(unsafe.Pointer(&buf[0]))
+ n := len(buf)
+ if n >= 72 {
+ d.a[0] ^= bw[0]
+ d.a[1] ^= bw[1]
+ d.a[2] ^= bw[2]
+ d.a[3] ^= bw[3]
+ d.a[4] ^= bw[4]
+ d.a[5] ^= bw[5]
+ d.a[6] ^= bw[6]
+ d.a[7] ^= bw[7]
+ d.a[8] ^= bw[8]
+ }
+ if n >= 104 {
+ d.a[9] ^= bw[9]
+ d.a[10] ^= bw[10]
+ d.a[11] ^= bw[11]
+ d.a[12] ^= bw[12]
+ }
+ if n >= 136 {
+ d.a[13] ^= bw[13]
+ d.a[14] ^= bw[14]
+ d.a[15] ^= bw[15]
+ d.a[16] ^= bw[16]
+ }
+ if n >= 144 {
+ d.a[17] ^= bw[17]
+ }
+ if n >= 168 {
+ d.a[18] ^= bw[18]
+ d.a[19] ^= bw[19]
+ d.a[20] ^= bw[20]
+ }
+}
+
+func copyOutUnaligned(d *state, buf []byte) {
+ ab := (*[maxRate]uint8)(unsafe.Pointer(&d.a[0]))
+ copy(buf, ab[:])
+}
+
+var (
+ xorIn = xorInUnaligned
+ copyOut = copyOutUnaligned
+)
+
+const xorImplementationUnaligned = "unaligned"