aboutsummaryrefslogtreecommitdiffstats
path: root/contracts/utils
diff options
context:
space:
mode:
authorLeonid Logvinov <logvinov.leon@gmail.com>2018-12-04 21:41:18 +0800
committerLeonid Logvinov <logvinov.leon@gmail.com>2018-12-04 21:41:18 +0800
commite8d0aff333c33ead5da0878fa776aa0f42abd567 (patch)
tree28dc73fd2e3b524cb2a32465d88ad30cfb6799d4 /contracts/utils
parent79f5e36edbd8a4483aac46032092dece95bb0b4c (diff)
downloaddexon-sol-tools-e8d0aff333c33ead5da0878fa776aa0f42abd567.tar.gz
dexon-sol-tools-e8d0aff333c33ead5da0878fa776aa0f42abd567.tar.zst
dexon-sol-tools-e8d0aff333c33ead5da0878fa776aa0f42abd567.zip
Refactor @0x/contracts-utils from @0x/contracts-core
Diffstat (limited to 'contracts/utils')
-rw-r--r--contracts/utils/.solhint.json20
-rw-r--r--contracts/utils/README.md70
-rw-r--r--contracts/utils/compiler.json22
-rw-r--r--contracts/utils/contracts/test/TestConstants/TestConstants.sol57
-rw-r--r--contracts/utils/contracts/test/TestLibBytes/TestLibBytes.sol269
-rw-r--r--contracts/utils/contracts/utils/LibBytes/LibBytes.sol567
-rw-r--r--contracts/utils/contracts/utils/Ownable/IOwnable.sol8
-rw-r--r--contracts/utils/contracts/utils/Ownable/Ownable.sol33
-rw-r--r--contracts/utils/contracts/utils/ReentrancyGuard/ReentrancyGuard.sol45
-rw-r--r--contracts/utils/contracts/utils/SafeMath/SafeMath.sol87
-rw-r--r--contracts/utils/package.json89
-rw-r--r--contracts/utils/src/artifacts/index.ts19
-rw-r--r--contracts/utils/src/index.ts2
-rw-r--r--contracts/utils/src/wrappers/index.ts2
-rw-r--r--contracts/utils/test/global_hooks.ts17
-rw-r--r--contracts/utils/test/lib_bytes.ts875
-rw-r--r--contracts/utils/test/libs.ts34
-rw-r--r--contracts/utils/tsconfig.json19
-rw-r--r--contracts/utils/tslint.json6
19 files changed, 2241 insertions, 0 deletions
diff --git a/contracts/utils/.solhint.json b/contracts/utils/.solhint.json
new file mode 100644
index 000000000..076afe9f3
--- /dev/null
+++ b/contracts/utils/.solhint.json
@@ -0,0 +1,20 @@
+{
+ "extends": "default",
+ "rules": {
+ "avoid-low-level-calls": false,
+ "avoid-tx-origin": "warn",
+ "bracket-align": false,
+ "code-complexity": false,
+ "const-name-snakecase": "error",
+ "expression-indent": "error",
+ "function-max-lines": false,
+ "func-order": "error",
+ "indent": ["error", 4],
+ "max-line-length": ["warn", 160],
+ "no-inline-assembly": false,
+ "quotes": ["error", "double"],
+ "separate-by-one-line-in-contract": "error",
+ "space-after-comma": "error",
+ "statement-indent": "error"
+ }
+}
diff --git a/contracts/utils/README.md b/contracts/utils/README.md
new file mode 100644
index 000000000..e7c7b49ff
--- /dev/null
+++ b/contracts/utils/README.md
@@ -0,0 +1,70 @@
+## Contracts utils
+
+Smart contracts utils used in the 0x protocol.
+
+## Usage
+
+Contracts can be found in the [contracts](./contracts) directory. The contents of this directory are broken down into the following subdirectories:
+
+* [utils](./contracts/utils)
+ * This directory contains libraries and utils.
+* [test](./contracts/test)
+ * This directory contains mocks and other contracts that are used solely for testing contracts within the other directories.
+
+## Contributing
+
+We strongly recommend that the community help us make improvements and determine the future direction of the protocol. To report bugs within this package, please create an issue in this repository.
+
+For proposals regarding the 0x protocol's smart contract architecture, message format, or additional functionality, go to the [0x Improvement Proposals (ZEIPs)](https://github.com/0xProject/ZEIPs) repository and follow the contribution guidelines provided therein.
+
+Please read our [contribution guidelines](../../CONTRIBUTING.md) before getting started.
+
+### Install Dependencies
+
+If you don't have yarn workspaces enabled (Yarn < v1.0) - enable them:
+
+```bash
+yarn config set workspaces-experimental true
+```
+
+Then install dependencies
+
+```bash
+yarn install
+```
+
+### Build
+
+To build this package and all other monorepo packages that it depends on, run the following from the monorepo root directory:
+
+```bash
+PKG=@0x/contracts-utils yarn build
+```
+
+Or continuously rebuild on change:
+
+```bash
+PKG=@0x/contracts-utils yarn watch
+```
+
+### Clean
+
+```bash
+yarn clean
+```
+
+### Lint
+
+```bash
+yarn lint
+```
+
+### Run Tests
+
+```bash
+yarn test
+```
+
+#### Testing options
+
+Contracts testing options like coverage, profiling, revert traces or backing node choosing - are described [here](../TESTING.md).
diff --git a/contracts/utils/compiler.json b/contracts/utils/compiler.json
new file mode 100644
index 000000000..1524c1eaa
--- /dev/null
+++ b/contracts/utils/compiler.json
@@ -0,0 +1,22 @@
+{
+ "artifactsDir": "./generated-artifacts",
+ "contractsDir": "./contracts",
+ "compilerSettings": {
+ "optimizer": {
+ "enabled": true,
+ "runs": 1000000
+ },
+ "outputSelection": {
+ "*": {
+ "*": [
+ "abi",
+ "evm.bytecode.object",
+ "evm.bytecode.sourceMap",
+ "evm.deployedBytecode.object",
+ "evm.deployedBytecode.sourceMap"
+ ]
+ }
+ }
+ },
+ "contracts": ["TestConstants", "TestLibBytes", "LibBytes", "Ownable", "IOwnable", "ReentrancyGuard", "SafeMath"]
+}
diff --git a/contracts/utils/contracts/test/TestConstants/TestConstants.sol b/contracts/utils/contracts/test/TestConstants/TestConstants.sol
new file mode 100644
index 000000000..3c852173b
--- /dev/null
+++ b/contracts/utils/contracts/test/TestConstants/TestConstants.sol
@@ -0,0 +1,57 @@
+/*
+
+ Copyright 2018 ZeroEx Intl.
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+
+*/
+
+pragma solidity 0.4.24;
+
+import "@0x/contracts-utils/contracts/utils/LibBytes/LibBytes.sol";
+
+
+// solhint-disable max-line-length
+contract TestConstants {
+
+ using LibBytes for bytes;
+
+ bytes4 constant internal ERC20_PROXY_ID = bytes4(keccak256("ERC20Token(address)"));
+
+ address constant internal KOVAN_ZRX_ADDRESS = 0x6Ff6C0Ff1d68b964901F986d4C9FA3ac68346570;
+ bytes constant internal KOVAN_ZRX_ASSET_DATA = "\xf4\x72\x61\xb0\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x6f\xf6\xc0\xff\x1d\x68\xb9\x64\x90\x1f\x98\x6d\x4c\x9f\xa3\xac\x68\x34\x65\x70";
+
+ address constant internal MAINNET_ZRX_ADDRESS = 0xE41d2489571d322189246DaFA5ebDe1F4699F498;
+ bytes constant public MAINNET_ZRX_ASSET_DATA = "\xf4\x72\x61\xb0\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xe4\x1d\x24\x89\x57\x1d\x32\x21\x89\x24\x6d\xaf\xa5\xeb\xde\x1f\x46\x99\xf4\x98";
+
+ function assertValidZrxAssetData()
+ public
+ pure
+ returns (bool)
+ {
+ bytes memory kovanZrxAssetData = abi.encodeWithSelector(ERC20_PROXY_ID, KOVAN_ZRX_ADDRESS);
+ require(
+ kovanZrxAssetData.equals(KOVAN_ZRX_ASSET_DATA),
+ "INVALID_KOVAN_ZRX_ASSET_DATA"
+ );
+
+ bytes memory mainetZrxAssetData = abi.encodeWithSelector(ERC20_PROXY_ID, MAINNET_ZRX_ADDRESS);
+ require(
+ mainetZrxAssetData.equals(MAINNET_ZRX_ASSET_DATA),
+ "INVALID_MAINNET_ZRX_ASSET_DATA"
+ );
+
+ return true;
+ }
+}
+// solhint-enable max-line-length \ No newline at end of file
diff --git a/contracts/utils/contracts/test/TestLibBytes/TestLibBytes.sol b/contracts/utils/contracts/test/TestLibBytes/TestLibBytes.sol
new file mode 100644
index 000000000..444a3e717
--- /dev/null
+++ b/contracts/utils/contracts/test/TestLibBytes/TestLibBytes.sol
@@ -0,0 +1,269 @@
+/*
+
+ Copyright 2018 ZeroEx Intl.
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+
+*/
+
+pragma solidity 0.4.24;
+
+import "@0x/contracts-utils/contracts/utils/LibBytes/LibBytes.sol";
+
+
+contract TestLibBytes {
+
+ using LibBytes for bytes;
+
+ /// @dev Pops the last byte off of a byte array by modifying its length.
+ /// @param b Byte array that will be modified.
+ /// @return The byte that was popped off.
+ function publicPopLastByte(bytes memory b)
+ public
+ pure
+ returns (bytes memory, bytes1 result)
+ {
+ result = b.popLastByte();
+ return (b, result);
+ }
+
+ /// @dev Pops the last 20 bytes off of a byte array by modifying its length.
+ /// @param b Byte array that will be modified.
+ /// @return The 20 byte address that was popped off.
+ function publicPopLast20Bytes(bytes memory b)
+ public
+ pure
+ returns (bytes memory, address result)
+ {
+ result = b.popLast20Bytes();
+ return (b, result);
+ }
+
+ /// @dev Tests equality of two byte arrays.
+ /// @param lhs First byte array to compare.
+ /// @param rhs Second byte array to compare.
+ /// @return True if arrays are the same. False otherwise.
+ function publicEquals(bytes memory lhs, bytes memory rhs)
+ public
+ pure
+ returns (bool equal)
+ {
+ equal = lhs.equals(rhs);
+ return equal;
+ }
+
+ function publicEqualsPop1(bytes memory lhs, bytes memory rhs)
+ public
+ pure
+ returns (bool equal)
+ {
+ lhs.popLastByte();
+ rhs.popLastByte();
+ equal = lhs.equals(rhs);
+ return equal;
+ }
+
+ /// @dev Performs a deep copy of a byte array onto another byte array of greater than or equal length.
+ /// @param dest Byte array that will be overwritten with source bytes.
+ /// @param source Byte array to copy onto dest bytes.
+ function publicDeepCopyBytes(
+ bytes memory dest,
+ bytes memory source
+ )
+ public
+ pure
+ returns (bytes memory)
+ {
+ LibBytes.deepCopyBytes(dest, source);
+ return dest;
+ }
+
+ /// @dev Reads an address from a position in a byte array.
+ /// @param b Byte array containing an address.
+ /// @param index Index in byte array of address.
+ /// @return address from byte array.
+ function publicReadAddress(
+ bytes memory b,
+ uint256 index
+ )
+ public
+ pure
+ returns (address result)
+ {
+ result = b.readAddress(index);
+ return result;
+ }
+
+ /// @dev Writes an address into a specific position in a byte array.
+ /// @param b Byte array to insert address into.
+ /// @param index Index in byte array of address.
+ /// @param input Address to put into byte array.
+ function publicWriteAddress(
+ bytes memory b,
+ uint256 index,
+ address input
+ )
+ public
+ pure
+ returns (bytes memory)
+ {
+ b.writeAddress(index, input);
+ return b;
+ }
+
+ /// @dev Reads a bytes32 value from a position in a byte array.
+ /// @param b Byte array containing a bytes32 value.
+ /// @param index Index in byte array of bytes32 value.
+ /// @return bytes32 value from byte array.
+ function publicReadBytes32(
+ bytes memory b,
+ uint256 index
+ )
+ public
+ pure
+ returns (bytes32 result)
+ {
+ result = b.readBytes32(index);
+ return result;
+ }
+
+ /// @dev Writes a bytes32 into a specific position in a byte array.
+ /// @param b Byte array to insert <input> into.
+ /// @param index Index in byte array of <input>.
+ /// @param input bytes32 to put into byte array.
+ function publicWriteBytes32(
+ bytes memory b,
+ uint256 index,
+ bytes32 input
+ )
+ public
+ pure
+ returns (bytes memory)
+ {
+ b.writeBytes32(index, input);
+ return b;
+ }
+
+ /// @dev Reads a uint256 value from a position in a byte array.
+ /// @param b Byte array containing a uint256 value.
+ /// @param index Index in byte array of uint256 value.
+ /// @return uint256 value from byte array.
+ function publicReadUint256(
+ bytes memory b,
+ uint256 index
+ )
+ public
+ pure
+ returns (uint256 result)
+ {
+ result = b.readUint256(index);
+ return result;
+ }
+
+ /// @dev Writes a uint256 into a specific position in a byte array.
+ /// @param b Byte array to insert <input> into.
+ /// @param index Index in byte array of <input>.
+ /// @param input uint256 to put into byte array.
+ function publicWriteUint256(
+ bytes memory b,
+ uint256 index,
+ uint256 input
+ )
+ public
+ pure
+ returns (bytes memory)
+ {
+ b.writeUint256(index, input);
+ return b;
+ }
+
+ /// @dev Reads an unpadded bytes4 value from a position in a byte array.
+ /// @param b Byte array containing a bytes4 value.
+ /// @param index Index in byte array of bytes4 value.
+ /// @return bytes4 value from byte array.
+ function publicReadBytes4(
+ bytes memory b,
+ uint256 index
+ )
+ public
+ pure
+ returns (bytes4 result)
+ {
+ result = b.readBytes4(index);
+ return result;
+ }
+
+ /// @dev Reads nested bytes from a specific position.
+ /// @param b Byte array containing nested bytes.
+ /// @param index Index of nested bytes.
+ /// @return result Nested bytes.
+ function publicReadBytesWithLength(
+ bytes memory b,
+ uint256 index
+ )
+ public
+ pure
+ returns (bytes memory result)
+ {
+ result = b.readBytesWithLength(index);
+ return result;
+ }
+
+ /// @dev Inserts bytes at a specific position in a byte array.
+ /// @param b Byte array to insert <input> into.
+ /// @param index Index in byte array of <input>.
+ /// @param input bytes to insert.
+ /// @return b Updated input byte array
+ function publicWriteBytesWithLength(
+ bytes memory b,
+ uint256 index,
+ bytes memory input
+ )
+ public
+ pure
+ returns (bytes memory)
+ {
+ b.writeBytesWithLength(index, input);
+ return b;
+ }
+
+ /// @dev Copies a block of memory from one location to another.
+ /// @param mem Memory contents we want to apply memCopy to
+ /// @param dest Destination offset into <mem>.
+ /// @param source Source offset into <mem>.
+ /// @param length Length of bytes to copy from <source> to <dest>
+ /// @return mem Memory contents after calling memCopy.
+ function testMemcpy(
+ bytes mem,
+ uint256 dest,
+ uint256 source,
+ uint256 length
+ )
+ public // not external, we need input in memory
+ pure
+ returns (bytes)
+ {
+ // Sanity check. Overflows are not checked.
+ require(source + length <= mem.length);
+ require(dest + length <= mem.length);
+
+ // Get pointer to memory contents
+ uint256 offset = mem.contentAddress();
+
+ // Execute memCopy adjusted for memory array location
+ LibBytes.memCopy(offset + dest, offset + source, length);
+
+ // Return modified memory contents
+ return mem;
+ }
+}
diff --git a/contracts/utils/contracts/utils/LibBytes/LibBytes.sol b/contracts/utils/contracts/utils/LibBytes/LibBytes.sol
new file mode 100644
index 000000000..369f588ad
--- /dev/null
+++ b/contracts/utils/contracts/utils/LibBytes/LibBytes.sol
@@ -0,0 +1,567 @@
+/*
+
+ Copyright 2018 ZeroEx Intl.
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+
+*/
+
+pragma solidity 0.4.24;
+
+
+library LibBytes {
+
+ using LibBytes for bytes;
+
+ /// @dev Gets the memory address for a byte array.
+ /// @param input Byte array to lookup.
+ /// @return memoryAddress Memory address of byte array. This
+ /// points to the header of the byte array which contains
+ /// the length.
+ function rawAddress(bytes memory input)
+ internal
+ pure
+ returns (uint256 memoryAddress)
+ {
+ assembly {
+ memoryAddress := input
+ }
+ return memoryAddress;
+ }
+
+ /// @dev Gets the memory address for the contents of a byte array.
+ /// @param input Byte array to lookup.
+ /// @return memoryAddress Memory address of the contents of the byte array.
+ function contentAddress(bytes memory input)
+ internal
+ pure
+ returns (uint256 memoryAddress)
+ {
+ assembly {
+ memoryAddress := add(input, 32)
+ }
+ return memoryAddress;
+ }
+
+ /// @dev Copies `length` bytes from memory location `source` to `dest`.
+ /// @param dest memory address to copy bytes to.
+ /// @param source memory address to copy bytes from.
+ /// @param length number of bytes to copy.
+ function memCopy(
+ uint256 dest,
+ uint256 source,
+ uint256 length
+ )
+ internal
+ pure
+ {
+ if (length < 32) {
+ // Handle a partial word by reading destination and masking
+ // off the bits we are interested in.
+ // This correctly handles overlap, zero lengths and source == dest
+ assembly {
+ let mask := sub(exp(256, sub(32, length)), 1)
+ let s := and(mload(source), not(mask))
+ let d := and(mload(dest), mask)
+ mstore(dest, or(s, d))
+ }
+ } else {
+ // Skip the O(length) loop when source == dest.
+ if (source == dest) {
+ return;
+ }
+
+ // For large copies we copy whole words at a time. The final
+ // word is aligned to the end of the range (instead of after the
+ // previous) to handle partial words. So a copy will look like this:
+ //
+ // ####
+ // ####
+ // ####
+ // ####
+ //
+ // We handle overlap in the source and destination range by
+ // changing the copying direction. This prevents us from
+ // overwriting parts of source that we still need to copy.
+ //
+ // This correctly handles source == dest
+ //
+ if (source > dest) {
+ assembly {
+ // We subtract 32 from `sEnd` and `dEnd` because it
+ // is easier to compare with in the loop, and these
+ // are also the addresses we need for copying the
+ // last bytes.
+ length := sub(length, 32)
+ let sEnd := add(source, length)
+ let dEnd := add(dest, length)
+
+ // Remember the last 32 bytes of source
+ // This needs to be done here and not after the loop
+ // because we may have overwritten the last bytes in
+ // source already due to overlap.
+ let last := mload(sEnd)
+
+ // Copy whole words front to back
+ // Note: the first check is always true,
+ // this could have been a do-while loop.
+ // solhint-disable-next-line no-empty-blocks
+ for {} lt(source, sEnd) {} {
+ mstore(dest, mload(source))
+ source := add(source, 32)
+ dest := add(dest, 32)
+ }
+
+ // Write the last 32 bytes
+ mstore(dEnd, last)
+ }
+ } else {
+ assembly {
+ // We subtract 32 from `sEnd` and `dEnd` because those
+ // are the starting points when copying a word at the end.
+ length := sub(length, 32)
+ let sEnd := add(source, length)
+ let dEnd := add(dest, length)
+
+ // Remember the first 32 bytes of source
+ // This needs to be done here and not after the loop
+ // because we may have overwritten the first bytes in
+ // source already due to overlap.
+ let first := mload(source)
+
+ // Copy whole words back to front
+ // We use a signed comparisson here to allow dEnd to become
+ // negative (happens when source and dest < 32). Valid
+ // addresses in local memory will never be larger than
+ // 2**255, so they can be safely re-interpreted as signed.
+ // Note: the first check is always true,
+ // this could have been a do-while loop.
+ // solhint-disable-next-line no-empty-blocks
+ for {} slt(dest, dEnd) {} {
+ mstore(dEnd, mload(sEnd))
+ sEnd := sub(sEnd, 32)
+ dEnd := sub(dEnd, 32)
+ }
+
+ // Write the first 32 bytes
+ mstore(dest, first)
+ }
+ }
+ }
+ }
+
+ /// @dev Returns a slices from a byte array.
+ /// @param b The byte array to take a slice from.
+ /// @param from The starting index for the slice (inclusive).
+ /// @param to The final index for the slice (exclusive).
+ /// @return result The slice containing bytes at indices [from, to)
+ function slice(
+ bytes memory b,
+ uint256 from,
+ uint256 to
+ )
+ internal
+ pure
+ returns (bytes memory result)
+ {
+ require(
+ from <= to,
+ "FROM_LESS_THAN_TO_REQUIRED"
+ );
+ require(
+ to < b.length,
+ "TO_LESS_THAN_LENGTH_REQUIRED"
+ );
+
+ // Create a new bytes structure and copy contents
+ result = new bytes(to - from);
+ memCopy(
+ result.contentAddress(),
+ b.contentAddress() + from,
+ result.length
+ );
+ return result;
+ }
+
+ /// @dev Returns a slice from a byte array without preserving the input.
+ /// @param b The byte array to take a slice from. Will be destroyed in the process.
+ /// @param from The starting index for the slice (inclusive).
+ /// @param to The final index for the slice (exclusive).
+ /// @return result The slice containing bytes at indices [from, to)
+ /// @dev When `from == 0`, the original array will match the slice. In other cases its state will be corrupted.
+ function sliceDestructive(
+ bytes memory b,
+ uint256 from,
+ uint256 to
+ )
+ internal
+ pure
+ returns (bytes memory result)
+ {
+ require(
+ from <= to,
+ "FROM_LESS_THAN_TO_REQUIRED"
+ );
+ require(
+ to < b.length,
+ "TO_LESS_THAN_LENGTH_REQUIRED"
+ );
+
+ // Create a new bytes structure around [from, to) in-place.
+ assembly {
+ result := add(b, from)
+ mstore(result, sub(to, from))
+ }
+ return result;
+ }
+
+ /// @dev Pops the last byte off of a byte array by modifying its length.
+ /// @param b Byte array that will be modified.
+ /// @return The byte that was popped off.
+ function popLastByte(bytes memory b)
+ internal
+ pure
+ returns (bytes1 result)
+ {
+ require(
+ b.length > 0,
+ "GREATER_THAN_ZERO_LENGTH_REQUIRED"
+ );
+
+ // Store last byte.
+ result = b[b.length - 1];
+
+ assembly {
+ // Decrement length of byte array.
+ let newLen := sub(mload(b), 1)
+ mstore(b, newLen)
+ }
+ return result;
+ }
+
+ /// @dev Pops the last 20 bytes off of a byte array by modifying its length.
+ /// @param b Byte array that will be modified.
+ /// @return The 20 byte address that was popped off.
+ function popLast20Bytes(bytes memory b)
+ internal
+ pure
+ returns (address result)
+ {
+ require(
+ b.length >= 20,
+ "GREATER_OR_EQUAL_TO_20_LENGTH_REQUIRED"
+ );
+
+ // Store last 20 bytes.
+ result = readAddress(b, b.length - 20);
+
+ assembly {
+ // Subtract 20 from byte array length.
+ let newLen := sub(mload(b), 20)
+ mstore(b, newLen)
+ }
+ return result;
+ }
+
+ /// @dev Tests equality of two byte arrays.
+ /// @param lhs First byte array to compare.
+ /// @param rhs Second byte array to compare.
+ /// @return True if arrays are the same. False otherwise.
+ function equals(
+ bytes memory lhs,
+ bytes memory rhs
+ )
+ internal
+ pure
+ returns (bool equal)
+ {
+ // Keccak gas cost is 30 + numWords * 6. This is a cheap way to compare.
+ // We early exit on unequal lengths, but keccak would also correctly
+ // handle this.
+ return lhs.length == rhs.length && keccak256(lhs) == keccak256(rhs);
+ }
+
+ /// @dev Reads an address from a position in a byte array.
+ /// @param b Byte array containing an address.
+ /// @param index Index in byte array of address.
+ /// @return address from byte array.
+ function readAddress(
+ bytes memory b,
+ uint256 index
+ )
+ internal
+ pure
+ returns (address result)
+ {
+ require(
+ b.length >= index + 20, // 20 is length of address
+ "GREATER_OR_EQUAL_TO_20_LENGTH_REQUIRED"
+ );
+
+ // Add offset to index:
+ // 1. Arrays are prefixed by 32-byte length parameter (add 32 to index)
+ // 2. Account for size difference between address length and 32-byte storage word (subtract 12 from index)
+ index += 20;
+
+ // Read address from array memory
+ assembly {
+ // 1. Add index to address of bytes array
+ // 2. Load 32-byte word from memory
+ // 3. Apply 20-byte mask to obtain address
+ result := and(mload(add(b, index)), 0xffffffffffffffffffffffffffffffffffffffff)
+ }
+ return result;
+ }
+
+ /// @dev Writes an address into a specific position in a byte array.
+ /// @param b Byte array to insert address into.
+ /// @param index Index in byte array of address.
+ /// @param input Address to put into byte array.
+ function writeAddress(
+ bytes memory b,
+ uint256 index,
+ address input
+ )
+ internal
+ pure
+ {
+ require(
+ b.length >= index + 20, // 20 is length of address
+ "GREATER_OR_EQUAL_TO_20_LENGTH_REQUIRED"
+ );
+
+ // Add offset to index:
+ // 1. Arrays are prefixed by 32-byte length parameter (add 32 to index)
+ // 2. Account for size difference between address length and 32-byte storage word (subtract 12 from index)
+ index += 20;
+
+ // Store address into array memory
+ assembly {
+ // The address occupies 20 bytes and mstore stores 32 bytes.
+ // First fetch the 32-byte word where we'll be storing the address, then
+ // apply a mask so we have only the bytes in the word that the address will not occupy.
+ // Then combine these bytes with the address and store the 32 bytes back to memory with mstore.
+
+ // 1. Add index to address of bytes array
+ // 2. Load 32-byte word from memory
+ // 3. Apply 12-byte mask to obtain extra bytes occupying word of memory where we'll store the address
+ let neighbors := and(
+ mload(add(b, index)),
+ 0xffffffffffffffffffffffff0000000000000000000000000000000000000000
+ )
+
+ // Make sure input address is clean.
+ // (Solidity does not guarantee this)
+ input := and(input, 0xffffffffffffffffffffffffffffffffffffffff)
+
+ // Store the neighbors and address into memory
+ mstore(add(b, index), xor(input, neighbors))
+ }
+ }
+
+ /// @dev Reads a bytes32 value from a position in a byte array.
+ /// @param b Byte array containing a bytes32 value.
+ /// @param index Index in byte array of bytes32 value.
+ /// @return bytes32 value from byte array.
+ function readBytes32(
+ bytes memory b,
+ uint256 index
+ )
+ internal
+ pure
+ returns (bytes32 result)
+ {
+ require(
+ b.length >= index + 32,
+ "GREATER_OR_EQUAL_TO_32_LENGTH_REQUIRED"
+ );
+
+ // Arrays are prefixed by a 256 bit length parameter
+ index += 32;
+
+ // Read the bytes32 from array memory
+ assembly {
+ result := mload(add(b, index))
+ }
+ return result;
+ }
+
+ /// @dev Writes a bytes32 into a specific position in a byte array.
+ /// @param b Byte array to insert <input> into.
+ /// @param index Index in byte array of <input>.
+ /// @param input bytes32 to put into byte array.
+ function writeBytes32(
+ bytes memory b,
+ uint256 index,
+ bytes32 input
+ )
+ internal
+ pure
+ {
+ require(
+ b.length >= index + 32,
+ "GREATER_OR_EQUAL_TO_32_LENGTH_REQUIRED"
+ );
+
+ // Arrays are prefixed by a 256 bit length parameter
+ index += 32;
+
+ // Read the bytes32 from array memory
+ assembly {
+ mstore(add(b, index), input)
+ }
+ }
+
+ /// @dev Reads a uint256 value from a position in a byte array.
+ /// @param b Byte array containing a uint256 value.
+ /// @param index Index in byte array of uint256 value.
+ /// @return uint256 value from byte array.
+ function readUint256(
+ bytes memory b,
+ uint256 index
+ )
+ internal
+ pure
+ returns (uint256 result)
+ {
+ result = uint256(readBytes32(b, index));
+ return result;
+ }
+
+ /// @dev Writes a uint256 into a specific position in a byte array.
+ /// @param b Byte array to insert <input> into.
+ /// @param index Index in byte array of <input>.
+ /// @param input uint256 to put into byte array.
+ function writeUint256(
+ bytes memory b,
+ uint256 index,
+ uint256 input
+ )
+ internal
+ pure
+ {
+ writeBytes32(b, index, bytes32(input));
+ }
+
+ /// @dev Reads an unpadded bytes4 value from a position in a byte array.
+ /// @param b Byte array containing a bytes4 value.
+ /// @param index Index in byte array of bytes4 value.
+ /// @return bytes4 value from byte array.
+ function readBytes4(
+ bytes memory b,
+ uint256 index
+ )
+ internal
+ pure
+ returns (bytes4 result)
+ {
+ require(
+ b.length >= index + 4,
+ "GREATER_OR_EQUAL_TO_4_LENGTH_REQUIRED"
+ );
+
+ // Arrays are prefixed by a 32 byte length field
+ index += 32;
+
+ // Read the bytes4 from array memory
+ assembly {
+ result := mload(add(b, index))
+ // Solidity does not require us to clean the trailing bytes.
+ // We do it anyway
+ result := and(result, 0xFFFFFFFF00000000000000000000000000000000000000000000000000000000)
+ }
+ return result;
+ }
+
+ /// @dev Reads nested bytes from a specific position.
+ /// @dev NOTE: the returned value overlaps with the input value.
+ /// Both should be treated as immutable.
+ /// @param b Byte array containing nested bytes.
+ /// @param index Index of nested bytes.
+ /// @return result Nested bytes.
+ function readBytesWithLength(
+ bytes memory b,
+ uint256 index
+ )
+ internal
+ pure
+ returns (bytes memory result)
+ {
+ // Read length of nested bytes
+ uint256 nestedBytesLength = readUint256(b, index);
+ index += 32;
+
+ // Assert length of <b> is valid, given
+ // length of nested bytes
+ require(
+ b.length >= index + nestedBytesLength,
+ "GREATER_OR_EQUAL_TO_NESTED_BYTES_LENGTH_REQUIRED"
+ );
+
+ // Return a pointer to the byte array as it exists inside `b`
+ assembly {
+ result := add(b, index)
+ }
+ return result;
+ }
+
+ /// @dev Inserts bytes at a specific position in a byte array.
+ /// @param b Byte array to insert <input> into.
+ /// @param index Index in byte array of <input>.
+ /// @param input bytes to insert.
+ function writeBytesWithLength(
+ bytes memory b,
+ uint256 index,
+ bytes memory input
+ )
+ internal
+ pure
+ {
+ // Assert length of <b> is valid, given
+ // length of input
+ require(
+ b.length >= index + 32 + input.length, // 32 bytes to store length
+ "GREATER_OR_EQUAL_TO_NESTED_BYTES_LENGTH_REQUIRED"
+ );
+
+ // Copy <input> into <b>
+ memCopy(
+ b.contentAddress() + index,
+ input.rawAddress(), // includes length of <input>
+ input.length + 32 // +32 bytes to store <input> length
+ );
+ }
+
+ /// @dev Performs a deep copy of a byte array onto another byte array of greater than or equal length.
+ /// @param dest Byte array that will be overwritten with source bytes.
+ /// @param source Byte array to copy onto dest bytes.
+ function deepCopyBytes(
+ bytes memory dest,
+ bytes memory source
+ )
+ internal
+ pure
+ {
+ uint256 sourceLen = source.length;
+ // Dest length must be >= source length, or some bytes would not be copied.
+ require(
+ dest.length >= sourceLen,
+ "GREATER_OR_EQUAL_TO_SOURCE_BYTES_LENGTH_REQUIRED"
+ );
+ memCopy(
+ dest.contentAddress(),
+ source.contentAddress(),
+ sourceLen
+ );
+ }
+}
diff --git a/contracts/utils/contracts/utils/Ownable/IOwnable.sol b/contracts/utils/contracts/utils/Ownable/IOwnable.sol
new file mode 100644
index 000000000..5deb13497
--- /dev/null
+++ b/contracts/utils/contracts/utils/Ownable/IOwnable.sol
@@ -0,0 +1,8 @@
+pragma solidity 0.4.24;
+
+
+contract IOwnable {
+
+ function transferOwnership(address newOwner)
+ public;
+}
diff --git a/contracts/utils/contracts/utils/Ownable/Ownable.sol b/contracts/utils/contracts/utils/Ownable/Ownable.sol
new file mode 100644
index 000000000..0c830be68
--- /dev/null
+++ b/contracts/utils/contracts/utils/Ownable/Ownable.sol
@@ -0,0 +1,33 @@
+pragma solidity 0.4.24;
+
+import "./IOwnable.sol";
+
+
+contract Ownable is
+ IOwnable
+{
+ address public owner;
+
+ constructor ()
+ public
+ {
+ owner = msg.sender;
+ }
+
+ modifier onlyOwner() {
+ require(
+ msg.sender == owner,
+ "ONLY_CONTRACT_OWNER"
+ );
+ _;
+ }
+
+ function transferOwnership(address newOwner)
+ public
+ onlyOwner
+ {
+ if (newOwner != address(0)) {
+ owner = newOwner;
+ }
+ }
+}
diff --git a/contracts/utils/contracts/utils/ReentrancyGuard/ReentrancyGuard.sol b/contracts/utils/contracts/utils/ReentrancyGuard/ReentrancyGuard.sol
new file mode 100644
index 000000000..9f98a7a16
--- /dev/null
+++ b/contracts/utils/contracts/utils/ReentrancyGuard/ReentrancyGuard.sol
@@ -0,0 +1,45 @@
+/*
+
+ Copyright 2018 ZeroEx Intl.
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+
+*/
+
+pragma solidity 0.4.24;
+
+
+contract ReentrancyGuard {
+
+ // Locked state of mutex
+ bool private locked = false;
+
+ /// @dev Functions with this modifer cannot be reentered. The mutex will be locked
+ /// before function execution and unlocked after.
+ modifier nonReentrant() {
+ // Ensure mutex is unlocked
+ require(
+ !locked,
+ "REENTRANCY_ILLEGAL"
+ );
+
+ // Lock mutex before function call
+ locked = true;
+
+ // Perform function call
+ _;
+
+ // Unlock mutex after function call
+ locked = false;
+ }
+}
diff --git a/contracts/utils/contracts/utils/SafeMath/SafeMath.sol b/contracts/utils/contracts/utils/SafeMath/SafeMath.sol
new file mode 100644
index 000000000..2855edb9d
--- /dev/null
+++ b/contracts/utils/contracts/utils/SafeMath/SafeMath.sol
@@ -0,0 +1,87 @@
+pragma solidity 0.4.24;
+
+
+contract SafeMath {
+
+ function safeMul(uint256 a, uint256 b)
+ internal
+ pure
+ returns (uint256)
+ {
+ if (a == 0) {
+ return 0;
+ }
+ uint256 c = a * b;
+ require(
+ c / a == b,
+ "UINT256_OVERFLOW"
+ );
+ return c;
+ }
+
+ function safeDiv(uint256 a, uint256 b)
+ internal
+ pure
+ returns (uint256)
+ {
+ uint256 c = a / b;
+ return c;
+ }
+
+ function safeSub(uint256 a, uint256 b)
+ internal
+ pure
+ returns (uint256)
+ {
+ require(
+ b <= a,
+ "UINT256_UNDERFLOW"
+ );
+ return a - b;
+ }
+
+ function safeAdd(uint256 a, uint256 b)
+ internal
+ pure
+ returns (uint256)
+ {
+ uint256 c = a + b;
+ require(
+ c >= a,
+ "UINT256_OVERFLOW"
+ );
+ return c;
+ }
+
+ function max64(uint64 a, uint64 b)
+ internal
+ pure
+ returns (uint256)
+ {
+ return a >= b ? a : b;
+ }
+
+ function min64(uint64 a, uint64 b)
+ internal
+ pure
+ returns (uint256)
+ {
+ return a < b ? a : b;
+ }
+
+ function max256(uint256 a, uint256 b)
+ internal
+ pure
+ returns (uint256)
+ {
+ return a >= b ? a : b;
+ }
+
+ function min256(uint256 a, uint256 b)
+ internal
+ pure
+ returns (uint256)
+ {
+ return a < b ? a : b;
+ }
+}
diff --git a/contracts/utils/package.json b/contracts/utils/package.json
new file mode 100644
index 000000000..94b3cb7c7
--- /dev/null
+++ b/contracts/utils/package.json
@@ -0,0 +1,89 @@
+{
+ "private": true,
+ "name": "@0x/contracts-utils",
+ "version": "1.0.0",
+ "engines": {
+ "node": ">=6.12"
+ },
+ "description": "Smart contract utils of 0x protocol",
+ "main": "lib/src/index.js",
+ "directories": {
+ "test": "test"
+ },
+ "scripts": {
+ "build": "yarn pre_build && tsc -b",
+ "build:ci": "yarn build",
+ "pre_build": "run-s compile generate_contract_wrappers",
+ "test": "yarn run_mocha",
+ "rebuild_and_test": "run-s build test",
+ "test:coverage": "SOLIDITY_COVERAGE=true run-s build run_mocha coverage:report:text coverage:report:lcov",
+ "test:profiler": "SOLIDITY_PROFILER=true run-s build run_mocha profiler:report:html",
+ "test:trace": "SOLIDITY_REVERT_TRACE=true run-s build run_mocha",
+ "run_mocha":
+ "mocha --require source-map-support/register --require make-promises-safe 'lib/test/**/*.js' --timeout 100000 --bail --exit",
+ "compile": "sol-compiler --contracts-dir contracts",
+ "clean": "shx rm -rf lib generated-artifacts generated-wrappers",
+ "generate_contract_wrappers": "abi-gen --abis ${npm_package_config_abis} --template ../../node_modules/@0x/abi-gen-templates/contract.handlebars --partials '../../node_modules/@0x/abi-gen-templates/partials/**/*.handlebars' --output generated-wrappers --backend ethers",
+ "lint": "tslint --format stylish --project . --exclude ./generated-wrappers/**/* --exclude ./generated-artifacts/**/* --exclude **/lib/**/* && yarn lint-contracts",
+ "coverage:report:text": "istanbul report text",
+ "coverage:report:html": "istanbul report html && open coverage/index.html",
+ "profiler:report:html": "istanbul report html && open coverage/index.html",
+ "coverage:report:lcov": "istanbul report lcov",
+ "test:circleci": "yarn test",
+ "lint-contracts": "solhint contracts/**/**/**/**/*.sol"
+ },
+ "config": {
+ "abis": "generated-artifacts/@(IOwnable|Ownable|LibBytes|ReentrancyGuard|SafeMath|TestConstants|TestLibBytes).json"
+ },
+ "repository": {
+ "type": "git",
+ "url": "https://github.com/0xProject/0x-monorepo.git"
+ },
+ "license": "Apache-2.0",
+ "bugs": {
+ "url": "https://github.com/0xProject/0x-monorepo/issues"
+ },
+ "homepage": "https://github.com/0xProject/0x-monorepo/contracts/core/README.md",
+ "devDependencies": {
+ "@0x/contracts-test-utils": "^1.0.0",
+ "@0x/abi-gen": "^1.0.17",
+ "@0x/dev-utils": "^1.0.19",
+ "@0x/sol-compiler": "^1.1.14",
+ "@0x/sol-cov": "^2.1.14",
+ "@0x/subproviders": "^2.1.6",
+ "@0x/tslint-config": "^1.0.10",
+ "@types/bn.js": "^4.11.0",
+ "@types/lodash": "4.14.104",
+ "@types/node": "*",
+ "@types/yargs": "^10.0.0",
+ "chai": "^4.0.1",
+ "chai-as-promised": "^7.1.0",
+ "chai-bignumber": "^2.0.1",
+ "dirty-chai": "^2.0.1",
+ "make-promises-safe": "^1.1.0",
+ "ethereumjs-abi": "0.6.5",
+ "mocha": "^4.1.0",
+ "npm-run-all": "^4.1.2",
+ "shx": "^0.2.2",
+ "solc": "^0.4.24",
+ "solhint": "^1.2.1",
+ "tslint": "5.11.0",
+ "typescript": "3.0.1",
+ "yargs": "^10.0.3"
+ },
+ "dependencies": {
+ "@0x/base-contract": "^3.0.8",
+ "@0x/order-utils": "^3.0.4",
+ "@0x/contracts-multisig": "^1.0.0",
+ "@0x/types": "^1.3.0",
+ "@0x/typescript-typings": "^3.0.4",
+ "@0x/utils": "^2.0.6",
+ "@0x/web3-wrapper": "^3.1.6",
+ "ethereum-types": "^1.1.2",
+ "ethereumjs-util": "^5.1.1",
+ "lodash": "^4.17.5"
+ },
+ "publishConfig": {
+ "access": "public"
+ }
+}
diff --git a/contracts/utils/src/artifacts/index.ts b/contracts/utils/src/artifacts/index.ts
new file mode 100644
index 000000000..a5c2b215c
--- /dev/null
+++ b/contracts/utils/src/artifacts/index.ts
@@ -0,0 +1,19 @@
+import { ContractArtifact } from 'ethereum-types';
+
+import * as IOwnable from '../../generated-artifacts/IOwnable.json';
+import * as LibBytes from '../../generated-artifacts/LibBytes.json';
+import * as Ownable from '../../generated-artifacts/Ownable.json';
+import * as ReentrancyGuard from '../../generated-artifacts/ReentrancyGuard.json';
+import * as SafeMath from '../../generated-artifacts/SafeMath.json';
+import * as TestConstants from '../../generated-artifacts/TestConstants.json';
+import * as TestLibBytes from '../../generated-artifacts/TestLibBytes.json';
+
+export const artifacts = {
+ TestConstants: TestConstants as ContractArtifact,
+ TestLibBytes: TestLibBytes as ContractArtifact,
+ IOwnable: IOwnable as ContractArtifact,
+ LibBytes: LibBytes as ContractArtifact,
+ Ownable: Ownable as ContractArtifact,
+ SafeMath: SafeMath as ContractArtifact,
+ ReentrancyGuard: ReentrancyGuard as ContractArtifact,
+};
diff --git a/contracts/utils/src/index.ts b/contracts/utils/src/index.ts
new file mode 100644
index 000000000..d55f08ea2
--- /dev/null
+++ b/contracts/utils/src/index.ts
@@ -0,0 +1,2 @@
+export * from './artifacts';
+export * from './wrappers';
diff --git a/contracts/utils/src/wrappers/index.ts b/contracts/utils/src/wrappers/index.ts
new file mode 100644
index 000000000..823b7fa4b
--- /dev/null
+++ b/contracts/utils/src/wrappers/index.ts
@@ -0,0 +1,2 @@
+export * from '../../generated-wrappers/test_constants';
+export * from '../../generated-wrappers/test_lib_bytes';
diff --git a/contracts/utils/test/global_hooks.ts b/contracts/utils/test/global_hooks.ts
new file mode 100644
index 000000000..f8ace376a
--- /dev/null
+++ b/contracts/utils/test/global_hooks.ts
@@ -0,0 +1,17 @@
+import { env, EnvVars } from '@0x/dev-utils';
+
+import { coverage, profiler, provider } from '@0x/contracts-test-utils';
+before('start web3 provider', () => {
+ provider.start();
+});
+after('generate coverage report', async () => {
+ if (env.parseBoolean(EnvVars.SolidityCoverage)) {
+ const coverageSubprovider = coverage.getCoverageSubproviderSingleton();
+ await coverageSubprovider.writeCoverageAsync();
+ }
+ if (env.parseBoolean(EnvVars.SolidityProfiler)) {
+ const profilerSubprovider = profiler.getProfilerSubproviderSingleton();
+ await profilerSubprovider.writeProfilerOutputAsync();
+ }
+ provider.stop();
+});
diff --git a/contracts/utils/test/lib_bytes.ts b/contracts/utils/test/lib_bytes.ts
new file mode 100644
index 000000000..985a98943
--- /dev/null
+++ b/contracts/utils/test/lib_bytes.ts
@@ -0,0 +1,875 @@
+import {
+ chaiSetup,
+ constants,
+ expectContractCallFailedAsync,
+ provider,
+ txDefaults,
+ typeEncodingUtils,
+ web3Wrapper,
+} from '@0x/contracts-test-utils';
+import { BlockchainLifecycle } from '@0x/dev-utils';
+import { generatePseudoRandomSalt } from '@0x/order-utils';
+import { RevertReason } from '@0x/types';
+import { BigNumber } from '@0x/utils';
+import BN = require('bn.js');
+import * as chai from 'chai';
+import ethUtil = require('ethereumjs-util');
+import * as _ from 'lodash';
+
+import { TestLibBytesContract } from '../generated-wrappers/test_lib_bytes';
+import { artifacts } from '../src';
+
+chaiSetup.configure();
+const expect = chai.expect;
+const blockchainLifecycle = new BlockchainLifecycle(web3Wrapper);
+
+// BUG: Ideally we would use Buffer.from(memory).toString('hex')
+// https://github.com/Microsoft/TypeScript/issues/23155
+const toHex = (buf: Uint8Array): string => buf.reduce((a, v) => a + ('00' + v.toString(16)).slice(-2), '0x');
+
+const fromHex = (str: string): Uint8Array => Uint8Array.from(Buffer.from(str.slice(2), 'hex'));
+
+describe('LibBytes', () => {
+ let libBytes: TestLibBytesContract;
+ const byteArrayShorterThan32Bytes = '0x012345';
+ const byteArrayShorterThan20Bytes = byteArrayShorterThan32Bytes;
+ const byteArrayLongerThan32Bytes =
+ '0x0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef';
+ const byteArrayLongerThan32BytesFirstBytesSwapped =
+ '0x2301456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef';
+ const byteArrayLongerThan32BytesLastBytesSwapped =
+ '0x0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef0123456789abefcd';
+ let testAddress: string;
+ let testAddressB: string;
+ const testBytes32 = '0x102030405060708090a0b0c0d0e0f0102030405060708090a0b0c0d0e0f01020';
+ const testBytes32B = '0x534877abd8443578526845cdfef020047528759477fedef87346527659aced32';
+ const testUint256 = new BigNumber(testBytes32, 16);
+ const testUint256B = new BigNumber(testBytes32B, 16);
+ const testBytes4 = '0xabcdef12';
+ const testByte = '0xab';
+ let shortData: string;
+ let shortTestBytes: string;
+ let shortTestBytesAsBuffer: Buffer;
+ let wordOfData: string;
+ let wordOfTestBytes: string;
+ let wordOfTestBytesAsBuffer: Buffer;
+ let longData: string;
+ let longTestBytes: string;
+ let longTestBytesAsBuffer: Buffer;
+
+ before(async () => {
+ await blockchainLifecycle.startAsync();
+ });
+ after(async () => {
+ await blockchainLifecycle.revertAsync();
+ });
+ before(async () => {
+ // Setup accounts & addresses
+ const accounts = await web3Wrapper.getAvailableAddressesAsync();
+ testAddress = accounts[1];
+ testAddressB = accounts[2];
+ // Deploy LibBytes
+ libBytes = await TestLibBytesContract.deployFrom0xArtifactAsync(artifacts.TestLibBytes, provider, txDefaults);
+ // Verify lengths of test data
+ const byteArrayShorterThan32BytesLength = ethUtil.toBuffer(byteArrayShorterThan32Bytes).byteLength;
+ expect(byteArrayShorterThan32BytesLength).to.be.lessThan(32);
+ const byteArrayLongerThan32BytesLength = ethUtil.toBuffer(byteArrayLongerThan32Bytes).byteLength;
+ expect(byteArrayLongerThan32BytesLength).to.be.greaterThan(32);
+ const testBytes32Length = ethUtil.toBuffer(testBytes32).byteLength;
+ expect(testBytes32Length).to.be.equal(32);
+ // Create short test bytes
+ shortData = '0xffffaa';
+ const encodedShortData = ethUtil.toBuffer(shortData);
+ const shortDataLength = new BigNumber(encodedShortData.byteLength);
+ const encodedShortDataLength = typeEncodingUtils.encodeUint256(shortDataLength);
+ shortTestBytesAsBuffer = Buffer.concat([encodedShortDataLength, encodedShortData]);
+ shortTestBytes = ethUtil.bufferToHex(shortTestBytesAsBuffer);
+ // Create test bytes one word in length
+ wordOfData = ethUtil.bufferToHex(typeEncodingUtils.encodeUint256(generatePseudoRandomSalt()));
+ const encodedWordOfData = ethUtil.toBuffer(wordOfData);
+ const wordOfDataLength = new BigNumber(encodedWordOfData.byteLength);
+ const encodedWordOfDataLength = typeEncodingUtils.encodeUint256(wordOfDataLength);
+ wordOfTestBytesAsBuffer = Buffer.concat([encodedWordOfDataLength, encodedWordOfData]);
+ wordOfTestBytes = ethUtil.bufferToHex(wordOfTestBytesAsBuffer);
+ // Create long test bytes (combines short test bytes with word of test bytes)
+ longData = ethUtil.bufferToHex(Buffer.concat([encodedShortData, encodedWordOfData]));
+ const longDataLength = new BigNumber(encodedShortData.byteLength + encodedWordOfData.byteLength);
+ const encodedLongDataLength = typeEncodingUtils.encodeUint256(longDataLength);
+ longTestBytesAsBuffer = Buffer.concat([encodedLongDataLength, encodedShortData, encodedWordOfData]);
+ longTestBytes = ethUtil.bufferToHex(longTestBytesAsBuffer);
+ });
+ beforeEach(async () => {
+ await blockchainLifecycle.startAsync();
+ });
+ afterEach(async () => {
+ await blockchainLifecycle.revertAsync();
+ });
+
+ describe('popLastByte', () => {
+ it('should revert if length is 0', async () => {
+ return expectContractCallFailedAsync(
+ libBytes.publicPopLastByte.callAsync(constants.NULL_BYTES),
+ RevertReason.LibBytesGreaterThanZeroLengthRequired,
+ );
+ });
+ it('should pop the last byte from the input and return it when array holds more than 1 byte', async () => {
+ const [newBytes, poppedByte] = await libBytes.publicPopLastByte.callAsync(byteArrayLongerThan32Bytes);
+ const expectedNewBytes = byteArrayLongerThan32Bytes.slice(0, -2);
+ const expectedPoppedByte = `0x${byteArrayLongerThan32Bytes.slice(-2)}`;
+ expect(newBytes).to.equal(expectedNewBytes);
+ expect(poppedByte).to.equal(expectedPoppedByte);
+ });
+ it('should pop the last byte from the input and return it when array is exactly 1 byte', async () => {
+ const [newBytes, poppedByte] = await libBytes.publicPopLastByte.callAsync(testByte);
+ const expectedNewBytes = '0x';
+ expect(newBytes).to.equal(expectedNewBytes);
+ return expect(poppedByte).to.be.equal(testByte);
+ });
+ });
+
+ describe('popLast20Bytes', () => {
+ it('should revert if length is less than 20', async () => {
+ return expectContractCallFailedAsync(
+ libBytes.publicPopLast20Bytes.callAsync(byteArrayShorterThan20Bytes),
+ RevertReason.LibBytesGreaterOrEqualTo20LengthRequired,
+ );
+ });
+ it('should pop the last 20 bytes from the input and return it when array holds more than 20 bytes', async () => {
+ const [newBytes, poppedAddress] = await libBytes.publicPopLast20Bytes.callAsync(byteArrayLongerThan32Bytes);
+ const expectedNewBytes = byteArrayLongerThan32Bytes.slice(0, -40);
+ const expectedPoppedAddress = `0x${byteArrayLongerThan32Bytes.slice(-40)}`;
+ expect(newBytes).to.equal(expectedNewBytes);
+ expect(poppedAddress).to.equal(expectedPoppedAddress);
+ });
+ it('should pop the last 20 bytes from the input and return it when array is exactly 20 bytes', async () => {
+ const [newBytes, poppedAddress] = await libBytes.publicPopLast20Bytes.callAsync(testAddress);
+ const expectedNewBytes = '0x';
+ const expectedPoppedAddress = testAddress;
+ expect(newBytes).to.equal(expectedNewBytes);
+ expect(poppedAddress).to.equal(expectedPoppedAddress);
+ });
+ });
+
+ describe('equals', () => {
+ it('should return true if byte arrays are equal (both arrays < 32 bytes)', async () => {
+ const isEqual = await libBytes.publicEquals.callAsync(
+ byteArrayShorterThan32Bytes,
+ byteArrayShorterThan32Bytes,
+ );
+ return expect(isEqual).to.be.true();
+ });
+ it('should return true if byte arrays are equal (both arrays > 32 bytes)', async () => {
+ const isEqual = await libBytes.publicEquals.callAsync(
+ byteArrayLongerThan32Bytes,
+ byteArrayLongerThan32Bytes,
+ );
+ return expect(isEqual).to.be.true();
+ });
+ it('should return false if byte arrays are not equal (first array < 32 bytes, second array > 32 bytes)', async () => {
+ const isEqual = await libBytes.publicEquals.callAsync(
+ byteArrayShorterThan32Bytes,
+ byteArrayLongerThan32Bytes,
+ );
+ return expect(isEqual).to.be.false();
+ });
+ it('should return false if byte arrays are not equal (first array > 32 bytes, second array < 32 bytes)', async () => {
+ const isEqual = await libBytes.publicEquals.callAsync(
+ byteArrayLongerThan32Bytes,
+ byteArrayShorterThan32Bytes,
+ );
+ return expect(isEqual).to.be.false();
+ });
+ it('should return false if byte arrays are not equal (same length, but a byte in first word differs)', async () => {
+ const isEqual = await libBytes.publicEquals.callAsync(
+ byteArrayLongerThan32BytesFirstBytesSwapped,
+ byteArrayLongerThan32Bytes,
+ );
+ return expect(isEqual).to.be.false();
+ });
+ it('should return false if byte arrays are not equal (same length, but a byte in last word differs)', async () => {
+ const isEqual = await libBytes.publicEquals.callAsync(
+ byteArrayLongerThan32BytesLastBytesSwapped,
+ byteArrayLongerThan32Bytes,
+ );
+ return expect(isEqual).to.be.false();
+ });
+
+ describe('should ignore trailing data', () => {
+ it('should return true when both < 32 bytes', async () => {
+ const isEqual = await libBytes.publicEqualsPop1.callAsync('0x0102', '0x0103');
+ return expect(isEqual).to.be.true();
+ });
+ });
+ });
+
+ describe('deepCopyBytes', () => {
+ it('should revert if dest is shorter than source', async () => {
+ return expectContractCallFailedAsync(
+ libBytes.publicDeepCopyBytes.callAsync(byteArrayShorterThan32Bytes, byteArrayLongerThan32Bytes),
+ RevertReason.LibBytesGreaterOrEqualToSourceBytesLengthRequired,
+ );
+ });
+ it('should overwrite dest with source if source and dest have equal length', async () => {
+ const zeroedByteArrayLongerThan32Bytes = `0x${_.repeat('0', byteArrayLongerThan32Bytes.length - 2)}`;
+ const zeroedBytesAfterCopy = await libBytes.publicDeepCopyBytes.callAsync(
+ zeroedByteArrayLongerThan32Bytes,
+ byteArrayLongerThan32Bytes,
+ );
+ return expect(zeroedBytesAfterCopy).to.be.equal(byteArrayLongerThan32Bytes);
+ });
+ it('should overwrite the leftmost len(source) bytes of dest if dest is larger than source', async () => {
+ const zeroedByteArrayLongerThan32Bytes = `0x${_.repeat('0', byteArrayLongerThan32Bytes.length * 2)}`;
+ const zeroedBytesAfterCopy = await libBytes.publicDeepCopyBytes.callAsync(
+ zeroedByteArrayLongerThan32Bytes,
+ byteArrayLongerThan32Bytes,
+ );
+ const copiedBytes = zeroedBytesAfterCopy.slice(0, byteArrayLongerThan32Bytes.length);
+ return expect(copiedBytes).to.be.equal(byteArrayLongerThan32Bytes);
+ });
+ it('should not overwrite the rightmost bytes of dest if dest is larger than source', async () => {
+ const zeroedByteArrayLongerThan32Bytes = `0x${_.repeat('0', byteArrayLongerThan32Bytes.length * 2)}`;
+ const zeroedBytesAfterCopy = await libBytes.publicDeepCopyBytes.callAsync(
+ zeroedByteArrayLongerThan32Bytes,
+ byteArrayLongerThan32Bytes,
+ );
+ const expectedNotCopiedBytes = zeroedByteArrayLongerThan32Bytes.slice(byteArrayLongerThan32Bytes.length);
+ const notCopiedBytes = zeroedBytesAfterCopy.slice(byteArrayLongerThan32Bytes.length);
+ return expect(notCopiedBytes).to.be.equal(expectedNotCopiedBytes);
+ });
+ });
+
+ describe('readAddress', () => {
+ it('should successfully read address when the address takes up the whole array', async () => {
+ const byteArray = ethUtil.addHexPrefix(testAddress);
+ const testAddressOffset = new BigNumber(0);
+ const address = await libBytes.publicReadAddress.callAsync(byteArray, testAddressOffset);
+ return expect(address).to.be.equal(testAddress);
+ });
+ it('should successfully read address when it is offset in the array', async () => {
+ const addressByteArrayBuffer = ethUtil.toBuffer(testAddress);
+ const prefixByteArrayBuffer = ethUtil.toBuffer('0xabcdef');
+ const combinedByteArrayBuffer = Buffer.concat([prefixByteArrayBuffer, addressByteArrayBuffer]);
+ const combinedByteArray = ethUtil.bufferToHex(combinedByteArrayBuffer);
+ const testAddressOffset = new BigNumber(prefixByteArrayBuffer.byteLength);
+ const address = await libBytes.publicReadAddress.callAsync(combinedByteArray, testAddressOffset);
+ return expect(address).to.be.equal(testAddress);
+ });
+ it('should fail if the byte array is too short to hold an address', async () => {
+ const shortByteArray = '0xabcdef';
+ const offset = new BigNumber(0);
+ return expectContractCallFailedAsync(
+ libBytes.publicReadAddress.callAsync(shortByteArray, offset),
+ RevertReason.LibBytesGreaterOrEqualTo20LengthRequired,
+ );
+ });
+ it('should fail if the length between the offset and end of the byte array is too short to hold an address', async () => {
+ const byteArray = testAddress;
+ const badOffset = new BigNumber(ethUtil.toBuffer(byteArray).byteLength);
+ return expectContractCallFailedAsync(
+ libBytes.publicReadAddress.callAsync(byteArray, badOffset),
+ RevertReason.LibBytesGreaterOrEqualTo20LengthRequired,
+ );
+ });
+ });
+
+ describe('writeAddress', () => {
+ it('should successfully write address when the address takes up the whole array', async () => {
+ const byteArray = testAddress;
+ const testAddressOffset = new BigNumber(0);
+ const newByteArray = await libBytes.publicWriteAddress.callAsync(
+ byteArray,
+ testAddressOffset,
+ testAddressB,
+ );
+ return expect(newByteArray).to.be.equal(testAddressB);
+ });
+ it('should successfully write address when it is offset in the array', async () => {
+ const addressByteArrayBuffer = ethUtil.toBuffer(testAddress);
+ const prefixByteArrayBuffer = ethUtil.toBuffer('0xabcdef');
+ const combinedByteArrayBuffer = Buffer.concat([prefixByteArrayBuffer, addressByteArrayBuffer]);
+ const combinedByteArray = ethUtil.bufferToHex(combinedByteArrayBuffer);
+ const testAddressOffset = new BigNumber(prefixByteArrayBuffer.byteLength);
+ const newByteArray = await libBytes.publicWriteAddress.callAsync(
+ combinedByteArray,
+ testAddressOffset,
+ testAddressB,
+ );
+ const newByteArrayBuffer = ethUtil.toBuffer(newByteArray);
+ const addressFromOffsetBuffer = newByteArrayBuffer.slice(prefixByteArrayBuffer.byteLength);
+ const addressFromOffset = ethUtil.addHexPrefix(ethUtil.bufferToHex(addressFromOffsetBuffer));
+ return expect(addressFromOffset).to.be.equal(testAddressB);
+ });
+ it('should fail if the byte array is too short to hold an address', async () => {
+ const offset = new BigNumber(0);
+ return expectContractCallFailedAsync(
+ libBytes.publicWriteAddress.callAsync(byteArrayShorterThan20Bytes, offset, testAddress),
+ RevertReason.LibBytesGreaterOrEqualTo20LengthRequired,
+ );
+ });
+ it('should fail if the length between the offset and end of the byte array is too short to hold an address', async () => {
+ const byteArray = byteArrayLongerThan32Bytes;
+ const badOffset = new BigNumber(ethUtil.toBuffer(byteArray).byteLength);
+ return expectContractCallFailedAsync(
+ libBytes.publicWriteAddress.callAsync(byteArray, badOffset, testAddress),
+ RevertReason.LibBytesGreaterOrEqualTo20LengthRequired,
+ );
+ });
+ });
+
+ describe('readBytes32', () => {
+ it('should successfully read bytes32 when the bytes32 takes up the whole array', async () => {
+ const testBytes32Offset = new BigNumber(0);
+ const bytes32 = await libBytes.publicReadBytes32.callAsync(testBytes32, testBytes32Offset);
+ return expect(bytes32).to.be.equal(testBytes32);
+ });
+ it('should successfully read bytes32 when it is offset in the array', async () => {
+ const bytes32ByteArrayBuffer = ethUtil.toBuffer(testBytes32);
+ const prefixByteArrayBuffer = ethUtil.toBuffer('0xabcdef');
+ const combinedByteArrayBuffer = Buffer.concat([prefixByteArrayBuffer, bytes32ByteArrayBuffer]);
+ const combinedByteArray = ethUtil.bufferToHex(combinedByteArrayBuffer);
+ const testBytes32Offset = new BigNumber(prefixByteArrayBuffer.byteLength);
+ const bytes32 = await libBytes.publicReadBytes32.callAsync(combinedByteArray, testBytes32Offset);
+ return expect(bytes32).to.be.equal(testBytes32);
+ });
+ it('should fail if the byte array is too short to hold a bytes32', async () => {
+ const offset = new BigNumber(0);
+ return expectContractCallFailedAsync(
+ libBytes.publicReadBytes32.callAsync(byteArrayShorterThan32Bytes, offset),
+ RevertReason.LibBytesGreaterOrEqualTo32LengthRequired,
+ );
+ });
+ it('should fail if the length between the offset and end of the byte array is too short to hold a bytes32', async () => {
+ const badOffset = new BigNumber(ethUtil.toBuffer(testBytes32).byteLength);
+ return expectContractCallFailedAsync(
+ libBytes.publicReadBytes32.callAsync(testBytes32, badOffset),
+ RevertReason.LibBytesGreaterOrEqualTo32LengthRequired,
+ );
+ });
+ });
+
+ describe('writeBytes32', () => {
+ it('should successfully write bytes32 when the address takes up the whole array', async () => {
+ const byteArray = testBytes32;
+ const testBytes32Offset = new BigNumber(0);
+ const newByteArray = await libBytes.publicWriteBytes32.callAsync(
+ byteArray,
+ testBytes32Offset,
+ testBytes32B,
+ );
+ return expect(newByteArray).to.be.equal(testBytes32B);
+ });
+ it('should successfully write bytes32 when it is offset in the array', async () => {
+ const bytes32ByteArrayBuffer = ethUtil.toBuffer(testBytes32);
+ const prefixByteArrayBuffer = ethUtil.toBuffer('0xabcdef');
+ const combinedByteArrayBuffer = Buffer.concat([prefixByteArrayBuffer, bytes32ByteArrayBuffer]);
+ const combinedByteArray = ethUtil.bufferToHex(combinedByteArrayBuffer);
+ const testBytes32Offset = new BigNumber(prefixByteArrayBuffer.byteLength);
+ const newByteArray = await libBytes.publicWriteBytes32.callAsync(
+ combinedByteArray,
+ testBytes32Offset,
+ testBytes32B,
+ );
+ const newByteArrayBuffer = ethUtil.toBuffer(newByteArray);
+ const bytes32FromOffsetBuffer = newByteArrayBuffer.slice(prefixByteArrayBuffer.byteLength);
+ const bytes32FromOffset = ethUtil.addHexPrefix(ethUtil.bufferToHex(bytes32FromOffsetBuffer));
+ return expect(bytes32FromOffset).to.be.equal(testBytes32B);
+ });
+ it('should fail if the byte array is too short to hold a bytes32', async () => {
+ const offset = new BigNumber(0);
+ return expectContractCallFailedAsync(
+ libBytes.publicWriteBytes32.callAsync(byteArrayShorterThan32Bytes, offset, testBytes32),
+ RevertReason.LibBytesGreaterOrEqualTo32LengthRequired,
+ );
+ });
+ it('should fail if the length between the offset and end of the byte array is too short to hold a bytes32', async () => {
+ const byteArray = byteArrayLongerThan32Bytes;
+ const badOffset = new BigNumber(ethUtil.toBuffer(byteArray).byteLength);
+ return expectContractCallFailedAsync(
+ libBytes.publicWriteBytes32.callAsync(byteArray, badOffset, testBytes32),
+ RevertReason.LibBytesGreaterOrEqualTo32LengthRequired,
+ );
+ });
+ });
+
+ describe('readUint256', () => {
+ it('should successfully read uint256 when the uint256 takes up the whole array', async () => {
+ const formattedTestUint256 = new BN(testUint256.toString(10));
+ const testUint256AsBuffer = ethUtil.toBuffer(formattedTestUint256);
+ const byteArray = ethUtil.bufferToHex(testUint256AsBuffer);
+ const testUint256Offset = new BigNumber(0);
+ const uint256 = await libBytes.publicReadUint256.callAsync(byteArray, testUint256Offset);
+ return expect(uint256).to.bignumber.equal(testUint256);
+ });
+ it('should successfully read uint256 when it is offset in the array', async () => {
+ const prefixByteArrayBuffer = ethUtil.toBuffer('0xabcdef');
+ const formattedTestUint256 = new BN(testUint256.toString(10));
+ const testUint256AsBuffer = ethUtil.toBuffer(formattedTestUint256);
+ const combinedByteArrayBuffer = Buffer.concat([prefixByteArrayBuffer, testUint256AsBuffer]);
+ const combinedByteArray = ethUtil.bufferToHex(combinedByteArrayBuffer);
+ const testUint256Offset = new BigNumber(prefixByteArrayBuffer.byteLength);
+ const uint256 = await libBytes.publicReadUint256.callAsync(combinedByteArray, testUint256Offset);
+ return expect(uint256).to.bignumber.equal(testUint256);
+ });
+ it('should fail if the byte array is too short to hold a uint256', async () => {
+ const offset = new BigNumber(0);
+ return expectContractCallFailedAsync(
+ libBytes.publicReadUint256.callAsync(byteArrayShorterThan32Bytes, offset),
+ RevertReason.LibBytesGreaterOrEqualTo32LengthRequired,
+ );
+ });
+ it('should fail if the length between the offset and end of the byte array is too short to hold a uint256', async () => {
+ const formattedTestUint256 = new BN(testUint256.toString(10));
+ const testUint256AsBuffer = ethUtil.toBuffer(formattedTestUint256);
+ const byteArray = ethUtil.bufferToHex(testUint256AsBuffer);
+ const badOffset = new BigNumber(testUint256AsBuffer.byteLength);
+ return expectContractCallFailedAsync(
+ libBytes.publicReadUint256.callAsync(byteArray, badOffset),
+ RevertReason.LibBytesGreaterOrEqualTo32LengthRequired,
+ );
+ });
+ });
+
+ describe('writeUint256', () => {
+ it('should successfully write uint256 when the address takes up the whole array', async () => {
+ const byteArray = testBytes32;
+ const testUint256Offset = new BigNumber(0);
+ const newByteArray = await libBytes.publicWriteUint256.callAsync(
+ byteArray,
+ testUint256Offset,
+ testUint256B,
+ );
+ const newByteArrayAsUint256 = new BigNumber(newByteArray, 16);
+ return expect(newByteArrayAsUint256).to.be.bignumber.equal(testUint256B);
+ });
+ it('should successfully write uint256 when it is offset in the array', async () => {
+ const bytes32ByteArrayBuffer = ethUtil.toBuffer(testBytes32);
+ const prefixByteArrayBuffer = ethUtil.toBuffer('0xabcdef');
+ const combinedByteArrayBuffer = Buffer.concat([prefixByteArrayBuffer, bytes32ByteArrayBuffer]);
+ const combinedByteArray = ethUtil.bufferToHex(combinedByteArrayBuffer);
+ const testUint256Offset = new BigNumber(prefixByteArrayBuffer.byteLength);
+ const newByteArray = await libBytes.publicWriteUint256.callAsync(
+ combinedByteArray,
+ testUint256Offset,
+ testUint256B,
+ );
+ const newByteArrayBuffer = ethUtil.toBuffer(newByteArray);
+ const uint256FromOffsetBuffer = newByteArrayBuffer.slice(prefixByteArrayBuffer.byteLength);
+ const uint256FromOffset = new BigNumber(
+ ethUtil.addHexPrefix(ethUtil.bufferToHex(uint256FromOffsetBuffer)),
+ 16,
+ );
+ return expect(uint256FromOffset).to.be.bignumber.equal(testUint256B);
+ });
+ it('should fail if the byte array is too short to hold a uint256', async () => {
+ const offset = new BigNumber(0);
+ return expectContractCallFailedAsync(
+ libBytes.publicWriteUint256.callAsync(byteArrayShorterThan32Bytes, offset, testUint256),
+ RevertReason.LibBytesGreaterOrEqualTo32LengthRequired,
+ );
+ });
+ it('should fail if the length between the offset and end of the byte array is too short to hold a uint256', async () => {
+ const byteArray = byteArrayLongerThan32Bytes;
+ const badOffset = new BigNumber(ethUtil.toBuffer(byteArray).byteLength);
+ return expectContractCallFailedAsync(
+ libBytes.publicWriteUint256.callAsync(byteArray, badOffset, testUint256),
+ RevertReason.LibBytesGreaterOrEqualTo32LengthRequired,
+ );
+ });
+ });
+
+ describe('readBytes4', () => {
+ // AssertionError: expected promise to be rejected with an error including 'revert' but it was fulfilled with '0x08c379a0'
+ it('should revert if byte array has a length < 4', async () => {
+ const byteArrayLessThan4Bytes = '0x010101';
+ const offset = new BigNumber(0);
+ return expectContractCallFailedAsync(
+ libBytes.publicReadBytes4.callAsync(byteArrayLessThan4Bytes, offset),
+ RevertReason.LibBytesGreaterOrEqualTo4LengthRequired,
+ );
+ });
+ it('should return the first 4 bytes of a byte array of arbitrary length', async () => {
+ const first4Bytes = await libBytes.publicReadBytes4.callAsync(byteArrayLongerThan32Bytes, new BigNumber(0));
+ const expectedFirst4Bytes = byteArrayLongerThan32Bytes.slice(0, 10);
+ expect(first4Bytes).to.equal(expectedFirst4Bytes);
+ });
+ it('should successfully read bytes4 when the bytes4 takes up the whole array', async () => {
+ const testBytes4Offset = new BigNumber(0);
+ const bytes4 = await libBytes.publicReadBytes4.callAsync(testBytes4, testBytes4Offset);
+ return expect(bytes4).to.be.equal(testBytes4);
+ });
+ it('should successfully read bytes4 when it is offset in the array', async () => {
+ const bytes4ByteArrayBuffer = ethUtil.toBuffer(testBytes4);
+ const prefixByteArrayBuffer = ethUtil.toBuffer('0xabcdef');
+ const combinedByteArrayBuffer = Buffer.concat([prefixByteArrayBuffer, bytes4ByteArrayBuffer]);
+ const combinedByteArray = ethUtil.bufferToHex(combinedByteArrayBuffer);
+ const testBytes4Offset = new BigNumber(prefixByteArrayBuffer.byteLength);
+ const bytes4 = await libBytes.publicReadBytes4.callAsync(combinedByteArray, testBytes4Offset);
+ return expect(bytes4).to.be.equal(testBytes4);
+ });
+ it('should fail if the length between the offset and end of the byte array is too short to hold a bytes4', async () => {
+ const badOffset = new BigNumber(ethUtil.toBuffer(testBytes4).byteLength);
+ return expectContractCallFailedAsync(
+ libBytes.publicReadBytes4.callAsync(testBytes4, badOffset),
+ RevertReason.LibBytesGreaterOrEqualTo4LengthRequired,
+ );
+ });
+ });
+
+ describe('readBytesWithLength', () => {
+ it('should successfully read short, nested array of bytes when it takes up the whole array', async () => {
+ const testBytesOffset = new BigNumber(0);
+ const bytes = await libBytes.publicReadBytesWithLength.callAsync(shortTestBytes, testBytesOffset);
+ return expect(bytes).to.be.equal(shortData);
+ });
+ it('should successfully read short, nested array of bytes when it is offset in the array', async () => {
+ const prefixByteArrayBuffer = ethUtil.toBuffer('0xabcdef');
+ const combinedByteArrayBuffer = Buffer.concat([prefixByteArrayBuffer, shortTestBytesAsBuffer]);
+ const combinedByteArray = ethUtil.bufferToHex(combinedByteArrayBuffer);
+ const testUint256Offset = new BigNumber(prefixByteArrayBuffer.byteLength);
+ const bytes = await libBytes.publicReadBytesWithLength.callAsync(combinedByteArray, testUint256Offset);
+ return expect(bytes).to.be.equal(shortData);
+ });
+ it('should successfully read a nested array of bytes - one word in length - when it takes up the whole array', async () => {
+ const testBytesOffset = new BigNumber(0);
+ const bytes = await libBytes.publicReadBytesWithLength.callAsync(wordOfTestBytes, testBytesOffset);
+ return expect(bytes).to.be.equal(wordOfData);
+ });
+ it('should successfully read a nested array of bytes - one word in length - when it is offset in the array', async () => {
+ const prefixByteArrayBuffer = ethUtil.toBuffer('0xabcdef');
+ const combinedByteArrayBuffer = Buffer.concat([prefixByteArrayBuffer, wordOfTestBytesAsBuffer]);
+ const combinedByteArray = ethUtil.bufferToHex(combinedByteArrayBuffer);
+ const testUint256Offset = new BigNumber(prefixByteArrayBuffer.byteLength);
+ const bytes = await libBytes.publicReadBytesWithLength.callAsync(combinedByteArray, testUint256Offset);
+ return expect(bytes).to.be.equal(wordOfData);
+ });
+ it('should successfully read long, nested array of bytes when it takes up the whole array', async () => {
+ const testBytesOffset = new BigNumber(0);
+ const bytes = await libBytes.publicReadBytesWithLength.callAsync(longTestBytes, testBytesOffset);
+ return expect(bytes).to.be.equal(longData);
+ });
+ it('should successfully read long, nested array of bytes when it is offset in the array', async () => {
+ const prefixByteArrayBuffer = ethUtil.toBuffer('0xabcdef');
+ const combinedByteArrayBuffer = Buffer.concat([prefixByteArrayBuffer, longTestBytesAsBuffer]);
+ const combinedByteArray = ethUtil.bufferToHex(combinedByteArrayBuffer);
+ const testUint256Offset = new BigNumber(prefixByteArrayBuffer.byteLength);
+ const bytes = await libBytes.publicReadBytesWithLength.callAsync(combinedByteArray, testUint256Offset);
+ return expect(bytes).to.be.equal(longData);
+ });
+ it('should fail if the byte array is too short to hold the length of a nested byte array', async () => {
+ // The length of the nested array is 32 bytes. By storing less than 32 bytes, a length cannot be read.
+ const offset = new BigNumber(0);
+ return expectContractCallFailedAsync(
+ libBytes.publicReadBytesWithLength.callAsync(byteArrayShorterThan32Bytes, offset),
+ RevertReason.LibBytesGreaterOrEqualTo32LengthRequired,
+ );
+ });
+ it('should fail if we store a nested byte array length, without a nested byte array', async () => {
+ const offset = new BigNumber(0);
+ return expectContractCallFailedAsync(
+ libBytes.publicReadBytesWithLength.callAsync(testBytes32, offset),
+ RevertReason.LibBytesGreaterOrEqualToNestedBytesLengthRequired,
+ );
+ });
+ it('should fail if the length between the offset and end of the byte array is too short to hold the length of a nested byte array', async () => {
+ const badOffset = new BigNumber(ethUtil.toBuffer(byteArrayShorterThan32Bytes).byteLength);
+ return expectContractCallFailedAsync(
+ libBytes.publicReadBytesWithLength.callAsync(byteArrayShorterThan32Bytes, badOffset),
+ RevertReason.LibBytesGreaterOrEqualTo32LengthRequired,
+ );
+ });
+ it('should fail if the length between the offset and end of the byte array is too short to hold the nested byte array', async () => {
+ const badOffset = new BigNumber(ethUtil.toBuffer(testBytes32).byteLength);
+ return expectContractCallFailedAsync(
+ libBytes.publicReadBytesWithLength.callAsync(testBytes32, badOffset),
+ RevertReason.LibBytesGreaterOrEqualTo32LengthRequired,
+ );
+ });
+ });
+
+ describe('writeBytesWithLength', () => {
+ it('should successfully write short, nested array of bytes when it takes up the whole array', async () => {
+ const testBytesOffset = new BigNumber(0);
+ const emptyByteArray = ethUtil.bufferToHex(new Buffer(shortTestBytesAsBuffer.byteLength));
+ const bytesWritten = await libBytes.publicWriteBytesWithLength.callAsync(
+ emptyByteArray,
+ testBytesOffset,
+ shortData,
+ );
+ const bytesRead = await libBytes.publicReadBytesWithLength.callAsync(bytesWritten, testBytesOffset);
+ return expect(bytesRead).to.be.equal(shortData);
+ });
+ it('should successfully write short, nested array of bytes when it is offset in the array', async () => {
+ // Write a prefix to the array
+ const prefixData = '0xabcdef';
+ const prefixDataAsBuffer = ethUtil.toBuffer(prefixData);
+ const prefixOffset = new BigNumber(0);
+ const emptyByteArray = ethUtil.bufferToHex(
+ new Buffer(prefixDataAsBuffer.byteLength + shortTestBytesAsBuffer.byteLength),
+ );
+ let bytesWritten = await libBytes.publicWriteBytesWithLength.callAsync(
+ emptyByteArray,
+ prefixOffset,
+ prefixData,
+ );
+ // Write data after prefix
+ const testBytesOffset = new BigNumber(prefixDataAsBuffer.byteLength);
+ bytesWritten = await libBytes.publicWriteBytesWithLength.callAsync(
+ bytesWritten,
+ testBytesOffset,
+ shortData,
+ );
+ // Read data after prefix and validate
+ const bytes = await libBytes.publicReadBytesWithLength.callAsync(bytesWritten, testBytesOffset);
+ return expect(bytes).to.be.equal(shortData);
+ });
+ it('should successfully write a nested array of bytes - one word in length - when it takes up the whole array', async () => {
+ const testBytesOffset = new BigNumber(0);
+ const emptyByteArray = ethUtil.bufferToHex(new Buffer(wordOfTestBytesAsBuffer.byteLength));
+ const bytesWritten = await libBytes.publicWriteBytesWithLength.callAsync(
+ emptyByteArray,
+ testBytesOffset,
+ wordOfData,
+ );
+ const bytesRead = await libBytes.publicReadBytesWithLength.callAsync(bytesWritten, testBytesOffset);
+ return expect(bytesRead).to.be.equal(wordOfData);
+ });
+ it('should successfully write a nested array of bytes - one word in length - when it is offset in the array', async () => {
+ // Write a prefix to the array
+ const prefixData = '0xabcdef';
+ const prefixDataAsBuffer = ethUtil.toBuffer(prefixData);
+ const prefixOffset = new BigNumber(0);
+ const emptyByteArray = ethUtil.bufferToHex(
+ new Buffer(prefixDataAsBuffer.byteLength + wordOfTestBytesAsBuffer.byteLength),
+ );
+ let bytesWritten = await libBytes.publicWriteBytesWithLength.callAsync(
+ emptyByteArray,
+ prefixOffset,
+ prefixData,
+ );
+ // Write data after prefix
+ const testBytesOffset = new BigNumber(prefixDataAsBuffer.byteLength);
+ bytesWritten = await libBytes.publicWriteBytesWithLength.callAsync(
+ bytesWritten,
+ testBytesOffset,
+ wordOfData,
+ );
+ // Read data after prefix and validate
+ const bytes = await libBytes.publicReadBytesWithLength.callAsync(bytesWritten, testBytesOffset);
+ return expect(bytes).to.be.equal(wordOfData);
+ });
+ it('should successfully write a long, nested bytes when it takes up the whole array', async () => {
+ const testBytesOffset = new BigNumber(0);
+ const emptyByteArray = ethUtil.bufferToHex(new Buffer(longTestBytesAsBuffer.byteLength));
+ const bytesWritten = await libBytes.publicWriteBytesWithLength.callAsync(
+ emptyByteArray,
+ testBytesOffset,
+ longData,
+ );
+ const bytesRead = await libBytes.publicReadBytesWithLength.callAsync(bytesWritten, testBytesOffset);
+ return expect(bytesRead).to.be.equal(longData);
+ });
+ it('should successfully write long, nested array of bytes when it is offset in the array', async () => {
+ // Write a prefix to the array
+ const prefixData = '0xabcdef';
+ const prefixDataAsBuffer = ethUtil.toBuffer(prefixData);
+ const prefixOffset = new BigNumber(0);
+ const emptyByteArray = ethUtil.bufferToHex(
+ new Buffer(prefixDataAsBuffer.byteLength + longTestBytesAsBuffer.byteLength),
+ );
+ let bytesWritten = await libBytes.publicWriteBytesWithLength.callAsync(
+ emptyByteArray,
+ prefixOffset,
+ prefixData,
+ );
+ // Write data after prefix
+ const testBytesOffset = new BigNumber(prefixDataAsBuffer.byteLength);
+ bytesWritten = await libBytes.publicWriteBytesWithLength.callAsync(bytesWritten, testBytesOffset, longData);
+ // Read data after prefix and validate
+ const bytes = await libBytes.publicReadBytesWithLength.callAsync(bytesWritten, testBytesOffset);
+ return expect(bytes).to.be.equal(longData);
+ });
+ it('should fail if the byte array is too short to hold the length of a nested byte array', async () => {
+ const offset = new BigNumber(0);
+ const emptyByteArray = ethUtil.bufferToHex(new Buffer(1));
+ return expectContractCallFailedAsync(
+ libBytes.publicWriteBytesWithLength.callAsync(emptyByteArray, offset, longData),
+ RevertReason.LibBytesGreaterOrEqualToNestedBytesLengthRequired,
+ );
+ });
+ it('should fail if the length between the offset and end of the byte array is too short to hold the length of a nested byte array', async () => {
+ const emptyByteArray = ethUtil.bufferToHex(new Buffer(shortTestBytesAsBuffer.byteLength));
+ const badOffset = new BigNumber(ethUtil.toBuffer(shortTestBytesAsBuffer).byteLength);
+ return expectContractCallFailedAsync(
+ libBytes.publicWriteBytesWithLength.callAsync(emptyByteArray, badOffset, shortData),
+ RevertReason.LibBytesGreaterOrEqualToNestedBytesLengthRequired,
+ );
+ });
+ });
+
+ describe('memCopy', () => {
+ // Create memory 0x000102...FF
+ const memSize = 256;
+ // tslint:disable:no-shadowed-variable
+ const memory = new Uint8Array(memSize).map((_, i) => i);
+ const memHex = toHex(memory);
+
+ // Reference implementation to test against
+ const refMemcpy = (mem: Uint8Array, dest: number, source: number, length: number): Uint8Array =>
+ Uint8Array.from(mem).copyWithin(dest, source, source + length);
+
+ // Test vectors: destination, source, length, job description
+ type Tests = Array<[number, number, number, string]>;
+
+ const test = (tests: Tests) =>
+ tests.forEach(([dest, source, length, job]) =>
+ it(job, async () => {
+ const expected = refMemcpy(memory, dest, source, length);
+ const resultStr = await libBytes.testMemcpy.callAsync(
+ memHex,
+ new BigNumber(dest),
+ new BigNumber(source),
+ new BigNumber(length),
+ );
+ const result = fromHex(resultStr);
+ expect(result).to.deep.equal(expected);
+ }),
+ );
+
+ test([[0, 0, 0, 'copies zero bytes with overlap']]);
+
+ describe('copies forward', () =>
+ test([
+ [128, 0, 0, 'zero bytes'],
+ [128, 0, 1, 'one byte'],
+ [128, 0, 11, 'eleven bytes'],
+ [128, 0, 31, 'thirty-one bytes'],
+ [128, 0, 32, 'one word'],
+ [128, 0, 64, 'two words'],
+ [128, 0, 96, 'three words'],
+ [128, 0, 33, 'one word and one byte'],
+ [128, 0, 72, 'two words and eight bytes'],
+ [128, 0, 100, 'three words and four bytes'],
+ ]));
+
+ describe('copies forward within one word', () =>
+ test([
+ [16, 0, 0, 'zero bytes'],
+ [16, 0, 1, 'one byte'],
+ [16, 0, 11, 'eleven bytes'],
+ [16, 0, 16, 'sixteen bytes'],
+ ]));
+
+ describe('copies forward with one byte overlap', () =>
+ test([
+ [0, 0, 1, 'one byte'],
+ [10, 0, 11, 'eleven bytes'],
+ [30, 0, 31, 'thirty-one bytes'],
+ [31, 0, 32, 'one word'],
+ [32, 0, 33, 'one word and one byte'],
+ [71, 0, 72, 'two words and eight bytes'],
+ [99, 0, 100, 'three words and four bytes'],
+ ]));
+
+ describe('copies forward with thirty-one bytes overlap', () =>
+ test([
+ [0, 0, 31, 'thirty-one bytes'],
+ [1, 0, 32, 'one word'],
+ [2, 0, 33, 'one word and one byte'],
+ [41, 0, 72, 'two words and eight bytes'],
+ [69, 0, 100, 'three words and four bytes'],
+ ]));
+
+ describe('copies forward with one word overlap', () =>
+ test([
+ [0, 0, 32, 'one word'],
+ [1, 0, 33, 'one word and one byte'],
+ [41, 0, 72, 'two words and eight bytes'],
+ [69, 0, 100, 'three words and four bytes'],
+ ]));
+
+ describe('copies forward with one word and one byte overlap', () =>
+ test([
+ [0, 0, 33, 'one word and one byte'],
+ [40, 0, 72, 'two words and eight bytes'],
+ [68, 0, 100, 'three words and four bytes'],
+ ]));
+
+ describe('copies forward with two words overlap', () =>
+ test([
+ [0, 0, 64, 'two words'],
+ [8, 0, 72, 'two words and eight bytes'],
+ [36, 0, 100, 'three words and four bytes'],
+ ]));
+
+ describe('copies forward within one word and one byte overlap', () =>
+ test([[0, 0, 1, 'one byte'], [10, 0, 11, 'eleven bytes'], [15, 0, 16, 'sixteen bytes']]));
+
+ describe('copies backward', () =>
+ test([
+ [0, 128, 0, 'zero bytes'],
+ [0, 128, 1, 'one byte'],
+ [0, 128, 11, 'eleven bytes'],
+ [0, 128, 31, 'thirty-one bytes'],
+ [0, 128, 32, 'one word'],
+ [0, 128, 64, 'two words'],
+ [0, 128, 96, 'three words'],
+ [0, 128, 33, 'one word and one byte'],
+ [0, 128, 72, 'two words and eight bytes'],
+ [0, 128, 100, 'three words and four bytes'],
+ ]));
+
+ describe('copies backward within one word', () =>
+ test([
+ [0, 16, 0, 'zero bytes'],
+ [0, 16, 1, 'one byte'],
+ [0, 16, 11, 'eleven bytes'],
+ [0, 16, 16, 'sixteen bytes'],
+ ]));
+
+ describe('copies backward with one byte overlap', () =>
+ test([
+ [0, 0, 1, 'one byte'],
+ [0, 10, 11, 'eleven bytes'],
+ [0, 30, 31, 'thirty-one bytes'],
+ [0, 31, 32, 'one word'],
+ [0, 32, 33, 'one word and one byte'],
+ [0, 71, 72, 'two words and eight bytes'],
+ [0, 99, 100, 'three words and four bytes'],
+ ]));
+
+ describe('copies backward with thirty-one bytes overlap', () =>
+ test([
+ [0, 0, 31, 'thirty-one bytes'],
+ [0, 1, 32, 'one word'],
+ [0, 2, 33, 'one word and one byte'],
+ [0, 41, 72, 'two words and eight bytes'],
+ [0, 69, 100, 'three words and four bytes'],
+ ]));
+
+ describe('copies backward with one word overlap', () =>
+ test([
+ [0, 0, 32, 'one word'],
+ [0, 1, 33, 'one word and one byte'],
+ [0, 41, 72, 'two words and eight bytes'],
+ [0, 69, 100, 'three words and four bytes'],
+ ]));
+
+ describe('copies backward with one word and one byte overlap', () =>
+ test([
+ [0, 0, 33, 'one word and one byte'],
+ [0, 40, 72, 'two words and eight bytes'],
+ [0, 68, 100, 'three words and four bytes'],
+ ]));
+
+ describe('copies backward with two words overlap', () =>
+ test([
+ [0, 0, 64, 'two words'],
+ [0, 8, 72, 'two words and eight bytes'],
+ [0, 36, 100, 'three words and four bytes'],
+ ]));
+
+ describe('copies forward within one word and one byte overlap', () =>
+ test([[0, 0, 1, 'one byte'], [0, 10, 11, 'eleven bytes'], [0, 15, 16, 'sixteen bytes']]));
+ });
+});
+// tslint:disable:max-file-line-count
diff --git a/contracts/utils/test/libs.ts b/contracts/utils/test/libs.ts
new file mode 100644
index 000000000..81596b2e4
--- /dev/null
+++ b/contracts/utils/test/libs.ts
@@ -0,0 +1,34 @@
+import { chaiSetup, provider, txDefaults, web3Wrapper } from '@0x/contracts-test-utils';
+import { BlockchainLifecycle } from '@0x/dev-utils';
+import * as chai from 'chai';
+
+import { TestConstantsContract } from '../generated-wrappers/test_constants';
+import { artifacts } from '../src';
+
+chaiSetup.configure();
+const expect = chai.expect;
+
+const blockchainLifecycle = new BlockchainLifecycle(web3Wrapper);
+
+describe('Libs', () => {
+ beforeEach(async () => {
+ await blockchainLifecycle.startAsync();
+ });
+ afterEach(async () => {
+ await blockchainLifecycle.revertAsync();
+ });
+
+ describe('LibConstants', () => {
+ describe('ZRX_ASSET_DATA', () => {
+ it('should have the correct ZRX_ASSET_DATA', async () => {
+ const testConstants = await TestConstantsContract.deployFrom0xArtifactAsync(
+ artifacts.TestConstants,
+ provider,
+ txDefaults,
+ );
+ const isValid = await testConstants.assertValidZrxAssetData.callAsync();
+ expect(isValid).to.be.equal(true);
+ });
+ });
+ });
+});
diff --git a/contracts/utils/tsconfig.json b/contracts/utils/tsconfig.json
new file mode 100644
index 000000000..68251e6b0
--- /dev/null
+++ b/contracts/utils/tsconfig.json
@@ -0,0 +1,19 @@
+{
+ "extends": "../../tsconfig",
+ "compilerOptions": {
+ "outDir": "lib",
+ "rootDir": ".",
+ "resolveJsonModule": true
+ },
+ "include": ["./src/**/*", "./test/**/*", "./generated-wrappers/**/*"],
+ "files": [
+ "./generated-artifacts/TestConstants.json",
+ "./generated-artifacts/TestLibBytes.json",
+ "./generated-artifacts/IOwnable.json",
+ "./generated-artifacts/Ownable.json",
+ "./generated-artifacts/LibBytes.json",
+ "./generated-artifacts/SafeMath.json",
+ "./generated-artifacts/ReentrancyGuard.json"
+ ],
+ "exclude": ["./deploy/solc/solc_bin"]
+}
diff --git a/contracts/utils/tslint.json b/contracts/utils/tslint.json
new file mode 100644
index 000000000..1bb3ac2a2
--- /dev/null
+++ b/contracts/utils/tslint.json
@@ -0,0 +1,6 @@
+{
+ "extends": ["@0x/tslint-config"],
+ "rules": {
+ "custom-no-magic-numbers": false
+ }
+}