aboutsummaryrefslogtreecommitdiffstats
path: root/packages/sol-cov/src
diff options
context:
space:
mode:
authorFabio Berger <me@fabioberger.com>2018-06-22 16:39:07 +0800
committerFabio Berger <me@fabioberger.com>2018-06-22 16:39:07 +0800
commit3ce295a2af17feef6cd4e3140196501805493719 (patch)
tree964df912ce86d98a211f81f3d6159d797a37c3b3 /packages/sol-cov/src
parenta30107ab867964d371b2d5fc6791c7b1963f1c7b (diff)
parent0515c6acded983bba05320895ea2c2891f37055c (diff)
downloaddexon-0x-contracts-3ce295a2af17feef6cd4e3140196501805493719.tar.gz
dexon-0x-contracts-3ce295a2af17feef6cd4e3140196501805493719.tar.zst
dexon-0x-contracts-3ce295a2af17feef6cd4e3140196501805493719.zip
Merge branch 'v2-prototype' into refactor/check-revert-reasons
* v2-prototype: (40 commits) Use make-promises-safe as a preloader instead of manually importing Updated compiler runs to be 1,000,000 Add event to setSignatureValidatorApproval, rename signer => signerAddress accross all contracts Add senderAddress to Fill and Cancel logs, add comments to events and types Fix Island component Add missing image assets for Chris and Mel Fix some bugs in sol-cov Remove unreachable PreSigned check Fix linting Buttons look hella disabled now Remove border radius, fix width issue for unlock step Add Chris and Mel to about page fix linter issues only call getLocationByOffset if source if defined Set settleOrder and settleMatchedOrders to private Prevent prettier issue Support mobile friendly onboarding flows Removed MixinSettlement. Moved `settleOrder` into `MixinExchangeCore` and `settleMatchedOrders` into `MixinMatchOrders` Migrations after rebasing Linter ...
Diffstat (limited to 'packages/sol-cov/src')
-rw-r--r--packages/sol-cov/src/ast_visitor.ts5
-rw-r--r--packages/sol-cov/src/collect_coverage_entries.ts2
-rw-r--r--packages/sol-cov/src/coverage_subprovider.ts6
-rw-r--r--packages/sol-cov/src/get_source_range_snippet.ts180
-rw-r--r--packages/sol-cov/src/revert_trace_subprovider.ts81
-rw-r--r--packages/sol-cov/src/source_maps.ts4
-rw-r--r--packages/sol-cov/src/types.ts10
-rw-r--r--packages/sol-cov/src/utils.ts18
8 files changed, 285 insertions, 21 deletions
diff --git a/packages/sol-cov/src/ast_visitor.ts b/packages/sol-cov/src/ast_visitor.ts
index 16984b5ec..564f0f7d2 100644
--- a/packages/sol-cov/src/ast_visitor.ts
+++ b/packages/sol-cov/src/ast_visitor.ts
@@ -116,8 +116,9 @@ export class ASTVisitor {
this._statementMap[this._entryId++] = this._getExpressionRange(ast);
}
private _getExpressionRange(ast: Parser.ASTNode): SingleFileSourceRange {
- const start = this._locationByOffset[ast.range[0]];
- const end = this._locationByOffset[ast.range[1] + 1];
+ const astRange = ast.range as [number, number];
+ const start = this._locationByOffset[astRange[0]];
+ const end = this._locationByOffset[astRange[1] + 1];
const range = {
start,
end,
diff --git a/packages/sol-cov/src/collect_coverage_entries.ts b/packages/sol-cov/src/collect_coverage_entries.ts
index b145f044e..3fc85008c 100644
--- a/packages/sol-cov/src/collect_coverage_entries.ts
+++ b/packages/sol-cov/src/collect_coverage_entries.ts
@@ -10,7 +10,7 @@ const coverageEntriesBySourceHash: { [sourceHash: string]: CoverageEntriesDescri
export const collectCoverageEntries = (contractSource: string) => {
const sourceHash = ethUtil.sha3(contractSource).toString('hex');
- if (_.isUndefined(coverageEntriesBySourceHash[sourceHash])) {
+ if (_.isUndefined(coverageEntriesBySourceHash[sourceHash]) && !_.isUndefined(contractSource)) {
const ast = parser.parse(contractSource, { range: true });
const locationByOffset = getLocationByOffset(contractSource);
const visitor = new ASTVisitor(locationByOffset);
diff --git a/packages/sol-cov/src/coverage_subprovider.ts b/packages/sol-cov/src/coverage_subprovider.ts
index 065a48434..45843bc96 100644
--- a/packages/sol-cov/src/coverage_subprovider.ts
+++ b/packages/sol-cov/src/coverage_subprovider.ts
@@ -66,6 +66,12 @@ export const coverageHandler: SingleFileSubtraceHandler = (
): Coverage => {
const absoluteFileName = contractData.sources[fileIndex];
const coverageEntriesDescription = collectCoverageEntries(contractData.sourceCodes[fileIndex]);
+
+ // if the source wasn't provided for the fileIndex, we can't cover the file
+ if (_.isUndefined(coverageEntriesDescription)) {
+ return {};
+ }
+
let sourceRanges = _.map(subtrace, structLog => pcToSourceRange[structLog.pc]);
sourceRanges = _.compact(sourceRanges); // Some PC's don't map to a source range and we just ignore them.
// By default lodash does a shallow object comparasion. We JSON.stringify them and compare as strings.
diff --git a/packages/sol-cov/src/get_source_range_snippet.ts b/packages/sol-cov/src/get_source_range_snippet.ts
new file mode 100644
index 000000000..30d6ec802
--- /dev/null
+++ b/packages/sol-cov/src/get_source_range_snippet.ts
@@ -0,0 +1,180 @@
+import * as ethUtil from 'ethereumjs-util';
+import * as _ from 'lodash';
+import * as Parser from 'solidity-parser-antlr';
+
+import { SingleFileSourceRange, SourceRange, SourceSnippet } from './types';
+import { utils } from './utils';
+
+interface ASTInfo {
+ type: string;
+ node: Parser.ASTNode;
+ name: string | null;
+ range?: SingleFileSourceRange;
+}
+
+// Parsing source code for each transaction/code is slow and therefore we cache it
+const parsedSourceByHash: { [sourceHash: string]: Parser.ASTNode } = {};
+
+export function getSourceRangeSnippet(sourceRange: SourceRange, sourceCode: string): SourceSnippet | null {
+ const sourceHash = ethUtil.sha3(sourceCode).toString('hex');
+ if (_.isUndefined(parsedSourceByHash[sourceHash])) {
+ parsedSourceByHash[sourceHash] = Parser.parse(sourceCode, { loc: true });
+ }
+ const astNode = parsedSourceByHash[sourceHash];
+ const visitor = new ASTInfoVisitor();
+ Parser.visit(astNode, visitor);
+ const astInfo = visitor.getASTInfoForRange(sourceRange);
+ if (astInfo === null) {
+ return null;
+ }
+ const sourceCodeInRange = utils.getRange(sourceCode, sourceRange.location);
+ return {
+ ...astInfo,
+ range: astInfo.range as SingleFileSourceRange,
+ source: sourceCodeInRange,
+ fileName: sourceRange.fileName,
+ };
+}
+
+// A visitor which collects ASTInfo for most nodes in the AST.
+class ASTInfoVisitor {
+ private _astInfos: ASTInfo[] = [];
+ public getASTInfoForRange(sourceRange: SourceRange): ASTInfo | null {
+ // HACK(albrow): Sometimes the source range doesn't exactly match that
+ // of astInfo. To work around that we try with a +/-1 offset on
+ // end.column. If nothing matches even with the offset, we return null.
+ const offset = {
+ start: {
+ line: 0,
+ column: 0,
+ },
+ end: {
+ line: 0,
+ column: 0,
+ },
+ };
+ let astInfo = this._getASTInfoForRange(sourceRange, offset);
+ if (astInfo !== null) {
+ return astInfo;
+ }
+ offset.end.column += 1;
+ astInfo = this._getASTInfoForRange(sourceRange, offset);
+ if (astInfo !== null) {
+ return astInfo;
+ }
+ offset.end.column -= 2;
+ astInfo = this._getASTInfoForRange(sourceRange, offset);
+ if (astInfo !== null) {
+ return astInfo;
+ }
+ return null;
+ }
+ public ContractDefinition(ast: Parser.ContractDefinition): void {
+ this._visitContractDefinition(ast);
+ }
+ public IfStatement(ast: Parser.IfStatement): void {
+ this._visitStatement(ast);
+ }
+ public FunctionDefinition(ast: Parser.FunctionDefinition): void {
+ this._visitFunctionLikeDefinition(ast);
+ }
+ public ModifierDefinition(ast: Parser.ModifierDefinition): void {
+ this._visitFunctionLikeDefinition(ast);
+ }
+ public ForStatement(ast: Parser.ForStatement): void {
+ this._visitStatement(ast);
+ }
+ public ReturnStatement(ast: Parser.ReturnStatement): void {
+ this._visitStatement(ast);
+ }
+ public BreakStatement(ast: Parser.BreakStatement): void {
+ this._visitStatement(ast);
+ }
+ public ContinueStatement(ast: Parser.ContinueStatement): void {
+ this._visitStatement(ast);
+ }
+ public EmitStatement(ast: any /* TODO: Parser.EmitStatement */): void {
+ this._visitStatement(ast);
+ }
+ public VariableDeclarationStatement(ast: Parser.VariableDeclarationStatement): void {
+ this._visitStatement(ast);
+ }
+ public Statement(ast: Parser.Statement): void {
+ this._visitStatement(ast);
+ }
+ public WhileStatement(ast: Parser.WhileStatement): void {
+ this._visitStatement(ast);
+ }
+ public SimpleStatement(ast: Parser.SimpleStatement): void {
+ this._visitStatement(ast);
+ }
+ public ThrowStatement(ast: Parser.ThrowStatement): void {
+ this._visitStatement(ast);
+ }
+ public DoWhileStatement(ast: Parser.DoWhileStatement): void {
+ this._visitStatement(ast);
+ }
+ public ExpressionStatement(ast: Parser.ExpressionStatement): void {
+ this._visitStatement(ast.expression);
+ }
+ public InlineAssemblyStatement(ast: Parser.InlineAssemblyStatement): void {
+ this._visitStatement(ast);
+ }
+ public ModifierInvocation(ast: Parser.ModifierInvocation): void {
+ const BUILTIN_MODIFIERS = ['public', 'view', 'payable', 'external', 'internal', 'pure', 'constant'];
+ if (!_.includes(BUILTIN_MODIFIERS, ast.name)) {
+ this._visitStatement(ast);
+ }
+ }
+ private _visitStatement(ast: Parser.ASTNode): void {
+ this._astInfos.push({
+ type: ast.type,
+ node: ast,
+ name: null,
+ range: ast.loc,
+ });
+ }
+ private _visitFunctionLikeDefinition(ast: Parser.ModifierDefinition | Parser.FunctionDefinition): void {
+ this._astInfos.push({
+ type: ast.type,
+ node: ast,
+ name: ast.name,
+ range: ast.loc,
+ });
+ }
+ private _visitContractDefinition(ast: Parser.ContractDefinition): void {
+ this._astInfos.push({
+ type: ast.type,
+ node: ast,
+ name: ast.name,
+ range: ast.loc,
+ });
+ }
+ private _getASTInfoForRange(sourceRange: SourceRange, offset: SingleFileSourceRange): ASTInfo | null {
+ const offsetSourceRange = {
+ ...sourceRange,
+ location: {
+ start: {
+ line: sourceRange.location.start.line + offset.start.line,
+ column: sourceRange.location.start.column + offset.start.column,
+ },
+ end: {
+ line: sourceRange.location.end.line + offset.end.line,
+ column: sourceRange.location.end.column + offset.end.column,
+ },
+ },
+ };
+ for (const astInfo of this._astInfos) {
+ const astInfoRange = astInfo.range as SingleFileSourceRange;
+ if (
+ astInfoRange.start.column === offsetSourceRange.location.start.column &&
+ astInfoRange.start.line === offsetSourceRange.location.start.line &&
+ astInfoRange.end.column === offsetSourceRange.location.end.column &&
+ astInfoRange.end.line === offsetSourceRange.location.end.line
+ ) {
+ return astInfo;
+ }
+ }
+ return null;
+ }
+}
diff --git a/packages/sol-cov/src/revert_trace_subprovider.ts b/packages/sol-cov/src/revert_trace_subprovider.ts
index b1d4da10c..fed305bd3 100644
--- a/packages/sol-cov/src/revert_trace_subprovider.ts
+++ b/packages/sol-cov/src/revert_trace_subprovider.ts
@@ -4,10 +4,11 @@ import { getLogger, levels, Logger } from 'loglevel';
import { AbstractArtifactAdapter } from './artifact_adapters/abstract_artifact_adapter';
import { constants } from './constants';
+import { getSourceRangeSnippet } from './get_source_range_snippet';
import { getRevertTrace } from './revert_trace';
import { parseSourceMap } from './source_maps';
import { TraceCollectionSubprovider } from './trace_collection_subprovider';
-import { ContractData, EvmCallStack, SourceRange } from './types';
+import { ContractData, EvmCallStack, SourceRange, SourceSnippet } from './types';
import { utils } from './utils';
/**
@@ -53,7 +54,7 @@ export class RevertTraceSubprovider extends TraceCollectionSubprovider {
}
}
private async _printStackTraceAsync(evmCallStack: EvmCallStack): Promise<void> {
- const sourceRanges: SourceRange[] = [];
+ const sourceSnippets: SourceSnippet[] = [];
if (_.isUndefined(this._contractsData)) {
this._contractsData = await this._artifactAdapter.collectContractsDataAsync();
}
@@ -74,6 +75,7 @@ export class RevertTraceSubprovider extends TraceCollectionSubprovider {
}
const bytecodeHex = stripHexPrefix(bytecode);
const sourceMap = isContractCreation ? contractData.sourceMap : contractData.sourceMapRuntime;
+
const pcToSourceRange = parseSourceMap(
contractData.sourceCodes,
sourceMap,
@@ -87,28 +89,75 @@ export class RevertTraceSubprovider extends TraceCollectionSubprovider {
// actually happens in assembly). In that case, we want to keep
// searching backwards by decrementing the pc until we find a
// mapped source range.
- while (_.isUndefined(sourceRange)) {
+ while (_.isUndefined(sourceRange) && pc > 0) {
sourceRange = pcToSourceRange[pc];
pc -= 1;
- if (pc <= 0) {
- this._logger.warn(
- `could not find matching sourceRange for structLog: ${evmCallStackEntry.structLog}`,
- );
- continue;
- }
}
- sourceRanges.push(sourceRange);
+ if (_.isUndefined(sourceRange)) {
+ this._logger.warn(
+ `could not find matching sourceRange for structLog: ${JSON.stringify(
+ _.omit(evmCallStackEntry.structLog, 'stack'),
+ )}`,
+ );
+ continue;
+ }
+
+ const fileIndex = contractData.sources.indexOf(sourceRange.fileName);
+ const sourceSnippet = getSourceRangeSnippet(sourceRange, contractData.sourceCodes[fileIndex]);
+ if (sourceSnippet !== null) {
+ sourceSnippets.push(sourceSnippet);
+ }
}
- if (sourceRanges.length > 0) {
+ const filteredSnippets = filterSnippets(sourceSnippets);
+ if (filteredSnippets.length > 0) {
this._logger.error('\n\nStack trace for REVERT:\n');
- _.forEach(_.reverse(sourceRanges), sourceRange => {
- this._logger.error(
- `${sourceRange.fileName}:${sourceRange.location.start.line}:${sourceRange.location.start.column}`,
- );
+ _.forEach(_.reverse(filteredSnippets), snippet => {
+ const traceString = getStackTraceString(snippet);
+ this._logger.error(traceString);
});
this._logger.error('\n');
} else {
- this._logger.error('Could not determine stack trace');
+ this._logger.error('REVERT detected but could not determine stack trace');
+ }
+ }
+}
+
+// removes duplicates and if statements
+function filterSnippets(sourceSnippets: SourceSnippet[]): SourceSnippet[] {
+ if (sourceSnippets.length === 0) {
+ return [];
+ }
+ const results: SourceSnippet[] = [sourceSnippets[0]];
+ let prev = sourceSnippets[0];
+ for (const sourceSnippet of sourceSnippets) {
+ if (sourceSnippet.type === 'IfStatement') {
+ continue;
+ } else if (sourceSnippet.source === prev.source) {
+ prev = sourceSnippet;
+ continue;
}
+ results.push(sourceSnippet);
+ prev = sourceSnippet;
+ }
+ return results;
+}
+
+function getStackTraceString(sourceSnippet: SourceSnippet): string {
+ let result = `${sourceSnippet.fileName}:${sourceSnippet.range.start.line}:${sourceSnippet.range.start.column}`;
+ const snippetString = getSourceSnippetString(sourceSnippet);
+ if (snippetString !== '') {
+ result += `:\n ${snippetString}`;
+ }
+ return result;
+}
+
+function getSourceSnippetString(sourceSnippet: SourceSnippet): string {
+ switch (sourceSnippet.type) {
+ case 'ContractDefinition':
+ return `contract ${sourceSnippet.name}`;
+ case 'FunctionDefinition':
+ return `function ${sourceSnippet.name}`;
+ default:
+ return `${sourceSnippet.source}`;
}
}
diff --git a/packages/sol-cov/src/source_maps.ts b/packages/sol-cov/src/source_maps.ts
index f9503e16c..90b21dda1 100644
--- a/packages/sol-cov/src/source_maps.ts
+++ b/packages/sol-cov/src/source_maps.ts
@@ -36,7 +36,7 @@ export function parseSourceMap(
): { [programCounter: number]: SourceRange } {
const bytecode = Uint8Array.from(Buffer.from(bytecodeHex, 'hex'));
const pcToInstructionIndex: { [programCounter: number]: number } = getPcToInstructionIndexMapping(bytecode);
- const locationByOffsetByFileIndex = _.map(sourceCodes, getLocationByOffset);
+ const locationByOffsetByFileIndex = _.map(sourceCodes, s => (_.isUndefined(s) ? {} : getLocationByOffset(s)));
const entries = srcMap.split(';');
let lastParsedEntry: SourceLocation = {} as any;
const instructionIndexToSourceRange: { [instructionIndex: number]: SourceRange } = {};
@@ -56,7 +56,7 @@ export function parseSourceMap(
length,
fileIndex,
};
- if (parsedEntry.fileIndex !== -1) {
+ if (parsedEntry.fileIndex !== -1 && !_.isUndefined(locationByOffsetByFileIndex[parsedEntry.fileIndex])) {
const sourceRange = {
location: {
start: locationByOffsetByFileIndex[parsedEntry.fileIndex][parsedEntry.offset],
diff --git a/packages/sol-cov/src/types.ts b/packages/sol-cov/src/types.ts
index cef7141cb..54ade0400 100644
--- a/packages/sol-cov/src/types.ts
+++ b/packages/sol-cov/src/types.ts
@@ -1,4 +1,5 @@
import { StructLog } from 'ethereum-types';
+import * as Parser from 'solidity-parser-antlr';
export interface LineColumn {
line: number;
@@ -114,3 +115,12 @@ export interface EvmCallStackEntry {
}
export type EvmCallStack = EvmCallStackEntry[];
+
+export interface SourceSnippet {
+ source: string;
+ fileName: string;
+ type: string;
+ node: Parser.ASTNode;
+ name: string | null;
+ range: SingleFileSourceRange;
+}
diff --git a/packages/sol-cov/src/utils.ts b/packages/sol-cov/src/utils.ts
index 4f16a1cda..b696bd463 100644
--- a/packages/sol-cov/src/utils.ts
+++ b/packages/sol-cov/src/utils.ts
@@ -5,6 +5,10 @@ import * as _ from 'lodash';
import { ContractData, LineColumn, SingleFileSourceRange } from './types';
+// This is the minimum length of valid contract bytecode. The Solidity compiler
+// metadata is 86 bytes. If you add the '0x' prefix, we get 88.
+const MIN_CONTRACT_BYTECODE_LENGTH = 88;
+
export const utils = {
compareLineColumn(lhs: LineColumn, rhs: LineColumn): number {
return lhs.line !== rhs.line ? lhs.line - rhs.line : lhs.column - rhs.column;
@@ -38,7 +42,15 @@ export const utils = {
}
const contractData = _.find(contractsData, contractDataCandidate => {
const bytecodeRegex = utils.bytecodeToBytecodeRegex(contractDataCandidate.bytecode);
+ // If the bytecode is less than the minimum length, we are probably
+ // dealing with an interface. This isn't what we're looking for.
+ if (bytecodeRegex.length < MIN_CONTRACT_BYTECODE_LENGTH) {
+ return false;
+ }
const runtimeBytecodeRegex = utils.bytecodeToBytecodeRegex(contractDataCandidate.runtimeBytecode);
+ if (runtimeBytecodeRegex.length < MIN_CONTRACT_BYTECODE_LENGTH) {
+ return false;
+ }
// We use that function to find by bytecode or runtimeBytecode. Those are quasi-random strings so
// collisions are practically impossible and it allows us to reuse that code
return !_.isNull(bytecode.match(bytecodeRegex)) || !_.isNull(bytecode.match(runtimeBytecodeRegex));
@@ -66,4 +78,10 @@ export const utils = {
}
return structLogs;
},
+ getRange(sourceCode: string, range: SingleFileSourceRange): string {
+ const lines = sourceCode.split('\n').slice(range.start.line - 1, range.end.line);
+ lines[lines.length - 1] = lines[lines.length - 1].slice(0, range.end.column);
+ lines[0] = lines[0].slice(range.start.column);
+ return lines.join('\n');
+ },
};