aboutsummaryrefslogtreecommitdiffstats
path: root/packages/pipeline/src/scripts
diff options
context:
space:
mode:
authorAlex Browne <stephenalexbrowne@gmail.com>2018-11-08 08:48:25 +0800
committerAlex Browne <stephenalexbrowne@gmail.com>2018-12-05 06:24:48 +0800
commit954c3b9272556723c10fd02ad6b753ac98ca47fa (patch)
treed35620102750bf6f5de6acbc3aee096b923be6b2 /packages/pipeline/src/scripts
parent9cb89725c9289a821db0ebb1809bcd1d0e64c02a (diff)
downloaddexon-0x-contracts-954c3b9272556723c10fd02ad6b753ac98ca47fa.tar.gz
dexon-0x-contracts-954c3b9272556723c10fd02ad6b753ac98ca47fa.tar.zst
dexon-0x-contracts-954c3b9272556723c10fd02ad6b753ac98ca47fa.zip
Split index.ts into multiple scripts in scripts/ and detect last known block when pulling events
Diffstat (limited to 'packages/pipeline/src/scripts')
-rw-r--r--packages/pipeline/src/scripts/merge_v2_events.ts81
-rw-r--r--packages/pipeline/src/scripts/pull_missing_events.ts60
-rw-r--r--packages/pipeline/src/scripts/update_relayer_info.ts31
3 files changed, 172 insertions, 0 deletions
diff --git a/packages/pipeline/src/scripts/merge_v2_events.ts b/packages/pipeline/src/scripts/merge_v2_events.ts
new file mode 100644
index 000000000..227ece121
--- /dev/null
+++ b/packages/pipeline/src/scripts/merge_v2_events.ts
@@ -0,0 +1,81 @@
+import { web3Factory } from '@0x/dev-utils';
+import 'reflect-metadata';
+import { Connection, createConnection } from 'typeorm';
+
+import { ExchangeEventsSource } from '../data_sources/contract-wrappers/exchange_events';
+import { ExchangeFillEvent } from '../entities/ExchangeFillEvent';
+import { deployConfig } from '../ormconfig';
+import { parseExchangeEvents } from '../parsers/events';
+
+let connection: Connection;
+
+(async () => {
+ connection = await createConnection(deployConfig);
+ await getExchangeEventsAsync();
+ await mergeExchangeEventsAsync();
+ console.log('Exiting process');
+ process.exit(0);
+})();
+
+// TODO(albrow): Separately: Errors do not appear to be handled correctly. If you use the
+// wrong rpcUrl it just returns early with no error.
+async function getExchangeEventsAsync(): Promise<void> {
+ console.log('Getting event logs...');
+ const provider = web3Factory.getRpcProvider({
+ rpcUrl: 'https://mainnet.infura.io',
+ });
+ const eventsRepository = connection.getRepository(ExchangeFillEvent);
+ const exchangeEvents = new ExchangeEventsSource(provider, 1);
+ const eventLogs = await exchangeEvents.getFillEventsAsync();
+ console.log('Parsing events...');
+ const events = parseExchangeEvents(eventLogs);
+ console.log(`Retrieved and parsed ${events.length} total events.`);
+ console.log('Saving events...');
+ for (const event of events) {
+ await eventsRepository.save(event);
+ }
+ await eventsRepository.save(events);
+ console.log('Saved events.');
+}
+
+const insertEventsRawQuery = `INSERT INTO events_raw (
+ event_type,
+ error_id,
+ order_hash,
+ maker,
+ maker_amount,
+ maker_fee,
+ maker_token,
+ taker,
+ taker_amount,
+ taker_fee,
+ taker_token,
+ txn_hash,
+ fee_recipient,
+ block_number,
+ log_index
+)
+(
+ SELECT
+ 'LogFill',
+ null,
+ "orderHash",
+ "makerAddress",
+ "makerAssetFilledAmount"::numeric(78),
+ "makerFeePaid"::numeric(78),
+ "makerTokenAddress",
+ "takerAddress",
+ "takerAssetFilledAmount"::numeric(78),
+ "takerFeePaid"::numeric(78),
+ "takerTokenAddress",
+ "transactionHash",
+ "feeRecipientAddress",
+ "blockNumber",
+ "logIndex"
+ FROM exchange_fill_event
+) ON CONFLICT (order_hash, txn_hash, log_index) DO NOTHING`;
+
+async function mergeExchangeEventsAsync(): Promise<void> {
+ console.log('Merging results into events_raw...');
+ await connection.query(insertEventsRawQuery);
+}
diff --git a/packages/pipeline/src/scripts/pull_missing_events.ts b/packages/pipeline/src/scripts/pull_missing_events.ts
new file mode 100644
index 000000000..1f71722a3
--- /dev/null
+++ b/packages/pipeline/src/scripts/pull_missing_events.ts
@@ -0,0 +1,60 @@
+import { web3Factory } from '@0x/dev-utils';
+import { Web3ProviderEngine } from '@0x/subproviders';
+import R = require('ramda');
+import 'reflect-metadata';
+import { Connection, createConnection, Repository } from 'typeorm';
+
+import { ExchangeEventsSource } from '../data_sources/contract-wrappers/exchange_events';
+import { ExchangeFillEvent } from '../entities/ExchangeFillEvent';
+import { deployConfig } from '../ormconfig';
+import { parseExchangeEvents } from '../parsers/events';
+
+const EXCHANGE_START_BLOCK = 6271590; // Block number when the Exchange contract was deployed to mainnet.
+const START_BLOCK_OFFSET = 1000; // Number of blocks before the last known block to consider when updating fill events.
+const BATCH_SAVE_SIZE = 1000; // Number of events to save at once.
+
+let connection: Connection;
+
+(async () => {
+ connection = await createConnection(deployConfig);
+ const provider = web3Factory.getRpcProvider({
+ rpcUrl: 'https://mainnet.infura.io',
+ });
+ await getExchangeEventsAsync(provider);
+ process.exit(0);
+})();
+
+async function getExchangeEventsAsync(provider: Web3ProviderEngine): Promise<void> {
+ console.log('Checking existing event logs...');
+ const eventsRepository = connection.getRepository(ExchangeFillEvent);
+ const startBlock = await getStartBlockAsync(eventsRepository);
+ console.log(`Getting event logs starting at ${startBlock}...`);
+ const exchangeEvents = new ExchangeEventsSource(provider, 1);
+ const eventLogs = await exchangeEvents.getFillEventsAsync(startBlock);
+ console.log('Parsing events...');
+ const events = parseExchangeEvents(eventLogs);
+ console.log(`Retrieved and parsed ${events.length} total events.`);
+ console.log('Saving events...');
+ // Split the events into batches of size BATCH_SAVE_SIZE and save each batch
+ // in a single request. This reduces round-trip latency to the DB. We need
+ // to batch this way because saving an extremely large number of events in a
+ // single request causes problems.
+ for (const eventsBatch of R.splitEvery(BATCH_SAVE_SIZE, events)) {
+ await eventsRepository.save(eventsBatch);
+ }
+ const totalEvents = await eventsRepository.count();
+ console.log(`Done saving events. There are now ${totalEvents} total events.`);
+}
+
+async function getStartBlockAsync(eventsRepository: Repository<ExchangeFillEvent>): Promise<number> {
+ const fillEventCount = await eventsRepository.count();
+ if (fillEventCount === 0) {
+ console.log('No existing fill events found.');
+ return EXCHANGE_START_BLOCK;
+ }
+ const queryResult = await connection.query(
+ 'SELECT "blockNumber" FROM exchange_fill_event ORDER BY "blockNumber" DESC LIMIT 1',
+ );
+ const lastKnownBlock = queryResult[0].blockNumber;
+ return lastKnownBlock - START_BLOCK_OFFSET;
+}
diff --git a/packages/pipeline/src/scripts/update_relayer_info.ts b/packages/pipeline/src/scripts/update_relayer_info.ts
new file mode 100644
index 000000000..05e045ff4
--- /dev/null
+++ b/packages/pipeline/src/scripts/update_relayer_info.ts
@@ -0,0 +1,31 @@
+import 'reflect-metadata';
+import { Connection, createConnection } from 'typeorm';
+
+import { RelayerRegistrySource } from '../data_sources/relayer-registry';
+import { Relayer } from '../entities/Relayer';
+import { deployConfig } from '../ormconfig';
+import { parseRelayers } from '../parsers/relayer_registry';
+
+// NOTE(albrow): We need to manually update this URL for now. Fix this when we
+// have the relayer-registry behind semantic versioning.
+const RELAYER_REGISTRY_URL =
+ 'https://raw.githubusercontent.com/0xProject/0x-relayer-registry/4701c85677d161ea729a466aebbc1826c6aa2c0b/relayers.json';
+
+let connection: Connection;
+
+(async () => {
+ connection = await createConnection(deployConfig);
+ await getRelayers();
+ process.exit(0);
+})();
+
+async function getRelayers(): Promise<void> {
+ console.log('Getting latest relayer info...');
+ const relayerRepository = connection.getRepository(Relayer);
+ const relayerSource = new RelayerRegistrySource(RELAYER_REGISTRY_URL);
+ const relayersResp = await relayerSource.getRelayerInfoAsync();
+ const relayers = parseRelayers(relayersResp);
+ console.log('Saving relayer info...');
+ await relayerRepository.save(relayers);
+ console.log('Done saving relayer info.');
+}