Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
}
this.l2Tokens[erc20L1Address] = tokenData
const walletAddress = await this.getWalletAddress()
// handle custom L2 token:
const customTokenAddress = await this.arbTokenBridge.customToken(
erc20L1Address
)
if (customTokenAddress !== ethers.constants.AddressZero) {
const customTokenContract = IArbToken__factory.connect(
customTokenAddress,
this.l2Signer
)
tokenData.CUSTOM = {
contract: customTokenContract,
balance: BigNumber.from(0),
}
try {
const balance = (await customTokenContract.balanceOf(
walletAddress
)) as BigNumber
tokenData.CUSTOM.balance = balance
} catch (err) {
console.warn("Count not get custom token's balance", err)
}
}
const l2ERC20Address = await this.getERC20L2Address(erc20L1Address)
const l2ERC777Address = await this.getERC777L2Address(erc20L1Address)
// check if standard arb erc20:
if (!tokenData.ERC20) {
BigNumber.from(enqueuedCount)
)
// Okay, so. Since events are processed in parallel, we don't know if the Enqueue
// event associated with this queue element has already been processed. So we'll ask
// the api to fetch that data for itself later on and we use fake values for some
// fields. The real TODO here is to make sure we fix this data structure to avoid ugly
// "dummy" fields.
transactionEntries.push({
index: extraData.prevTotalElements
.add(BigNumber.from(transactionIndex))
.toNumber(),
batchIndex: extraData.batchIndex.toNumber(),
blockNumber: BigNumber.from(0).toNumber(),
timestamp: BigNumber.from(0).toNumber(),
gasLimit: BigNumber.from(0).toString(),
target: constants.AddressZero,
origin: constants.AddressZero,
data: '0x',
queueOrigin: 'l1',
value: '0x0',
queueIndex: queueIndex.toNumber(),
decoded: null,
confirmed: true,
})
enqueuedCount++
transactionIndex++
}
}
const transactionBatchEntry: TransactionBatchEntry = {
transactionIndex++
}
for (let j = 0; j < context.numSubsequentQueueTransactions; j++) {
const queueIndex = event.args._startingQueueIndex.add(
BigNumber.from(enqueuedCount)
)
// Okay, so. Since events are processed in parallel, we don't know if the Enqueue
// event associated with this queue element has already been processed. So we'll ask
// the api to fetch that data for itself later on and we use fake values for some
// fields. The real TODO here is to make sure we fix this data structure to avoid ugly
// "dummy" fields.
transactionEntries.push({
index: extraData.prevTotalElements
.add(BigNumber.from(transactionIndex))
.toNumber(),
batchIndex: extraData.batchIndex.toNumber(),
blockNumber: BigNumber.from(0).toNumber(),
timestamp: BigNumber.from(0).toNumber(),
gasLimit: BigNumber.from(0).toString(),
target: constants.AddressZero,
origin: constants.AddressZero,
data: '0x',
queueOrigin: 'l1',
value: '0x0',
queueIndex: queueIndex.toNumber(),
decoded: null,
confirmed: true,
})
enqueuedCount++
parseBlock: async (
block: any,
chainId: number
): Promise<{
transactionEntry: TransactionEntry
stateRootEntry: StateRootEntry
}> => {
const transaction = block.transactions[0]
const transactionIndex =
transaction.index === null || transaction.index === undefined
? BigNumber.from(transaction.blockNumber).toNumber() - 1
: BigNumber.from(transaction.index).toNumber()
let transactionEntry: Partial = {
// Legacy support.
index: transactionIndex,
value: transaction.value,
batchIndex: null,
blockNumber: BigNumber.from(transaction.l1BlockNumber).toNumber(),
timestamp: BigNumber.from(transaction.l1Timestamp).toNumber(),
queueOrigin: transaction.queueOrigin,
confirmed: false,
}
if (transaction.queueOrigin === 'sequencer') {
const decodedTransaction: DecodedSequencerBatchTransaction = {
sig: {
const stateRootEntries: StateRootEntry[] = []
for (let i = 0; i < stateRoots.length; i++) {
stateRootEntries.push({
index: event.args._prevTotalElements.add(BigNumber.from(i)).toNumber(),
batchIndex: event.args._batchIndex.toNumber(),
value: stateRoots[i],
confirmed: true,
})
}
// Using .toNumber() here and in other places because I want to move everything to use
// BigNumber + hex, but that'll take a lot of work. This makes it easier in the future.
const stateRootBatchEntry: StateRootBatchEntry = {
index: event.args._batchIndex.toNumber(),
blockNumber: BigNumber.from(extraData.blockNumber).toNumber(),
timestamp: BigNumber.from(extraData.timestamp).toNumber(),
submitter: extraData.submitter,
size: event.args._batchSize.toNumber(),
root: event.args._batchRoot,
prevTotalElements: event.args._prevTotalElements.toNumber(),
extraData: event.args._extraData,
l1TransactionHash: extraData.l1TransactionHash,
}
return {
stateRootBatchEntry,
stateRootEntries,
}
},
storeEvent: async (entry, db) => {
...transactionEntry,
gasLimit: `${SEQUENCER_GAS_LIMIT}`, // ?
target: SEQUENCER_ENTRYPOINT_ADDRESS,
origin: null,
data: serialize(
{
value: transaction.value,
gasLimit: transaction.gas,
gasPrice: transaction.gasPrice,
nonce: transaction.nonce,
to: transaction.to,
data: transaction.input,
chainId,
},
{
v: BigNumber.from(transaction.v).toNumber(),
r: padHexString(transaction.r, 32),
s: padHexString(transaction.s, 32),
}
),
decoded: decodedTransaction,
queueIndex: null,
}
} else {
transactionEntry = {
...transactionEntry,
gasLimit: BigNumber.from(transaction.gas).toString(),
target: ethers.utils.getAddress(transaction.to),
origin: ethers.utils.getAddress(transaction.l1TxOrigin),
data: transaction.input,
decoded: null,
queueIndex:
}
this.l2Provider = l2Provider
this.arbSys = ArbSys__factory.connect(ARB_SYS_ADDRESS, l2Signer)
this.arbTokenBridge = ArbTokenBridge__factory.connect(
arbTokenBridgeAddress,
l2Signer
)
this.arbRetryableTx = ArbRetryableTx__factory.connect(
ARB_RETRYABLE_TX_ADDRESS,
l2Signer
)
this.l2EthBalance = BigNumber.from(0)
}
confirmed: true,
})
enqueuedCount++
transactionIndex++
}
}
const transactionBatchEntry: TransactionBatchEntry = {
index: extraData.batchIndex.toNumber(),
root: extraData.batchRoot,
size: extraData.batchSize.toNumber(),
prevTotalElements: extraData.prevTotalElements.toNumber(),
extraData: extraData.batchExtraData,
blockNumber: BigNumber.from(extraData.blockNumber).toNumber(),
timestamp: BigNumber.from(extraData.timestamp).toNumber(),
submitter: extraData.submitter,
l1TransactionHash: extraData.l1TransactionHash,
}
return {
transactionBatchEntry,
transactionEntries,
}
},
storeEvent: async (entry, db) => {
static bitFlipSeqNum = (seqNum: BigNumber) => {
return seqNum.or(BigNumber.from(1).shl(255))
}
const sequencerTransaction = parseSequencerBatchTransaction(
calldata,
nextTxPointer
)
const decoded = maybeDecodeSequencerBatchTransaction(
sequencerTransaction
)
transactionEntries.push({
index: extraData.prevTotalElements
.add(BigNumber.from(transactionIndex))
.toNumber(),
batchIndex: extraData.batchIndex.toNumber(),
blockNumber: BigNumber.from(context.blockNumber).toNumber(),
timestamp: BigNumber.from(context.timestamp).toNumber(),
gasLimit: BigNumber.from(extraData.gasLimit).toString(),
target: SEQUENCER_ENTRYPOINT_ADDRESS,
origin: null,
data: toHexString(sequencerTransaction),
queueOrigin: 'sequencer',
value: decoded ? decoded.value : '0x0',
queueIndex: null,
decoded,
confirmed: true,
})
nextTxPointer += 3 + sequencerTransaction.length
transactionIndex++
}
for (let j = 0; j < context.numSubsequentQueueTransactions; j++) {