Commit 04ed4ba0 authored by mergify[bot]'s avatar mergify[bot] Committed by GitHub

Merge branch 'develop' into willc/atst-cli-gif

parents b4000498 d9202f95
---
'@eth-optimism/atst': minor
---
Remove broken allowFailures as option
---
'@eth-optimism/atst': minor
---
Deprecate parseAttestationBytes and createRawKey in favor for createKey, createValue
...@@ -147,6 +147,8 @@ jobs: ...@@ -147,6 +147,8 @@ jobs:
- "." - "."
docker-publish: docker-publish:
environment:
DOCKER_BUILDKIT: 1
parameters: parameters:
docker_name: docker_name:
description: Docker image name description: Docker image name
...@@ -154,6 +156,13 @@ jobs: ...@@ -154,6 +156,13 @@ jobs:
docker_tags: docker_tags:
description: Docker image tags as csv description: Docker image tags as csv
type: string type: string
docker_file:
description: Path to Dockerfile
type: string
docker_context:
description: Docker build context
type: string
default: "."
registry: registry:
description: Docker registry description: Docker registry
type: string type: string
...@@ -162,17 +171,14 @@ jobs: ...@@ -162,17 +171,14 @@ jobs:
description: Docker repo description: Docker repo
type: string type: string
default: "oplabs-tools-artifacts/images" default: "oplabs-tools-artifacts/images"
platforms:
description: Platforms to build for
type: string
default: "linux/amd64"
machine: machine:
image: ubuntu-2204:2022.07.1 image: ubuntu-2204:2022.07.1
resource_class: xlarge resource_class: xlarge
steps: steps:
- attach_workspace:
at: /tmp/docker_images
- run:
name: Docker load
command: |
DOCKER_LABELS=$(echo -ne <<parameters.docker_tags>> | sed "s/,/\n/g" | sed "s/[^a-zA-Z0-9\n]/-/g")
echo -ne $DOCKER_LABELS | tr ' ' '\n' | xargs -I {} docker load -i /tmp/docker_images/<<parameters.docker_name>>_{}.tar
- gcp-oidc-authenticate - gcp-oidc-authenticate
# Below is CircleCI recommended way of specifying nameservers on an Ubuntu box: # Below is CircleCI recommended way of specifying nameservers on an Ubuntu box:
# https://support.circleci.com/hc/en-us/articles/7323511028251-How-to-set-custom-DNS-on-Ubuntu-based-images-using-netplan # https://support.circleci.com/hc/en-us/articles/7323511028251-How-to-set-custom-DNS-on-Ubuntu-based-images-using-netplan
...@@ -181,13 +187,19 @@ jobs: ...@@ -181,13 +187,19 @@ jobs:
- run: sudo sed -i "s/addresses:/ addresses":" [8.8.8.8, 8.8.4.4] /g" /etc/netplan/50-cloud-init.yaml - run: sudo sed -i "s/addresses:/ addresses":" [8.8.8.8, 8.8.4.4] /g" /etc/netplan/50-cloud-init.yaml
- run: cat /etc/netplan/50-cloud-init.yaml - run: cat /etc/netplan/50-cloud-init.yaml
- run: sudo netplan apply - run: sudo netplan apply
- checkout
- run: - run:
name: Publish name: Build & Publish
command: | command: |
gcloud auth configure-docker <<parameters.registry>> gcloud auth configure-docker <<parameters.registry>>
IMAGE_BASE="<<parameters.registry>>/<<parameters.repo>>/<<parameters.docker_name>>" IMAGE_BASE="<<parameters.registry>>/<<parameters.repo>>/<<parameters.docker_name>>"
DOCKER_TAGS=$(echo -ne <<parameters.docker_tags>> | sed "s/,/\n/g" | sed "s/[^a-zA-Z0-9\n]/-/g" | sed -e "s|^|${IMAGE_BASE}:|") DOCKER_TAGS=$(echo -ne <<parameters.docker_tags>> | sed "s/,/\n/g" | sed "s/[^a-zA-Z0-9\n]/-/g" | sed -e "s|^|-t ${IMAGE_BASE}:|")
echo -ne $DOCKER_TAGS | tr ' ' '\n' | xargs -L1 docker push docker context create buildx-build
docker buildx create --use buildx-build
docker buildx build --platform=<<parameters.platforms>> --push \
$(echo -ne $DOCKER_TAGS | tr '\n' ' ') \
-f <<parameters.docker_file>> \
<<parameters.docker_context>>
docker-release: docker-release:
environment: environment:
...@@ -213,6 +225,10 @@ jobs: ...@@ -213,6 +225,10 @@ jobs:
description: Docker repo description: Docker repo
type: string type: string
default: "oplabs-tools-artifacts/images" default: "oplabs-tools-artifacts/images"
platforms:
description: Platforms to build for
type: string
default: "linux/amd64"
machine: machine:
image: ubuntu-2204:2022.07.1 image: ubuntu-2204:2022.07.1
resource_class: xlarge resource_class: xlarge
...@@ -220,26 +236,21 @@ jobs: ...@@ -220,26 +236,21 @@ jobs:
- gcp-cli/install - gcp-cli/install
- gcp-oidc-authenticate - gcp-oidc-authenticate
- checkout - checkout
- run:
name: Build
command: |
IMAGE_BASE="<<parameters.registry>>/<<parameters.repo>>/<<parameters.docker_name>>"
DOCKER_TAGS=$(echo -ne <<parameters.docker_tags>> | sed "s/,/\n/g" | sed "s/[^a-zA-Z0-9\n]/-/g" | sed -e "s|^|-t ${IMAGE_BASE}:|")
docker build \
$(echo -ne $DOCKER_TAGS | tr '\n' ' ') \
-f <<parameters.docker_file>> \
--build-arg VERSION=$CIRCLE_TAG \
<<parameters.docker_context>>
- run: - run:
name: Configure Docker name: Configure Docker
command: | command: |
gcloud auth configure-docker <<parameters.registry>> gcloud auth configure-docker <<parameters.registry>>
- run: - run:
name: Publish name: Build & Publish
command: | command: |
IMAGE_BASE="<<parameters.registry>>/<<parameters.repo>>/<<parameters.docker_name>>" IMAGE_BASE="<<parameters.registry>>/<<parameters.repo>>/<<parameters.docker_name>>"
DOCKER_TAGS=$(echo -ne <<parameters.docker_tags>> | sed "s/,/\n/g" | sed "s/[^a-zA-Z0-9\n]/-/g" | sed -e "s|^|${IMAGE_BASE}:|") DOCKER_TAGS=$(echo -ne <<parameters.docker_tags>> | sed "s/,/\n/g" | sed "s/[^a-zA-Z0-9\n]/-/g" | sed -e "s|^|-t ${IMAGE_BASE}:|")
echo -ne $DOCKER_TAGS | tr ' ' '\n' | xargs -L1 docker push docker context create buildx-build
docker buildx create --use buildx-build
docker buildx build --platform=<<parameters.platforms>> --push \
$(echo -ne $DOCKER_TAGS | tr '\n' ' ') \
-f <<parameters.docker_file>> \
<<parameters.docker_context>>
- run: - run:
name: Tag name: Tag
command: | command: |
...@@ -1062,11 +1073,11 @@ workflows: ...@@ -1062,11 +1073,11 @@ workflows:
- docker-publish: - docker-publish:
name: op-node-docker-publish name: op-node-docker-publish
docker_name: op-node docker_name: op-node
docker_file: op-node/Dockerfile
docker_tags: <<pipeline.git.revision>>,<<pipeline.git.branch>> docker_tags: <<pipeline.git.revision>>,<<pipeline.git.branch>>
context: context:
- oplabs-gcr - oplabs-gcr
requires: platforms: "linux/amd64,linux/arm64"
- op-node-docker-build
- docker-build: - docker-build:
name: op-batcher-docker-build name: op-batcher-docker-build
docker_file: op-batcher/Dockerfile docker_file: op-batcher/Dockerfile
...@@ -1075,12 +1086,12 @@ workflows: ...@@ -1075,12 +1086,12 @@ workflows:
docker_context: . docker_context: .
- docker-publish: - docker-publish:
name: op-batcher-docker-publish name: op-batcher-docker-publish
docker_file: op-batcher/Dockerfile
docker_name: op-batcher docker_name: op-batcher
docker_tags: <<pipeline.git.revision>>,<<pipeline.git.branch>> docker_tags: <<pipeline.git.revision>>,<<pipeline.git.branch>>
context: context:
- oplabs-gcr - oplabs-gcr
requires: platforms: "linux/amd64,linux/arm64"
- op-batcher-docker-build
- docker-build: - docker-build:
name: op-proposer-docker-build name: op-proposer-docker-build
docker_file: op-proposer/Dockerfile docker_file: op-proposer/Dockerfile
...@@ -1089,12 +1100,12 @@ workflows: ...@@ -1089,12 +1100,12 @@ workflows:
docker_context: . docker_context: .
- docker-publish: - docker-publish:
name: op-proposer-docker-publish name: op-proposer-docker-publish
docker_file: op-proposer/Dockerfile
docker_name: op-proposer docker_name: op-proposer
docker_tags: <<pipeline.git.revision>>,<<pipeline.git.branch>> docker_tags: <<pipeline.git.revision>>,<<pipeline.git.branch>>
context: context:
- oplabs-gcr - oplabs-gcr
requires: platforms: "linux/amd64,linux/arm64"
- op-proposer-docker-build
- docker-build: - docker-build:
name: op-heartbeat-docker-build name: op-heartbeat-docker-build
docker_file: op-heartbeat/Dockerfile docker_file: op-heartbeat/Dockerfile
...@@ -1103,12 +1114,11 @@ workflows: ...@@ -1103,12 +1114,11 @@ workflows:
docker_context: . docker_context: .
- docker-publish: - docker-publish:
name: op-heartbeat-docker-publish name: op-heartbeat-docker-publish
docker_file: op-heartbeat/Dockerfile
docker_name: op-heartbeat docker_name: op-heartbeat
docker_tags: <<pipeline.git.revision>>,<<pipeline.git.branch>> docker_tags: <<pipeline.git.revision>>,<<pipeline.git.branch>>
context: context:
- oplabs-gcr - oplabs-gcr
requires:
- op-heartbeat-docker-build
- docker-build: - docker-build:
name: indexer-docker-build name: indexer-docker-build
docker_file: indexer/Dockerfile docker_file: indexer/Dockerfile
...@@ -1117,12 +1127,11 @@ workflows: ...@@ -1117,12 +1127,11 @@ workflows:
docker_context: . docker_context: .
- docker-publish: - docker-publish:
name: indexer-docker-publish name: indexer-docker-publish
docker_file: indexer/Dockerfile
docker_name: indexer docker_name: indexer
docker_tags: <<pipeline.git.revision>>,<<pipeline.git.branch>> docker_tags: <<pipeline.git.revision>>,<<pipeline.git.branch>>
context: context:
- oplabs-gcr - oplabs-gcr
requires:
- indexer-docker-build
- hive-test: - hive-test:
name: hive-test-rpc name: hive-test-rpc
version: <<pipeline.git.revision>> version: <<pipeline.git.revision>>
...@@ -1167,6 +1176,7 @@ workflows: ...@@ -1167,6 +1176,7 @@ workflows:
docker_name: op-node docker_name: op-node
docker_tags: <<pipeline.git.revision>>,<<pipeline.git.branch>> docker_tags: <<pipeline.git.revision>>,<<pipeline.git.branch>>
docker_context: . docker_context: .
platforms: "linux/amd64,linux/arm64"
context: context:
- oplabs-gcr-release - oplabs-gcr-release
requires: requires:
...@@ -1182,6 +1192,7 @@ workflows: ...@@ -1182,6 +1192,7 @@ workflows:
docker_name: op-batcher docker_name: op-batcher
docker_tags: <<pipeline.git.revision>>,<<pipeline.git.branch>> docker_tags: <<pipeline.git.revision>>,<<pipeline.git.branch>>
docker_context: . docker_context: .
platforms: "linux/amd64,linux/arm64"
context: context:
- oplabs-gcr-release - oplabs-gcr-release
requires: requires:
...@@ -1197,6 +1208,7 @@ workflows: ...@@ -1197,6 +1208,7 @@ workflows:
docker_name: op-proposer docker_name: op-proposer
docker_tags: <<pipeline.git.revision>>,<<pipeline.git.branch>> docker_tags: <<pipeline.git.revision>>,<<pipeline.git.branch>>
docker_context: . docker_context: .
platforms: "linux/amd64,linux/arm64"
context: context:
- oplabs-gcr-release - oplabs-gcr-release
requires: requires:
......
FROM golang:1.18.0-alpine3.15 as builder FROM --platform=$BUILDPLATFORM golang:1.18.0-alpine3.15 as builder
ARG VERSION=v0.0.0 ARG VERSION=v0.0.0
...@@ -17,7 +17,11 @@ COPY ./.git /app/.git ...@@ -17,7 +17,11 @@ COPY ./.git /app/.git
WORKDIR /app/op-batcher WORKDIR /app/op-batcher
RUN make op-batcher VERSION="$VERSION" RUN go mod download
ARG TARGETOS TARGETARCH
RUN make op-batcher VERSION="$VERSION" GOOS=$TARGETOS GOARCH=$TARGETARCH
FROM alpine:3.15 FROM alpine:3.15
......
...@@ -8,7 +8,7 @@ LDFLAGSSTRING +=-X main.Version=$(VERSION) ...@@ -8,7 +8,7 @@ LDFLAGSSTRING +=-X main.Version=$(VERSION)
LDFLAGS := -ldflags "$(LDFLAGSSTRING)" LDFLAGS := -ldflags "$(LDFLAGSSTRING)"
op-batcher: op-batcher:
env GO111MODULE=on go build -v $(LDFLAGS) -o ./bin/op-batcher ./cmd env GO111MODULE=on GOOS=$(TARGETOS) GOARCH=$(TARGETARCH) go build -v $(LDFLAGS) -o ./bin/op-batcher ./cmd
clean: clean:
rm bin/op-batcher rm bin/op-batcher
......
...@@ -77,7 +77,7 @@ func TestBatchInLastPossibleBlocks(gt *testing.T) { ...@@ -77,7 +77,7 @@ func TestBatchInLastPossibleBlocks(gt *testing.T) {
} }
// 8 L1 blocks with 17 L2 blocks is the unsafe state. // 8 L1 blocks with 17 L2 blocks is the unsafe state.
// Because wew consistently batch submitted we are one epoch behind the unsafe head with the safe head // Because we consistently batch submitted we are one epoch behind the unsafe head with the safe head
verifyChainStateOnSequencer(8, 17, 8, 15, 7) verifyChainStateOnSequencer(8, 17, 8, 15, 7)
// Create the batch for L2 blocks 16 & 17 // Create the batch for L2 blocks 16 & 17
......
FROM golang:1.18.0-alpine3.15 as builder FROM --platform=$BUILDPLATFORM golang:1.18.0-alpine3.15 as builder
ARG VERSION=v0.0.0 ARG VERSION=v0.0.0
...@@ -15,7 +15,11 @@ COPY ./.git /app/.git ...@@ -15,7 +15,11 @@ COPY ./.git /app/.git
WORKDIR /app/op-node WORKDIR /app/op-node
RUN make op-node VERSION="$VERSION" RUN go mod download
ARG TARGETOS TARGETARCH
RUN make op-node VERSION="$VERSION" GOOS=$TARGETOS GOARCH=$TARGETARCH
FROM alpine:3.15 FROM alpine:3.15
......
...@@ -9,7 +9,7 @@ LDFLAGSSTRING +=-X github.com/ethereum-optimism/optimism/op-node/version.Meta=$( ...@@ -9,7 +9,7 @@ LDFLAGSSTRING +=-X github.com/ethereum-optimism/optimism/op-node/version.Meta=$(
LDFLAGS := -ldflags "$(LDFLAGSSTRING)" LDFLAGS := -ldflags "$(LDFLAGSSTRING)"
op-node: op-node:
env GO111MODULE=on go build -v $(LDFLAGS) -o ./bin/op-node ./cmd/main.go env GO111MODULE=on GOOS=$(TARGETOS) GOARCH=$(TARGETARCH) go build -v $(LDFLAGS) -o ./bin/op-node ./cmd/main.go
clean: clean:
rm bin/op-node rm bin/op-node
......
FROM golang:1.18.0-alpine3.15 as builder FROM --platform=$BUILDPLATFORM golang:1.18.0-alpine3.15 as builder
ARG VERSION=v0.0.0 ARG VERSION=v0.0.0
...@@ -16,7 +16,11 @@ COPY ./.git /app/.git ...@@ -16,7 +16,11 @@ COPY ./.git /app/.git
WORKDIR /app/op-proposer WORKDIR /app/op-proposer
RUN make op-proposer VERSION="$VERSION" RUN go mod download
ARG TARGETOS TARGETARCH
RUN make op-proposer VERSION="$VERSION" GOOS=$TARGETOS GOARCH=$TARGETARCH
FROM alpine:3.15 FROM alpine:3.15
......
...@@ -8,7 +8,7 @@ LDFLAGSSTRING +=-X main.Version=$(VERSION) ...@@ -8,7 +8,7 @@ LDFLAGSSTRING +=-X main.Version=$(VERSION)
LDFLAGS := -ldflags "$(LDFLAGSSTRING)" LDFLAGS := -ldflags "$(LDFLAGSSTRING)"
op-proposer: op-proposer:
env GO111MODULE=on go build -v $(LDFLAGS) -o ./bin/op-proposer ./cmd env GO111MODULE=on GOOS=$(TARGETOS) GOARCH=$(TARGETARCH) go build -v $(LDFLAGS) -o ./bin/op-proposer ./cmd
clean: clean:
rm bin/op-proposer rm bin/op-proposer
......
...@@ -44,7 +44,9 @@ The cli provides a convenient cli for interacting with the attestation station c ...@@ -44,7 +44,9 @@ The cli provides a convenient cli for interacting with the attestation station c
For react hooks we recomend using the [wagmi cli](https://wagmi.sh/cli/getting-started) with the [etherscan plugin](https://wagmi.sh/cli/plugins/etherscan) and [react plugin](https://wagmi.sh/cli/plugins/react) to automatically generate react hooks around the attestation station. For react hooks we recomend using the [wagmi cli](https://wagmi.sh/cli/getting-started) with the [etherscan plugin](https://wagmi.sh/cli/plugins/etherscan) and [react plugin](https://wagmi.sh/cli/plugins/react) to automatically generate react hooks around the attestation station.
Use `parseAttestationBytes` and `stringifyAttestationBytes` to parse and stringify attestations before passing them into wagmi hooks. Use `createKey` and `createValue` to convert your raw keys and values into bytes that can be used in the attestation station contract calls
Use `parseString`, `parseBool`, `parseAddress` and `parseNumber` to convert values returned by attestation station to their correct data type.
For convenience we also export the hooks here. For convenience we also export the hooks here.
......
...@@ -171,11 +171,6 @@ const attestation = await readAttestations({ ...@@ -171,11 +171,6 @@ const attestation = await readAttestations({
* @defaults defaults to the create2 address * @defaults defaults to the create2 address
*/ */
contractAddress, contractAddress,
/**
* Boolean: Whether to allow some of the calls to fail
* Defaults to false
*/
allowFailures,
}) })
``` ```
...@@ -207,10 +202,10 @@ const attestation = parseAttestationBytes( ...@@ -207,10 +202,10 @@ const attestation = parseAttestationBytes(
### attestation keys ### attestation keys
Attestation keys are limited to 32 bytes. To support keys longer than 32 bytes, you can use the `encodeRawKey` function Attestation keys are limited to 32 bytes. To support keys longer than 32 bytes, you can use the `createKey` function
```typescript ```typescript
const key = await encodeRawKey( const key = await createKey(
about, about,
key, key,
'i.am.a.key.much.longer.than.32.bytes.long' 'i.am.a.key.much.longer.than.32.bytes.long'
...@@ -218,7 +213,7 @@ const key = await encodeRawKey( ...@@ -218,7 +213,7 @@ const key = await encodeRawKey(
await writeAttestation(preparedTx) await writeAttestation(preparedTx)
``` ```
encodeRawKey will keep the key as is if it is shorter than 32 bytes and otherwise run it through kekkak256 createKey will keep the key as is if it is shorter than 32 bytes and otherwise run it through kekkak256
### prepareWriteAttestation ### prepareWriteAttestation
......
...@@ -15,13 +15,13 @@ cli ...@@ -15,13 +15,13 @@ cli
.option('--creator <string>', readOptionsValidators.creator.description!) .option('--creator <string>', readOptionsValidators.creator.description!)
.option('--about <string>', readOptionsValidators.about.description!) .option('--about <string>', readOptionsValidators.about.description!)
.option('--key <string>', readOptionsValidators.key.description!) .option('--key <string>', readOptionsValidators.key.description!)
.option('--data-type [string]', readOptionsValidators.dataType.description!, { .option('--data-type <string>', readOptionsValidators.dataType.description!, {
default: readOptionsValidators.dataType.parse(undefined), default: readOptionsValidators.dataType.parse(undefined),
}) })
.option('--rpc-url [url]', readOptionsValidators.rpcUrl.description!, { .option('--rpc-url <url>', readOptionsValidators.rpcUrl.description!, {
default: readOptionsValidators.rpcUrl.parse(undefined), default: readOptionsValidators.rpcUrl.parse(undefined),
}) })
.option('--contract [address]', readOptionsValidators.contract.description!, { .option('--contract <address>', readOptionsValidators.contract.description!, {
default: readOptionsValidators.contract.parse(undefined), default: readOptionsValidators.contract.parse(undefined),
}) })
.example( .example(
...@@ -52,17 +52,17 @@ cli ...@@ -52,17 +52,17 @@ cli
'--private-key <string>', '--private-key <string>',
writeOptionsValidators.privateKey.description! writeOptionsValidators.privateKey.description!
) )
.option('--data-type [string]', readOptionsValidators.dataType.description!, { .option('--data-type <string>', readOptionsValidators.dataType.description!, {
default: writeOptionsValidators.dataType.parse(undefined), default: writeOptionsValidators.dataType.parse(undefined),
}) })
.option('--about <string>', writeOptionsValidators.about.description!) .option('--about <string>', writeOptionsValidators.about.description!)
.option('--key <string>', writeOptionsValidators.key.description!) .option('--key <string>', writeOptionsValidators.key.description!)
.option('--value <string>', writeOptionsValidators.value.description!) .option('--value <string>', writeOptionsValidators.value.description!)
.option('--rpc-url [url]', writeOptionsValidators.rpcUrl.description!, { .option('--rpc-url <url>', writeOptionsValidators.rpcUrl.description!, {
default: writeOptionsValidators.rpcUrl.parse(undefined), default: writeOptionsValidators.rpcUrl.parse(undefined),
}) })
.option( .option(
'--contract [address]', '--contract <address>',
writeOptionsValidators.contract.description!, writeOptionsValidators.contract.description!,
{ {
default: writeOptionsValidators.contract.parse(undefined), default: writeOptionsValidators.contract.parse(undefined),
......
// constants // constants
export { ATTESTATION_STATION_ADDRESS } from './constants/attestationStationAddress' export { ATTESTATION_STATION_ADDRESS } from './constants/attestationStationAddress'
// lib // lib
export { encodeRawKey } from './lib/encodeRawKey' export { encodeRawKey, createKey } from './lib/createKey'
export { createValue, stringifyAttestationBytes } from './lib/createValue'
export { export {
readAttestation, readAttestation,
readAttestationAddress, readAttestationAddress,
...@@ -15,7 +16,6 @@ export { prepareWriteAttestation } from './lib/prepareWriteAttestation' ...@@ -15,7 +16,6 @@ export { prepareWriteAttestation } from './lib/prepareWriteAttestation'
export { prepareWriteAttestations } from './lib/prepareWriteAttestations' export { prepareWriteAttestations } from './lib/prepareWriteAttestations'
export { writeAttestation } from './lib/writeAttestation' export { writeAttestation } from './lib/writeAttestation'
export { abi } from './lib/abi' export { abi } from './lib/abi'
export { stringifyAttestationBytes } from './lib/stringifyAttestationBytes'
export { export {
parseAttestationBytes, parseAttestationBytes,
parseAddress, parseAddress,
......
import { describe, expect, it } from 'vitest' import { describe, expect, it } from 'vitest'
import { encodeRawKey } from './encodeRawKey' import { encodeRawKey } from './createKey'
describe(encodeRawKey.name, () => { describe(encodeRawKey.name, () => {
it('should return just the raw key if it is less than 32 bytes', () => { it('should return just the raw key if it is less than 32 bytes', () => {
......
...@@ -2,10 +2,24 @@ import { ethers } from 'ethers' ...@@ -2,10 +2,24 @@ import { ethers } from 'ethers'
import { WagmiBytes } from '../types/WagmiBytes' import { WagmiBytes } from '../types/WagmiBytes'
export const encodeRawKey = (rawKey: string): WagmiBytes => { /**
* Creates an attesation key from a raw string
* Converts to bytes32 if key is less than 32 bytes
* Hashes key if key is greater than 32 bytes
*/
export const createKey = (rawKey: string): WagmiBytes => {
if (rawKey.length < 32) { if (rawKey.length < 32) {
return ethers.utils.formatBytes32String(rawKey) as WagmiBytes return ethers.utils.formatBytes32String(rawKey) as WagmiBytes
} }
const hash = ethers.utils.keccak256(ethers.utils.toUtf8Bytes(rawKey)) const hash = ethers.utils.keccak256(ethers.utils.toUtf8Bytes(rawKey))
return (hash.slice(0, 64) + 'ff') as WagmiBytes return (hash.slice(0, 64) + 'ff') as WagmiBytes
} }
/**
* @deprecated use createKey instead
* Will be removed in v1.0.0
*/
export const encodeRawKey: typeof createKey = (rawKey) => {
console.warn('encodeRawKey is deprecated, use createKey instead')
return createKey(rawKey)
}
...@@ -9,7 +9,16 @@ import { ...@@ -9,7 +9,16 @@ import {
import { WagmiBytes } from '../types/WagmiBytes' import { WagmiBytes } from '../types/WagmiBytes'
export const stringifyAttestationBytes = ( /**
* Turns a value into bytes to make an attestation
*
* @example
* createValue('hello world') // '0x68656c6c6f20776f726c64'
* createValue(123) // '0x7b'
* createValue(true) // '0x1'
* createValue(BigNumber.from(10)) // '0xa'
*/
export const createValue = (
bytes: WagmiBytes | string | Address | number | boolean | BigNumber bytes: WagmiBytes | string | Address | number | boolean | BigNumber
): WagmiBytes => { ): WagmiBytes => {
bytes = bytes === '0x' ? '0x0' : bytes bytes = bytes === '0x' ? '0x0' : bytes
...@@ -33,3 +42,14 @@ export const stringifyAttestationBytes = ( ...@@ -33,3 +42,14 @@ export const stringifyAttestationBytes = (
} }
throw new Error(`unrecognized bytes type ${bytes satisfies never}`) throw new Error(`unrecognized bytes type ${bytes satisfies never}`)
} }
/**
* @deprecated use createValue instead
* Will be removed in v1.0.0
*/
export const stringifyAttestationBytes: typeof createValue = (bytes) => {
console.warn(
'stringifyAttestationBytes is deprecated, use createValue instead'
)
return createValue(bytes)
}
...@@ -4,7 +4,7 @@ import { Address } from 'wagmi' ...@@ -4,7 +4,7 @@ import { Address } from 'wagmi'
import { ATTESTATION_STATION_ADDRESS } from '../constants/attestationStationAddress' import { ATTESTATION_STATION_ADDRESS } from '../constants/attestationStationAddress'
import { abi } from '../lib/abi' import { abi } from '../lib/abi'
import { AttestationCreatedEvent } from '../types/AttestationCreatedEvent' import { AttestationCreatedEvent } from '../types/AttestationCreatedEvent'
import { encodeRawKey } from './encodeRawKey' import { encodeRawKey } from './createKey'
export const getEvents = async ({ export const getEvents = async ({
creator = null, creator = null,
......
...@@ -41,6 +41,9 @@ export const parseAddress = (rawAttestation: WagmiBytes): Address => { ...@@ -41,6 +41,9 @@ export const parseAddress = (rawAttestation: WagmiBytes): Address => {
} }
/** /**
* @deprecated use parseString, parseBool, parseNumber, or parseAddress instead
* Will be removed in v1.0.0
* @internal
* Parses a raw attestation * Parses a raw attestation
*/ */
export const parseAttestationBytes = <TDataType extends DataTypeOption>( export const parseAttestationBytes = <TDataType extends DataTypeOption>(
......
...@@ -4,7 +4,7 @@ import { formatBytes32String } from 'ethers/lib/utils.js' ...@@ -4,7 +4,7 @@ import { formatBytes32String } from 'ethers/lib/utils.js'
import { ATTESTATION_STATION_ADDRESS } from '../constants/attestationStationAddress' import { ATTESTATION_STATION_ADDRESS } from '../constants/attestationStationAddress'
import { WagmiBytes } from '../types/WagmiBytes' import { WagmiBytes } from '../types/WagmiBytes'
import { abi } from './abi' import { abi } from './abi'
import { stringifyAttestationBytes } from './stringifyAttestationBytes' import { createValue } from './createValue'
export const prepareWriteAttestation = async ( export const prepareWriteAttestation = async (
about: Address, about: Address,
...@@ -27,6 +27,6 @@ export const prepareWriteAttestation = async ( ...@@ -27,6 +27,6 @@ export const prepareWriteAttestation = async (
abi, abi,
functionName: 'attest', functionName: 'attest',
chainId, chainId,
args: [about, formattedKey, stringifyAttestationBytes(value) as WagmiBytes], args: [about, formattedKey, createValue(value) as WagmiBytes],
}) })
} }
...@@ -4,7 +4,7 @@ import { formatBytes32String } from 'ethers/lib/utils.js' ...@@ -4,7 +4,7 @@ import { formatBytes32String } from 'ethers/lib/utils.js'
import { ATTESTATION_STATION_ADDRESS } from '../constants/attestationStationAddress' import { ATTESTATION_STATION_ADDRESS } from '../constants/attestationStationAddress'
import { WagmiBytes } from '../types/WagmiBytes' import { WagmiBytes } from '../types/WagmiBytes'
import { abi } from './abi' import { abi } from './abi'
import { stringifyAttestationBytes } from './stringifyAttestationBytes' import { createValue } from './createValue'
type Attestation = { type Attestation = {
about: Address about: Address
...@@ -27,7 +27,7 @@ export const prepareWriteAttestations = async ( ...@@ -27,7 +27,7 @@ export const prepareWriteAttestations = async (
`key is longer than 32 bytes: ${attestation.key}. Try using a shorter key or using 'encodeRawKey' to encode the key into 32 bytes first` `key is longer than 32 bytes: ${attestation.key}. Try using a shorter key or using 'encodeRawKey' to encode the key into 32 bytes first`
) )
} }
const formattedValue = stringifyAttestationBytes( const formattedValue = createValue(
attestation.value attestation.value
) as WagmiBytes ) as WagmiBytes
return { return {
......
...@@ -19,7 +19,6 @@ import { parseAttestationBytes } from './parseAttestationBytes' ...@@ -19,7 +19,6 @@ import { parseAttestationBytes } from './parseAttestationBytes'
* creator: creatorAddress, * creator: creatorAddress,
* about: aboutAddress, * about: aboutAddress,
* key: 'my_key', * key: 'my_key',
* allowFailure: false,
* }, * },
* { * {
* creator: creatorAddress2, * creator: creatorAddress2,
...@@ -27,7 +26,6 @@ import { parseAttestationBytes } from './parseAttestationBytes' ...@@ -27,7 +26,6 @@ import { parseAttestationBytes } from './parseAttestationBytes'
* key: 'my_key', * key: 'my_key',
* dataType: 'number', * dataType: 'number',
* contractAddress: '0x1234', * contractAddress: '0x1234',
* allowFailure: false,
* }, * },
* ) * )
*/ */
...@@ -40,7 +38,6 @@ export const readAttestations = async ( ...@@ -40,7 +38,6 @@ export const readAttestations = async (
about, about,
key, key,
contractAddress = ATTESTATION_STATION_ADDRESS, contractAddress = ATTESTATION_STATION_ADDRESS,
allowFailure = false,
} = attestation } = attestation
if (key.length > 32) { if (key.length > 32) {
throw new Error( throw new Error(
...@@ -52,7 +49,6 @@ export const readAttestations = async ( ...@@ -52,7 +49,6 @@ export const readAttestations = async (
abi, abi,
functionName: 'attestations', functionName: 'attestations',
args: [creator, about, formatBytes32String(key) as WagmiBytes], args: [creator, about, formatBytes32String(key) as WagmiBytes],
allowFailure,
} as const } as const
}) })
......
...@@ -11,5 +11,4 @@ export interface AttestationReadParams { ...@@ -11,5 +11,4 @@ export interface AttestationReadParams {
key: string key: string
dataType?: DataTypeOption dataType?: DataTypeOption
contractAddress?: Address contractAddress?: Address
allowFailure?: boolean
} }
...@@ -14,6 +14,10 @@ that maintains 1:1 compatibility with Ethereum. ...@@ -14,6 +14,10 @@ that maintains 1:1 compatibility with Ethereum.
- [L2 Output Root Proposals](proposals.md) - [L2 Output Root Proposals](proposals.md)
- [Rollup Node](rollup-node.md) - [Rollup Node](rollup-node.md)
- [Rollup Node P2p](rollup-node-p2p.md) - [Rollup Node P2p](rollup-node-p2p.md)
- [L2 Chain Derivation](derivation.md)
- [Network Upgrades](network-upgrades.md)
- [System Config](system_config.md)
- [Batch Submitter](batcher.md)
- [Guaranteed Gas Market](guaranteed-gas-market.md) - [Guaranteed Gas Market](guaranteed-gas-market.md)
- [Messengers](messengers.md) - [Messengers](messengers.md)
- [Bridges](bridges.md) - [Bridges](bridges.md)
......
# Network Upgrades
Network upgrades, also known as forks or hardforks, implement consensus-breaking changes.
These changes are transitioned into deterministically across all nodes through an activation rule.
This document lists the network upgrades of the OP Stack, starting after the Bedrock upgrade.
Prospective upgrades may be listed as proposals, but are not governed through these specifications.
Activation rule parameters of network upgrades are configured in respective chain configurations,
and not part of this specification.
<!-- START doctoc generated TOC please keep comment here to allow auto update -->
<!-- DON'T EDIT THIS SECTION, INSTEAD RE-RUN doctoc TO UPDATE -->
**Table of Contents**
- [Activation rules](#activation-rules)
- [L2 Block-number based activation](#l2-block-number-based-activation)
- [L2 Block-timestamp based activation](#l2-block-timestamp-based-activation)
- [Post-Bedrock Network upgrades](#post-bedrock-network-upgrades)
- [Regolith](#regolith)
<!-- END doctoc generated TOC please keep comment here to allow auto update -->
## Activation rules
The below L2-block based activation rules may be applied in two contexts:
- The rollup node, specified through the rollup configuration (known as `rollup.json`),
referencing L2 blocks (or block input-attributes) that pass through the derivation pipeline.
- The execution engine, specified through the chain configuration (known as the `config` part of `genesis.json`),
referencing blocks or input-attributes that are part of, or applied to, the L2 chain.
### L2 Block-number based activation
Activation rule: `x != null && x >= upgradeNumber`
Starting at, and including, the L2 `block` with `block.number == x`, the upgrade rules apply.
If the upgrade block-number `x` is not specified in the configuration, the upgrade is ignored.
This applies to the L2 block number, not to the L1-origin block number.
This means that an L2 upgrade may be inactive, and then active, without changing the L1-origin.
This block number based method has commonly been used in L1 up until the Bellatrix/Paris upgrade, a.k.a. The Merge,
which was upgraded through special rules.
### L2 Block-timestamp based activation
Activation rule: `x != null && x >= upgradeTime`
Starting at, and including, the L2 `block` with `block.timestamp == x`, the upgrade rules apply.
If the upgrade block-timestamp `x` is not specified in the configuration, the upgrade is ignored.
This applies to the L2 block timestamp, not to the L1-origin block timestamp.
This means that an L2 upgrade may be inactive, and then active, without changing the L1-origin.
This timestamp based method has become the default on L1 after the Bellatrix/Paris upgrade, a.k.a. The Merge,
because it can be planned in accordance with beacon-chain epochs and slots.
Note that the L2 version is not limited to timestamps that match L1 beacon-chain slots or epochs.
A timestamp may be chosen to be synchronous with a specific slot or epoch on L1,
but the matching L1-origin information may not be present at the time of activation on L2.
## Post-Bedrock Network upgrades
### Regolith
The Regolith upgrade, named after a material best described as "deposited dust on top of a layer of bedrock",
implements minor changes to deposit processing, based on reports of the Sherlock Audit-contest and findings in
the Bedrock Optimism Goerli testnet.
Summary of changes:
- The `isSystemTx` boolean is disabled, system transactions now use the same gas accounting rules as regular deposits.
- The actual deposit gas-usage is recorded in the receipt of the deposit transaction,
and subtracted from the L2 block gas-pool.
Unused gas of deposits is not refunded with ETH however, as it is burned on L1.
- The `nonce` value of the deposit sender account, before the transaction state-transition, is recorded in a new
optional field (`depositNonce`), extending the transaction receipt (i.e. not present in pre-Regolith receipts).
- The recorded deposit `nonce` is used to correct the transaction and receipt metadata in RPC responses,
including the `contractAddress` field of deposits that deploy contracts.
- The `gas` and `depositNonce` data is committed to as part of the consensus-representation of the receipt,
enabling the data to be safely synced between independent L2 nodes.
The [deposit specification](./deposits.md) specifies the changes of the Regolith upgrade in more detail.
The Regolith upgrade uses a *L2 block-timestamp* activation-rule, and is specified in both the
rollup-node (`regolith_time`) and execution engine (`config.regolithTime`).
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment