Commit 1b9fd7a3 authored by Mark Tyneway's avatar Mark Tyneway Committed by GitHub

ufm-test-services: delete (#10702)

* ufm-test-services: delete

This code is no longer maintained by anybody at the company and is not
running in production. This commit deletes the code. We should strive
to ensure that there is clear ownership of all code in this repository
to ensure problems can be easily resolved when one comes up in a
particular place.

* ci: delete
parent 61b2b36c
...@@ -1901,10 +1901,6 @@ workflows: ...@@ -1901,10 +1901,6 @@ workflows:
name: indexer-docker-build name: indexer-docker-build
docker_name: indexer docker_name: indexer
docker_tags: <<pipeline.git.revision>>,<<pipeline.git.branch>> docker_tags: <<pipeline.git.revision>>,<<pipeline.git.branch>>
- docker-build:
name: ufm-metamask-docker-build
docker_name: ufm-metamask
docker_tags: <<pipeline.git.revision>>,<<pipeline.git.branch>>
- check-generated-mocks-op-node - check-generated-mocks-op-node
- check-generated-mocks-op-service - check-generated-mocks-op-service
- cannon-go-lint-and-test: - cannon-go-lint-and-test:
...@@ -2183,21 +2179,6 @@ workflows: ...@@ -2183,21 +2179,6 @@ workflows:
- oplabs-gcr - oplabs-gcr
requires: requires:
- hold - hold
- docker-build:
name: ufm-metamask-docker-release
filters:
tags:
only: /^ufm-metamask\/v.*/
branches:
ignore: /.*/
docker_name: ufm-metamask
docker_tags: <<pipeline.git.revision>>,latest
publish: true
release: true
context:
- oplabs-gcr
requires:
- hold
scheduled-todo-issues: scheduled-todo-issues:
when: when:
...@@ -2384,14 +2365,6 @@ workflows: ...@@ -2384,14 +2365,6 @@ workflows:
context: context:
- oplabs-gcr - oplabs-gcr
- slack - slack
- docker-build:
name: ufm-metamask-docker-publish
docker_name: ufm-metamask
docker_tags: <<pipeline.git.revision>>,<<pipeline.git.branch>>
publish: true
context:
- oplabs-gcr
- slack
scheduled-preimage-reproducibility: scheduled-preimage-reproducibility:
when: when:
......
...@@ -32,7 +32,6 @@ on: ...@@ -32,7 +32,6 @@ on:
- op-ufm - op-ufm
- da-server - da-server
- proxyd - proxyd
- ufm-metamask
- op-contracts - op-contracts
- op-conductor - op-conductor
prerelease: prerelease:
......
...@@ -84,7 +84,6 @@ The Optimism Immunefi program offers up to $2,000,042 for in-scope critical vuln ...@@ -84,7 +84,6 @@ The Optimism Immunefi program offers up to $2,000,042 for in-scope critical vuln
│ ├── <a href="./packages/sdk">sdk</a>: provides a set of tools for interacting with Optimism │ ├── <a href="./packages/sdk">sdk</a>: provides a set of tools for interacting with Optimism
├── <a href="./proxyd">proxyd</a>: Configurable RPC request router and proxy ├── <a href="./proxyd">proxyd</a>: Configurable RPC request router and proxy
├── <a href="./specs">specs</a>: Specs of the rollup starting at the Bedrock upgrade ├── <a href="./specs">specs</a>: Specs of the rollup starting at the Bedrock upgrade
└── <a href="./ufm-test-services">ufm-test-services</a>: Runs a set of tasks to generate metrics
</pre> </pre>
## Development and Release Process ## Development and Release Process
...@@ -127,7 +126,6 @@ The full set of components that have releases are: ...@@ -127,7 +126,6 @@ The full set of components that have releases are:
- `op-proposer` - `op-proposer`
- `op-ufm` - `op-ufm`
- `proxyd` - `proxyd`
- `ufm-metamask`
All other components and packages should be considered development components only and do not have releases. All other components and packages should be considered development components only and do not have releases.
......
...@@ -225,19 +225,6 @@ target "indexer" { ...@@ -225,19 +225,6 @@ target "indexer" {
tags = [for tag in split(",", IMAGE_TAGS) : "${REGISTRY}/${REPOSITORY}/indexer:${tag}"] tags = [for tag in split(",", IMAGE_TAGS) : "${REGISTRY}/${REPOSITORY}/indexer:${tag}"]
} }
target "ufm-metamask" {
dockerfile = "Dockerfile"
context = "./ufm-test-services/metamask"
args = {
// proxyd dockerfile has no _ in the args
GITCOMMIT = "${GIT_COMMIT}"
GITDATE = "${GIT_DATE}"
GITVERSION = "${GIT_VERSION}"
}
platforms = split(",", PLATFORMS)
tags = [for tag in split(",", IMAGE_TAGS) : "${REGISTRY}/${REPOSITORY}/ufm-metamask:${tag}"]
}
target "chain-mon" { target "chain-mon" {
dockerfile = "./ops/docker/Dockerfile.packages" dockerfile = "./ops/docker/Dockerfile.packages"
context = "." context = "."
......
...@@ -22,7 +22,6 @@ MIN_VERSIONS = { ...@@ -22,7 +22,6 @@ MIN_VERSIONS = {
'op-proposer': '0.10.14', 'op-proposer': '0.10.14',
'proxyd': '3.16.0', 'proxyd': '3.16.0',
'op-heartbeat': '0.1.0', 'op-heartbeat': '0.1.0',
'ufm-metamask': '0.1.0',
'op-contracts': '1.0.0', 'op-contracts': '1.0.0',
'op-conductor': '0.0.0', 'op-conductor': '0.0.0',
} }
......
...@@ -16,7 +16,6 @@ SERVICES = [ ...@@ -16,7 +16,6 @@ SERVICES = [
'da-server', 'da-server',
'proxyd', 'proxyd',
'op-heartbeat', 'op-heartbeat',
'ufm-metamask',
'op-contracts', 'op-contracts',
'test', 'test',
'op-stack', # special case for tagging op-node, op-batcher, and op-proposer together 'op-stack', # special case for tagging op-node, op-batcher, and op-proposer together
......
# Used by Test Services to perform certain actions if in CI environment
CI=false
# This is the password used to login into Grafana dashboard as the admin user
GRAFANA_ADMIN_PWD=op
# Used by Test Services to query metrics. http://prometheus will use Docker's built-in DNS
METRICS_READ_URL="http://prometheus:9090/api/v1/query"
# The needed credentials to access METRICS_READ_URL. Will be sent as: Authorization: Bearer username:password
METRICS_READ_USERNAME=""
METRICS_READ_PASSWORD=""
# Used by Test Services to push metrics. http://pushgateway will use Docker's built-in DNS
METRICS_WRITE_URL="http://pushgateway:9091"
# Dictates how the request body is structured when pushing metrics. Should be either "grafana" or "prometheus-pushgateway"
METRICS_WRITE_TOOL="prometheus-pushgateway"
# This is the source the pushed metric will be labeled as originting from. May not need this value
METRICS_WRITE_SOURCE=""
# The needed credentials to access METRICS_READ_URL. Will be sent as: Authorization: Bearer username:password
METRICS_WRITE_USERNAME=""
METRICS_WRITE_PASSWORD=""
# If true (or anything other than false), Xvfb will be inside the Metamask Test Service container and use it for Playwright tests.
# If false, you will need to specify METAMASK_DISPLAY and METAMASK_DISPLAY_VOLUME so Playwright can connect to a display
METAMASK_PLAYWRIGHT_RUN_HEADLESS=true
# The display used for running Playwright tests
METAMASK_DISPLAY=host.docker.internal:0
# The storage for Playwright to store test result, screenshots, videos, etc.
METAMASK_DISPLAY_VOLUME=/tmp/.X11-unix:/tmp/.X11-unix
# Mnemonic used to initialize Metamask, make sure there's enough ETH to run tests
METAMASK_SECRET_WORDS_OR_PRIVATEKEY="test test test test test test test test test test test junk"
# The initial network Metamask will be initialized with, Test Service will override with OP Sepolia
METAMASK_NETWORK="sepolia"
# The password to unlock Metamask
METAMASK_PASSWORD="T3st_P@ssw0rd!"
# The URL of the Metamask test dApp that will be spun up automatically for testing against
METAMASK_DAPP_URL="http://localhost:9011"
# The OP Sepolia RPC provider to be used to read/write data
METAMASK_OP_SEPOLIA_RPC_URL=""
# Used by Test Services to perform certain actions if in CI environment
CI=false
# This is the password used to login into Grafana dashboard as the admin user
GRAFANA_ADMIN_PWD=op
# Used by Test Services to query metrics. http://prometheus will use Docker's built-in DNS
METRICS_READ_URL="http://prometheus:9090/api/v1/query"
# The needed credentials to access METRICS_READ_URL. Will be sent as: Authorization: Bearer username:password
METRICS_READ_USERNAME=""
METRICS_READ_PASSWORD=""
# Used by Test Services to push metrics. http://pushgateway will use Docker's built-in DNS
METRICS_WRITE_URL="http://pushgateway:9091"
# Dictates how the request body is structured when pushing metrics. Should be either "grafana" or "prometheus-pushgateway"
METRICS_WRITE_TOOL="prometheus-pushgateway"
# This is the source the pushed metric will be labeled as originting from. May not need this value
METRICS_WRITE_SOURCE=""
# The needed credentials to access METRICS_READ_URL. Will be sent as: Authorization: Bearer username:password
METRICS_WRITE_USERNAME=""
METRICS_WRITE_PASSWORD=""
# If true (or anything other than false), Xvfb will be inside the Metamask Test Service container and use it for Playwright tests.
# If false, you will need to specify METAMASK_DISPLAY and METAMASK_DISPLAY_VOLUME so Playwright can connect to a display
METAMASK_PLAYWRIGHT_RUN_HEADLESS=true
# The display used for running Playwright tests
METAMASK_DISPLAY=host.docker.internal:0
# The storage for Playwright to store test result, screenshots, videos, etc.
METAMASK_DISPLAY_VOLUME=/tmp/.X11-unix:/tmp/.X11-unix
# Mnemonic used to initialize Metamask, make sure there's enough ETH to run tests
METAMASK_SECRET_WORDS_OR_PRIVATEKEY="test test test test test test test test test test test junk"
# The initial network Metamask will be initialized with, Test Service will override with OP Goerli
METAMASK_NETWORK="sepolia"
# The password to unlock Metamask
METAMASK_PASSWORD="T3st_P@ssw0rd!"
# The URL of the Metamask test dApp that will be spun up automatically for testing against
METAMASK_DAPP_URL="http://localhost:9011"
# The OP Sepolia RPC provider to be used to read/write data
METAMASK_OP_SEPOLIA_RPC_URL=""
# User Facing Monitoring
This project allows you to create _Test Services_ which are Docker containers configured to run a set of tasks, generate metrics, and push those metrics to a Prometheus Pushgateway that can later be scraped and queried by a Grafana dashboard
This project has two modes of execution: CI and local
## CI Execution
![Diagram of UFM execution flow in CI](./assets//ufm-ci-execution.svg)
Starting from left to right in the above diagram:
1. Github Workflow files are created for each time interval Test Services should be ran
- All Test Services that should be ran for a specific time interval (e.g. 1 hour) should be defined in the same Github Workflow file
2. Github will run a workflow at its specified time interval, triggering all of it's defined Test Services to run
3. `docker-compose.yml` builds and runs each Test Service, setting any environment variables that can be sourced from Github secrets
4. Each Test Service will run its defined tasks, generate its metrics, and push them to an already deployed instance of Prometheus Pushgateway
5. An already deployed instance of Prometheus will scrape the Pushgateway for metrics
6. An already deployed Grafana dashboard will query Prometheus for metric data to display
### Mocking CI Execution Locally
Thanks to [Act](https://github.com/nektos/act), Github actions can be "ran" locally for testing. Here's how:
1. [Install Act](https://github.com/nektos/act#installation-through-package-managers)
- For MacOS: `brew install act`
2. [Generate](https://docs.github.com/en/enterprise-server@3.6/authentication/keeping-your-account-and-data-secure/managing-your-personal-access-tokens#creating-a-personal-access-token) a Github access token
- I believe it only needs permission to read from repositories
3. Copy secrets file: `cp .secrets.example .secrets` and fill it out
4. Create `~/.actrc` and copy the following into it:
```bash
-P ubuntu-latest=catthehacker/ubuntu:full-18.04
-P ubuntu-latest=catthehacker/ubuntu:full-18.04
-P ubuntu-18.04=catthehacker/ubuntu:full-18.04
```
5. Spin up the Pushgateway, Prometheus, and Grafana containers: `docker-compose up pushgateway prometheus grafana`
- Optionally, you could specify a remote Pushgateway and Prometheus instance to push metrics to in `.secrets`
- `PROMETHEUS_SERVER_URL` and `PROMETHEUS_PUSHGATEWAY_URL`
6. Run `act -W .github/workflows/YOUR_WORKFLOW_FILE.yml -s GITHUB_TOKEN=YOUR_TOKEN --secret-file ./ufm-test-services/.secrets --container-architecture linux/amd64`
- `--container-architecture linux/amd64` is necessary if running on MacOS, but may be different for you
- Downloading the Github Actions Docker image takes a while and is pretty big, so you might need to allocate more resources to Docker, or `docker prune`/remove no longer needed images/containers/volumes
Following these steps will use `act` to mock the Github Actions environment using a Docker container. The Github Actions container will then spin up a nested container to run each Test Service. Each Test Service _should_ be configured to generate and push metrics to the given Pushgateway, so after `act` finishes execution, you should be able to login into Grafana and view the dashboards
## Local Execution
### Running With Scheduler
![Diagram of UFM execution flow locally](./assets//ufm-local-execution.svg)
Starting from left to right in the above diagram:
1. Copy env file: `cp .env.example .env` and fill it out
- If you want to run local instances of the Pushgateway, Prometheus, and Grafana, you can run:
```bash
docker-compose up pushgateway prometheus grafana
```
to spin those up. Otherwise, you should override the defaults URLs in the `.env` for:
- `PROMETHEUS_SERVER_URL` and `PROMETHEUS_PUSHGATEWAY_URL`
3. You'll need to setup some sort of scheduler to run your Test Services at specific time intervals
- For Linux/MacOS this can be accomplished using `cron`
- Edit your `cron` job file using `crontab -e`
- Here is some example code to get you started, also found in `crontab.example` file:
```bash
# Needs to point to docker, otherwise you'll get the error: exec: "docker": executable file not found in $PATH
PATH=/
# Runs every 1 hour
0 * * * * /usr/local/bin/docker-compose -f /path/to/docker-compose.yml --profile 1hour up -d
# Runs every 1 day
0 12 * * * /usr/local/bin/docker-compose -f /path/to/docker-compose.yml --profile 1day up -d
# Runs every 7 days
0 12 */7 * * /usr/local/bin/docker-compose -f /path/to/docker-compose.yml --profile 7day up -d
```
### Running Manually
1. Copy env file: `cp .env.example .env` and fill it out
2. Run `docker-compose` for which ever Test Service you'd like to run e.g.:
- `docker-compose run testService1`
- `docker-compose --profile 1hour up`
## Test Services
If you're trying to run a specific Test Service, make sure to check out their `README.md`s, as they may have some required prerequisites to setup before they'll run as expected
This diff is collapsed.
This diff is collapsed.
# Needs to point to docker, otherwise you'll get the error: exec: "docker": executable file not found in $PATH
PATH=/
# Runs every 1 hour
0 * * * * /usr/local/bin/docker-compose -f /path/to/docker-compose.local.yml --profile 1hour up -d
# Runs every 1 day
0 12 * * * /usr/local/bin/docker-compose -f /path/to/docker-compose.local.yml --profile 1day up -d
# Runs every 7 day
0 12 */7 * * /usr/local/bin/docker-compose -f /path/to/docker-compose.local.yml --profile 7day up -d
version: "3"
services:
pushgateway:
image: prom/pushgateway
container_name: pushgateway
ports:
- "9091:9091"
restart: unless-stopped
read_only: true
security_opt:
- "no-new-privileges:true"
prometheus:
image: prom/prometheus:latest
container_name: prometheus
ports:
- "9090:9090"
volumes:
- ./prometheus.yml:/etc/prometheus/prometheus.yml
command:
- '--config.file=/etc/prometheus/prometheus.yml'
read_only: true
security_opt:
- "no-new-privileges:true"
grafana:
image: grafana/grafana:latest
container_name: grafana
ports:
- "3000:3000"
environment:
- GF_SECURITY_ADMIN_PASSWORD=${GRAFANA_ADMIN_PWD}
volumes:
- ./grafana/provisioning:/etc/grafana/provisioning
- ./grafana/dashboards:/var/lib/grafana/dashboards
security_opt:
- "no-new-privileges:true"
metamask:
build:
context: ./metamask
dockerfile: Dockerfile
args:
- METAMASK_PLAYWRIGHT_RUN_HEADLESS=${METAMASK_PLAYWRIGHT_RUN_HEADLESS}
container_name: test-service-metamask
profiles: ["5minute"]
environment:
CI: ${CI}
DISPLAY: ${METAMASK_DISPLAY}
GRAFANA_ADMIN_PWD: ${GRAFANA_ADMIN_PWD}
METRICS_READ_URL: ${METRICS_READ_URL}
METRICS_READ_USERNAME: ${METRICS_READ_USERNAME}
METRICS_READ_PASSWORD: ${METRICS_READ_PASSWORD}
METRICS_WRITE_URL: ${METRICS_WRITE_URL}
METRICS_WRITE_TOOL: ${METRICS_WRITE_TOOL}
METRICS_WRITE_SOURCE: ${METRICS_WRITE_SOURCE}
METRICS_WRITE_USERNAME: ${METRICS_WRITE_USERNAME}
METRICS_WRITE_PASSWORD: ${METRICS_WRITE_PASSWORD}
METAMASK_PLAYWRIGHT_RUN_HEADLESS: ${METAMASK_PLAYWRIGHT_RUN_HEADLESS}
METAMASK_SECRET_WORDS_OR_PRIVATEKEY: ${METAMASK_SECRET_WORDS_OR_PRIVATEKEY}
METAMASK_NETWORK: ${METAMASK_NETWORK}
METAMASK_PASSWORD: ${METAMASK_PASSWORD}
METAMASK_DAPP_URL: ${METAMASK_DAPP_URL}
METAMASK_OP_SEPOLIA_RPC_URL: ${METAMASK_OP_SEPOLIA_RPC_URL}
volumes:
- ${METAMASK_DISPLAY_VOLUME:-/path/in/container/if/no/env/set}
restart: "no"
{
"annotations": {
"list": [
{
"builtIn": 1,
"datasource": {
"type": "grafana",
"uid": "-- Grafana --"
},
"enable": true,
"hide": true,
"iconColor": "rgba(0, 211, 255, 1)",
"name": "Annotations & Alerts",
"type": "dashboard"
}
]
},
"editable": true,
"fiscalYearStartMonth": 0,
"graphTooltip": 0,
"id": 1,
"links": [],
"liveNow": false,
"panels": [
{
"datasource": {
"type": "prometheus",
"uid": "PBFA97CFB590B2093"
},
"fieldConfig": {
"defaults": {
"color": {
"mode": "thresholds"
},
"custom": {
"axisCenteredZero": true,
"axisColorMode": "series",
"axisGridShow": false,
"axisLabel": "",
"axisPlacement": "auto",
"barAlignment": 0,
"drawStyle": "line",
"fillOpacity": 0,
"gradientMode": "none",
"hideFrom": {
"legend": false,
"tooltip": false,
"viz": false
},
"insertNulls": false,
"lineInterpolation": "stepAfter",
"lineWidth": 1,
"pointSize": 5,
"scaleDistribution": {
"type": "linear"
},
"showPoints": "never",
"spanNulls": false,
"stacking": {
"group": "A",
"mode": "none"
},
"thresholdsStyle": {
"mode": "off"
}
},
"decimals": 0,
"displayName": "Number of Transactions (positive number = success, negative = failures)",
"mappings": [],
"thresholds": {
"mode": "absolute",
"steps": [
{
"color": "text",
"value": null
},
{
"color": "red",
"value": -1
},
{
"color": "text",
"value": 0
},
{
"color": "green",
"value": 1
}
]
}
},
"overrides": []
},
"gridPos": {
"h": 8,
"w": 24,
"x": 0,
"y": 0
},
"id": 1,
"options": {
"legend": {
"calcs": [
"last"
],
"displayMode": "list",
"placement": "bottom",
"showLegend": true
},
"timezone": [
"browser"
],
"tooltip": {
"mode": "single",
"sort": "none"
}
},
"targets": [
{
"datasource": {
"type": "prometheus",
"uid": "PBFA97CFB590B2093"
},
"disableTextWrap": false,
"editorMode": "builder",
"expr": "metamask_self_send_metric",
"fullMetaSearch": false,
"includeNullMetadata": true,
"instant": false,
"legendFormat": "__auto",
"range": true,
"refId": "A",
"useBackend": false
}
],
"title": "Self Transferring on OP Goerli (positive number = success, negative = failures)",
"type": "timeseries"
},
{
"datasource": {
"type": "prometheus",
"uid": "PBFA97CFB590B2093"
},
"fieldConfig": {
"defaults": {
"color": {
"mode": "fixed"
},
"custom": {
"axisCenteredZero": false,
"axisColorMode": "text",
"axisGridShow": false,
"axisLabel": "",
"axisPlacement": "auto",
"axisSoftMin": 0,
"barAlignment": 0,
"drawStyle": "line",
"fillOpacity": 0,
"gradientMode": "none",
"hideFrom": {
"legend": false,
"tooltip": false,
"viz": false
},
"insertNulls": false,
"lineInterpolation": "stepAfter",
"lineWidth": 1,
"pointSize": 5,
"scaleDistribution": {
"type": "linear"
},
"showPoints": "never",
"spanNulls": false,
"stacking": {
"group": "A",
"mode": "none"
},
"thresholdsStyle": {
"mode": "off"
}
},
"decimals": 0,
"mappings": [],
"thresholds": {
"mode": "absolute",
"steps": [
{
"color": "text",
"value": null
}
]
}
},
"overrides": [
{
"matcher": {
"id": "byName",
"options": "metamask_self_send_fee_estimation_low_metric"
},
"properties": [
{
"id": "displayName",
"value": "Low (Slow 🐢)"
},
{
"id": "color",
"value": {
"fixedColor": "green",
"mode": "fixed"
}
}
]
},
{
"matcher": {
"id": "byName",
"options": "metamask_self_send_fee_estimation_medium_metric"
},
"properties": [
{
"id": "displayName",
"value": "Medium (Market 🦊)"
},
{
"id": "color",
"value": {
"fixedColor": "orange",
"mode": "fixed"
}
}
]
},
{
"matcher": {
"id": "byName",
"options": "metamask_self_send_fee_estimation_high_metric"
},
"properties": [
{
"id": "displayName",
"value": "High (Aggressive 🦍)"
}
]
},
{
"matcher": {
"id": "byName",
"options": "metamask_self_send_fee_estimation_actual_metric"
},
"properties": [
{
"id": "displayName",
"value": "Actual transaction fee"
},
{
"id": "color",
"value": {
"fixedColor": "blue",
"mode": "fixed"
}
}
]
}
]
},
"gridPos": {
"h": 11,
"w": 24,
"x": 0,
"y": 8
},
"id": 2,
"options": {
"legend": {
"calcs": [
"last"
],
"displayMode": "table",
"placement": "bottom",
"showLegend": true
},
"timezone": [
"browser"
],
"tooltip": {
"mode": "single",
"sort": "none"
}
},
"targets": [
{
"datasource": {
"type": "prometheus",
"uid": "PBFA97CFB590B2093"
},
"disableTextWrap": false,
"editorMode": "builder",
"expr": "metamask_self_send_fee_estimation_low_metric",
"fullMetaSearch": false,
"includeNullMetadata": true,
"instant": false,
"legendFormat": "__auto",
"range": true,
"refId": "A",
"useBackend": false
},
{
"datasource": {
"type": "prometheus",
"uid": "PBFA97CFB590B2093"
},
"disableTextWrap": false,
"editorMode": "builder",
"expr": "metamask_self_send_fee_estimation_medium_metric",
"fullMetaSearch": false,
"hide": false,
"includeNullMetadata": true,
"instant": false,
"legendFormat": "__auto",
"range": true,
"refId": "B",
"useBackend": false
},
{
"datasource": {
"type": "prometheus",
"uid": "PBFA97CFB590B2093"
},
"disableTextWrap": false,
"editorMode": "builder",
"expr": "metamask_self_send_fee_estimation_high_metric",
"fullMetaSearch": false,
"hide": false,
"includeNullMetadata": true,
"instant": false,
"legendFormat": "__auto",
"range": true,
"refId": "C",
"useBackend": false
},
{
"datasource": {
"type": "prometheus",
"uid": "PBFA97CFB590B2093"
},
"disableTextWrap": false,
"editorMode": "builder",
"expr": "metamask_self_send_fee_estimation_actual_metric",
"fullMetaSearch": false,
"hide": false,
"includeNullMetadata": true,
"instant": false,
"legendFormat": "__auto",
"range": true,
"refId": "D",
"useBackend": false
}
],
"title": "Self Transferring on OP Goerli Fee Estimates",
"type": "timeseries"
}
],
"refresh": "5s",
"schemaVersion": 38,
"style": "dark",
"tags": [],
"templating": {
"list": []
},
"time": {
"from": "now-12h",
"to": "now"
},
"timepicker": {},
"timezone": "",
"title": "UFM: Metamask",
"uid": "f66f7076-c724-4f81-8ff9-58d6d99f2716",
"version": 1,
"weekStart": ""
}
\ No newline at end of file
apiVersion: 1
providers:
- name: 'default'
orgId: 1
folder: ''
type: file
disableDeletion: false
options:
path: /var/lib/grafana/dashboards
apiVersion: 1
datasources:
- name: Prometheus
type: prometheus
access: proxy
url: http://prometheus:9090
isDefault: true
node_modules/
/test-results/
/playwright-report/
/playwright/.cache/
.env
# Using the Playwright image
FROM mcr.microsoft.com/playwright:v1.37.1-jammy
# Setting the working directory
WORKDIR /app
# Update PATH
ENV PATH /app/node_modules/.bin:$PATH
RUN npm i -g pnpm
RUN if [ "$METAMASK_PLAYWRIGHT_RUN_HEADLESS" != "false" ]; then \
apt-get update && \
apt-get install -y xvfb && \
rm -rf /var/lib/apt/lists/* ; \
fi
# Copy necessary files and directories
COPY package.json pnpm-lock.yaml pnpm-workspace.yaml /app/
RUN pnpm install --frozen-lockfile --ignore-scripts
COPY tests /app/tests/
COPY playwright.config.ts /app/
COPY start.sh /app/
COPY tsconfig.json /app/
# Start the script
CMD /bin/bash /app/start.sh
MIT License
Copyright (c) 2023 Optimism
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
# User Facing Monitoring - Metamask Tests
## Running Locally
### Building Docker Image
```bash
docker build -t ufm-test-service-metamask .
```
### Running the Docker Container on MacOS
The following steps were taken from [here](https://www.oddbird.net/2022/11/30/headed-playwright-in-docker/#macos)
Apple’s operating system doesn’t include a built-in XServer, but we can use [XQuartz](https://www.xquartz.org/) to provide one:
1. Install XQuartz: `brew install --cask xquartz``
2. Open XQuartz, go to `Preferences -> Security`, and check `Allow connections from network clients`
3. Restart your computer (restarting XQuartz might not be enough)
4. Start XQuartz by executing `xhost +localhost` in your terminal
5. Open Docker Desktop and edit settings to give access to `/tmp/.X11-unix` in `Preferences -> Resources -> File sharing`
Once XQuartz is running with the right permissions, you can populate the environment variable and socket Docker args (these envs are defaulted to the below values in `ufm-test-services/.env.example`):
```bash
docker run --rm -it \
-e DISPLAY=host.docker.internal:0 \
-v /tmp/.X11-unix:/tmp/.X11-unix \
ufm-test-service-metamask
```
name: 'UFM Test Service: 1 hour'
on:
schedule:
# Run every hour
- cron: '0 * * * *'
jobs:
ufm_test_service_metamask:
name: 'UFM Test Service: Metamask'
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Run Docker Compose
run: docker-compose run metamask
env:
CI: ${{ secrets.CI }}
METRICS_READ_URL: ${{ secrets.METRICS_READ_URL }}
METRICS_READ_USERNAME: ${{ secrets.METRICS_READ_USERNAME }}
METRICS_READ_PASSWORD: ${{ secrets.METRICS_READ_PASSWORD }}
METRICS_WRITE_URL: ${{ secrets.METRICS_WRITE_URL }}
METRICS_WRITE_TOOL: ${{ secrets.METRICS_WRITE_TOOL }}
METRICS_WRITE_SOURCE: ${{ secrets.METRICS_WRITE_SOURCE }}
METRICS_WRITE_USERNAME: ${{ secrets.METRICS_WRITE_USERNAME }}
METRICS_WRITE_PASSWORD: ${{ secrets.METRICS_WRITE_PASSWORD }}
METAMASK_SECRET_WORDS_OR_PRIVATEKEY: ${{ secrets.METAMASK_SECRET_WORDS_OR_PRIVATEKEY }}
METAMASK_NETWORK: ${{ secrets.METAMASK_NETWORK || 'sepolia' }}
METAMASK_PASSWORD: ${{ secrets.METAMASK_PASSWORD || 'T3st_P@ssw0rd!' }}
METAMASK_DAPP_URL: ${{ secrets.METAMASK_DAPP_URL || 'http://localhost:9011' }}
METAMASK_OP_SEPOLIA_RPC_URL: ${{ secrets.METAMASK_OP_SEPOLIA_RPC_URL }}
{
"name": "@eth-optimism/ufm-test-service-metamask",
"version": "0.1.0",
"description": "A User facing monitoring Test Service for MetaMask",
"license": "MIT",
"repository": {
"type": "git",
"url": "https://github.com/ethereum-optimism/optimism.git",
"directory": "ufm-test-services/metamask"
},
"homepage": "https://optimism.io",
"type": "module",
"scripts": {
"clean": "rm -rf node_modules packages/*/node_modules && echo 'Finished cleaning'",
"lint": "prettier --check .",
"lint:fix": "prettier --write .",
"start:metamask-dapp": "npx serve -l 9011 node_modules/@metamask/test-dapp/dist",
"test": "npx playwright test"
},
"devDependencies": {
"@metamask/test-dapp": "^8.1.0",
"@playwright/test": "1.40.1",
"@synthetixio/synpress": "3.7.2-beta.9",
"dotenv": "^16.3.1",
"serve": "^14.2.1",
"typescript": "^5.3.3",
"viem": "^1.20.0"
},
"dependencies": {
"prom-client": "^15.0.0",
"zod": "^3.22.4"
},
"pnpm": {
"overrides": {
"@cypress/request": "^3.0.1",
"axios": "^1.6.2",
"got": "^11.8.6"
}
}
}
import { defineConfig, devices } from '@playwright/test';
/**
* Read environment variables from file.
* https://github.com/motdotla/dotenv
*/
import 'dotenv/config'
/**
* See https://playwright.dev/docs/test-configuration.
*/
export default defineConfig({
testDir: './tests',
/* Run tests in files in parallel */
fullyParallel: true,
/* Fail the build on CI if you accidentally left test.only in the source code. */
forbidOnly: !!process.env.CI,
/* Opt out of parallel tests on CI. */
workers: process.env.CI ? 1 : undefined,
/* Reporter to use. See https://playwright.dev/docs/test-reporters */
reporter: 'html',
/* Shared settings for all the projects below. See https://playwright.dev/docs/api/class-testoptions. */
use: {
/* Base URL to use in actions like `await page.goto('/')`. */
// baseURL: 'http://127.0.0.1:3000',
/* Collect trace when retrying the failed test. See https://playwright.dev/docs/trace-viewer */
trace: 'on-first-retry',
},
/* Configure projects for major browsers */
projects: [
{
name: 'chromium',
use: { ...devices['Desktop Chrome'] },
},
],
/* Run your local dev server before starting the tests */
webServer: {
command: 'npm run start:metamask-dapp',
url: process.env.METAMASK_DAPP_URL,
reuseExistingServer: false,
},
});
This diff is collapsed.
#!/bin/bash
if [ "$METAMASK_PLAYWRIGHT_RUN_HEADLESS" != "false" ]; then
# Start Xvfb in the background on display :99
Xvfb :99 &
# Set the DISPLAY environment variable
export DISPLAY=:99
fi
npm test
# If something goes wrong, Playwright generates this file, but only if there is an error.
# npx playwright show-trace will log the Playwright error
if [ -f "test-results/metamask-Setup-wallet-and-dApp-chromium-retry1/trace.zip" ]; then
npx playwright show-trace "test-results/metamask-Setup-wallet-and-dApp-chromium-retry1/trace.zip"
fi
import 'dotenv/config'
import { z } from 'zod'
import metamask from '@synthetixio/synpress/commands/metamask.js'
import synpressPlaywright from '@synthetixio/synpress/commands/playwright.js'
import { confirmPageElements } from '@synthetixio/synpress/pages/metamask/notification-page.js'
import { expect, test, type Page } from '@playwright/test'
import { mnemonicToAccount, privateKeyToAccount } from 'viem/accounts'
import { formatGwei, parseGwei } from 'viem'
import { testWithSynpress } from './testWithSynpressUtil'
import {
incrementSelfSendTxGauge,
setFeeEstimationGauge,
} from './prometheusUtils'
const env = z
.object({
METAMASK_SECRET_WORDS_OR_PRIVATEKEY: z.string(),
METAMASK_OP_SEPOLIA_RPC_URL: z.string().url(),
METAMASK_DAPP_URL: z.string().url(),
})
.parse(process.env)
const expectedSender = env.METAMASK_SECRET_WORDS_OR_PRIVATEKEY?.startsWith('0x')
? privateKeyToAccount(
env.METAMASK_SECRET_WORDS_OR_PRIVATEKEY as `0x${string}`
).address.toLowerCase()
: mnemonicToAccount(
env.METAMASK_SECRET_WORDS_OR_PRIVATEKEY as string
).address.toLowerCase()
const expectedRecipient = expectedSender
const expectedCurrencySymbol = 'OPS'
let sharedPage: Page
let wasSuccessful: boolean
let handledFailure: boolean
test.describe.configure({ mode: 'serial' })
test.beforeAll(() => {
wasSuccessful = false
handledFailure = false
})
test.afterAll(async () => {
// This is handling failure scenarios such as Playwright timeouts
// where are not able to catch and respond to an error.
if (!wasSuccessful && !handledFailure) {
await incrementSelfSendTxGauge(false)
}
await sharedPage.close()
})
testWithSynpress('Setup wallet and dApp', async ({ page }) => {
console.log('Setting up wallet and dApp...')
sharedPage = page
await sharedPage.goto('http://localhost:9011')
})
testWithSynpress('Add OP Sepolia network', async () => {
console.log('Adding OP Sepolia network...')
const expectedChainId = '0xaa37dc'
await metamask.addNetwork({
name: 'op-sepolia',
rpcUrls: {
default: {
http: [env.METAMASK_OP_SEPOLIA_RPC_URL],
},
},
id: '11155420',
nativeCurrency: {
symbol: expectedCurrencySymbol,
},
blockExplorers: {
default: {
url: 'https://optimism-sepolia.blockscout.com',
},
},
})
try {
await expect(sharedPage.locator('#chainId')).toHaveText(expectedChainId)
} catch (error) {
await incrementSelfSendTxGauge(false)
handledFailure = true
throw error
}
})
test(`Connect wallet with ${expectedSender}`, async () => {
console.log(`Connecting wallet with ${expectedSender}...`)
await sharedPage.click('#connectButton')
await metamask.acceptAccess()
try {
await expect(sharedPage.locator('#accounts')).toHaveText(expectedSender)
} catch (error) {
await incrementSelfSendTxGauge(false)
handledFailure = true
throw error
}
})
test('Send an EIP-1559 transaction and verify success', async () => {
console.log('Sending an EIP-1559 transaction and verify success...')
const expectedTransferAmount = '0x1'
const expectedTxType = '0x2'
await sharedPage.locator('#toInput').fill(expectedRecipient)
await sharedPage.locator('#amountInput').fill(expectedTransferAmount)
await sharedPage.locator('#typeInput').selectOption(expectedTxType)
await sharedPage.click('#submitForm')
const txHashPromise = new Promise((resolve) => {
// Metamask test dApp only console.logs the transaction hash,
// so we must setup a listener before we confirm the tx to capture it
sharedPage.on('console', async (msg) => {
resolve(msg.text()) // Resolve the Promise when txHash is set
})
})
const notificationPage =
await synpressPlaywright.switchToMetamaskNotification()
console.log('Gathering transaction fee estimations...')
const lowFeeEstimate = await getFeeEstimateInGwei(
confirmPageElements.gasOptionLowButton,
'Low',
notificationPage
)
const highFeeEstimate = await getFeeEstimateInGwei(
confirmPageElements.gasOptionHighButton,
'Aggressive',
notificationPage
)
// Medium needs to be last because that's the gas option we want to submit the tx with
const mediumFeeEstimate = await getFeeEstimateInGwei(
confirmPageElements.gasOptionMediumButton,
'Market',
notificationPage
)
console.log('Sent transaction, waiting for confirmation...')
await metamask.confirmTransactionAndWaitForMining()
const txHash = await txHashPromise
const transactionReceiptPromise = new Promise<Record<string, string>>(
(resolve) => {
sharedPage.on('load', async () => {
const responseText = await sharedPage.locator('body > main').innerText()
const transactionReceipt = JSON.parse(
responseText.replace('Response: ', '')
)
resolve(transactionReceipt)
})
}
)
// Metamask test dApp allows us access to the Metamask RPC provider via loading this URL.
// The RPC response will be populated onto the page that's loaded.
// More info here: https://github.com/MetaMask/test-dapp/tree/main#usage
console.log('Retrieving transaction receipt...')
await sharedPage.goto(
`${env.METAMASK_DAPP_URL}/request.html?method=eth_getTransactionReceipt&params=["${txHash}"]`
)
const transactionReceipt = await transactionReceiptPromise
try {
expect(transactionReceipt.status).toBe('0x1')
wasSuccessful = true
await incrementSelfSendTxGauge(true)
} catch (error) {
await incrementSelfSendTxGauge(false)
handledFailure = true
throw error
}
await setFeeEstimationGauge('low', lowFeeEstimate)
await setFeeEstimationGauge('medium', mediumFeeEstimate)
await setFeeEstimationGauge('high', highFeeEstimate)
await setFeeEstimationGauge('actual', getActualTransactionFee(transactionReceipt))
})
const getFeeEstimateInGwei = async (
gasOptionButton: string,
waitForText: 'Low' | 'Market' | 'Aggressive',
notificationPage: Page
) => {
await synpressPlaywright.waitAndClick(
confirmPageElements.editGasFeeButton,
notificationPage
)
await synpressPlaywright.waitAndClick(gasOptionButton, notificationPage)
await synpressPlaywright.waitForText(
`${confirmPageElements.editGasFeeButton} .edit-gas-fee-button__label`,
waitForText,
notificationPage
)
const regexParseEtherValue = /(\d+\.\d+)\s?OPS/
const feeValue = (
await synpressPlaywright.waitAndGetValue(
confirmPageElements.totalLabel,
notificationPage
)
).match(regexParseEtherValue)[1]
return parseInt(parseGwei(feeValue).toString())
}
const getActualTransactionFee = (transactionReceipt: Record<string, string>) => {
const effectiveGasPrice = BigInt(transactionReceipt.effectiveGasPrice)
const l2GasUsed = BigInt(transactionReceipt.gasUsed)
const l1Fee = BigInt(transactionReceipt.l1Fee)
return parseInt(formatGwei(effectiveGasPrice * l2GasUsed + l1Fee, 'wei'))
}
import 'dotenv/config'
import { z } from 'zod'
import { Gauge, Pushgateway, Registry } from 'prom-client'
const env = z
.object({
METRICS_READ_URL: z.string().url(),
METRICS_READ_USERNAME: z.string().optional(),
METRICS_READ_PASSWORD: z.string().optional(),
METRICS_WRITE_URL: z.string().url(),
METRICS_WRITE_TOOL: z.enum(['grafana', 'prometheus-pushgateway']),
METRICS_WRITE_SOURCE: z.string().optional(),
METRICS_WRITE_USERNAME: z.string().optional(),
METRICS_WRITE_PASSWORD: z.string().optional(),
})
.refine(
(data) => {
if (
(data.METRICS_READ_USERNAME && !data.METRICS_READ_PASSWORD) ||
(data.METRICS_READ_PASSWORD && !data.METRICS_READ_USERNAME)
) {
return false
}
if (
(data.METRICS_WRITE_USERNAME && !data.METRICS_WRITE_PASSWORD) ||
(data.METRICS_WRITE_PASSWORD && !data.METRICS_WRITE_USERNAME)
) {
return false
}
return true
},
{
message:
'Both username and password must be provided together for read or write metrics',
}
)
.refine(
(data) => {
if (
data.METRICS_WRITE_TOOL === 'grafana' &&
data.METRICS_WRITE_SOURCE === undefined
)
return false
return true
},
{
message:
'Writing to Grafana requires a source, please specify one using METRICS_WRITE_SOURCE env',
}
)
.parse(process.env)
const selfSendTransactionMetricName = 'metamask_self_send_metric'
const feeEstimateLowMetricName = 'metamask_self_send_fee_estimation_low_metric'
const feeEstimateMediumMetricName =
'metamask_self_send_fee_estimation_medium_metric'
const feeEstimateHighMetricName =
'metamask_self_send_fee_estimation_high_metric'
const feeEstimateActualMetricName =
'metamask_self_send_fee_estimation_actual_metric'
const selfSendRegistry = new Registry()
const feeEstimateLowRegistry = new Registry()
const feeEstimateMediumRegistry = new Registry()
const feeEstimateHighRegistry = new Registry()
const feeEstimateActualRegistry = new Registry()
const selfSendGauge = new Gauge({
name: selfSendTransactionMetricName,
help: 'A gauge signifying the number of transactions sent with Metamask',
registers: [selfSendRegistry],
})
const feeEstimateLowGauge = new Gauge({
name: feeEstimateLowMetricName,
help: 'A gauge signifying the latest fee estimation from Metamask for Low transaction speed',
registers: [feeEstimateLowRegistry],
})
const feeEstimateMediumGauge = new Gauge({
name: feeEstimateMediumMetricName,
help: 'A gauge signifying the latest fee estimation from Metamask for Medium transaction speed',
registers: [feeEstimateMediumRegistry],
})
const feeEstimateHighGauge = new Gauge({
name: feeEstimateHighMetricName,
help: 'A gauge signifying the latest fee estimation from Metamask for High transaction speed',
registers: [feeEstimateHighRegistry],
})
const feeEstimateActualGauge = new Gauge({
name: feeEstimateActualMetricName,
help: 'A gauge signifying the latest actual transaction fee',
registers: [feeEstimateActualRegistry],
})
const queryMetricsReadUrl = async (
query: string = selfSendTransactionMetricName
) => {
const metricsReadRequest = `${env.METRICS_READ_URL}?query=${query}`
const response = await fetch(metricsReadRequest, {
headers:
env.METRICS_READ_USERNAME === undefined
? undefined
: {
Authorization: `Bearer ${env.METRICS_READ_USERNAME}:${env.METRICS_READ_PASSWORD}`,
},
})
if (!response.ok) {
console.error(response.status)
console.error(response.statusText)
throw new Error(`Failed to fetch metric from: ${metricsReadRequest}`)
}
return response
}
export const getSelfSendGaugeValue = async () => {
const response = await queryMetricsReadUrl(selfSendTransactionMetricName)
// The following is an example of the expect response from prometheusMetricQuery
// for response.json().data.result[0]:
// [
// {
// metric: {
// __name__: 'metamask_self_send',
// exported_job: 'metamask_self_send_tx_count',
// instance: 'pushgateway:9091',
// job: 'pushgateway'
// },
// value: [ 1695847795.646, '-1' ]
// }
// ]
try {
const responseJson = z
.object({
data: z.object({
result: z.array(
z.object({
value: z.tuple([
z.number(),
z.number().or(z.string().transform((value) => parseInt(value))),
]),
})
),
}),
})
.parse(await response.json())
return responseJson.data.result[0].value[1]
} catch (error) {
if (
error.message === "Cannot read properties of undefined (reading 'value')"
) {
console.warn(
`No data found for metric ${selfSendTransactionMetricName} in ${env.METRICS_READ_URL}`
)
return undefined
}
throw error
}
}
const pushMetricsGrafana = (metricName: string, valueToSetTo: number) =>
pushMetricsWriteUrl(
`${metricName},source=${
env.METRICS_WRITE_SOURCE
} metric=${valueToSetTo}`
)
const pushMetricsPrometheusPushgateway = (registry: Registry) => {
const pushGateway = new Pushgateway(env.METRICS_WRITE_URL, undefined, registry)
return pushGateway.pushAdd({ jobName: 'ufm-metamask-metric-push'})
}
const pushMetricsWriteUrl = async (requestBody: string) => {
const response = await fetch(env.METRICS_WRITE_URL, {
method: 'POST',
headers:
env.METRICS_WRITE_USERNAME === undefined
? undefined
: {
Authorization: `Bearer ${env.METRICS_WRITE_USERNAME}:${env.METRICS_WRITE_PASSWORD}`,
},
body: requestBody,
})
if (!response.ok) {
console.error(response.status)
console.error(response.statusText)
throw new Error(`Failed to push metric to: ${env.METRICS_WRITE_URL}`)
}
return response
}
export const setSelfSendTxGauge = async (valueToSetTo: number) => {
console.log(`Setting ${selfSendTransactionMetricName} to ${valueToSetTo}...`)
selfSendGauge.set(valueToSetTo)
env.METRICS_WRITE_TOOL === 'grafana'
? await pushMetricsGrafana(selfSendTransactionMetricName.replace('_metric', ''), valueToSetTo)
: await pushMetricsPrometheusPushgateway(selfSendRegistry)
}
export const incrementSelfSendTxGauge = async (isSuccess: boolean) => {
const currentMetricValue = (await getSelfSendGaugeValue()) ?? 0
let newMetricValue: number
if (isSuccess) {
newMetricValue = currentMetricValue >= 0 ? currentMetricValue + 1 : 1
} else {
newMetricValue = currentMetricValue < 0 ? currentMetricValue - 1 : -1
}
console.log(
`Current value of ${selfSendTransactionMetricName} is ${currentMetricValue}, incrementing to ${newMetricValue}...`
)
await setSelfSendTxGauge(newMetricValue)
}
export const setFeeEstimationGauge = async (
txSpeed: 'low' | 'medium' | 'high' | 'actual',
fee: number
) => {
let metricNameGrafana: string
let prometheusRegistry: Registry
switch (txSpeed) {
case 'low':
feeEstimateLowGauge.set(fee)
metricNameGrafana = feeEstimateLowMetricName
prometheusRegistry = feeEstimateLowRegistry
break
case 'medium':
feeEstimateMediumGauge.set(fee)
metricNameGrafana = feeEstimateMediumMetricName
prometheusRegistry = feeEstimateMediumRegistry
break
case 'high':
feeEstimateHighGauge.set(fee)
metricNameGrafana = feeEstimateHighMetricName
prometheusRegistry = feeEstimateHighRegistry
break
case 'actual':
feeEstimateActualGauge.set(fee)
metricNameGrafana = feeEstimateActualMetricName
prometheusRegistry = feeEstimateActualRegistry
break
default:
throw new Error(`unsupported transaction speed given: ${txSpeed}`)
}
metricNameGrafana = metricNameGrafana.replace('_metric', '')
console.log(`Setting ${metricNameGrafana} to ${fee}...`)
env.METRICS_WRITE_TOOL === 'grafana'
? await pushMetricsGrafana(metricNameGrafana, fee)
: await pushMetricsPrometheusPushgateway(prometheusRegistry)
}
import 'dotenv/config'
import {
type BrowserContext,
chromium,
expect,
test as base,
} from '@playwright/test'
import metamask from '@synthetixio/synpress/commands/metamask.js'
import helpers from '@synthetixio/synpress/helpers.js'
const { initialSetup } = metamask
const { prepareMetamask } = helpers
export const testWithSynpress = base.extend<{
context: BrowserContext
}>({
context: async ({}, use) => {
// required for synpress
global.expect = expect
// download metamask
const metamaskPath = await prepareMetamask(
process.env.METAMASK_VERSION || '10.25.0',
)
// prepare browser args
const browserArgs = [
`--disable-extensions-except=${metamaskPath}`,
`--load-extension=${metamaskPath}`,
'--remote-debugging-port=9222',
]
if (process.env.CI) {
browserArgs.push('--disable-gpu')
}
if (process.env.HEADLESS_MODE) {
browserArgs.push('--headless=new')
}
// launch browser
const context = await chromium.launchPersistentContext('', {
headless: false,
args: browserArgs,
})
// wait for metamask
await context.pages()[0].waitForTimeout(3000)
// setup metamask
await initialSetup(chromium, {
secretWordsOrPrivateKey: process.env.METAMASK_SECRET_WORDS_OR_PRIVATEKEY,
network: process.env.METAMASK_NETWORK,
password: process.env.METAMASK_PASSWORD,
enableAdvancedSettings: true,
})
await use(context)
},
})
export { expect }
{
"extends": "../../tsconfig.json",
"compilerOptions": {
"outDir": "./dist",
"baseUrl": "./src",
"noEmit": true,
"target": "ESNext",
"lib": ["esnext"],
"module": "esnext",
"moduleResolution": "Node",
"isolatedModules": true,
"allowUnreachableCode": false,
"skipLibCheck": false,
"allowUnusedLabels": false,
"alwaysStrict": true,
"exactOptionalPropertyTypes": true,
"noFallthroughCasesInSwitch": true,
"noImplicitAny": true,
"noImplicitReturns": true,
"noImplicitOverride": true,
"noImplicitThis": true,
"forceConsistentCasingInFileNames": true,
"verbatimModuleSyntax": true,
"noPropertyAccessFromIndexSignature": true,
"noUncheckedIndexedAccess": true,
"noUnusedLocals": true,
"noUnusedParameters": true,
"strict": true
},
"include": ["./src"]
}
global:
scrape_interval: 2s
scrape_configs:
- job_name: 'pushgateway'
static_configs:
- targets: ['pushgateway:9091']
\ No newline at end of file
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment