Commit a02392b0 authored by Ben Wilson's avatar Ben Wilson Committed by GitHub

Add rpc-proxy service for whitelisting JSON RPC methods to the sequencer. (#945)

* Add healthcheck endpoint for rpc-proxy
Added ethereum-nginx-proxy source
updated README and docker image build

* Check ETH_CALLS_ALLOWED is set, clean up comments, remove old Dockerfile
parent e52ccd98
......@@ -92,6 +92,14 @@ jobs:
push: true
tags: ethereumoptimism/l2geth:${{ needs.release.outputs.l2geth }}
- name: Publish rpc-proxy
uses: docker/build-push-action@v2
with:
context: .
file: ./ops/docker/Dockerfile.rpc-proxy
push: true
tags: ethereumoptimism/rpc-proxy:${{ needs.release.outputs.l2geth }}
# pushes the base builder image to dockerhub
builder:
name: Prepare the base builder image for the services
......
......@@ -14,6 +14,8 @@ The base `docker-compose.yml` file will start the required components for a full
Supplementing the base configuration is an additional metric enabling file, `docker-compose-metrics.yml`. Adding this configuration to the stack will enable metric emission for l2geth and start grafana (for metrics visualisation) and influxdb (for metric collection) instances.
Also available for testing is the `rpc-proxy` service in the `docker-compose-rpc-proxy.yml` file. It can be used to restrict what RPC methods are allowed to the Sequencer.
The base stack can be started and stopped with a command like this (there is no need to specify the default docker-compose.yml)
```
docker-compose \
......
version: "3"
services:
rpc-proxy:
depends_on:
- l1_chain
- deployer
- l2geth
image: rpc-proxy
build:
context: ..
dockerfile: ./ops/docker/Dockerfile.rpc-proxy
environment:
SEQUENCER: l2geth:8545
ETH_CALLS_ALLOWED: eth_blockNumber,eth_sendRawTransaction
ports:
- 9546:8080
- 9145:9145
......@@ -173,3 +173,4 @@ services:
URL: http://deployer:8081/addresses.json
ENABLE_GAS_REPORT: 1
NO_NETWORK: 1
FROM openresty/openresty:buster
LABEL maintainer="Optimistic Systems <systems@optiomism.io>"
ARG GOTEMPLATE_VERSION=v3.9.0
RUN DEBIAN_FRONTEND=noninteractive apt-get update \
&& DEBIAN_FRONTEND=noninteractive apt-get install -y --no-install-recommends \
openresty-opm \
&& opm get knyar/nginx-lua-prometheus
RUN curl -o /usr/local/bin/gomplate \
-sSL https://github.com/hairyhenderson/gomplate/releases/download/$GOTEMPLATE_VERSION/gomplate_linux-amd64-slim \
&& chmod +x /usr/local/bin/gomplate
RUN mkdir -p /var/log/nginx/ \
&& ln -sf /dev/stdout /var/log/nginx/access.log \
&& ln -sf /dev/stderr /var/log/nginx/error.log
COPY ./ops/docker/rpc-proxy/eth-jsonrpc-access.lua /usr/local/openresty/nginx/eth-jsonrpc-access.lua
COPY ./ops/docker/rpc-proxy/nginx.template.conf /docker-entrypoint.d/nginx.template.conf
COPY ./ops/docker/rpc-proxy/docker-entrypoint.sh /docker-entrypoint.sh
ENTRYPOINT ["/docker-entrypoint.sh"]
#!/bin/bash
set -eo pipefail
if [ -z "$SEQUENCER" ];then
echo "SEQUENCER env must be set, exiting"
exit 1
fi
if [ -z "$ETH_CALLS_ALLOWED" ];then
echo "ETH_CALLS_ALLOWED env must be set, exiting"
exit 1
fi
gomplate -f /docker-entrypoint.d/nginx.template.conf > /usr/local/openresty/nginx/conf/nginx.conf
cat /usr/local/openresty/nginx/conf/nginx.conf
exec openresty "$@"
-- Source: https://github.com/adetante/ethereum-nginx-proxy
local cjson = require('cjson')
local function empty(s)
return s == nil or s == ''
end
local function split(s)
local res = {}
local i = 1
for v in string.gmatch(s, "([^,]+)") do
res[i] = v
i = i + 1
end
return res
end
local function contains(arr, val)
for i, v in ipairs (arr) do
if v == val then
return true
end
end
return false
end
-- parse conf
local blacklist, whitelist = nil
if not empty(ngx.var.jsonrpc_blacklist) then
blacklist = split(ngx.var.jsonrpc_blacklist)
end
if not empty(ngx.var.jsonrpc_whitelist) then
whitelist = split(ngx.var.jsonrpc_whitelist)
end
-- check conf
if blacklist ~= nil and whitelist ~= nil then
ngx.log(ngx.ERR, 'invalid conf: jsonrpc_blacklist and jsonrpc_whitelist are both set')
ngx.exit(ngx.HTTP_INTERNAL_SERVER_ERROR)
return
end
-- get request content
ngx.req.read_body()
-- try to parse the body as JSON
local success, body = pcall(cjson.decode, ngx.var.request_body);
if not success then
ngx.log(ngx.ERR, 'invalid JSON request')
ngx.exit(ngx.HTTP_BAD_REQUEST)
return
end
local method = body['method']
local version = body['jsonrpc']
-- check we have a method and a version
if empty(method) or empty(version) then
ngx.log(ngx.ERR, 'no method and/or jsonrpc attribute')
ngx.exit(ngx.HTTP_BAD_REQUEST)
return
end
metric_sequencer_requests:inc(1, {method, ngx.var.server_name, ngx.var.status})
-- check the version is supported
if version ~= "2.0" then
ngx.log(ngx.ERR, 'jsonrpc version not supported: ' .. version)
ngx.exit(ngx.HTTP_INTERNAL_SERVER_ERROR)
return
end
-- if whitelist is configured, check that the method is whitelisted
if whitelist ~= nil then
if not contains(whitelist, method) then
ngx.log(ngx.ERR, 'jsonrpc method is not whitelisted: ' .. method)
ngx.exit(ngx.HTTP_FORBIDDEN)
return
end
end
-- if blacklist is configured, check that the method is not blacklisted
if blacklist ~= nil then
if contains(blacklist, method) then
ngx.log(ngx.ERR, 'jsonrpc method is blacklisted: ' .. method)
ngx.exit(ngx.HTTP_FORBIDDEN)
return
end
end
return
worker_processes 5;
daemon off;
error_log /var/log/nginx/error.log;
worker_rlimit_nofile 8192;
pcre_jit on;
events {
worker_connections 4096;
}
http {
include mime.types;
index index.html;
# See Move default writable paths to a dedicated directory (#119)
# https://github.com/openresty/docker-openresty/issues/119
client_body_temp_path /var/run/openresty/nginx-client-body;
proxy_temp_path /var/run/openresty/nginx-proxy;
fastcgi_temp_path /var/run/openresty/nginx-fastcgi;
uwsgi_temp_path /var/run/openresty/nginx-uwsgi;
scgi_temp_path /var/run/openresty/nginx-scgi;
keepalive_timeout 0;
default_type application/octet-stream;
log_format main '$remote_addr - $remote_user [$time_local] $status '
'"$request" $body_bytes_sent "$http_referer" '
'"$http_user_agent" "$http_x_forwarded_for"';
access_log /var/log/nginx/access.log main;
sendfile on;
tcp_nopush on;
lua_shared_dict prometheus_metrics 10M;
init_worker_by_lua_block {
prometheus = require("prometheus").init("prometheus_metrics")
metric_requests = prometheus:counter(
"nginx_http_requests_total", "Number of HTTP requests", {"host", "status"})
metric_sequencer_requests = prometheus:counter(
"nginx_eth_sequencer_requests", "Number of requests going to the sequencer", {"method", "host", "status"})
metric_replica_requests = prometheus:counter(
"nginx_eth_replica_requests", "Number of requests going to the replicas", {"host", "status"})
metric_latency = prometheus:histogram(
"nginx_http_request_duration_seconds", "HTTP request latency", {"host"})
metric_connections = prometheus:gauge(
"nginx_http_connections", "Number of HTTP connections", {"state"})
}
log_by_lua_block {
metric_requests:inc(1, {ngx.var.server_name, ngx.var.status})
metric_latency:observe(tonumber(ngx.var.request_time), {ngx.var.server_name})
}
upstream sequencer {
server {{env.Getenv "SEQUENCER"}};
}
server { # RPC proxy server
listen 8080;
location = /healthz {
return 200 'healthz';
}
location / {
set $jsonrpc_whitelist {{env.Getenv "ETH_CALLS_ALLOWED"}};
access_by_lua_file 'eth-jsonrpc-access.lua';
proxy_pass http://sequencer;
}
}
server { # Metrics server
listen 9145;
location /metrics {
content_by_lua_block {
metric_connections:set(ngx.var.connections_reading, {"reading"})
metric_connections:set(ngx.var.connections_waiting, {"waiting"})
metric_connections:set(ngx.var.connections_writing, {"writing"})
prometheus:collect()
}
}
}
}
\ No newline at end of file
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment