Commit 13c72ec5 authored by Peeeekay's avatar Peeeekay Committed by GitHub

fix: make this work with kurtosis 0.65.0 (#73)

Can be merged after `service-name` gets merged.
Co-authored-by: default avatarGyanendra Mishra <anomaly.the@gmail.com>
parent 968f0734
...@@ -77,12 +77,10 @@ def run(plan, args): ...@@ -77,12 +77,10 @@ def run(plan, args):
if args_with_right_defaults.wait_for_finalization: if args_with_right_defaults.wait_for_finalization:
plan.print("Waiting for the first finalized epoch") plan.print("Waiting for the first finalized epoch")
first_cl_client = all_cl_client_contexts[0] first_cl_client = all_cl_client_contexts[0]
first_cl_client_id = first_cl_client.beacon_service_id first_client_beacon_name = first_cl_client.beacon_service_name
epoch_recipe = struct( epoch_recipe = GetHttpRequestRecipe(
service_id = first_cl_client_id, service_name = first_client_beacon_name,
method= "GET",
endpoint = "/eth/v1/beacon/states/head/finality_checkpoints", endpoint = "/eth/v1/beacon/states/head/finality_checkpoints",
content_type = "application/json",
port_id = HTTP_PORT_ID_FOR_FACT, port_id = HTTP_PORT_ID_FOR_FACT,
extract = { extract = {
"finalized_epoch": ".data.finalized.epoch" "finalized_epoch": ".data.finalized.epoch"
......
shared_utils = import_module("github.com/kurtosis-tech/eth2-package/src/shared_utils/shared_utils.star") shared_utils = import_module("github.com/kurtosis-tech/eth2-package/src/shared_utils/shared_utils.star")
SERVICE_ID = "forkmon" SERVICE_NAME = "forkmon"
IMAGE_NAME = "ralexstokes/ethereum_consensus_monitor:latest" IMAGE_NAME = "ralexstokes/ethereum_consensus_monitor:latest"
HTTP_PORT_ID = "http" HTTP_PORT_ID = "http"
...@@ -40,7 +40,7 @@ def launch_forkmon( ...@@ -40,7 +40,7 @@ def launch_forkmon(
config = get_config(config_files_artifact_name) config = get_config(config_files_artifact_name)
plan.add_service(SERVICE_ID, config) plan.add_service(SERVICE_NAME, config)
def get_config(config_files_artifact_name): def get_config(config_files_artifact_name):
......
shared_utils = import_module("github.com/kurtosis-tech/eth2-package/src/shared_utils/shared_utils.star") shared_utils = import_module("github.com/kurtosis-tech/eth2-package/src/shared_utils/shared_utils.star")
static_files = import_module("github.com/kurtosis-tech/eth2-package/src/static_files/static_files.star") static_files = import_module("github.com/kurtosis-tech/eth2-package/src/static_files/static_files.star")
SERVICE_ID = "grafana" SERVICE_NAME = "grafana"
IMAGE_NAME = "grafana/grafana-enterprise:9.2.3" IMAGE_NAME = "grafana/grafana-enterprise:9.2.3"
...@@ -30,7 +30,7 @@ def launch_grafana(plan, datasource_config_template, dashboard_providers_config_ ...@@ -30,7 +30,7 @@ def launch_grafana(plan, datasource_config_template, dashboard_providers_config_
config = get_config(grafana_config_artifacts_uuid, grafana_dashboards_artifacts_uuid) config = get_config(grafana_config_artifacts_uuid, grafana_dashboards_artifacts_uuid)
plan.add_service(SERVICE_ID, config) plan.add_service(SERVICE_NAME, config)
def get_grafana_config_dir_artifact_uuid(plan, datasource_config_template, dashboard_providers_config_template, prometheus_private_url): def get_grafana_config_dir_artifact_uuid(plan, datasource_config_template, dashboard_providers_config_template, prometheus_private_url):
......
def new_cl_client_context(client_name, enr, ip_addr, http_port_num, cl_nodes_metrics_info, beacon_service_id): def new_cl_client_context(client_name, enr, ip_addr, http_port_num, cl_nodes_metrics_info, beacon_service_name):
return struct( return struct(
client_name = client_name, client_name = client_name,
enr = enr, enr = enr,
ip_addr = ip_addr, ip_addr = ip_addr,
http_port_num = http_port_num, http_port_num = http_port_num,
cl_nodes_metrics_info = cl_nodes_metrics_info, cl_nodes_metrics_info = cl_nodes_metrics_info,
beacon_service_id = beacon_service_id beacon_service_name = beacon_service_name
) )
def wait_for_healthy(plan, service_id, port_id): def wait_for_healthy(plan, service_name, port_id):
recipe = struct( recipe = GetHttpRequestRecipe(
service_id = service_id, service_name = service_name,
method= "GET",
endpoint = "/eth/v1/node/health", endpoint = "/eth/v1/node/health",
content_type = "application/json",
port_id = port_id port_id = port_id
) )
return plan.wait(recipe, "code", "IN", [200, 206, 503]) return plan.wait(recipe, "code", "IN", [200, 206, 503])
...@@ -40,8 +40,8 @@ VALIDATOR_METRICS_PORT_NUM = 5064 ...@@ -40,8 +40,8 @@ VALIDATOR_METRICS_PORT_NUM = 5064
METRICS_PATH = "/metrics" METRICS_PATH = "/metrics"
BEACON_SUFFIX_SERVICE_ID = "beacon" BEACON_SUFFIX_SERVICE_NAME = "beacon"
VALIDATOR_SUFFIX_SERVICE_ID = "validator" VALIDATOR_SUFFIX_SERVICE_NAME = "validator"
PRIVATE_IP_ADDRESS_PLACEHOLDER = "KURTOSIS_IP_ADDR_PLACEHOLDER" PRIVATE_IP_ADDRESS_PLACEHOLDER = "KURTOSIS_IP_ADDR_PLACEHOLDER"
...@@ -68,7 +68,7 @@ LIGHTHOUSE_LOG_LEVELS = { ...@@ -68,7 +68,7 @@ LIGHTHOUSE_LOG_LEVELS = {
def launch( def launch(
plan, plan,
launcher, launcher,
service_id, service_name,
image, image,
participant_log_level, participant_log_level,
global_log_level, global_log_level,
...@@ -79,8 +79,8 @@ def launch( ...@@ -79,8 +79,8 @@ def launch(
extra_beacon_params, extra_beacon_params,
extra_validator_params): extra_validator_params):
beacon_node_service_id = "{0}-{1}".format(service_id, BEACON_SUFFIX_SERVICE_ID) beacon_node_service_name = "{0}-{1}".format(service_name, BEACON_SUFFIX_SERVICE_NAME)
validator_node_service_id = "{0}-{1}".format(service_id, VALIDATOR_SUFFIX_SERVICE_ID) validator_node_service_name = "{0}-{1}".format(service_name, VALIDATOR_SUFFIX_SERVICE_NAME)
log_level = parse_input.get_client_log_level_or_default(participant_log_level, global_log_level, LIGHTHOUSE_LOG_LEVELS) log_level = parse_input.get_client_log_level_or_default(participant_log_level, global_log_level, LIGHTHOUSE_LOG_LEVELS)
...@@ -95,9 +95,9 @@ def launch( ...@@ -95,9 +95,9 @@ def launch(
extra_beacon_params, extra_beacon_params,
) )
beacon_service = plan.add_service(beacon_node_service_id, beacon_config) beacon_service = plan.add_service(beacon_node_service_name, beacon_config)
cl_node_health_checker.wait_for_healthy(plan, beacon_node_service_id, BEACON_HTTP_PORT_ID) cl_node_health_checker.wait_for_healthy(plan, beacon_node_service_name, BEACON_HTTP_PORT_ID)
beacon_http_port = beacon_service.ports[BEACON_HTTP_PORT_ID] beacon_http_port = beacon_service.ports[BEACON_HTTP_PORT_ID]
...@@ -114,14 +114,12 @@ def launch( ...@@ -114,14 +114,12 @@ def launch(
extra_validator_params, extra_validator_params,
) )
validator_service = plan.add_service(validator_node_service_id, validator_config) validator_service = plan.add_service(validator_node_service_name, validator_config)
# TODO(old) add validator availability using the validator API: https://ethereum.github.io/beacon-APIs/?urls.primaryName=v1#/ValidatorRequiredApi | from eth2-merge-kurtosis-module # TODO(old) add validator availability using the validator API: https://ethereum.github.io/beacon-APIs/?urls.primaryName=v1#/ValidatorRequiredApi | from eth2-merge-kurtosis-module
beacon_node_identity_recipe = struct( beacon_node_identity_recipe = GetHttpRequestRecipe(
service_id = beacon_node_service_id, service_name = beacon_node_service_name,
method= "GET",
endpoint = "/eth/v1/node/identity", endpoint = "/eth/v1/node/identity",
content_type = "application/json",
port_id = BEACON_HTTP_PORT_ID, port_id = BEACON_HTTP_PORT_ID,
extract = { extract = {
"enr": ".data.enr" "enr": ".data.enr"
...@@ -135,8 +133,8 @@ def launch( ...@@ -135,8 +133,8 @@ def launch(
validator_metrics_port = validator_service.ports[VALIDATOR_METRICS_PORT_ID] validator_metrics_port = validator_service.ports[VALIDATOR_METRICS_PORT_ID]
validator_metrics_url = "{0}:{1}".format(validator_service.ip_address, validator_metrics_port.number) validator_metrics_url = "{0}:{1}".format(validator_service.ip_address, validator_metrics_port.number)
beacon_node_metrics_info = cl_node_metrics.new_cl_node_metrics_info(beacon_node_service_id, METRICS_PATH, beacon_metrics_url) beacon_node_metrics_info = cl_node_metrics.new_cl_node_metrics_info(beacon_node_service_name, METRICS_PATH, beacon_metrics_url)
validator_node_metrics_info = cl_node_metrics.new_cl_node_metrics_info(validator_node_service_id, METRICS_PATH, validator_metrics_url) validator_node_metrics_info = cl_node_metrics.new_cl_node_metrics_info(validator_node_service_name, METRICS_PATH, validator_metrics_url)
nodes_metrics_info = [beacon_node_metrics_info, validator_node_metrics_info] nodes_metrics_info = [beacon_node_metrics_info, validator_node_metrics_info]
return cl_client_context.new_cl_client_context( return cl_client_context.new_cl_client_context(
...@@ -145,7 +143,7 @@ def launch( ...@@ -145,7 +143,7 @@ def launch(
beacon_service.ip_address, beacon_service.ip_address,
BEACON_HTTP_PORT_NUM, BEACON_HTTP_PORT_NUM,
nodes_metrics_info, nodes_metrics_info,
beacon_node_service_id, beacon_node_service_name,
) )
......
...@@ -24,8 +24,8 @@ HTTP_PORT_NUM = 4000 ...@@ -24,8 +24,8 @@ HTTP_PORT_NUM = 4000
METRICS_PORT_NUM = 8008 METRICS_PORT_NUM = 8008
VALIDATOR_METRICS_PORT_NUM = 5064 VALIDATOR_METRICS_PORT_NUM = 5064
BEACON_SUFFIX_SERVICE_ID = "beacon" BEACON_SUFFIX_SERVICE_NAME = "beacon"
VALIDATOR_SUFFIX_SERVICE_ID = "validator" VALIDATOR_SUFFIX_SERVICE_NAME = "validator"
METRICS_PATH = "/metrics" METRICS_PATH = "/metrics"
...@@ -52,7 +52,7 @@ LODESTAR_LOG_LEVELS = { ...@@ -52,7 +52,7 @@ LODESTAR_LOG_LEVELS = {
def launch( def launch(
plan, plan,
launcher, launcher,
service_id, service_name,
image, image,
participant_log_level, participant_log_level,
global_log_level, global_log_level,
...@@ -63,8 +63,8 @@ def launch( ...@@ -63,8 +63,8 @@ def launch(
extra_beacon_params, extra_beacon_params,
extra_validator_params): extra_validator_params):
beacon_node_service_id = "{0}-{1}".format(service_id, BEACON_SUFFIX_SERVICE_ID) beacon_node_service_name = "{0}-{1}".format(service_name, BEACON_SUFFIX_SERVICE_NAME)
validator_node_service_id = "{0}-{1}".format(service_id, VALIDATOR_SUFFIX_SERVICE_ID) validator_node_service_name = "{0}-{1}".format(service_name, VALIDATOR_SUFFIX_SERVICE_NAME)
log_level = parse_input.get_client_log_level_or_default(participant_log_level, global_log_level, LODESTAR_LOG_LEVELS) log_level = parse_input.get_client_log_level_or_default(participant_log_level, global_log_level, LODESTAR_LOG_LEVELS)
...@@ -79,18 +79,18 @@ def launch( ...@@ -79,18 +79,18 @@ def launch(
extra_beacon_params, extra_beacon_params,
) )
beacon_service = plan.add_service(beacon_node_service_id, beacon_config) beacon_service = plan.add_service(beacon_node_service_name, beacon_config)
beacon_http_port = beacon_service.ports[HTTP_PORT_ID] beacon_http_port = beacon_service.ports[HTTP_PORT_ID]
cl_node_health_checker.wait_for_healthy(plan, beacon_node_service_id, HTTP_PORT_ID) cl_node_health_checker.wait_for_healthy(plan, beacon_node_service_name, HTTP_PORT_ID)
# Launch validator node # Launch validator node
beacon_http_url = "http://{0}:{1}".format(beacon_service.ip_address, beacon_http_port.number) beacon_http_url = "http://{0}:{1}".format(beacon_service.ip_address, beacon_http_port.number)
validator_config = get_validator_config( validator_config = get_validator_config(
validator_node_service_id, validator_node_service_name,
launcher.cl_genesis_data, launcher.cl_genesis_data,
image, image,
log_level, log_level,
...@@ -100,15 +100,13 @@ def launch( ...@@ -100,15 +100,13 @@ def launch(
extra_validator_params, extra_validator_params,
) )
validator_service = plan.add_service(validator_node_service_id, validator_config) validator_service = plan.add_service(validator_node_service_name, validator_config)
# TODO(old) add validator availability using the validator API: https://ethereum.github.io/beacon-APIs/?urls.primaryName=v1#/ValidatorRequiredApi | from eth2-merge-kurtosis-module # TODO(old) add validator availability using the validator API: https://ethereum.github.io/beacon-APIs/?urls.primaryName=v1#/ValidatorRequiredApi | from eth2-merge-kurtosis-module
beacon_node_identity_recipe = struct( beacon_node_identity_recipe = GetHttpRequestRecipe(
service_id = beacon_node_service_id, service_name = beacon_node_service_name,
method= "GET",
endpoint = "/eth/v1/node/identity", endpoint = "/eth/v1/node/identity",
content_type = "application/json",
port_id = HTTP_PORT_ID, port_id = HTTP_PORT_ID,
extract = { extract = {
"enr": ".data.enr" "enr": ".data.enr"
...@@ -119,7 +117,7 @@ def launch( ...@@ -119,7 +117,7 @@ def launch(
beacon_metrics_port = beacon_service.ports[METRICS_PORT_ID] beacon_metrics_port = beacon_service.ports[METRICS_PORT_ID]
beacon_metrics_url = "{0}:{1}".format(beacon_service.ip_address, beacon_metrics_port.number) beacon_metrics_url = "{0}:{1}".format(beacon_service.ip_address, beacon_metrics_port.number)
beacon_node_metrics_info = cl_node_metrics.new_cl_node_metrics_info(service_id, METRICS_PATH, beacon_metrics_url) beacon_node_metrics_info = cl_node_metrics.new_cl_node_metrics_info(service_name, METRICS_PATH, beacon_metrics_url)
nodes_metrics_info = [beacon_node_metrics_info] nodes_metrics_info = [beacon_node_metrics_info]
return cl_client_context.new_cl_client_context( return cl_client_context.new_cl_client_context(
...@@ -128,7 +126,7 @@ def launch( ...@@ -128,7 +126,7 @@ def launch(
beacon_service.ip_address, beacon_service.ip_address,
HTTP_PORT_NUM, HTTP_PORT_NUM,
nodes_metrics_info, nodes_metrics_info,
beacon_node_service_id beacon_node_service_name
) )
...@@ -210,7 +208,7 @@ def get_beacon_config( ...@@ -210,7 +208,7 @@ def get_beacon_config(
def get_validator_config( def get_validator_config(
service_id, service_name,
genesis_data, genesis_data,
image, image,
log_level, log_level,
...@@ -219,7 +217,7 @@ def get_validator_config( ...@@ -219,7 +217,7 @@ def get_validator_config(
mev_boost_context, mev_boost_context,
extra_params): extra_params):
root_dirpath = shared_utils.path_join(CONSENSUS_DATA_DIRPATH_ON_SERVICE_CONTAINER, service_id) root_dirpath = shared_utils.path_join(CONSENSUS_DATA_DIRPATH_ON_SERVICE_CONTAINER, service_name)
genesis_config_filepath = shared_utils.path_join(GENESIS_DATA_MOUNT_DIRPATH_ON_SERVICE_CONTAINER, genesis_data.config_yml_rel_filepath) genesis_config_filepath = shared_utils.path_join(GENESIS_DATA_MOUNT_DIRPATH_ON_SERVICE_CONTAINER, genesis_data.config_yml_rel_filepath)
validator_keys_dirpath = shared_utils.path_join(VALIDATOR_KEYS_MOUNT_DIRPATH_ON_SERVICE_CONTAINER, node_keystore_files.raw_keys_relative_dirpath) validator_keys_dirpath = shared_utils.path_join(VALIDATOR_KEYS_MOUNT_DIRPATH_ON_SERVICE_CONTAINER, node_keystore_files.raw_keys_relative_dirpath)
......
...@@ -62,7 +62,7 @@ ENTRYPOINT_ARGS = ["sh", "-c"] ...@@ -62,7 +62,7 @@ ENTRYPOINT_ARGS = ["sh", "-c"]
def launch( def launch(
plan, plan,
launcher, launcher,
service_id, service_name,
image, image,
participant_log_level, participant_log_level,
global_log_level, global_log_level,
...@@ -79,15 +79,13 @@ def launch( ...@@ -79,15 +79,13 @@ def launch(
config = get_config(launcher.cl_genesis_data, image, bootnode_context, el_client_context, mev_boost_context, log_level, node_keystore_files, extra_params) config = get_config(launcher.cl_genesis_data, image, bootnode_context, el_client_context, mev_boost_context, log_level, node_keystore_files, extra_params)
nimbus_service = plan.add_service(service_id, config) nimbus_service = plan.add_service(service_name, config)
cl_node_health_checker.wait_for_healthy(plan, service_id, HTTP_PORT_ID) cl_node_health_checker.wait_for_healthy(plan, service_name, HTTP_PORT_ID)
cl_node_identity_recipe = struct( cl_node_identity_recipe = GetHttpRequestRecipe(
service_id = service_id, service_name = service_name,
method= "GET",
endpoint = "/eth/v1/node/identity", endpoint = "/eth/v1/node/identity",
content_type = "application/json",
port_id = HTTP_PORT_ID, port_id = HTTP_PORT_ID,
extract = { extract = {
"enr": ".data.enr" "enr": ".data.enr"
...@@ -98,7 +96,7 @@ def launch( ...@@ -98,7 +96,7 @@ def launch(
metrics_port = nimbus_service.ports[METRICS_PORT_ID] metrics_port = nimbus_service.ports[METRICS_PORT_ID]
metrics_url = "{0}:{1}".format(nimbus_service.ip_address, metrics_port.number) metrics_url = "{0}:{1}".format(nimbus_service.ip_address, metrics_port.number)
nimbus_node_metrics_info = cl_node_metrics.new_cl_node_metrics_info(service_id, METRICS_PATH, metrics_url) nimbus_node_metrics_info = cl_node_metrics.new_cl_node_metrics_info(service_name, METRICS_PATH, metrics_url)
nodes_metrics_info = [nimbus_node_metrics_info] nodes_metrics_info = [nimbus_node_metrics_info]
...@@ -108,7 +106,7 @@ def launch( ...@@ -108,7 +106,7 @@ def launch(
nimbus_service.ip_address, nimbus_service.ip_address,
HTTP_PORT_NUM, HTTP_PORT_NUM,
nodes_metrics_info, nodes_metrics_info,
service_id, service_name,
) )
......
...@@ -31,8 +31,8 @@ HTTP_PORT_NUM = 3500 ...@@ -31,8 +31,8 @@ HTTP_PORT_NUM = 3500
BEACON_MONITORING_PORT_NUM = 8080 BEACON_MONITORING_PORT_NUM = 8080
VALIDATOR_MONITORING_PORT_NUM = 8081 VALIDATOR_MONITORING_PORT_NUM = 8081
BEACON_SUFFIX_SERVICE_ID = "beacon" BEACON_SUFFIX_SERVICE_NAME = "beacon"
VALIDATOR_SUFFIX_SERVICE_ID = "validator" VALIDATOR_SUFFIX_SERVICE_NAME = "validator"
MIN_PEERS = 1 MIN_PEERS = 1
...@@ -64,7 +64,7 @@ PRYSM_LOG_LEVELS = { ...@@ -64,7 +64,7 @@ PRYSM_LOG_LEVELS = {
def launch( def launch(
plan, plan,
launcher, launcher,
service_id, service_name,
images, images,
participant_log_level, participant_log_level,
global_log_level, global_log_level,
...@@ -87,8 +87,8 @@ def launch( ...@@ -87,8 +87,8 @@ def launch(
fail("An empty validator image was provided") fail("An empty validator image was provided")
beacon_node_service_id = "{0}-{1}".format(service_id, BEACON_SUFFIX_SERVICE_ID) beacon_node_service_name = "{0}-{1}".format(service_name, BEACON_SUFFIX_SERVICE_NAME)
validator_node_service_id = "{0}-{1}".format(service_id, VALIDATOR_SUFFIX_SERVICE_ID) validator_node_service_name = "{0}-{1}".format(service_name, VALIDATOR_SUFFIX_SERVICE_NAME)
log_level = parse_input.get_client_log_level_or_default(participant_log_level, global_log_level, PRYSM_LOG_LEVELS) log_level = parse_input.get_client_log_level_or_default(participant_log_level, global_log_level, PRYSM_LOG_LEVELS)
...@@ -102,9 +102,9 @@ def launch( ...@@ -102,9 +102,9 @@ def launch(
extra_beacon_params, extra_beacon_params,
) )
beacon_service = plan.add_service(beacon_node_service_id, beacon_config) beacon_service = plan.add_service(beacon_node_service_name, beacon_config)
cl_node_health_checker.wait_for_healthy(plan, beacon_node_service_id, HTTP_PORT_ID) cl_node_health_checker.wait_for_healthy(plan, beacon_node_service_name, HTTP_PORT_ID)
beacon_http_port = beacon_service.ports[HTTP_PORT_ID] beacon_http_port = beacon_service.ports[HTTP_PORT_ID]
...@@ -115,7 +115,7 @@ def launch( ...@@ -115,7 +115,7 @@ def launch(
validator_config = get_validator_config( validator_config = get_validator_config(
launcher.genesis_data, launcher.genesis_data,
validator_image, validator_image,
validator_node_service_id, validator_node_service_name,
log_level, log_level,
beacon_rpc_endpoint, beacon_rpc_endpoint,
beacon_http_endpoint, beacon_http_endpoint,
...@@ -126,14 +126,12 @@ def launch( ...@@ -126,14 +126,12 @@ def launch(
launcher.prysm_password_artifact_uuid launcher.prysm_password_artifact_uuid
) )
validator_service = plan.add_service(validator_node_service_id, validator_config) validator_service = plan.add_service(validator_node_service_name, validator_config)
# TODO(old) add validator availability using the validator API: https://ethereum.github.io/beacon-APIs/?urls.primaryName=v1#/ValidatorRequiredApi | from eth2-merge-kurtosis-module # TODO(old) add validator availability using the validator API: https://ethereum.github.io/beacon-APIs/?urls.primaryName=v1#/ValidatorRequiredApi | from eth2-merge-kurtosis-module
beacon_node_identity_recipe = struct( beacon_node_identity_recipe = GetHttpRequestRecipe(
service_id = beacon_node_service_id, service_name = beacon_node_service_name,
method= "GET",
endpoint = "/eth/v1/node/identity", endpoint = "/eth/v1/node/identity",
content_type = "application/json",
port_id = HTTP_PORT_ID, port_id = HTTP_PORT_ID,
extract = { extract = {
"enr": ".data.enr" "enr": ".data.enr"
...@@ -147,8 +145,8 @@ def launch( ...@@ -147,8 +145,8 @@ def launch(
validator_metrics_port = validator_service.ports[VALIDATOR_MONITORING_PORT_ID] validator_metrics_port = validator_service.ports[VALIDATOR_MONITORING_PORT_ID]
validator_metrics_url = "{0}:{1}".format(validator_service.ip_address, validator_metrics_port.number) validator_metrics_url = "{0}:{1}".format(validator_service.ip_address, validator_metrics_port.number)
beacon_node_metrics_info = cl_node_metrics.new_cl_node_metrics_info(beacon_node_service_id, METRICS_PATH, beacon_metrics_url) beacon_node_metrics_info = cl_node_metrics.new_cl_node_metrics_info(beacon_node_service_name, METRICS_PATH, beacon_metrics_url)
validator_node_metrics_info = cl_node_metrics.new_cl_node_metrics_info(validator_node_service_id, METRICS_PATH, validator_metrics_url) validator_node_metrics_info = cl_node_metrics.new_cl_node_metrics_info(validator_node_service_name, METRICS_PATH, validator_metrics_url)
nodes_metrics_info = [beacon_node_metrics_info, validator_node_metrics_info] nodes_metrics_info = [beacon_node_metrics_info, validator_node_metrics_info]
...@@ -158,7 +156,7 @@ def launch( ...@@ -158,7 +156,7 @@ def launch(
beacon_service.ip_address, beacon_service.ip_address,
HTTP_PORT_NUM, HTTP_PORT_NUM,
nodes_metrics_info, nodes_metrics_info,
beacon_node_service_id beacon_node_service_name
) )
...@@ -230,7 +228,7 @@ def get_beacon_config( ...@@ -230,7 +228,7 @@ def get_beacon_config(
def get_validator_config( def get_validator_config(
genesis_data, genesis_data,
validator_image, validator_image,
service_id, service_name,
log_level, log_level,
beacon_rpc_endpoint, beacon_rpc_endpoint,
beacon_http_endpoint, beacon_http_endpoint,
...@@ -241,7 +239,7 @@ def get_validator_config( ...@@ -241,7 +239,7 @@ def get_validator_config(
prysm_password_artifact_uuid prysm_password_artifact_uuid
): ):
consensus_data_dirpath = shared_utils.path_join(CONSENSUS_DATA_DIRPATH_ON_SERVICE_CONTAINER, service_id) consensus_data_dirpath = shared_utils.path_join(CONSENSUS_DATA_DIRPATH_ON_SERVICE_CONTAINER, service_name)
prysm_keystore_dirpath = shared_utils.path_join(VALIDATOR_KEYS_MOUNT_DIRPATH_ON_SERVICE_CONTAINER, node_keystore_files.prysm_relative_dirpath) prysm_keystore_dirpath = shared_utils.path_join(VALIDATOR_KEYS_MOUNT_DIRPATH_ON_SERVICE_CONTAINER, node_keystore_files.prysm_relative_dirpath)
prysm_password_filepath = shared_utils.path_join(PRYSM_PASSWORD_MOUNT_DIRPATH_ON_SERVICE_CONTAINER, prysm_password_relative_filepath) prysm_password_filepath = shared_utils.path_join(PRYSM_PASSWORD_MOUNT_DIRPATH_ON_SERVICE_CONTAINER, prysm_password_relative_filepath)
......
...@@ -69,7 +69,7 @@ TEKU_LOG_LEVELS = { ...@@ -69,7 +69,7 @@ TEKU_LOG_LEVELS = {
def launch( def launch(
plan, plan,
launcher, launcher,
service_id, service_name,
image, image,
participant_log_level, participant_log_level,
global_log_level, global_log_level,
...@@ -86,15 +86,13 @@ def launch( ...@@ -86,15 +86,13 @@ def launch(
config = get_config(launcher.cl_genesis_data, image, bootnode_context, el_client_context, mev_boost_context, log_level, node_keystore_files, extra_params) config = get_config(launcher.cl_genesis_data, image, bootnode_context, el_client_context, mev_boost_context, log_level, node_keystore_files, extra_params)
teku_service = plan.add_service(service_id, config) teku_service = plan.add_service(service_name, config)
cl_node_health_checker.wait_for_healthy(plan, service_id, HTTP_PORT_ID) cl_node_health_checker.wait_for_healthy(plan, service_name, HTTP_PORT_ID)
node_identity_recipe = struct( node_identity_recipe = GetHttpRequestRecipe(
service_id = service_id, service_name = service_name,
method= "GET",
endpoint = "/eth/v1/node/identity", endpoint = "/eth/v1/node/identity",
content_type = "application/json",
port_id = HTTP_PORT_ID, port_id = HTTP_PORT_ID,
extract = { extract = {
"enr": ".data.enr" "enr": ".data.enr"
...@@ -106,7 +104,7 @@ def launch( ...@@ -106,7 +104,7 @@ def launch(
teku_metrics_port = teku_service.ports[METRICS_PORT_ID] teku_metrics_port = teku_service.ports[METRICS_PORT_ID]
teku_metrics_url = "{0}:{1}".format(teku_service.ip_address, teku_metrics_port.number) teku_metrics_url = "{0}:{1}".format(teku_service.ip_address, teku_metrics_port.number)
teku_node_metrics_info = cl_node_metrics.new_cl_node_metrics_info(service_id, METRICS_PATH, teku_metrics_url) teku_node_metrics_info = cl_node_metrics.new_cl_node_metrics_info(service_name, METRICS_PATH, teku_metrics_url)
nodes_metrics_info = [teku_node_metrics_info] nodes_metrics_info = [teku_node_metrics_info]
return cl_client_context.new_cl_client_context( return cl_client_context.new_cl_client_context(
...@@ -115,7 +113,7 @@ def launch( ...@@ -115,7 +113,7 @@ def launch(
teku_service.ip_address, teku_service.ip_address,
HTTP_PORT_NUM, HTTP_PORT_NUM,
nodes_metrics_info, nodes_metrics_info,
service_id service_name
) )
......
...@@ -47,7 +47,7 @@ BESU_LOG_LEVELS = { ...@@ -47,7 +47,7 @@ BESU_LOG_LEVELS = {
def launch( def launch(
plan, plan,
launcher, launcher,
service_id, service_name,
image, image,
participant_log_level, participant_log_level,
global_log_level, global_log_level,
...@@ -59,9 +59,9 @@ def launch( ...@@ -59,9 +59,9 @@ def launch(
config = get_config(launcher.network_id, launcher.el_genesis_data, config = get_config(launcher.network_id, launcher.el_genesis_data,
image, existing_el_clients, log_level, extra_params) image, existing_el_clients, log_level, extra_params)
service = plan.add_service(service_id, config) service = plan.add_service(service_name, config)
enode = el_admin_node_info.get_enode_for_node(plan, service_id, RPC_PORT_ID) enode = el_admin_node_info.get_enode_for_node(plan, service_name, RPC_PORT_ID)
return el_client_context.new_el_client_context( return el_client_context.new_el_client_context(
"besu", "besu",
......
def get_enode_enr_for_node(plan, service_id, port_id): def get_enode_enr_for_node(plan, service_name, port_id):
recipe = struct( recipe = PostHttpRequestRecipe(
service_id = service_id, service_name = service_name,
method= "POST",
endpoint = "", endpoint = "",
body = '{"method":"admin_nodeInfo","params":[],"id":1,"jsonrpc":"2.0"}', body = '{"method":"admin_nodeInfo","params":[],"id":1,"jsonrpc":"2.0"}',
content_type = "application/json", content_type = "application/json",
...@@ -15,10 +14,9 @@ def get_enode_enr_for_node(plan, service_id, port_id): ...@@ -15,10 +14,9 @@ def get_enode_enr_for_node(plan, service_id, port_id):
response = plan.wait(recipe, "extract.enode", "!=", "") response = plan.wait(recipe, "extract.enode", "!=", "")
return (response["extract.enode"], response["extract.enr"]) return (response["extract.enode"], response["extract.enr"])
def get_enode_for_node(plan, service_id, port_id): def get_enode_for_node(plan, service_name, port_id):
recipe = struct( recipe = PostHttpRequestRecipe(
service_id = service_id, service_name = service_name,
method= "POST",
endpoint = "", endpoint = "",
body = '{"method":"admin_nodeInfo","params":[],"id":1,"jsonrpc":"2.0"}', body = '{"method":"admin_nodeInfo","params":[],"id":1,"jsonrpc":"2.0"}',
content_type = "application/json", content_type = "application/json",
......
...@@ -45,7 +45,7 @@ ERIGON_LOG_LEVELS = { ...@@ -45,7 +45,7 @@ ERIGON_LOG_LEVELS = {
def launch( def launch(
plan, plan,
launcher, launcher,
service_id, service_name,
image, image,
participant_log_level, participant_log_level,
global_log_level, global_log_level,
...@@ -57,9 +57,9 @@ def launch( ...@@ -57,9 +57,9 @@ def launch(
config = get_config(launcher.network_id, launcher.el_genesis_data, config = get_config(launcher.network_id, launcher.el_genesis_data,
image, existing_el_clients, log_level, extra_params) image, existing_el_clients, log_level, extra_params)
service = plan.add_service(service_id, config) service = plan.add_service(service_name, config)
enode, enr = el_admin_node_info.get_enode_enr_for_node(plan, service_id, RPC_PORT_ID) enode, enr = el_admin_node_info.get_enode_enr_for_node(plan, service_name, RPC_PORT_ID)
return el_client_context.new_el_client_context( return el_client_context.new_el_client_context(
"erigon", "erigon",
......
...@@ -57,7 +57,7 @@ VERBOSITY_LEVELS = { ...@@ -57,7 +57,7 @@ VERBOSITY_LEVELS = {
def launch( def launch(
plan, plan,
launcher, launcher,
service_id, service_name,
image, image,
participant_log_level, participant_log_level,
global_log_level, global_log_level,
...@@ -71,9 +71,9 @@ def launch( ...@@ -71,9 +71,9 @@ def launch(
config = get_config(launcher.network_id, launcher.el_genesis_data, launcher.prefunded_geth_keys_artifact_uuid, config = get_config(launcher.network_id, launcher.el_genesis_data, launcher.prefunded_geth_keys_artifact_uuid,
launcher.prefunded_account_info, image, existing_el_clients, log_level, extra_params) launcher.prefunded_account_info, image, existing_el_clients, log_level, extra_params)
service = plan.add_service(service_id, config) service = plan.add_service(service_name, config)
enode, enr = el_admin_node_info.get_enode_enr_for_node(plan, service_id, RPC_PORT_ID) enode, enr = el_admin_node_info.get_enode_enr_for_node(plan, service_name, RPC_PORT_ID)
return el_client_context.new_el_client_context( return el_client_context.new_el_client_context(
"geth", "geth",
......
...@@ -44,7 +44,7 @@ NETHERMIND_LOG_LEVELS = { ...@@ -44,7 +44,7 @@ NETHERMIND_LOG_LEVELS = {
def launch( def launch(
plan, plan,
launcher, launcher,
service_id, service_name,
image, image,
participant_log_level, participant_log_level,
global_log_level, global_log_level,
...@@ -55,9 +55,9 @@ def launch( ...@@ -55,9 +55,9 @@ def launch(
config = get_config(launcher.el_genesis_data, image, existing_el_clients, log_level, extra_params) config = get_config(launcher.el_genesis_data, image, existing_el_clients, log_level, extra_params)
service = plan.add_service(service_id, config) service = plan.add_service(service_name, config)
enode = el_admin_node_info.get_enode_for_node(plan, service_id, RPC_PORT_ID) enode = el_admin_node_info.get_enode_for_node(plan, service_name, RPC_PORT_ID)
return el_client_context.new_el_client_context( return el_client_context.new_el_client_context(
"nethermind", "nethermind",
......
...@@ -15,10 +15,10 @@ NETWORK_ID_TO_NAME = { ...@@ -15,10 +15,10 @@ NETWORK_ID_TO_NAME = {
"3": "ropsten", "3": "ropsten",
} }
def launch(plan, mev_boost_launcher, service_id, network_id): def launch(plan, mev_boost_launcher, service_name, network_id):
config = get_config(mev_boost_launcher, network_id) config = get_config(mev_boost_launcher, network_id)
mev_boost_service = plan.add_service(service_id, config) mev_boost_service = plan.add_service(service_name, config)
return mev_boost_context.new_mev_boost_context(mev_boost_service.ip_address, FLASHBOTS_MEV_BOOST_PORT) return mev_boost_context.new_mev_boost_context(mev_boost_service.ip_address, FLASHBOTS_MEV_BOOST_PORT)
......
...@@ -23,9 +23,9 @@ participant_module = import_module("github.com/kurtosis-tech/eth2-package/src/pa ...@@ -23,9 +23,9 @@ participant_module = import_module("github.com/kurtosis-tech/eth2-package/src/pa
package_io = import_module("github.com/kurtosis-tech/eth2-package/src/package_io/constants.star") package_io = import_module("github.com/kurtosis-tech/eth2-package/src/package_io/constants.star")
CL_CLIENT_SERVICE_ID_PREFIX = "cl-client-" CL_CLIENT_SERVICE_NAME_PREFIX = "cl-client-"
EL_CLIENT_SERVICE_ID_PREFIX = "el-client-" EL_CLIENT_SERVICE_NAME_PREFIX = "el-client-"
MEV_BOOST_SERVICE_ID_PREFIX = "mev-boost-" MEV_BOOST_SERVICE_NAME_PREFIX = "mev-boost-"
BOOT_PARTICIPANT_INDEX = 0 BOOT_PARTICIPANT_INDEX = 0
...@@ -96,12 +96,12 @@ def launch_participant_network(plan, participants, network_params, global_log_le ...@@ -96,12 +96,12 @@ def launch_participant_network(plan, participants, network_params, global_log_le
fail("Unsupported launcher '{0}', need one of '{1}'".format(el_client_type, ",".join([el.name for el in el_launchers.keys()]))) fail("Unsupported launcher '{0}', need one of '{1}'".format(el_client_type, ",".join([el.name for el in el_launchers.keys()])))
el_launcher, launch_method = el_launchers[el_client_type]["launcher"], el_launchers[el_client_type]["launch_method"] el_launcher, launch_method = el_launchers[el_client_type]["launcher"], el_launchers[el_client_type]["launch_method"]
el_service_id = "{0}{1}".format(EL_CLIENT_SERVICE_ID_PREFIX, index) el_service_name = "{0}{1}".format(EL_CLIENT_SERVICE_NAME_PREFIX, index)
el_client_context = launch_method( el_client_context = launch_method(
plan, plan,
el_launcher, el_launcher,
el_service_id, el_service_name,
participant.el_client_image, participant.el_client_image,
participant.el_client_log_level, participant.el_client_log_level,
global_log_level, global_log_level,
...@@ -157,7 +157,7 @@ def launch_participant_network(plan, participants, network_params, global_log_le ...@@ -157,7 +157,7 @@ def launch_participant_network(plan, participants, network_params, global_log_le
fail("Unsupported launcher '{0}', need one of '{1}'".format(cl_client_type, ",".join([cl.name for cl in cl_launchers.keys()]))) fail("Unsupported launcher '{0}', need one of '{1}'".format(cl_client_type, ",".join([cl.name for cl in cl_launchers.keys()])))
cl_launcher, launch_method = cl_launchers[cl_client_type]["launcher"], cl_launchers[cl_client_type]["launch_method"] cl_launcher, launch_method = cl_launchers[cl_client_type]["launcher"], cl_launchers[cl_client_type]["launch_method"]
cl_service_id = "{0}{1}".format(CL_CLIENT_SERVICE_ID_PREFIX, index) cl_service_name = "{0}{1}".format(CL_CLIENT_SERVICE_NAME_PREFIX, index)
new_cl_node_validator_keystores = preregistered_validator_keys_for_nodes[index] new_cl_node_validator_keystores = preregistered_validator_keys_for_nodes[index]
...@@ -167,8 +167,8 @@ def launch_participant_network(plan, participants, network_params, global_log_le ...@@ -167,8 +167,8 @@ def launch_participant_network(plan, participants, network_params, global_log_le
if hasattr(participant, "builder_network_params") and participant.builder_network_params != None: if hasattr(participant, "builder_network_params") and participant.builder_network_params != None:
mev_boost_launcher = mev_boost_launcher_module.new_mev_boost_launcher(MEV_BOOST_SHOULD_CHECK_RELAY, participant.builder_network_params.relay_endpoints) mev_boost_launcher = mev_boost_launcher_module.new_mev_boost_launcher(MEV_BOOST_SHOULD_CHECK_RELAY, participant.builder_network_params.relay_endpoints)
mev_boost_service_id = MEV_BOOST_SERVICE_ID_PREFIX.format(1) mev_boost_service_name = MEV_BOOST_SERVICE_NAME_PREFIX.format(1)
mev_boost_context = mev_boost_launcher_module.launch_mevboost(plan, mev_boost_launcher, mev_boost_service_id, network_params.network_id) mev_boost_context = mev_boost_launcher_module.launch_mevboost(plan, mev_boost_launcher, mev_boost_service_name, network_params.network_id)
all_mevboost_contexts.append(mev_boost_context) all_mevboost_contexts.append(mev_boost_context)
...@@ -178,7 +178,7 @@ def launch_participant_network(plan, participants, network_params, global_log_le ...@@ -178,7 +178,7 @@ def launch_participant_network(plan, participants, network_params, global_log_le
cl_client_context = launch_method( cl_client_context = launch_method(
plan, plan,
cl_launcher, cl_launcher,
cl_service_id, cl_service_name,
participant.cl_client_image, participant.cl_client_image,
participant.cl_client_log_level, participant.cl_client_log_level,
global_log_level, global_log_level,
...@@ -194,7 +194,7 @@ def launch_participant_network(plan, participants, network_params, global_log_le ...@@ -194,7 +194,7 @@ def launch_participant_network(plan, participants, network_params, global_log_le
cl_client_context = launch_method( cl_client_context = launch_method(
plan, plan,
cl_launcher, cl_launcher,
cl_service_id, cl_service_name,
participant.cl_client_image, participant.cl_client_image,
participant.cl_client_log_level, participant.cl_client_log_level,
global_log_level, global_log_level,
......
...@@ -59,7 +59,7 @@ def generate_cl_genesis_data( ...@@ -59,7 +59,7 @@ def generate_cl_genesis_data(
genesis_generation_config_artifact_name = plan.render_templates(template_and_data_by_rel_dest_filepath, "genesis-generation-config-cl") genesis_generation_config_artifact_name = plan.render_templates(template_and_data_by_rel_dest_filepath, "genesis-generation-config-cl")
# TODO(old) Make this the actual data generator - comment copied from the original module # TODO(old) Make this the actual data generator - comment copied from the original module
launcher_service_id = prelaunch_data_generator_launcher.launch_prelaunch_data_generator( launcher_service_name = prelaunch_data_generator_launcher.launch_prelaunch_data_generator(
plan, plan,
{ {
CONFIG_DIRPATH_ON_GENERATOR: genesis_generation_config_artifact_name, CONFIG_DIRPATH_ON_GENERATOR: genesis_generation_config_artifact_name,
...@@ -83,7 +83,7 @@ def generate_cl_genesis_data( ...@@ -83,7 +83,7 @@ def generate_cl_genesis_data(
(" && ").join(all_dirpath_creation_commands), (" && ").join(all_dirpath_creation_commands),
] ]
dir_creation_cmd_result = plan.exec(struct(service_id=launcher_service_id, command=dir_creation_cmd)) dir_creation_cmd_result = plan.exec(ExecRecipe(service_name=launcher_service_name, command=dir_creation_cmd))
plan.assert(dir_creation_cmd_result["code"], "==", SUCCESSFUL_EXEC_CMD_EXIT_CODE) plan.assert(dir_creation_cmd_result["code"], "==", SUCCESSFUL_EXEC_CMD_EXIT_CODE)
...@@ -100,7 +100,7 @@ def generate_cl_genesis_data( ...@@ -100,7 +100,7 @@ def generate_cl_genesis_data(
filepath_on_generator, filepath_on_generator,
OUTPUT_DIRPATH_ON_GENERATOR, OUTPUT_DIRPATH_ON_GENERATOR,
] ]
cmd_result = plan.exec(struct(service_id=launcher_service_id, command=cmd)) cmd_result = plan.exec(ExecRecipe(service_name=launcher_service_name, command=cmd))
plan.assert(cmd_result["code"], "==", SUCCESSFUL_EXEC_CMD_EXIT_CODE) plan.assert(cmd_result["code"], "==", SUCCESSFUL_EXEC_CMD_EXIT_CODE)
# Generate files that need dynamic content # Generate files that need dynamic content
...@@ -118,7 +118,7 @@ def generate_cl_genesis_data( ...@@ -118,7 +118,7 @@ def generate_cl_genesis_data(
destFilepath, destFilepath,
) )
] ]
cmd_result = plan.exec(struct(service_id=launcher_service_id, command=cmd)) cmd_result = plan.exec(ExecRecipe(service_name=launcher_service_name, command=cmd))
plan.assert(cmd_result["code"], "==", SUCCESSFUL_EXEC_CMD_EXIT_CODE) plan.assert(cmd_result["code"], "==", SUCCESSFUL_EXEC_CMD_EXIT_CODE)
...@@ -132,10 +132,10 @@ def generate_cl_genesis_data( ...@@ -132,10 +132,10 @@ def generate_cl_genesis_data(
"--state-output", shared_utils.path_join(OUTPUT_DIRPATH_ON_GENERATOR, GENESIS_STATE_FILENAME) "--state-output", shared_utils.path_join(OUTPUT_DIRPATH_ON_GENERATOR, GENESIS_STATE_FILENAME)
] ]
genesis_generation_result = plan.exec(struct(service_id=launcher_service_id, command=cl_genesis_generation_cmd)) genesis_generation_result = plan.exec(ExecRecipe(service_name=launcher_service_name, command=cl_genesis_generation_cmd))
plan.assert(genesis_generation_result["code"], "==", SUCCESSFUL_EXEC_CMD_EXIT_CODE) plan.assert(genesis_generation_result["code"], "==", SUCCESSFUL_EXEC_CMD_EXIT_CODE)
cl_genesis_data_artifact_name = plan.store_service_files(launcher_service_id, OUTPUT_DIRPATH_ON_GENERATOR, name = "cl-genesis-data") cl_genesis_data_artifact_name = plan.store_service_files(launcher_service_name, OUTPUT_DIRPATH_ON_GENERATOR, name = "cl-genesis-data")
jwt_secret_rel_filepath = shared_utils.path_join( jwt_secret_rel_filepath = shared_utils.path_join(
shared_utils.path_base(OUTPUT_DIRPATH_ON_GENERATOR), shared_utils.path_base(OUTPUT_DIRPATH_ON_GENERATOR),
...@@ -157,7 +157,7 @@ def generate_cl_genesis_data( ...@@ -157,7 +157,7 @@ def generate_cl_genesis_data(
) )
# we cleanup as the data generation is done # we cleanup as the data generation is done
plan.remove_service(launcher_service_id) plan.remove_service(launcher_service_name)
return result return result
......
...@@ -34,7 +34,7 @@ def generate_cl_validator_keystores( ...@@ -34,7 +34,7 @@ def generate_cl_validator_keystores(
num_nodes, num_nodes,
num_validators_per_node): num_validators_per_node):
service_id = prelaunch_data_generator_launcher.launch_prelaunch_data_generator( service_name = prelaunch_data_generator_launcher.launch_prelaunch_data_generator(
plan, plan,
{}, {},
) )
...@@ -66,13 +66,13 @@ def generate_cl_validator_keystores( ...@@ -66,13 +66,13 @@ def generate_cl_validator_keystores(
command_str = " && ".join(all_sub_command_strs) command_str = " && ".join(all_sub_command_strs)
command_result = plan.exec(struct(service_id=service_id, command=["sh", "-c", command_str])) command_result = plan.exec(ExecRecipe(service_name=service_name, command=["sh", "-c", command_str]))
plan.assert(command_result["code"], "==", SUCCESSFUL_EXEC_CMD_EXIT_CODE) plan.assert(command_result["code"], "==", SUCCESSFUL_EXEC_CMD_EXIT_CODE)
# Store outputs into files artifacts # Store outputs into files artifacts
keystore_files = [] keystore_files = []
for idx, output_dirpath in enumerate(all_output_dirpaths): for idx, output_dirpath in enumerate(all_output_dirpaths):
artifact_name = plan.store_service_files(service_id, output_dirpath, name = "validator-keystore-" + str(idx)) artifact_name = plan.store_service_files(service_name, output_dirpath, name = "validator-keystore-" + str(idx))
# This is necessary because the way Kurtosis currently implements artifact-storing is # This is necessary because the way Kurtosis currently implements artifact-storing is
base_dirname_in_artifact = shared_utils.path_base(output_dirpath) base_dirname_in_artifact = shared_utils.path_base(output_dirpath)
...@@ -97,10 +97,10 @@ def generate_cl_validator_keystores( ...@@ -97,10 +97,10 @@ def generate_cl_validator_keystores(
PRYSM_PASSWORD_FILEPATH_ON_GENERATOR, PRYSM_PASSWORD_FILEPATH_ON_GENERATOR,
), ),
] ]
write_prysm_password_file_cmd_result = plan.exec(struct(service_id=service_id, command=write_prysm_password_file_cmd)) write_prysm_password_file_cmd_result = plan.exec(ExecRecipe(service_name=service_name, command=write_prysm_password_file_cmd))
plan.assert(write_prysm_password_file_cmd_result["code"], "==", SUCCESSFUL_EXEC_CMD_EXIT_CODE) plan.assert(write_prysm_password_file_cmd_result["code"], "==", SUCCESSFUL_EXEC_CMD_EXIT_CODE)
prysm_password_artifact_name = plan.store_service_files(service_id, PRYSM_PASSWORD_FILEPATH_ON_GENERATOR, name = "prysm-password") prysm_password_artifact_name = plan.store_service_files(service_name, PRYSM_PASSWORD_FILEPATH_ON_GENERATOR, name = "prysm-password")
result = keystores_result.new_generate_keystores_result( result = keystores_result.new_generate_keystores_result(
prysm_password_artifact_name, prysm_password_artifact_name,
...@@ -109,5 +109,5 @@ def generate_cl_validator_keystores( ...@@ -109,5 +109,5 @@ def generate_cl_validator_keystores(
) )
# we cleanup as the data generation is done # we cleanup as the data generation is done
plan.remove_service(service_id) plan.remove_service(service_name)
return result return result
...@@ -52,7 +52,7 @@ def generate_el_genesis_data( ...@@ -52,7 +52,7 @@ def generate_el_genesis_data(
# TODO(old) Make this the actual data generator - comment copied from the original module # TODO(old) Make this the actual data generator - comment copied from the original module
launcher_service_id = prelaunch_data_generator_launcher.launch_prelaunch_data_generator( launcher_service_name = prelaunch_data_generator_launcher.launch_prelaunch_data_generator(
plan, plan,
{ {
CONFIG_DIRPATH_ON_GENERATOR: genesis_generation_config_artifact_name, CONFIG_DIRPATH_ON_GENERATOR: genesis_generation_config_artifact_name,
...@@ -80,7 +80,7 @@ def generate_el_genesis_data( ...@@ -80,7 +80,7 @@ def generate_el_genesis_data(
] ]
dir_creation_cmd_result = plan.exec(struct(service_id=launcher_service_id, command=dir_creation_cmd)) dir_creation_cmd_result = plan.exec(ExecRecipe(service_name=launcher_service_name, command=dir_creation_cmd))
plan.assert(dir_creation_cmd_result["code"], "==", SUCCESSFUL_EXEC_CMD_EXIT_CODE) plan.assert(dir_creation_cmd_result["code"], "==", SUCCESSFUL_EXEC_CMD_EXIT_CODE)
genesis_config_filepath_on_generator = shared_utils.path_join(CONFIG_DIRPATH_ON_GENERATOR, GENESIS_CONFIG_FILENAME) genesis_config_filepath_on_generator = shared_utils.path_join(CONFIG_DIRPATH_ON_GENERATOR, GENESIS_CONFIG_FILENAME)
...@@ -96,7 +96,7 @@ def generate_el_genesis_data( ...@@ -96,7 +96,7 @@ def generate_el_genesis_data(
" ".join(cmd) " ".join(cmd)
] ]
cmd_to_execute_result = plan.exec(struct(service_id=launcher_service_id, command=cmd_to_execute)) cmd_to_execute_result = plan.exec(ExecRecipe(service_name=launcher_service_name, command=cmd_to_execute))
plan.assert(cmd_to_execute_result["code"], "==", SUCCESSFUL_EXEC_CMD_EXIT_CODE) plan.assert(cmd_to_execute_result["code"], "==", SUCCESSFUL_EXEC_CMD_EXIT_CODE)
...@@ -115,10 +115,10 @@ def generate_el_genesis_data( ...@@ -115,10 +115,10 @@ def generate_el_genesis_data(
) )
] ]
jwt_secret_generation_cmd_result = plan.exec(struct(service_id=launcher_service_id, command=jwt_secret_generation_cmd)) jwt_secret_generation_cmd_result = plan.exec(ExecRecipe(service_name=launcher_service_name, command=jwt_secret_generation_cmd))
plan.assert(jwt_secret_generation_cmd_result["code"], "==", SUCCESSFUL_EXEC_CMD_EXIT_CODE) plan.assert(jwt_secret_generation_cmd_result["code"], "==", SUCCESSFUL_EXEC_CMD_EXIT_CODE)
el_genesis_data_artifact_name = plan.store_service_files(launcher_service_id, OUTPUT_DIRPATH_ON_GENERATOR, name = "el-genesis-data") el_genesis_data_artifact_name = plan.store_service_files(launcher_service_name, OUTPUT_DIRPATH_ON_GENERATOR, name = "el-genesis-data")
result = el_genesis.new_el_genesis_data( result = el_genesis.new_el_genesis_data(
el_genesis_data_artifact_name, el_genesis_data_artifact_name,
...@@ -130,7 +130,7 @@ def generate_el_genesis_data( ...@@ -130,7 +130,7 @@ def generate_el_genesis_data(
) )
# we cleanup as the data generation is done # we cleanup as the data generation is done
plan.remove_service(launcher_service_id) plan.remove_service(launcher_service_name)
return result return result
......
IMAGE = "ethpandaops/ethereum-genesis-generator:1.0.6" IMAGE = "ethpandaops/ethereum-genesis-generator:1.0.6"
SERVICE_ID_PREFIX = "prelaunch-data-generator-" SERVICE_NAME_PREFIX = "prelaunch-data-generator-"
# We use Docker exec commands to run the commands we need, so we override the default # We use Docker exec commands to run the commands we need, so we override the default
ENTRYPOINT_ARGS = [ ENTRYPOINT_ARGS = [
...@@ -13,14 +13,14 @@ def launch_prelaunch_data_generator(plan, files_artifact_mountpoints): ...@@ -13,14 +13,14 @@ def launch_prelaunch_data_generator(plan, files_artifact_mountpoints):
config = get_config(files_artifact_mountpoints) config = get_config(files_artifact_mountpoints)
service_id = "{0}{1}".format( service_name = "{0}{1}".format(
SERVICE_ID_PREFIX, SERVICE_NAME_PREFIX,
time.now().unix_nano, time.now().unix_nano,
) )
plan.add_service(service_id, config) plan.add_service(service_name, config)
return service_id return service_name
def get_config( def get_config(
files_artifact_mountpoints, files_artifact_mountpoints,
......
shared_utils = import_module("github.com/kurtosis-tech/eth2-package/src/shared_utils/shared_utils.star") shared_utils = import_module("github.com/kurtosis-tech/eth2-package/src/shared_utils/shared_utils.star")
SERVICE_ID = "prometheus" SERVICE_NAME = "prometheus"
# TODO(old) I'm not sure if we should use latest version or ping an specific version instead # TODO(old) I'm not sure if we should use latest version or ping an specific version instead
IMAGE_NAME = "prom/prometheus:latest" IMAGE_NAME = "prom/prometheus:latest"
...@@ -28,7 +28,7 @@ def launch_prometheus(plan, config_template, cl_client_contexts): ...@@ -28,7 +28,7 @@ def launch_prometheus(plan, config_template, cl_client_contexts):
config_files_artifact_name = plan.render_templates(template_and_data_by_rel_dest_filepath, "prometheus-config") config_files_artifact_name = plan.render_templates(template_and_data_by_rel_dest_filepath, "prometheus-config")
config = get_config(config_files_artifact_name) config = get_config(config_files_artifact_name)
prometheus_service = plan.add_service(SERVICE_ID, config) prometheus_service = plan.add_service(SERVICE_NAME, config)
private_ip_address = prometheus_service.ip_address private_ip_address = prometheus_service.ip_address
prometheus_service_http_port = prometheus_service.ports[HTTP_PORT_ID].number prometheus_service_http_port = prometheus_service.ports[HTTP_PORT_ID].number
......
IMAGE_NAME = "marioevz/merge-testnet-verifier:latest" IMAGE_NAME = "marioevz/merge-testnet-verifier:latest"
SERVICE_ID = "testnet-verifier" SERVICE_NAME = "testnet-verifier"
# We use Docker exec commands to run the commands we need, so we override the default # We use Docker exec commands to run the commands we need, so we override the default
SYNCHRONOUS_ENTRYPOINT_ARGS = [ SYNCHRONOUS_ENTRYPOINT_ARGS = [
...@@ -11,15 +11,15 @@ SYNCHRONOUS_ENTRYPOINT_ARGS = [ ...@@ -11,15 +11,15 @@ SYNCHRONOUS_ENTRYPOINT_ARGS = [
# this is broken check - https://github.com/ethereum/merge-testnet-verifier/issues/4 # this is broken check - https://github.com/ethereum/merge-testnet-verifier/issues/4
def launch_testnet_verifier(plan, params, el_client_contexts, cl_client_contexts): def launch_testnet_verifier(plan, params, el_client_contexts, cl_client_contexts):
config = get_asynchronous_verification_config(params, el_client_contexts, cl_client_contexts) config = get_asynchronous_verification_config(params, el_client_contexts, cl_client_contexts)
plan.add_service(SERVICE_ID, config) plan.add_service(SERVICE_NAME, config)
def run_synchronous_testnet_verification(plan, params, el_client_contexts, cl_client_contexts): def run_synchronous_testnet_verification(plan, params, el_client_contexts, cl_client_contexts):
config = get_synchronous_verification_config() config = get_synchronous_verification_config()
plan.add_service(SERVICE_ID, config) plan.add_service(SERVICE_NAME, config)
command = get_cmd(params, el_client_contexts, cl_client_contexts, True) command = get_cmd(params, el_client_contexts, cl_client_contexts, True)
exec_result = plan.exec(struct(service_id=SERVICE_ID, command=command)) exec_result = plan.exec(ExecRecipe(service_name=SERVICE_NAME, command=command))
plan.assert(exec_result["code"], "==", 0) plan.assert(exec_result["code"], "==", 0)
......
IMAGE_NAME = "kurtosistech/tx-fuzz:0.2.0" IMAGE_NAME = "kurtosistech/tx-fuzz:0.2.0"
SERVICE_ID = "transaction-spammer" SERVICE_NAME = "transaction-spammer"
def launch_transaction_spammer(plan, prefunded_addresses, el_client_context): def launch_transaction_spammer(plan, prefunded_addresses, el_client_context):
config = get_config(prefunded_addresses, el_client_context) config = get_config(prefunded_addresses, el_client_context)
plan.add_service(SERVICE_ID, config) plan.add_service(SERVICE_NAME, config)
def get_config(prefunded_addresses, el_client_context): def get_config(prefunded_addresses, el_client_context):
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment