Commit b61a128b authored by Barnabas Busa's avatar Barnabas Busa Committed by GitHub

fix!: remove vc_count (#844)

parent 3bb88e04
...@@ -12,6 +12,5 @@ additional_services: ...@@ -12,6 +12,5 @@ additional_services:
- prometheus_grafana - prometheus_grafana
mev_params: mev_params:
mev_boost_image: ghcr.io/commit-boost/pbs:latest mev_boost_image: ghcr.io/commit-boost/pbs:latest
mev_relay_image: flashbots/mev-boost-relay:latest
network_params: network_params:
seconds_per_slot: 3 seconds_per_slot: 3
...@@ -13,6 +13,7 @@ participants: ...@@ -13,6 +13,7 @@ participants:
cl_type: grandine cl_type: grandine
additional_services: additional_services:
- assertoor - assertoor
- dora
assertoor_params: assertoor_params:
run_stability_check: false run_stability_check: false
run_block_proposal_check: true run_block_proposal_check: true
...@@ -304,10 +304,6 @@ participants: ...@@ -304,10 +304,6 @@ participants:
# - vero: ghcr.io/serenita-org/vero:master # - vero: ghcr.io/serenita-org/vero:master
vc_image: "" vc_image: ""
# The number of validator clients to run for this participant
# Defaults to 1
vc_count: 1
# The log level string that this participant's validator client should log at # The log level string that this participant's validator client should log at
# If this is emptystring then the global `logLevel` parameter's value will be translated into a string appropriate for the client (e.g. if # If this is emptystring then the global `logLevel` parameter's value will be translated into a string appropriate for the client (e.g. if
# global `logLevel` = `info` then Teku would receive `INFO`, Prysm would receive `info`, etc.) # global `logLevel` = `info` then Teku would receive `INFO`, Prysm would receive `info`, etc.)
......
...@@ -133,7 +133,10 @@ def run(plan, args={}): ...@@ -133,7 +133,10 @@ def run(plan, args={}):
args_with_right_defaults.mev_params.mev_builder_extra_data, args_with_right_defaults.mev_params.mev_builder_extra_data,
global_node_selectors, global_node_selectors,
) )
elif args_with_right_defaults.mev_type == constants.FLASHBOTS_MEV_TYPE: elif (
args_with_right_defaults.mev_type == constants.FLASHBOTS_MEV_TYPE
or args_with_right_defaults.mev_type == constants.COMMIT_BOOST_MEV_TYPE
):
plan.print("Generating flashbots builder config file") plan.print("Generating flashbots builder config file")
flashbots_builder_config_file = flashbots_mev_rbuilder.new_builder_config( flashbots_builder_config_file = flashbots_mev_rbuilder.new_builder_config(
plan, plan,
......
...@@ -31,7 +31,6 @@ participants: ...@@ -31,7 +31,6 @@ participants:
vc_type: lighthouse vc_type: lighthouse
vc_image: sigp/lighthouse:latest-unstable vc_image: sigp/lighthouse:latest-unstable
vc_log_level: "" vc_log_level: ""
vc_count: 1
vc_extra_env_vars: {} vc_extra_env_vars: {}
vc_extra_labels: {} vc_extra_labels: {}
vc_extra_params: [] vc_extra_params: []
......
...@@ -120,7 +120,7 @@ def launch( ...@@ -120,7 +120,7 @@ def launch(
cl_service_name = "cl-{0}-{1}-{2}".format(index_str, cl_type, el_type) cl_service_name = "cl-{0}-{1}-{2}".format(index_str, cl_type, el_type)
new_cl_node_validator_keystores = None new_cl_node_validator_keystores = None
if participant.validator_count != 0 and participant.vc_count != 0: if participant.validator_count != 0:
new_cl_node_validator_keystores = preregistered_validator_keys_for_nodes[ new_cl_node_validator_keystores = preregistered_validator_keys_for_nodes[
index index
] ]
...@@ -145,16 +145,16 @@ def launch( ...@@ -145,16 +145,16 @@ def launch(
snooper_engine_context snooper_engine_context
) )
) )
checkpoint_sync_url = args_with_right_defaults.checkpoint_sync_url
if args_with_right_defaults.checkpoint_sync_enabled: if args_with_right_defaults.checkpoint_sync_enabled:
if args_with_right_defaults.checkpoint_sync_url == "": if args_with_right_defaults.checkpoint_sync_url == "":
if ( if (
network_params.network in constants.PUBLIC_NETWORKS network_params.network in constants.PUBLIC_NETWORKS
or network_params.network == constants.NETWORK_NAME.ephemery or network_params.network == constants.NETWORK_NAME.ephemery
): ):
args_with_right_defaults.checkpoint_sync_url = ( checkpoint_sync_url = constants.CHECKPOINT_SYNC_URL[
constants.CHECKPOINT_SYNC_URL[network_params.network] network_params.network
) ]
else: else:
fail( fail(
"Checkpoint sync URL is required if you enabled checkpoint_sync for custom networks. Please provide a valid URL." "Checkpoint sync URL is required if you enabled checkpoint_sync for custom networks. Please provide a valid URL."
...@@ -178,7 +178,7 @@ def launch( ...@@ -178,7 +178,7 @@ def launch(
tolerations, tolerations,
node_selectors, node_selectors,
args_with_right_defaults.checkpoint_sync_enabled, args_with_right_defaults.checkpoint_sync_enabled,
args_with_right_defaults.checkpoint_sync_url, checkpoint_sync_url,
args_with_right_defaults.port_publisher, args_with_right_defaults.port_publisher,
index, index,
) )
...@@ -199,7 +199,7 @@ def launch( ...@@ -199,7 +199,7 @@ def launch(
tolerations, tolerations,
node_selectors, node_selectors,
args_with_right_defaults.checkpoint_sync_enabled, args_with_right_defaults.checkpoint_sync_enabled,
args_with_right_defaults.checkpoint_sync_url, checkpoint_sync_url,
args_with_right_defaults.port_publisher, args_with_right_defaults.port_publisher,
index, index,
) )
......
...@@ -136,7 +136,10 @@ def get_config( ...@@ -136,7 +136,10 @@ def get_config(
constants.METRICS_PORT_ID: METRICS_PORT_NUM, constants.METRICS_PORT_ID: METRICS_PORT_NUM,
} }
if launcher.builder_type == "flashbots": if (
launcher.builder_type == constants.FLASHBOTS_MEV_TYPE
or launcher.builder_type == constants.COMMIT_BOOST_MEV_TYPE
):
used_port_assignments[constants.RBUILDER_PORT_ID] = RBUILDER_PORT_NUM used_port_assignments[constants.RBUILDER_PORT_ID] = RBUILDER_PORT_NUM
used_ports = shared_utils.get_port_specs(used_port_assignments) used_ports = shared_utils.get_port_specs(used_port_assignments)
...@@ -161,7 +164,10 @@ def get_config( ...@@ -161,7 +164,10 @@ def get_config(
"--http.addr=0.0.0.0", "--http.addr=0.0.0.0",
"--http.corsdomain=*", "--http.corsdomain=*",
"--http.api=admin,net,eth,web3,debug,txpool,trace{0}".format( "--http.api=admin,net,eth,web3,debug,txpool,trace{0}".format(
",flashbots" if launcher.builder_type == "flashbots" else "" ",flashbots"
if launcher.builder_type == constants.FLASHBOTS_MEV_TYPE
or launcher.builder_type == constants.COMMIT_BOOST_MEV_TYPE
else ""
), ),
"--ws", "--ws",
"--ws.addr=0.0.0.0", "--ws.addr=0.0.0.0",
...@@ -220,11 +226,14 @@ def get_config( ...@@ -220,11 +226,14 @@ def get_config(
) )
env_vars = {} env_vars = {}
image = participant.el_image image = participant.el_image
if launcher.builder_type == "mev-rs": if launcher.builder_type == constants.MEV_RS_MEV_TYPE:
files[ files[
mev_rs_builder.MEV_BUILDER_MOUNT_DIRPATH_ON_SERVICE mev_rs_builder.MEV_BUILDER_MOUNT_DIRPATH_ON_SERVICE
] = mev_rs_builder.MEV_BUILDER_FILES_ARTIFACT_NAME ] = mev_rs_builder.MEV_BUILDER_FILES_ARTIFACT_NAME
elif launcher.builder_type == "flashbots": elif (
launcher.builder_type == constants.FLASHBOTS_MEV_TYPE
or launcher.builder_type == constants.COMMIT_BOOST_MEV_TYPE
):
image = constants.DEFAULT_FLASHBOTS_BUILDER_IMAGE image = constants.DEFAULT_FLASHBOTS_BUILDER_IMAGE
cl_client_name = service_name.split("-")[4] cl_client_name = service_name.split("-")[4]
cmd.append("--engine.experimental") cmd.append("--engine.experimental")
......
...@@ -238,7 +238,6 @@ def input_parser(plan, input_args): ...@@ -238,7 +238,6 @@ def input_parser(plan, input_args):
vc_type=participant["vc_type"], vc_type=participant["vc_type"],
vc_image=participant["vc_image"], vc_image=participant["vc_image"],
vc_log_level=participant["vc_log_level"], vc_log_level=participant["vc_log_level"],
vc_count=participant["vc_count"],
vc_tolerations=participant["vc_tolerations"], vc_tolerations=participant["vc_tolerations"],
cl_extra_params=participant["cl_extra_params"], cl_extra_params=participant["cl_extra_params"],
cl_extra_labels=participant["cl_extra_labels"], cl_extra_labels=participant["cl_extra_labels"],
...@@ -642,30 +641,6 @@ def parse_network_params(plan, input_args): ...@@ -642,30 +641,6 @@ def parse_network_params(plan, input_args):
remote_signer_type, "" remote_signer_type, ""
) )
if result["parallel_keystore_generation"] and participant["vc_count"] != 1:
fail(
"parallel_keystore_generation is only supported for 1 validator client per participant (for now)"
)
# If the num validator keys per node is not divisible by vc_count of a participant, fail
if (
participant["vc_count"] > 0
and result["network_params"]["num_validator_keys_per_node"]
% participant["vc_count"]
!= 0
):
fail(
"num_validator_keys_per_node: {0} is not divisible by vc_count: {1} for participant: {2}".format(
result["network_params"]["num_validator_keys_per_node"],
participant["vc_count"],
str(index + 1)
+ "-"
+ participant["el_type"]
+ "-"
+ participant["cl_type"],
)
)
snooper_enabled = participant["snooper_enabled"] snooper_enabled = participant["snooper_enabled"]
if snooper_enabled == None: if snooper_enabled == None:
participant["snooper_enabled"] = result["snooper_enabled"] participant["snooper_enabled"] = result["snooper_enabled"]
...@@ -972,7 +947,6 @@ def default_participant(): ...@@ -972,7 +947,6 @@ def default_participant():
"vc_type": "", "vc_type": "",
"vc_image": "", "vc_image": "",
"vc_log_level": "", "vc_log_level": "",
"vc_count": 1,
"vc_extra_env_vars": {}, "vc_extra_env_vars": {},
"vc_extra_labels": {}, "vc_extra_labels": {},
"vc_extra_params": [], "vc_extra_params": [],
......
...@@ -28,7 +28,6 @@ PARTICIPANT_CATEGORIES = { ...@@ -28,7 +28,6 @@ PARTICIPANT_CATEGORIES = {
"use_separate_vc", "use_separate_vc",
"vc_type", "vc_type",
"vc_image", "vc_image",
"vc_count",
"vc_log_level", "vc_log_level",
"vc_extra_env_vars", "vc_extra_env_vars",
"vc_extra_labels", "vc_extra_labels",
...@@ -96,7 +95,6 @@ PARTICIPANT_MATRIX_PARAMS = { ...@@ -96,7 +95,6 @@ PARTICIPANT_MATRIX_PARAMS = {
"use_separate_vc", "use_separate_vc",
"vc_type", "vc_type",
"vc_image", "vc_image",
"vc_count",
"vc_log_level", "vc_log_level",
"vc_extra_env_vars", "vc_extra_env_vars",
"vc_extra_labels", "vc_extra_labels",
...@@ -112,7 +110,6 @@ PARTICIPANT_MATRIX_PARAMS = { ...@@ -112,7 +110,6 @@ PARTICIPANT_MATRIX_PARAMS = {
"vc": [ "vc": [
"vc_type", "vc_type",
"vc_image", "vc_image",
"vc_count",
"vc_log_level", "vc_log_level",
"vc_extra_env_vars", "vc_extra_env_vars",
"vc_extra_labels", "vc_extra_labels",
......
...@@ -191,6 +191,9 @@ def launch_participant_network( ...@@ -191,6 +191,9 @@ def launch_participant_network(
] ]
current_vc_index = 0 current_vc_index = 0
if not args_with_right_defaults.participants:
fail("No participants configured")
for index, participant in enumerate(args_with_right_defaults.participants): for index, participant in enumerate(args_with_right_defaults.participants):
el_type = participant.el_type el_type = participant.el_type
cl_type = participant.cl_type cl_type = participant.cl_type
...@@ -199,38 +202,34 @@ def launch_participant_network( ...@@ -199,38 +202,34 @@ def launch_participant_network(
index_str = shared_utils.zfill_custom( index_str = shared_utils.zfill_custom(
index + 1, len(str(len(args_with_right_defaults.participants))) index + 1, len(str(len(args_with_right_defaults.participants)))
) )
for sub_index in range(participant.vc_count): el_context = all_el_contexts[index] if index < len(all_el_contexts) else None
vc_index_str = shared_utils.zfill_custom( cl_context = all_cl_contexts[index] if index < len(all_cl_contexts) else None
sub_index + 1, len(str(participant.vc_count))
)
el_context = all_el_contexts[index]
cl_context = all_cl_contexts[index]
node_selectors = input_parser.get_client_node_selectors( node_selectors = input_parser.get_client_node_selectors(
participant.node_selectors, participant.node_selectors,
global_node_selectors, global_node_selectors,
) )
if participant.ethereum_metrics_exporter_enabled: if participant.ethereum_metrics_exporter_enabled:
pair_name = "{0}-{1}-{2}".format(index_str, cl_type, el_type) pair_name = "{0}-{1}-{2}".format(index_str, cl_type, el_type)
ethereum_metrics_exporter_service_name = ( ethereum_metrics_exporter_service_name = (
"ethereum-metrics-exporter-{0}".format(pair_name) "ethereum-metrics-exporter-{0}".format(pair_name)
) )
ethereum_metrics_exporter_context = ethereum_metrics_exporter.launch( ethereum_metrics_exporter_context = ethereum_metrics_exporter.launch(
plan, plan,
pair_name, pair_name,
ethereum_metrics_exporter_service_name, ethereum_metrics_exporter_service_name,
el_context, el_context,
cl_context, cl_context,
node_selectors, node_selectors,
args_with_right_defaults.docker_cache_params, args_with_right_defaults.docker_cache_params,
) )
plan.print( plan.print(
"Successfully added {0} ethereum metrics exporter participants".format( "Successfully added {0} ethereum metrics exporter participants".format(
ethereum_metrics_exporter_context ethereum_metrics_exporter_context
)
) )
)
all_ethereum_metrics_exporter_contexts.append( all_ethereum_metrics_exporter_contexts.append(
ethereum_metrics_exporter_context ethereum_metrics_exporter_context
...@@ -238,165 +237,150 @@ def launch_participant_network( ...@@ -238,165 +237,150 @@ def launch_participant_network(
xatu_sentry_context = None xatu_sentry_context = None
if participant.xatu_sentry_enabled: if participant.xatu_sentry_enabled:
pair_name = "{0}-{1}-{2}".format(index_str, cl_type, el_type) pair_name = "{0}-{1}-{2}".format(index_str, cl_type, el_type)
xatu_sentry_service_name = "xatu-sentry-{0}".format(pair_name) xatu_sentry_service_name = "xatu-sentry-{0}".format(pair_name)
xatu_sentry_context = xatu_sentry.launch( xatu_sentry_context = xatu_sentry.launch(
plan, plan,
xatu_sentry_service_name, xatu_sentry_service_name,
cl_context, cl_context,
xatu_sentry_params, xatu_sentry_params,
network_params, network_params,
pair_name, pair_name,
node_selectors, node_selectors,
) )
plan.print( plan.print(
"Successfully added {0} xatu sentry participants".format( "Successfully added {0} xatu sentry participants".format(
xatu_sentry_context xatu_sentry_context
)
) )
)
all_xatu_sentry_contexts.append(xatu_sentry_context) all_xatu_sentry_contexts.append(xatu_sentry_context)
plan.print( plan.print("Successfully added {0} CL participants".format(num_participants))
"Successfully added {0} CL participants".format(num_participants)
)
plan.print("Start adding validators for participant #{0}".format(index_str)) plan.print("Start adding validators for participant #{0}".format(index_str))
if participant.use_separate_vc == None: if participant.use_separate_vc == None:
# This should only be the case for the MEV participant, # This should only be the case for the MEV participant,
# the regular participants default to False/True # the regular participants default to False/True
all_vc_contexts.append(None) all_vc_contexts.append(None)
all_remote_signer_contexts.append(None) all_remote_signer_contexts.append(None)
all_snooper_beacon_contexts.append(None) all_snooper_beacon_contexts.append(None)
continue continue
if (
cl_type in _cls_that_need_separate_vc
and not participant.use_separate_vc
):
fail("{0} needs a separate validator client!".format(cl_type))
if not participant.use_separate_vc:
all_vc_contexts.append(None)
all_remote_signer_contexts.append(None)
all_snooper_beacon_contexts.append(None)
continue
plan.print( if cl_type in _cls_that_need_separate_vc and not participant.use_separate_vc:
"Using separate validator client for participant #{0}".format(index_str) fail("{0} needs a separate validator client!".format(cl_type))
)
vc_keystores = None if not participant.use_separate_vc:
if participant.validator_count != 0: all_vc_contexts.append(None)
if participant.vc_count == 1: all_remote_signer_contexts.append(None)
vc_keystores = preregistered_validator_keys_for_nodes[index] all_snooper_beacon_contexts.append(None)
else: continue
vc_keystores = preregistered_validator_keys_for_nodes[
index + sub_index
]
vc_context = None
remote_signer_context = None
snooper_beacon_context = None
if participant.snooper_enabled:
snooper_service_name = "snooper-beacon-{0}-{1}-{2}{3}".format(
index_str,
cl_type,
vc_type,
"-" + vc_index_str if participant.vc_count != 1 else "",
)
snooper_beacon_context = beacon_snooper.launch(
plan,
snooper_service_name,
cl_context,
node_selectors,
args_with_right_defaults.docker_cache_params,
)
plan.print(
"Successfully added {0} snooper participants".format(
snooper_beacon_context
)
)
all_snooper_beacon_contexts.append(snooper_beacon_context)
full_name = (
"{0}-{1}-{2}-{3}{4}".format(
index_str,
el_type,
cl_type,
vc_type,
"-" + vc_index_str if participant.vc_count != 1 else "",
)
if participant.cl_type != participant.vc_type
else "{0}-{1}-{2}{3}".format(
index_str,
el_type,
cl_type,
"-" + vc_index_str if participant.vc_count != 1 else "",
)
)
if participant.use_remote_signer: plan.print(
remote_signer_context = remote_signer.launch( "Using separate validator client for participant #{0}".format(index_str)
plan=plan, )
launcher=remote_signer.new_remote_signer_launcher(
el_cl_genesis_data=el_cl_data
),
service_name="signer-{0}".format(full_name),
remote_signer_type=remote_signer_type,
image=participant.remote_signer_image,
full_name="{0}-remote_signer".format(full_name),
vc_type=vc_type,
node_keystore_files=vc_keystores,
participant=participant,
global_tolerations=global_tolerations,
node_selectors=node_selectors,
port_publisher=args_with_right_defaults.port_publisher,
remote_signer_index=current_vc_index,
)
all_remote_signer_contexts.append(remote_signer_context) vc_keystores = None
if remote_signer_context and remote_signer_context.metrics_info: if participant.validator_count != 0:
remote_signer_context.metrics_info[ vc_keystores = preregistered_validator_keys_for_nodes[index]
"config"
] = participant.prometheus_config
vc_context = vc.launch( vc_context = None
remote_signer_context = None
snooper_beacon_context = None
if participant.snooper_enabled:
snooper_service_name = "snooper-beacon-{0}-{1}-{2}".format(
index_str,
cl_type,
vc_type,
)
snooper_beacon_context = beacon_snooper.launch(
plan,
snooper_service_name,
cl_context,
node_selectors,
args_with_right_defaults.docker_cache_params,
)
plan.print(
"Successfully added {0} snooper participants".format(
snooper_beacon_context
)
)
all_snooper_beacon_contexts.append(snooper_beacon_context)
full_name = (
"{0}-{1}-{2}-{3}".format(
index_str,
el_type,
cl_type,
vc_type,
)
if participant.cl_type != participant.vc_type
else "{0}-{1}-{2}".format(
index_str,
el_type,
cl_type,
)
)
if participant.use_remote_signer:
remote_signer_context = remote_signer.launch(
plan=plan, plan=plan,
launcher=vc.new_vc_launcher(el_cl_genesis_data=el_cl_data), launcher=remote_signer.new_remote_signer_launcher(
keymanager_file=keymanager_file, el_cl_genesis_data=el_cl_data
service_name="vc-{0}".format(full_name), ),
service_name="signer-{0}".format(full_name),
remote_signer_type=remote_signer_type,
image=participant.remote_signer_image,
full_name="{0}-remote_signer".format(full_name),
vc_type=vc_type, vc_type=vc_type,
image=participant.vc_image,
global_log_level=args_with_right_defaults.global_log_level,
cl_context=cl_context,
el_context=el_context,
remote_signer_context=remote_signer_context,
full_name=full_name,
snooper_enabled=participant.snooper_enabled,
snooper_beacon_context=snooper_beacon_context,
node_keystore_files=vc_keystores, node_keystore_files=vc_keystores,
participant=participant, participant=participant,
prysm_password_relative_filepath=prysm_password_relative_filepath,
prysm_password_artifact_uuid=prysm_password_artifact_uuid,
global_tolerations=global_tolerations, global_tolerations=global_tolerations,
node_selectors=node_selectors, node_selectors=node_selectors,
preset=network_params.preset,
network=network_params.network,
electra_fork_epoch=network_params.electra_fork_epoch,
port_publisher=args_with_right_defaults.port_publisher, port_publisher=args_with_right_defaults.port_publisher,
vc_index=current_vc_index, remote_signer_index=current_vc_index,
) )
all_vc_contexts.append(vc_context)
if vc_context and vc_context.metrics_info: all_remote_signer_contexts.append(remote_signer_context)
vc_context.metrics_info["config"] = participant.prometheus_config if remote_signer_context and remote_signer_context.metrics_info:
current_vc_index += 1 remote_signer_context.metrics_info["config"] = participant.prometheus_config
vc_context = vc.launch(
plan=plan,
launcher=vc.new_vc_launcher(el_cl_genesis_data=el_cl_data),
keymanager_file=keymanager_file,
service_name="vc-{0}".format(full_name),
vc_type=vc_type,
image=participant.vc_image,
global_log_level=args_with_right_defaults.global_log_level,
cl_context=cl_context,
el_context=el_context,
remote_signer_context=remote_signer_context,
full_name=full_name,
snooper_enabled=participant.snooper_enabled,
snooper_beacon_context=snooper_beacon_context,
node_keystore_files=vc_keystores,
participant=participant,
prysm_password_relative_filepath=prysm_password_relative_filepath,
prysm_password_artifact_uuid=prysm_password_artifact_uuid,
global_tolerations=global_tolerations,
node_selectors=node_selectors,
preset=network_params.preset,
network=network_params.network,
electra_fork_epoch=network_params.electra_fork_epoch,
port_publisher=args_with_right_defaults.port_publisher,
vc_index=current_vc_index,
)
all_vc_contexts.append(vc_context)
if vc_context and vc_context.metrics_info:
vc_context.metrics_info["config"] = participant.prometheus_config
current_vc_index += 1
all_participants = [] all_participants = []
for index, participant in enumerate(args_with_right_defaults.participants): for index, participant in enumerate(args_with_right_defaults.participants):
el_type = participant.el_type el_type = participant.el_type
...@@ -406,14 +390,15 @@ def launch_participant_network( ...@@ -406,14 +390,15 @@ def launch_participant_network(
snooper_engine_context = None snooper_engine_context = None
snooper_beacon_context = None snooper_beacon_context = None
el_context = all_el_contexts[index] el_context = all_el_contexts[index] if index < len(all_el_contexts) else None
cl_context = all_cl_contexts[index] cl_context = all_cl_contexts[index] if index < len(all_cl_contexts) else None
if participant.vc_count != 0: vc_context = all_vc_contexts[index] if index < len(all_vc_contexts) else None
vc_context = all_vc_contexts[index]
remote_signer_context = all_remote_signer_contexts[index] remote_signer_context = (
else: all_remote_signer_contexts[index]
vc_context = None if index < len(all_remote_signer_contexts)
remote_signer_context = None else None
)
if participant.snooper_enabled: if participant.snooper_enabled:
snooper_engine_context = all_snooper_engine_contexts[index] snooper_engine_context = all_snooper_engine_contexts[index]
...@@ -427,7 +412,7 @@ def launch_participant_network( ...@@ -427,7 +412,7 @@ def launch_participant_network(
] ]
xatu_sentry_context = None xatu_sentry_context = None
if participant.xatu_sentry_enabled: if participant.xatu_sentry_enabled and index < len(all_xatu_sentry_contexts):
xatu_sentry_context = all_xatu_sentry_contexts[index] xatu_sentry_context = all_xatu_sentry_contexts[index]
participant_entry = participant_module.new_participant( participant_entry = participant_module.new_participant(
......
...@@ -96,41 +96,26 @@ def generate_validator_keystores(plan, mnemonic, participants, docker_cache_para ...@@ -96,41 +96,26 @@ def generate_validator_keystores(plan, mnemonic, participants, docker_cache_para
all_output_dirpaths.append(output_dirpath) all_output_dirpaths.append(output_dirpath)
continue continue
for i in range(participant.vc_count): start_index = running_total_validator_count
output_dirpath = ( stop_index = start_index + participant.validator_count
NODE_KEYSTORES_OUTPUT_DIRPATH_FORMAT_STR.format(idx, "-" + str(i))
if participant.vc_count != 1 generate_keystores_cmd = '{0} keystores --insecure --prysm-pass {1} --out-loc {2} --source-mnemonic "{3}" --source-min {4} --source-max {5}'.format(
else NODE_KEYSTORES_OUTPUT_DIRPATH_FORMAT_STR.format(idx, "") KEYSTORES_GENERATION_TOOL_NAME,
) PRYSM_PASSWORD,
output_dirpath,
start_index = running_total_validator_count + i * ( mnemonic,
participant.validator_count // participant.vc_count start_index,
) stop_index,
stop_index = start_index + ( )
participant.validator_count // participant.vc_count all_output_dirpaths.append(output_dirpath)
) all_sub_command_strs.append(generate_keystores_cmd)
# Adjust stop_index for the last partition to include all remaining validators
if i == participant.vc_count - 1:
stop_index = running_total_validator_count + participant.validator_count
generate_keystores_cmd = '{0} keystores --insecure --prysm-pass {1} --out-loc {2} --source-mnemonic "{3}" --source-min {4} --source-max {5}'.format(
KEYSTORES_GENERATION_TOOL_NAME,
PRYSM_PASSWORD,
output_dirpath,
mnemonic,
start_index,
stop_index,
)
all_output_dirpaths.append(output_dirpath)
all_sub_command_strs.append(generate_keystores_cmd)
teku_permissions_cmd = "chmod 0777 -R " + output_dirpath + TEKU_KEYS_DIRNAME teku_permissions_cmd = "chmod 0777 -R " + output_dirpath + TEKU_KEYS_DIRNAME
raw_secret_permissions_cmd = ( raw_secret_permissions_cmd = (
"chmod 0600 -R " + output_dirpath + RAW_SECRETS_DIRNAME "chmod 0600 -R " + output_dirpath + RAW_SECRETS_DIRNAME
) )
all_sub_command_strs.append(teku_permissions_cmd) all_sub_command_strs.append(teku_permissions_cmd)
all_sub_command_strs.append(raw_secret_permissions_cmd) all_sub_command_strs.append(raw_secret_permissions_cmd)
running_total_validator_count += participant.validator_count running_total_validator_count += participant.validator_count
...@@ -151,51 +136,37 @@ def generate_validator_keystores(plan, mnemonic, participants, docker_cache_para ...@@ -151,51 +136,37 @@ def generate_validator_keystores(plan, mnemonic, participants, docker_cache_para
keystore_files.append(None) keystore_files.append(None)
continue continue
for i in range(participant.vc_count): output_dirpath = NODE_KEYSTORES_OUTPUT_DIRPATH_FORMAT_STR.format(idx, "")
output_dirpath = ( padded_idx = shared_utils.zfill_custom(idx + 1, len(str(len(participants))))
NODE_KEYSTORES_OUTPUT_DIRPATH_FORMAT_STR.format(idx, "-" + str(i)) keystore_start_index = running_total_validator_count
if participant.vc_count != 1 keystore_stop_index = (
else NODE_KEYSTORES_OUTPUT_DIRPATH_FORMAT_STR.format(idx, "") running_total_validator_count + participant.validator_count
) )
padded_idx = shared_utils.zfill_custom(idx + 1, len(str(len(participants))))
artifact_name = "{0}-{1}-{2}-{3}-{4}".format(
keystore_start_index = running_total_validator_count + i * ( padded_idx,
participant.validator_count // participant.vc_count participant.cl_type,
) participant.el_type,
keystore_stop_index = keystore_start_index + ( keystore_start_index,
participant.validator_count // participant.vc_count keystore_stop_index - 1,
) )
artifact_name = plan.store_service_files(
if i == participant.vc_count - 1: service_name, output_dirpath, name=artifact_name
keystore_stop_index = ( )
running_total_validator_count + participant.validator_count
) base_dirname_in_artifact = shared_utils.path_base(output_dirpath)
to_add = keystore_files_module.new_keystore_files(
artifact_name = "{0}-{1}-{2}-{3}-{4}-{5}".format( artifact_name,
padded_idx, shared_utils.path_join(base_dirname_in_artifact),
participant.cl_type, shared_utils.path_join(base_dirname_in_artifact, RAW_KEYS_DIRNAME),
participant.el_type, shared_utils.path_join(base_dirname_in_artifact, RAW_SECRETS_DIRNAME),
keystore_start_index, shared_utils.path_join(base_dirname_in_artifact, NIMBUS_KEYS_DIRNAME),
keystore_stop_index - 1, shared_utils.path_join(base_dirname_in_artifact, PRYSM_DIRNAME),
i, shared_utils.path_join(base_dirname_in_artifact, TEKU_KEYS_DIRNAME),
) shared_utils.path_join(base_dirname_in_artifact, TEKU_SECRETS_DIRNAME),
artifact_name = plan.store_service_files( )
service_name, output_dirpath, name=artifact_name
) keystore_files.append(to_add)
base_dirname_in_artifact = shared_utils.path_base(output_dirpath)
to_add = keystore_files_module.new_keystore_files(
artifact_name,
shared_utils.path_join(base_dirname_in_artifact),
shared_utils.path_join(base_dirname_in_artifact, RAW_KEYS_DIRNAME),
shared_utils.path_join(base_dirname_in_artifact, RAW_SECRETS_DIRNAME),
shared_utils.path_join(base_dirname_in_artifact, NIMBUS_KEYS_DIRNAME),
shared_utils.path_join(base_dirname_in_artifact, PRYSM_DIRNAME),
shared_utils.path_join(base_dirname_in_artifact, TEKU_KEYS_DIRNAME),
shared_utils.path_join(base_dirname_in_artifact, TEKU_SECRETS_DIRNAME),
)
keystore_files.append(to_add)
running_total_validator_count += participant.validator_count running_total_validator_count += participant.validator_count
...@@ -232,11 +203,14 @@ def generate_validator_keystores(plan, mnemonic, participants, docker_cache_para ...@@ -232,11 +203,14 @@ def generate_validator_keystores(plan, mnemonic, participants, docker_cache_para
# this is like above but runs things in parallel - for large networks that run on k8s or gigantic dockers # this is like above but runs things in parallel - for large networks that run on k8s or gigantic dockers
def generate_valdiator_keystores_in_parallel(plan, mnemonic, participants): def generate_valdiator_keystores_in_parallel(
plan, mnemonic, participants, docker_cache_params
):
service_names = launch_prelaunch_data_generator_parallel( service_names = launch_prelaunch_data_generator_parallel(
plan, plan,
{}, {},
["cl-validator-keystore-" + str(idx) for idx in range(0, len(participants))], ["cl-validator-keystore-" + str(idx) for idx in range(0, len(participants))],
docker_cache_params,
) )
all_output_dirpaths = [] all_output_dirpaths = []
all_generation_commands = [] all_generation_commands = []
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment