Commit ea4cd083 authored by Léo Vincent's avatar Léo Vincent Committed by GitHub

chore: add human friendly descriptions to `exec`, `run_sh` and `run_python` instructions (#561)

Add human friendly descriptions when executing commands or running
scripts.

Here is an example:

Without descriptions.

```bash
Executing command on service 'validator-key-generation-cl-validator-keystore'
Command returned with exit code '0' with no output

Verifying whether two values meet a certain condition '=='
Verification succeeded. Value is '0'.

Storing files from service 'validator-key-generation-cl-validator-keystore' at path '/node-0-keystores' to files artifact with name '1-lighthouse-geth-0-63'
Files with artifact name '1-lighthouse-geth-0-63' uploaded with artifact UUID 'd42fe1052c85433580a752bf818a54f1'

Executing command on service 'validator-key-generation-cl-validator-keystore'
Command returned with exit code '0' with no output
```

With descriptions.

```bash
Generating keystores
Command returned with exit code '0' with no output

Verifying whether two values meet a certain condition '=='
Verification succeeded. Value is '0'.

Storing files from service 'validator-key-generation-cl-validator-keystore' at path '/node-0-keystores' to files artifact with name '1-lighthouse-geth-0-63'
Files with artifact name '1-lighthouse-geth-0-63' uploaded with artifact UUID '51cfc3d4fd01469ca5512628098ab87f'

Storing prysm password in a file
Command returned with exit code '0' with no output
```
Co-authored-by: default avatarBarnabas Busa <busa.barnabas@gmail.com>
parent 3e1c7a65
......@@ -195,6 +195,7 @@ def launch_full_beacon(
plan.print("applying db schema")
plan.exec(
service_name=initdbschema.name,
description="Applying db schema",
recipe=ExecRecipe(
[
"./misc",
......@@ -210,6 +211,7 @@ def launch_full_beacon(
# Initialize the bigtable schema
plan.exec(
service_name=initdbschema.name,
description="Applying big table schema",
recipe=ExecRecipe(
[
"./misc",
......
......@@ -202,6 +202,7 @@ def merge_dashboards_artifacts(
] = additional_dashboard_data[GRANAFA_ADDITIONAL_DASHBOARDS_ARTIFACT_NAME_KEY]
result = plan.run_sh(
description="Merging grafana dashboards artifacts",
run="find "
+ GRAFANA_ADDITIONAL_DASHBOARDS_FILEPATH_ON_SERVICE
+ " -type f -exec cp {} "
......
......@@ -39,6 +39,7 @@ def spam_in_background(
plan.exec(
service_name=CUSTOM_FLOOD_SERVICE_NAME,
description="Sending transactions",
recipe=ExecRecipe(
[
"/bin/sh",
......
......@@ -35,6 +35,7 @@ def launch_mev_flood(
plan.exec(
service_name="mev-flood",
description="Initializing mev flood",
recipe=ExecRecipe(
command=[
"/bin/sh",
......@@ -69,4 +70,8 @@ def spam_in_background(
el_uri, owner, user, seconds_per_bundle, joined_extra_args
),
]
plan.exec(service_name="mev-flood", recipe=ExecRecipe(command=command))
plan.exec(
service_name="mev-flood",
description="Sending spam transactions",
recipe=ExecRecipe(command=command),
)
......@@ -12,6 +12,7 @@ def launch(plan, network, cancun_time, prague_time):
name="el_cl_genesis",
)
el_cl_genesis_data_uuid = plan.run_sh(
description="Creating network configs",
run="mkdir -p /network-configs/ && mv /opt/* /network-configs/",
store=[StoreSpec(src="/network-configs/", name="el_cl_genesis_data")],
files={"/opt": el_cl_genesis_uuid},
......
......@@ -6,6 +6,7 @@ el_cl_genesis_data = import_module(
def launch(plan, cancun_time, prague_time):
el_cl_genesis_data_uuid = plan.run_sh(
description="Creating network configs",
run="mkdir -p /network-configs/ && \
curl -o latest.tar.gz https://ephemery.dev/latest.tar.gz && \
tar xvzf latest.tar.gz -C /network-configs && \
......
......@@ -8,6 +8,7 @@ constants = import_module("../package_io/constants.star")
def launch(plan, network, cancun_time, prague_time):
# We are running a public network
dummy_genesis_data = plan.run_sh(
description="Creating network configs folder",
run="mkdir /network-configs",
store=[StoreSpec(src="/network-configs/", name="el_cl_genesis_data")],
)
......
......@@ -15,6 +15,7 @@ def shadowfork_prep(
# overload the network name to remove the shadowfork suffix
if constants.NETWORK_NAME.ephemery in base_network:
chain_id = plan.run_sh(
description="Fetching the chain id",
run="curl -s https://ephemery.dev/latest/config.yaml | yq .DEPOSIT_CHAIN_ID | tr -d '\n'",
image="linuxserver/yq",
)
......@@ -23,7 +24,8 @@ def shadowfork_prep(
network_id = constants.NETWORK_ID[
base_network
] # overload the network id to match the network name
latest_block = plan.run_sh( # fetch the latest block
latest_block = plan.run_sh(
description="Fetching the latest block",
run="mkdir -p /shadowfork && \
curl -o /shadowfork/latest_block.json "
+ network_params.network_sync_base_url
......
......@@ -71,6 +71,7 @@ def generate_el_cl_genesis_data(
files[GENESIS_VALUES_PATH] = genesis_generation_config_artifact_name
genesis = plan.run_sh(
description="Creating genesis",
run="cp /opt/values.env /config/values.env && ./entrypoint.sh all && mkdir /network-configs && mv /data/custom_config_data/* /network-configs/",
image=image,
files=files,
......@@ -85,18 +86,21 @@ def generate_el_cl_genesis_data(
)
genesis_validators_root = plan.run_sh(
description="Reading genesis validators root",
run="cat /data/genesis_validators_root.txt",
files={"/data": genesis.files_artifacts[1]},
wait=None,
)
cancun_time = plan.run_sh(
description="Reading cancun time from genesis",
run="jq .config.cancunTime /data/network-configs/genesis.json | tr -d '\n'",
image="badouralix/curl-jq",
files={"/data": genesis.files_artifacts[0]},
)
prague_time = plan.run_sh(
description="Reading prague time from genesis",
run="jq .config.pragueTime /data/network-configs/genesis.json | tr -d '\n'",
image="badouralix/curl-jq",
files={"/data": genesis.files_artifacts[0]},
......
......@@ -116,7 +116,9 @@ def generate_validator_keystores(plan, mnemonic, participants):
command_str = " && ".join(all_sub_command_strs)
command_result = plan.exec(
recipe=ExecRecipe(command=["sh", "-c", command_str]), service_name=service_name
service_name=service_name,
description="Generating keystores",
recipe=ExecRecipe(command=["sh", "-c", command_str]),
)
plan.verify(command_result["code"], "==", SUCCESSFUL_EXEC_CMD_EXIT_CODE)
......@@ -167,8 +169,9 @@ def generate_validator_keystores(plan, mnemonic, participants):
),
]
write_prysm_password_file_cmd_result = plan.exec(
recipe=ExecRecipe(command=write_prysm_password_file_cmd),
service_name=service_name,
description="Storing prysm password in a file",
recipe=ExecRecipe(command=write_prysm_password_file_cmd),
)
plan.verify(
write_prysm_password_file_cmd_result["code"],
......@@ -245,10 +248,11 @@ def generate_valdiator_keystores_in_parallel(plan, mnemonic, participants):
# no generation command as validator count is 0
continue
plan.exec(
service_name=service_name,
description="Generating keystore for participant " + str(idx),
recipe=ExecRecipe(
command=["sh", "-c", generation_command + " >/dev/null 2>&1 &"]
),
service_name=service_name,
)
# verify that files got created
......@@ -319,8 +323,9 @@ def generate_valdiator_keystores_in_parallel(plan, mnemonic, participants):
),
]
write_prysm_password_file_cmd_result = plan.exec(
recipe=ExecRecipe(command=write_prysm_password_file_cmd),
service_name=service_names[0],
description="Storing prysm password in a file",
recipe=ExecRecipe(command=write_prysm_password_file_cmd),
)
plan.verify(
write_prysm_password_file_cmd_result["code"],
......
......@@ -53,6 +53,7 @@ def new_port_spec(
def read_file_from_service(plan, service_name, filename):
output = plan.exec(
service_name=service_name,
description="Reading {} from {}".format(filename, service_name),
recipe=ExecRecipe(
command=["/bin/sh", "-c", "cat {} | tr -d '\n'".format(filename)]
),
......@@ -77,6 +78,7 @@ def label_maker(client, client_type, image, connected_client, extra_labels):
def get_devnet_enodes(plan, filename):
enode_list = plan.run_python(
description="Getting devnet enodes",
files={constants.GENESIS_DATA_MOUNTPOINT_ON_CLIENTS: filename},
wait=None,
run="""
......@@ -93,6 +95,7 @@ print(",".join(bootnodes), end="")
def get_devnet_enrs_list(plan, filename):
enr_list = plan.run_python(
description="Creating devnet enrs list",
files={constants.GENESIS_DATA_MOUNTPOINT_ON_CLIENTS: filename},
wait=None,
run="""
......@@ -109,6 +112,7 @@ print(",".join(bootnodes), end="")
def read_genesis_timestamp_from_config(plan, filename):
value = plan.run_python(
description="Reading genesis timestamp from config",
files={constants.GENESIS_DATA_MOUNTPOINT_ON_CLIENTS: filename},
wait=None,
packages=["PyYAML"],
......@@ -127,6 +131,7 @@ print(min_genesis_time + genesis_delay, end="")
def read_genesis_network_id_from_config(plan, filename):
value = plan.run_python(
description="Reading genesis network id from config",
files={constants.GENESIS_DATA_MOUNTPOINT_ON_CLIENTS: filename},
wait=None,
packages=["PyYAML"],
......@@ -162,6 +167,7 @@ def get_network_name(network):
# note that the timestamp it returns is a string
def get_final_genesis_timestamp(plan, padding):
result = plan.run_python(
description="Getting final genesis timestamp",
run="""
import time
import sys
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment