Skip to content

Commit

Permalink
feat(mocknet): improvements (#10108)
Browse files Browse the repository at this point in the history
- added --yes to skip the user confirmation in reset
- added update-binaries command to have neard-runner re-download the
neard binaries
- reworked update-config to work for arbitrary json configs
  • Loading branch information
wacban authored Nov 7, 2023
1 parent 276c28d commit bef14b1
Show file tree
Hide file tree
Showing 2 changed files with 78 additions and 30 deletions.
55 changes: 38 additions & 17 deletions pytest/tests/mocknet/helpers/neard_runner.py
Original file line number Diff line number Diff line change
Expand Up @@ -59,6 +59,8 @@ def __init__(self, request, client_address, server):
self.dispatcher.add_method(server.neard_runner.do_start, name="start")
self.dispatcher.add_method(server.neard_runner.do_stop, name="stop")
self.dispatcher.add_method(server.neard_runner.do_reset, name="reset")
self.dispatcher.add_method(server.neard_runner.do_update_binaries,
name="update_binaries")
super().__init__(request, client_address, server)

def do_GET(self):
Expand Down Expand Up @@ -189,21 +191,28 @@ def reset_current_neard_path(self):
'system_path']

# tries to download the binaries specified in config.json, saving them in $home/binaries/
def download_binaries(self):
# if force is set to true all binaries will be downloaded, otherwise only the missing ones
def download_binaries(self, force):
binaries = self.parse_binaries_config()

try:
os.mkdir(self.home_path('binaries'))
except FileExistsError:
pass

with self.lock:
num_binaries_saved = len(self.data['binaries'])
if force:
# always start from 0 and download all binaries
start_index = 0
else:
# start at the index of the first missing binary
# typically it's all or nothing
with self.lock:
start_index = len(self.data['binaries'])

# for now we assume that the binaries recorded in data.json as having been
# dowloaded are still valid and were not touched. Also this assumes that their
# filenames are neard0, neard1, etc. in the right order and with nothing skipped
for i in range(num_binaries_saved, len(binaries)):
for i in range(start_index, len(binaries)):
b = binaries[i]
logging.info(f'downloading binary from {b["url"]}')
with open(b['system_path'], 'wb') as f:
Expand Down Expand Up @@ -376,22 +385,24 @@ def do_network_init(self,
'protocol_version': protocol_version,
}, f)

def do_update_config(self, state_cache_size_mb, state_snapshot_enabled):
def do_update_config(self, key_value):
with self.lock:
logging.info(
f'updating config with state_cache_size_mb={state_cache_size_mb} state_snapshot_enabled={state_snapshot_enabled}'
)
logging.info(f'updating config with {key_value}')
with open(self.target_near_home_path('config.json'), 'r') as f:
config = json.load(f)

config['store']['trie_cache']['per_shard_max_bytes'] = {}
if state_cache_size_mb is not None:
for i in range(4):
config['store']['trie_cache']['per_shard_max_bytes'][
f's{i}.v1'] = state_cache_size_mb * 10**6
if state_snapshot_enabled is not None:
key = 'state_snapshot_enabled'
config['store'][key] = state_snapshot_enabled
[key, value] = key_value.split("=", 1)
key_item_list = key.split(".")

object = config
for key_item in key_item_list[:-1]:
if key_item not in object:
object[key_item] = {}
object = object[key_item]

value = json.loads(value)

object[key_item_list[-1]] = value

with open(self.target_near_home_path('config.json'), 'w') as f:
json.dump(config, f, indent=2)
Expand Down Expand Up @@ -425,6 +436,7 @@ def do_stop(self):
def do_reset(self):
with self.lock:
state = self.get_state()
logging.info(f"do_reset {state}")
if state == TestState.RUNNING:
self.kill_neard()
self.set_state(TestState.RESETTING)
Expand All @@ -441,6 +453,11 @@ def do_reset(self):
'Cannot reset node as test state has not been initialized yet'
)

def do_update_binaries(self):
logging.info('update binaries')
self.download_binaries(force=True)
logging.info('update binaries finished')

def do_ready(self):
with self.lock:
state = self.get_state()
Expand Down Expand Up @@ -796,6 +813,7 @@ def check_genesis_state(self):

def reset_near_home(self):
try:
logging.info("removing the old directory")
shutil.rmtree(self.target_near_home_path('data'))
except FileNotFoundError:
pass
Expand Down Expand Up @@ -855,10 +873,13 @@ def main():
# only let one instance of this code run at a time
_fd = get_lock(args.home)

logging.info("creating neard runner")
runner = NeardRunner(args)

runner.download_binaries()
logging.info("downloading binaries")
runner.download_binaries(force=False)

logging.info("serve")
runner.serve(args.port)


Expand Down
53 changes: 40 additions & 13 deletions pytest/tests/mocknet/mirror.py
Original file line number Diff line number Diff line change
Expand Up @@ -290,11 +290,12 @@ def status_cmd(args, traffic_generator, nodes):


def reset_cmd(args, traffic_generator, nodes):
print(
'this will reset all nodes\' home dirs to their initial states right after test initialization finished. continue? [yes/no]'
)
if sys.stdin.readline().strip() != 'yes':
sys.exit()
if not args.yes:
print(
'this will reset all nodes\' home dirs to their initial states right after test initialization finished. continue? [yes/no]'
)
if sys.stdin.readline().strip() != 'yes':
sys.exit()
all_nodes = nodes + [traffic_generator]
pmap(neard_runner_reset, all_nodes)
logger.info(
Expand Down Expand Up @@ -357,13 +358,12 @@ def neard_runner_network_init(node, validators, boot_nodes, epoch_length,
})


def neard_update_config(node, state_cache_size_mb, state_snapshot_enabled):
def neard_update_config(node, key_value):
return neard_runner_jsonrpc(
node,
'update_config',
params={
'state_cache_size_mb': state_cache_size_mb,
'state_snapshot_enabled': state_snapshot_enabled,
"key_value": key_value,
},
)

Expand All @@ -373,8 +373,7 @@ def update_config_cmd(args, traffic_generator, nodes):
results = pmap(
lambda node: neard_update_config(
node,
args.state_cache_size_mb,
args.state_snapshot_enabled,
args.set,
),
nodes,
)
Expand Down Expand Up @@ -419,6 +418,14 @@ def start_traffic_cmd(args, traffic_generator, nodes):
)


def neard_runner_update_binaries(node):
neard_runner_jsonrpc(node, 'update_binaries')


def update_binaries_cmd(args, traffic_generator, nodes):
pmap(neard_runner_update_binaries, nodes + [traffic_generator])


if __name__ == '__main__':
parser = ArgumentParser(description='Run a load test')
parser.add_argument('--chain-id', type=str, required=True)
Expand All @@ -441,9 +448,17 @@ def start_traffic_cmd(args, traffic_generator, nodes):
update_config_parser = subparsers.add_parser(
'update-config',
help='''Update config.json with given flags for all nodes.''')
update_config_parser.add_argument('--state-cache-size-mb', type=int)
update_config_parser.add_argument('--state-snapshot-enabled',
action=BooleanOptionalAction)
update_config_parser.add_argument(
'--set',
help='''
A key value pair to set in the config. The key will be interpreted as a
json path to the config to be updated. The value will be parsed as json.
e.g.
--set 'aaa.bbb.ccc=5'
--set 'aaa.bbb.ccc="5"'
--set 'aaa.bbb.ddd={"eee":6,"fff":"7"}' # no spaces!
''',
)
update_config_parser.set_defaults(func=update_config_cmd)

restart_parser = subparsers.add_parser(
Expand Down Expand Up @@ -505,8 +520,20 @@ def start_traffic_cmd(args, traffic_generator, nodes):
the test can be reset from the start without having to do that again. This command resets all nodes'
data dirs to what was saved then, so that start-traffic will start the test all over again.
''')
reset_parser.add_argument('--yes', action='store_true')
reset_parser.set_defaults(func=reset_cmd)

# It re-uses the same binary urls because it's quite easy to do it with the
# nearcore-release buildkite and urls in the following format without commit
# but only with the branch name:
# https://s3-us-west-1.amazonaws.com/build.nearprotocol.com/nearcore/Linux/<branch-name>/neard"
update_binaries_parser = subparsers.add_parser(
'update-binaries',
help=
'Update the neard binaries by re-downloading them. The same urls are used.'
)
update_binaries_parser.set_defaults(func=update_binaries_cmd)

args = parser.parse_args()

traffic_generator, nodes = get_nodes(args)
Expand Down

0 comments on commit bef14b1

Please sign in to comment.