Skip to content

Commit

Permalink
Add two new zombienet tests for bridges (manual run) (paritytech#3072)
Browse files Browse the repository at this point in the history
extracted useful code from paritytech#2982 

This PR:
- adds test 2 for Rococo <> Westend bridge: checks that relayer doesn't
submit any extra headers while there are no any messages;
- adds test 3 for Rococo <> Westend bridge: checks that relayer doesn't
submit any extra headers when there are messages;
- fixes most of comments from paritytech#2439 (like: log names, ability to run
specify test number when calling `run-tests.sh`).

Right now of all our tests, only test 2 is working (until BHs will be
upgraded to use async backing), so you can test it with
`./bridges/zombienet/run-tests.sh --test 2` locally.
  • Loading branch information
svyatonik authored Jan 29, 2024
1 parent 008e0fe commit 2e6067d
Show file tree
Hide file tree
Showing 16 changed files with 421 additions and 53 deletions.
6 changes: 6 additions & 0 deletions bridges/zombienet/helpers/chains/rococo-at-westend.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
module.exports = {
grandpaPalletName: "bridgeRococoGrandpa",
parachainsPalletName: "bridgeRococoParachains",
messagesPalletName: "bridgeRococoMessages",
bridgedBridgeHubParaId: 1013,
}
6 changes: 6 additions & 0 deletions bridges/zombienet/helpers/chains/westend-at-rococo.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
module.exports = {
grandpaPalletName: "bridgeWestendGrandpa",
parachainsPalletName: "bridgeWestendParachains",
messagesPalletName: "bridgeWestendMessages",
bridgedBridgeHubParaId: 1002,
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,44 @@
const utils = require("./utils");

async function run(nodeName, networkInfo, args) {
const {wsUri, userDefinedTypes} = networkInfo.nodesByName[nodeName];
const api = await zombie.connect(wsUri, userDefinedTypes);

// parse arguments
const exitAfterSeconds = Number(args[0]);
const bridgedChain = require("./chains/" + args[1]);

// start listening to new blocks
let totalGrandpaHeaders = 0;
let totalParachainHeaders = 0;
api.rpc.chain.subscribeNewHeads(async function (header) {
const apiAtParent = await api.at(header.parentHash);
const apiAtCurrent = await api.at(header.hash);
const currentEvents = await apiAtCurrent.query.system.events();

totalGrandpaHeaders += await utils.ensureOnlyMandatoryGrandpaHeadersImported(
bridgedChain,
apiAtParent,
apiAtCurrent,
currentEvents,
);
totalParachainHeaders += await utils.ensureOnlyInitialParachainHeaderImported(
bridgedChain,
apiAtParent,
apiAtCurrent,
currentEvents,
);
});

// wait given time
await new Promise(resolve => setTimeout(resolve, exitAfterSeconds * 1000));
// if we haven't seen any new GRANDPA or parachain headers => fail
if (totalGrandpaHeaders == 0) {
throw new Error("No bridged relay chain headers imported");
}
if (totalParachainHeaders == 0) {
throw new Error("No bridged parachain headers imported");
}
}

module.exports = { run }
Original file line number Diff line number Diff line change
@@ -0,0 +1,81 @@
const utils = require("./utils");

async function run(nodeName, networkInfo, args) {
const {wsUri, userDefinedTypes} = networkInfo.nodesByName[nodeName];
const api = await zombie.connect(wsUri, userDefinedTypes);

// parse arguments
const exitAfterSeconds = Number(args[0]);
const bridgedChain = require("./chains/" + args[1]);

// start listening to new blocks
let atLeastOneMessageReceived = false;
let atLeastOneMessageDelivered = false;
const unsubscribe = await api.rpc.chain.subscribeNewHeads(async function (header) {
const apiAtParent = await api.at(header.parentHash);
const apiAtCurrent = await api.at(header.hash);
const currentEvents = await apiAtCurrent.query.system.events();

const messagesReceived = currentEvents.find((record) => {
return record.event.section == bridgedChain.messagesPalletName
&& record.event.method == "MessagesReceived";
}) != undefined;
const messagesDelivered = currentEvents.find((record) => {
return record.event.section == bridgedChain.messagesPalletName &&
record.event.method == "MessagesDelivered";
}) != undefined;
const hasMessageUpdates = messagesReceived || messagesDelivered;
atLeastOneMessageReceived = atLeastOneMessageReceived || messagesReceived;
atLeastOneMessageDelivered = atLeastOneMessageDelivered || messagesDelivered;

if (!hasMessageUpdates) {
// if there are no any message update transactions, we only expect mandatory GRANDPA
// headers and initial parachain headers
await utils.ensureOnlyMandatoryGrandpaHeadersImported(
bridgedChain,
apiAtParent,
apiAtCurrent,
currentEvents,
);
await utils.ensureOnlyInitialParachainHeaderImported(
bridgedChain,
apiAtParent,
apiAtCurrent,
currentEvents,
);
} else {
const messageTransactions = (messagesReceived ? 1 : 0) + (messagesDelivered ? 1 : 0);

// otherwise we only accept at most one GRANDPA header
const newGrandpaHeaders = utils.countGrandpaHeaderImports(bridgedChain, currentEvents);
if (newGrandpaHeaders > 1) {
utils.logEvents(currentEvents);
throw new Error("Unexpected relay chain header import: " + newGrandpaHeaders + " / " + messageTransactions);
}

// ...and at most one parachain header
const newParachainHeaders = utils.countParachainHeaderImports(bridgedChain, currentEvents);
if (newParachainHeaders > 1) {
utils.logEvents(currentEvents);
throw new Error("Unexpected parachain header import: " + newParachainHeaders + " / " + messageTransactions);
}
}
});

// wait until we have received + delivered messages OR until timeout
await utils.pollUntil(
exitAfterSeconds,
() => { return atLeastOneMessageReceived && atLeastOneMessageDelivered; },
() => { unsubscribe(); },
() => {
if (!atLeastOneMessageReceived) {
throw new Error("No messages received from bridged chain");
}
if (!atLeastOneMessageDelivered) {
throw new Error("No messages delivered to bridged chain");
}
},
);
}

module.exports = { run }
103 changes: 103 additions & 0 deletions bridges/zombienet/helpers/utils.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,103 @@
module.exports = {
logEvents: function(events) {
let stringifiedEvents = "";
events.forEach((record) => {
if (stringifiedEvents != "") {
stringifiedEvents += ", ";
}
stringifiedEvents += record.event.section + "::" + record.event.method;
});
console.log("Block events: " + stringifiedEvents);
},
countGrandpaHeaderImports: function(bridgedChain, events) {
return events.reduce(
(count, record) => {
const { event } = record;
if (event.section == bridgedChain.grandpaPalletName && event.method == "UpdatedBestFinalizedHeader") {
count += 1;
}
return count;
},
0,
);
},
countParachainHeaderImports: function(bridgedChain, events) {
return events.reduce(
(count, record) => {
const { event } = record;
if (event.section == bridgedChain.parachainsPalletName && event.method == "UpdatedParachainHead") {
count += 1;
}
return count;
},
0,
);
},
pollUntil: async function(
timeoutInSecs,
predicate,
cleanup,
onFailure,
) {
const begin = new Date().getTime();
const end = begin + timeoutInSecs * 1000;
while (new Date().getTime() < end) {
if (predicate()) {
cleanup();
return;
}
await new Promise(resolve => setTimeout(resolve, 100));
}

cleanup();
onFailure();
},
ensureOnlyMandatoryGrandpaHeadersImported: async function(
bridgedChain,
apiAtParent,
apiAtCurrent,
currentEvents,
) {
// remember id of bridged relay chain GRANDPA authorities set at parent block
const authoritySetAtParent = await apiAtParent.query[bridgedChain.grandpaPalletName].currentAuthoritySet();
const authoritySetIdAtParent = authoritySetAtParent["setId"];

// now read the id of bridged relay chain GRANDPA authorities set at current block
const authoritySetAtCurrent = await apiAtCurrent.query[bridgedChain.grandpaPalletName].currentAuthoritySet();
const authoritySetIdAtCurrent = authoritySetAtCurrent["setId"];

// we expect to see no more than `authoritySetIdAtCurrent - authoritySetIdAtParent` new GRANDPA headers
const maxNewGrandpaHeaders = authoritySetIdAtCurrent - authoritySetIdAtParent;
const newGrandpaHeaders = module.exports.countGrandpaHeaderImports(bridgedChain, currentEvents);

// check that our assumptions are correct
if (newGrandpaHeaders > maxNewGrandpaHeaders) {
module.exports.logEvents(currentEvents);
throw new Error("Unexpected relay chain header import: " + newGrandpaHeaders + " / " + maxNewGrandpaHeaders);
}

return newGrandpaHeaders;
},
ensureOnlyInitialParachainHeaderImported: async function(
bridgedChain,
apiAtParent,
apiAtCurrent,
currentEvents,
) {
// remember whether we already know bridged parachain header at a parent block
const bestBridgedParachainHeader = await apiAtParent.query[bridgedChain.parachainsPalletName].parasInfo(bridgedChain.bridgedBridgeHubParaId);;
const hasBestBridgedParachainHeader = bestBridgedParachainHeader.isSome;

// we expect to see: no more than `1` bridged parachain header if there were no parachain header before.
const maxNewParachainHeaders = hasBestBridgedParachainHeader ? 0 : 1;
const newParachainHeaders = module.exports.countParachainHeaderImports(bridgedChain, currentEvents);

// check that our assumptions are correct
if (newParachainHeaders > maxNewParachainHeaders) {
module.exports.logEvents(currentEvents);
throw new Error("Unexpected parachain header import: " + newParachainHeaders + " / " + maxNewParachainHeaders);
}

return newParachainHeaders;
},
}
42 changes: 22 additions & 20 deletions bridges/zombienet/run-tests.sh
Original file line number Diff line number Diff line change
@@ -1,10 +1,12 @@
#!/bin/bash
#set -eu
set -x
shopt -s nullglob

trap "trap - SIGINT SIGTERM EXIT && kill -- -$$" SIGINT SIGTERM EXIT
trap "trap - SIGINT SIGTERM EXIT && killall -q -9 substrate-relay && kill -- -$$" SIGINT SIGTERM EXIT

# run tests in range [TESTS_BEGIN; TESTS_END)
TESTS_BEGIN=1
TESTS_END=1000
# whether to use paths for zombienet+bridges tests container or for local testing
ZOMBIENET_DOCKER_PATHS=0
while [ $# -ne 0 ]
Expand All @@ -14,6 +16,11 @@ do
--docker)
ZOMBIENET_DOCKER_PATHS=1
;;
--test)
shift
TESTS_BEGIN="$1"
TESTS_END="$1"
;;
esac
shift
done
Expand Down Expand Up @@ -57,7 +64,8 @@ ALL_TESTS_FOLDER=`mktemp -d /tmp/bridges-zombienet-tests.XXXXX`
function start_coproc() {
local command=$1
local name=$2
local coproc_log=`mktemp -p $TEST_FOLDER`
local logname=`basename $name`
local coproc_log=`mktemp -p $TEST_FOLDER $logname.XXXXX`
coproc COPROC {
# otherwise zombienet uses some hardcoded paths
unset RUN_IN_CONTAINER
Expand All @@ -73,44 +81,36 @@ function start_coproc() {
}

# execute every test from tests folder
TEST_INDEX=1
TEST_INDEX=$TESTS_BEGIN
while true
do
declare -A TEST_COPROCS
TEST_COPROCS_COUNT=0
TEST_PREFIX=$(printf "%04d" $TEST_INDEX)

# it'll be used by the `sync-exit.sh` script
export TEST_FOLDER=`mktemp -d -p $ALL_TESTS_FOLDER`
export TEST_FOLDER=`mktemp -d -p $ALL_TESTS_FOLDER test-$TEST_PREFIX.XXXXX`

# check if there are no more tests
zndsl_files=($BRIDGE_TESTS_FOLDER/$TEST_PREFIX-*.zndsl)
if [ ${#zndsl_files[@]} -eq 0 ]; then
break
fi

# start relay
if [ -f $BRIDGE_TESTS_FOLDER/$TEST_PREFIX-start-relay.sh ]; then
start_coproc "${BRIDGE_TESTS_FOLDER}/${TEST_PREFIX}-start-relay.sh" "relay"
RELAY_COPROC=$COPROC_PID
((TEST_COPROCS_COUNT++))
fi
# start tests
for zndsl_file in "${zndsl_files[@]}"; do
start_coproc "$ZOMBIENET_BINARY_PATH --provider native test $zndsl_file" "$zndsl_file"
echo -n "1">>$TEST_FOLDER/exit-sync
((TEST_COPROCS_COUNT++))
done
# wait until all tests are completed
relay_exited=0
for n in `seq 1 $TEST_COPROCS_COUNT`; do
if [ "$IS_BASH_5_1" -eq 1 ]; then
wait -n -p COPROC_PID
exit_code=$?
coproc_name=${TEST_COPROCS[$COPROC_PID, 0]}
coproc_log=${TEST_COPROCS[$COPROC_PID, 1]}
coproc_stdout=$(cat $coproc_log)
relay_exited=$(expr "${coproc_name}" == "relay")
else
wait -n
exit_code=$?
Expand All @@ -124,18 +124,20 @@ do
echo "====================================================================="
echo "=== Shutting down. Log of failed process below ==="
echo "====================================================================="
echo $coproc_stdout
echo "$coproc_stdout"

exit 1
fi

# if last test has exited, exit relay too
if [ $n -eq $(($TEST_COPROCS_COUNT - 1)) ] && [ $relay_exited -eq 0 ]; then
kill $RELAY_COPROC
break
fi
done

# proceed to next index
((TEST_INDEX++))
if [ "$TEST_INDEX" -ge "$TESTS_END" ]; then
break
fi

# kill relay here - it is started manually by tests
killall substrate-relay
done

echo "====================================================================="
Expand Down
4 changes: 3 additions & 1 deletion bridges/zombienet/scripts/invoke-script.sh
Original file line number Diff line number Diff line change
@@ -1,5 +1,7 @@
#!/bin/bash

INVOKE_LOG=`mktemp -p $TEST_FOLDER invoke.XXXXX`

pushd $POLKADOT_SDK_FOLDER/cumulus/scripts
./bridges_rococo_westend.sh $1
./bridges_rococo_westend.sh $1 >$INVOKE_LOG 2>&1
popd
7 changes: 7 additions & 0 deletions bridges/zombienet/scripts/start-relayer.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
#!/bin/bash

RELAY_LOG=`mktemp -p $TEST_FOLDER relay.XXXXX`

pushd $POLKADOT_SDK_FOLDER/cumulus/scripts
./bridges_rococo_westend.sh run-relay >$RELAY_LOG 2>&1&
popd
Loading

0 comments on commit 2e6067d

Please sign in to comment.