diff --git a/noir-projects/noir-contracts/bootstrap.sh b/noir-projects/noir-contracts/bootstrap.sh index 3d8b2b0f646..95dd0f21abd 100755 --- a/noir-projects/noir-contracts/bootstrap.sh +++ b/noir-projects/noir-contracts/bootstrap.sh @@ -29,6 +29,7 @@ export TRANSPILER=${TRANSPILER:-../../avm-transpiler/target/release/avm-transpil export BB_HASH=$(cache_content_hash ../../barretenberg/cpp/.rebuild_patterns) export tmp_dir=./target/tmp +export sem_id="nargo" # Create our tmp working directory, ensure it's removed on exit. function on_exit() { @@ -72,7 +73,7 @@ function process_function() { if ! cache_download vk-$hash.tar.gz &> /dev/null; then # It's not in the cache. Generate the vk file and upload it to the cache. echo_stderr "Generating vk for function: $name..." - echo "$bytecode_b64" | base64 -d | gunzip | $BB write_vk_for_ivc -b - -o $tmp_dir/$hash 2>/dev/null + echo "$bytecode_b64" | base64 -d | gunzip | sem --jobs $PARALLELISM --fg --id $sem_id $BB write_vk_for_ivc -b - -o $tmp_dir/$hash 2>/dev/null cache_upload vk-$hash.tar.gz $tmp_dir/$hash &> /dev/null fi @@ -105,7 +106,7 @@ function compile { "^noir-projects/aztec-nr/" \ )" if ! cache_download contract-$contract_hash.tar.gz &> /dev/null; then - $NARGO compile --package $contract --silence-warnings --inliner-aggressiveness 0 + sem --jobs $PARALLELISM --fg --id $sem_id $NARGO compile --package $contract --silence-warnings --inliner-aggressiveness 0 $TRANSPILER $json_path $json_path cache_upload contract-$contract_hash.tar.gz $json_path &> /dev/null fi @@ -117,7 +118,7 @@ function compile { # .[1] is the updated functions on stdin (-) # * merges their fields. jq -c '.functions[]' $json_path | \ - parallel -j$PARALLELISM --keep-order -N1 --block 8M --pipe --halt now,fail=1 process_function | \ + parallel -j4 --keep-order -N1 --block 8M --pipe --halt now,fail=1 process_function | \ jq -s '{functions: .}' | jq -s '.[0] * {functions: .[1].functions}' $json_path - > $tmp_dir/$filename mv $tmp_dir/$filename $json_path } @@ -132,7 +133,7 @@ function build { set +e echo_stderr "Compiling contracts (bb-hash: $BB_HASH)..." grep -oP '(?<=contracts/)[^"]+' Nargo.toml | \ - parallel -j$PARALLELISM --joblog joblog.txt -v --line-buffer --tag --halt now,fail=1 compile {} + parallel -j16 --joblog joblog.txt -v --line-buffer --tag --halt now,fail=1 compile {} code=$? cat joblog.txt return $code diff --git a/noir-projects/noir-protocol-circuits/bootstrap.sh b/noir-projects/noir-protocol-circuits/bootstrap.sh index b2ba4044009..29ba8e98330 100755 --- a/noir-projects/noir-protocol-circuits/bootstrap.sh +++ b/noir-projects/noir-protocol-circuits/bootstrap.sh @@ -16,6 +16,7 @@ export NARGO_HASH=$(cache_content_hash ../../noir/.rebuild_patterns) tmp_dir=./target/tmp key_dir=./target/keys +sem_id="nargo" # Circuits matching these patterns we have clientivc keys computed, rather than ultrahonk. ivc_patterns=( @@ -49,7 +50,7 @@ mkdir -p $tmp_dir mkdir -p $key_dir # Export vars needed inside compile. -export tmp_dir key_dir ci3 ivc_regex rollup_honk_regex +export tmp_dir key_dir ci3 ivc_regex rollup_honk_regex sem_id function compile { set -euo pipefail @@ -61,20 +62,20 @@ function compile { local program_hash_cmd="$NARGO check --package $name --silence-warnings --show-program-hash | cut -d' ' -f2" # echo_stderr $program_hash_cmd program_hash=$(dump_fail "$program_hash_cmd") - echo_stderr "Hash preimage: $NARGO_HASH-$program_hash" + # echo_stderr "Hash preimage: $NARGO_HASH-$program_hash" hash=$(hash_str "$NARGO_HASH-$program_hash") if ! cache_download circuit-$hash.tar.gz 1>&2; then - SECONDS=0 rm -f $json_path # TODO: --skip-brillig-constraints-check added temporarily for blobs build time. local compile_cmd="$NARGO compile --package $name --silence-warnings --skip-brillig-constraints-check" - echo_stderr "$compile_cmd" - dump_fail "$compile_cmd" - echo_stderr "Compilation complete for: $name (${SECONDS}s)" + # echo_stderr "$compile_cmd" + echo_stderr "Compiling: $name..." + sem --jobs $PARALLELISM --fg --id $sem_id dump_fail "$compile_cmd" + echo_stderr "Compilation complete for: $name" cache_upload circuit-$hash.tar.gz $json_path &> /dev/null fi - echo "$name" + # echo "$name" if echo "$name" | grep -qE "${ivc_regex}"; then local proto="client_ivc" local write_vk_cmd="write_vk_for_ivc" @@ -88,7 +89,7 @@ function compile { local write_vk_cmd="write_vk_ultra_honk -h 1" local vk_as_fields_cmd="vk_as_fields_ultra_honk" fi - echo "$proto$" + # echo "$proto$" # No vks needed for simulated circuits. [[ "$name" == *"simulated"* ]] && return @@ -99,16 +100,16 @@ function compile { hash=$(hash_str "$BB_HASH-$bytecode_hash-$proto") if ! cache_download vk-$hash.tar.gz 1>&2; then local key_path="$key_dir/$name.vk.data.json" - echo_stderr "Generating vk for function: $name..." - SECONDS=0 + echo_stderr "Generating vk for: $name..." local vk_cmd="jq -r '.bytecode' $json_path | base64 -d | gunzip | $BB $write_vk_cmd -b - -o - --recursive | xxd -p -c 0" - echo_stderr $vk_cmd - vk=$(dump_fail "$vk_cmd") + # echo_stderr $vk_cmd + vk=$(sem --jobs $PARALLELISM --fg --id $sem_id dump_fail "$vk_cmd") local vkf_cmd="echo '$vk' | xxd -r -p | $BB $vk_as_fields_cmd -k - -o -" # echo_stderrr $vkf_cmd + echo_stderr "Converting vk to fields for: $name..." vk_fields=$(dump_fail "$vkf_cmd") jq -n --arg vk "$vk" --argjson vkf "$vk_fields" '{keyAsBytes: $vk, keyAsFields: $vkf}' > $key_path - echo_stderr "Key output at: $key_path (${SECONDS}s)" + echo_stderr "Key output at: $key_path" cache_upload vk-$hash.tar.gz $key_path &> /dev/null fi } @@ -126,7 +127,7 @@ function build { echo "$(basename $dir)" fi done | \ - parallel -j$PARALLELISM --joblog joblog.txt -v --line-buffer --tag --halt now,fail=1 compile {} + parallel -j32 --joblog joblog.txt -v --line-buffer --tag --halt now,fail=1 compile {} code=$? cat joblog.txt return $code