diff --git a/.editorconfig b/.editorconfig new file mode 100644 index 000000000..78c6ddee2 --- /dev/null +++ b/.editorconfig @@ -0,0 +1,8 @@ +root = true + +[*] +end_of_line = lf +insert_final_newline = true +charset = utf-8 +indent_style = space +indent_size = 2 diff --git a/.github/workflows/build-ton-linux-android-tonlib.yml b/.github/workflows/build-ton-linux-android-tonlib.yml new file mode 100644 index 000000000..ae1be22ff --- /dev/null +++ b/.github/workflows/build-ton-linux-android-tonlib.yml @@ -0,0 +1,32 @@ +name: Tonlib Android + +on: [push,workflow_dispatch,workflow_call] + +jobs: + build: + runs-on: ubuntu-22.04 + + steps: + - name: Check out repository + uses: actions/checkout@v3 + with: + submodules: 'recursive' + + - name: Install system libraries + run: | + sudo apt-get update + sudo apt-get install -y build-essential git cmake ninja-build automake libtool texinfo autoconf libgflags-dev \ + zlib1g-dev libssl-dev libreadline-dev libmicrohttpd-dev pkg-config libgsl-dev python3 python3-dev \ + libtool autoconf libsodium-dev libsecp256k1-dev + + - name: Build TON + run: | + cp assembly/android/build-android-tonlib.sh . + chmod +x build-android-tonlib.sh + ./build-android-tonlib.sh -a + + - name: Upload artifacts + uses: actions/upload-artifact@master + with: + name: tonlib-android + path: artifacts \ No newline at end of file diff --git a/.github/workflows/build-ton-linux-x86-64-shared.yml b/.github/workflows/build-ton-linux-x86-64-shared.yml new file mode 100644 index 000000000..166de6606 --- /dev/null +++ b/.github/workflows/build-ton-linux-x86-64-shared.yml @@ -0,0 +1,40 @@ +name: Ubuntu TON build (shared, x86-64) + +on: [push,workflow_dispatch,workflow_call] + +jobs: + build: + strategy: + fail-fast: false + matrix: + os: [ubuntu-20.04, ubuntu-22.04] + runs-on: ${{ matrix.os }} + + steps: + - name: Check out repository + uses: actions/checkout@v3 + with: + submodules: 'recursive' + + - name: Install system libraries + run: | + sudo apt-get update + sudo apt-get install -y build-essential git cmake ninja-build zlib1g-dev libsecp256k1-dev libmicrohttpd-dev libsodium-dev + + - name: Install clang-16 + run: | + wget https://apt.llvm.org/llvm.sh + chmod +x llvm.sh + sudo ./llvm.sh 16 all + + - name: Build TON + run: | + cp assembly/native/build-ubuntu-shared.sh . + chmod +x build-ubuntu-shared.sh + ./build-ubuntu-shared.sh -t -a + + - name: Upload artifacts + uses: actions/upload-artifact@master + with: + name: ton-binaries-${{ matrix.os }} + path: artifacts diff --git a/.github/workflows/build-ton-macos-x86-64-shared.yml b/.github/workflows/build-ton-macos-x86-64-shared.yml new file mode 100644 index 000000000..c9331e3be --- /dev/null +++ b/.github/workflows/build-ton-macos-x86-64-shared.yml @@ -0,0 +1,25 @@ +name: MacOS TON build (shared, x86-64) + +on: [push,workflow_dispatch,workflow_call] + +jobs: + build: + runs-on: macos-12 + + steps: + - name: Check out repository + uses: actions/checkout@v3 + with: + submodules: 'recursive' + + - name: Build TON + run: | + cp assembly/native/build-macos-shared.sh . + chmod +x build-macos-shared.sh + ./build-macos-shared.sh -t -a + + - name: Upload artifacts + uses: actions/upload-artifact@master + with: + name: ton-binaries-macos-12 + path: artifacts diff --git a/.github/workflows/build-ton-wasm-emscripten.yml b/.github/workflows/build-ton-wasm-emscripten.yml new file mode 100644 index 000000000..16156b073 --- /dev/null +++ b/.github/workflows/build-ton-wasm-emscripten.yml @@ -0,0 +1,30 @@ +name: Emscripten TON build (wasm) + +on: [push,workflow_dispatch,workflow_call] + +jobs: + build: + runs-on: ubuntu-22.04 + + steps: + - name: Check out repository + uses: actions/checkout@v3 + with: + submodules: 'recursive' + + - name: Install system libraries + run: | + sudo apt-get update + sudo apt-get install -y build-essential git openssl cmake ninja-build zlib1g-dev libssl-dev libsecp256k1-dev libmicrohttpd-dev libsodium-dev + + - name: Build TON WASM artifacts + run: | + cd assembly/wasm + chmod +x fift-func-wasm-build-ubuntu.sh + ./fift-func-wasm-build-ubuntu.sh -a + + - name: Upload artifacts + uses: actions/upload-artifact@master + with: + name: ton-wasm-binaries + path: artifacts \ No newline at end of file diff --git a/.github/workflows/create-release.yml b/.github/workflows/create-release.yml index 10c20ac0c..3d248cfe9 100644 --- a/.github/workflows/create-release.yml +++ b/.github/workflows/create-release.yml @@ -46,7 +46,7 @@ jobs: - name: Download Windows artifacts uses: dawidd6/action-download-artifact@v2 with: - workflow: win-2019-compile.yml + workflow: ton-x86-64-windows.yml path: artifacts workflow_conclusion: success skip_unpack: true @@ -54,7 +54,7 @@ jobs: - name: Download and unzip Windows artifacts uses: dawidd6/action-download-artifact@v2 with: - workflow: win-2019-compile.yml + workflow: ton-x86-64-windows.yml path: artifacts workflow_conclusion: success skip_unpack: false @@ -62,7 +62,7 @@ jobs: - name: Download WASM artifacts uses: dawidd6/action-download-artifact@v2 with: - workflow: ton-wasm-emscripten.yml + workflow: build-ton-wasm-emscripten.yml path: artifacts workflow_conclusion: success skip_unpack: true diff --git a/.github/workflows/docker-ubuntu-image.yml b/.github/workflows/docker-ubuntu-image.yml index ca7540787..449711d86 100644 --- a/.github/workflows/docker-ubuntu-image.yml +++ b/.github/workflows/docker-ubuntu-image.yml @@ -1,4 +1,4 @@ -name: Docker Ubuntu 20.04 image +name: Docker Ubuntu 22.04 image on: workflow_dispatch: @@ -12,10 +12,12 @@ env: jobs: build-and-push: - runs-on: ubuntu-20.04 + runs-on: ubuntu-22.04 steps: - - name: Checkout + - name: Check out repository uses: actions/checkout@v3 + with: + submodules: 'recursive' - name: Set up QEMU uses: docker/setup-qemu-action@v1 @@ -35,5 +37,5 @@ jobs: uses: docker/build-push-action@v2 with: push: true - context: ./docker + context: ./ tags: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:latest diff --git a/.github/workflows/macos-11.7-compile.yml b/.github/workflows/macos-11.7-compile.yml deleted file mode 100644 index eb12db1b5..000000000 --- a/.github/workflows/macos-11.7-compile.yml +++ /dev/null @@ -1,106 +0,0 @@ -name: MacOS 11.7 Big Sur x86-64 Compile - -on: [push,workflow_dispatch,workflow_call] - -jobs: - build: - - runs-on: macos-11 - - steps: - - name: Check out repository - uses: actions/checkout@v3 - with: - submodules: 'recursive' - - - name: Compile Secp256k1 - run: | - export NONINTERACTIVE=1 - brew install ninja secp256k1 libsodium libmicrohttpd pkg-config automake libtool - git clone https://github.com/libbitcoin/secp256k1.git - cd secp256k1 - ./autogen.sh - ./configure --enable-module-recovery - make - make install - - - name: Build all - run: | - brew unlink openssl@1.1 - brew install openssl@3 - brew unlink openssl@3 && brew link --overwrite openssl@3 - rootPath=`pwd` - mkdir build - cd build - cmake -GNinja -DCMAKE_OSX_DEPLOYMENT_TARGET:STRING=11.7 -DCMAKE_CXX_FLAGS="-stdlib=libc++" -DCMAKE_BUILD_TYPE=Release .. - - ninja storage-daemon storage-daemon-cli blockchain-explorer fift func tonlib tonlibjson tonlib-cli validator-engine \ - lite-client pow-miner validator-engine-console generate-random-id json2tlo dht-server \ - http-proxy rldp-http-proxy adnl-proxy create-state create-hardfork tlbc emulator \ - test-ed25519 test-ed25519-crypto test-bigint test-vm test-fift test-cells test-smartcont test-net test-tdactor \ - test-tdutils test-tonlib-offline test-adnl test-dht test-rldp test-rldp2 test-catchain test-fec test-tddb test-db test-validator-session-state - - - name: Strip binaries - run: | - strip build/storage/storage-daemon/storage-daemon - strip build/storage/storage-daemon/storage-daemon-cli - strip build/blockchain-explorer/blockchain-explorer - strip build/crypto/fift - strip build/crypto/func - strip build/crypto/create-state - strip build/crypto/tlbc - strip build/validator-engine-console/validator-engine-console - strip build/tonlib/tonlib-cli - strip build/http/http-proxy - strip build/rldp-http-proxy/rldp-http-proxy - strip build/dht-server/dht-server - strip build/lite-client/lite-client - strip build/validator-engine/validator-engine - strip build/utils/generate-random-id - strip build/utils/json2tlo - strip build/adnl/adnl-proxy - - - name: Run tests - run: | - cd build - ctest --output-on-failure -E "test-catchain|test-actors" - - - name: Find & copy binaries - run: | - mkdir artifacts - cp build/storage/storage-daemon/storage-daemon artifacts/ - cp build/storage/storage-daemon/storage-daemon-cli artifacts/ - cp build/blockchain-explorer/blockchain-explorer artifacts/ - cp build/crypto/fift artifacts/ - cp build/crypto/func artifacts/ - cp build/crypto/create-state artifacts/ - cp build/crypto/tlbc artifacts/ - cp build/validator-engine-console/validator-engine-console artifacts/ - cp build/tonlib/tonlib-cli artifacts/ - cp build/tonlib/libtonlibjson.0.5.dylib artifacts/libtonlibjson.dylib - cp build/http/http-proxy artifacts/ - cp build/rldp-http-proxy/rldp-http-proxy artifacts/ - cp build/dht-server/dht-server artifacts/ - cp build/lite-client/lite-client artifacts/ - cp build/validator-engine/validator-engine artifacts/ - cp build/utils/generate-random-id artifacts/ - cp build/utils/json2tlo artifacts/ - cp build/adnl/adnl-proxy artifacts/ - cp build/emulator/*emulator.* artifacts/ - chmod +x artifacts/* - rsync -r crypto/smartcont artifacts/ - rsync -r crypto/fift/lib artifacts/ - ls -laRt artifacts - - - name: Simple binaries test - run: | - artifacts/validator-engine -V - artifacts/lite-client -V - artifacts/fift -V - artifacts/func -V - - - name: Upload artifacts - uses: actions/upload-artifact@master - with: - name: ton-macos-11.7 - path: artifacts diff --git a/.github/workflows/macos-12.6-compile.yml b/.github/workflows/macos-12.6-compile.yml deleted file mode 100644 index f41efc669..000000000 --- a/.github/workflows/macos-12.6-compile.yml +++ /dev/null @@ -1,106 +0,0 @@ -name: MacOS 12.6 Monterey x86-64 Compile - -on: [push,workflow_dispatch,workflow_call] - -jobs: - build: - - runs-on: macos-12 - - steps: - - name: Check out repository - uses: actions/checkout@v3 - with: - submodules: 'recursive' - - - name: Compile Secp256k1 - run: | - export NONINTERACTIVE=1 - brew install ninja secp256k1 libsodium libmicrohttpd pkg-config automake libtool - git clone https://github.com/libbitcoin/secp256k1.git - cd secp256k1 - ./autogen.sh - ./configure --enable-module-recovery - make - make install - - - name: Build all - run: | - brew unlink openssl@1.1 - brew install openssl@3 - brew unlink openssl@3 && brew link --overwrite openssl@3 - rootPath=`pwd` - mkdir build - cd build - cmake -GNinja -DCMAKE_OSX_DEPLOYMENT_TARGET:STRING=12.6 -DCMAKE_CXX_FLAGS="-stdlib=libc++" -DCMAKE_BUILD_TYPE=Release .. - - ninja storage-daemon storage-daemon-cli blockchain-explorer fift func tonlib tonlibjson tonlib-cli \ - validator-engine lite-client pow-miner validator-engine-console generate-random-id json2tlo dht-server \ - http-proxy rldp-http-proxy adnl-proxy create-state create-hardfork tlbc emulator \ - test-ed25519 test-ed25519-crypto test-bigint test-vm test-fift test-cells test-smartcont test-net test-tdactor \ - test-tdutils test-tonlib-offline test-adnl test-dht test-rldp test-rldp2 test-catchain test-fec test-tddb test-db test-validator-session-state - - - name: Strip binaries - run: | - strip build/storage/storage-daemon/storage-daemon - strip build/storage/storage-daemon/storage-daemon-cli - strip build/blockchain-explorer/blockchain-explorer - strip build/crypto/fift - strip build/crypto/func - strip build/crypto/create-state - strip build/crypto/tlbc - strip build/validator-engine-console/validator-engine-console - strip build/tonlib/tonlib-cli - strip build/http/http-proxy - strip build/rldp-http-proxy/rldp-http-proxy - strip build/dht-server/dht-server - strip build/lite-client/lite-client - strip build/validator-engine/validator-engine - strip build/utils/generate-random-id - strip build/utils/json2tlo - strip build/adnl/adnl-proxy - - - name: Run tests - run: | - cd build - ctest --output-on-failure -E "test-catchain|test-actors" - - - name: Find & copy binaries - run: | - mkdir artifacts - cp build/storage/storage-daemon/storage-daemon artifacts/ - cp build/storage/storage-daemon/storage-daemon-cli artifacts/ - cp build/blockchain-explorer/blockchain-explorer artifacts/ - cp build/crypto/fift artifacts/ - cp build/crypto/func artifacts/ - cp build/crypto/create-state artifacts/ - cp build/crypto/tlbc artifacts/ - cp build/validator-engine-console/validator-engine-console artifacts/ - cp build/tonlib/tonlib-cli artifacts/ - cp build/tonlib/libtonlibjson.0.5.dylib artifacts/libtonlibjson.dylib - cp build/http/http-proxy artifacts/ - cp build/rldp-http-proxy/rldp-http-proxy artifacts/ - cp build/dht-server/dht-server artifacts/ - cp build/lite-client/lite-client artifacts/ - cp build/validator-engine/validator-engine artifacts/ - cp build/utils/generate-random-id artifacts/ - cp build/utils/json2tlo artifacts/ - cp build/adnl/adnl-proxy artifacts/ - cp build/emulator/*emulator.* artifacts/ - chmod +x artifacts/* - rsync -r crypto/smartcont artifacts/ - rsync -r crypto/fift/lib artifacts/ - ls -laRt artifacts - - - name: Simple binaries test - run: | - artifacts/validator-engine -V - artifacts/lite-client -V - artifacts/fift -V - artifacts/func -V - - - name: Upload artifacts - uses: actions/upload-artifact@master - with: - name: ton-macos-12.6 - path: artifacts diff --git a/.github/workflows/ton-aarch64-linux.yml b/.github/workflows/ton-aarch64-linux.yml deleted file mode 100644 index 3c600bee5..000000000 --- a/.github/workflows/ton-aarch64-linux.yml +++ /dev/null @@ -1,50 +0,0 @@ -name: "TON aarch64 Linux binaries" - -on: [workflow_dispatch,workflow_call] - -jobs: - build: - runs-on: ubuntu-22.04 - - steps: - - run: | - sudo apt update - sudo apt install -y apt-utils - sudo apt install -q -y qemu-system-aarch64 qemu-efi binfmt-support qemu-user-static - - - uses: actions/checkout@v3 - with: - submodules: 'recursive' - - - uses: cachix/install-nix-action@v18 - with: - extra_nix_config: | - access-tokens = github.com=${{ secrets.GITHUB_TOKEN }} - - - name: Compile - run: nix build .?submodules=1#packages.aarch64-linux.ton-oldglibc_staticbinaries --print-build-logs --system aarch64-linux -o result-aarch64 - - - name: Copy binaries - run: | - ls -lart - mkdir artifacts - cp $PWD/result-aarch64-linux/bin/* artifacts/ - chmod +x artifacts/* - cp $PWD/result-aarch64-linux/lib/libtonlibjson.so.0.5 artifacts/libtonlibjson.so - cp $PWD/result-aarch64-linux/lib/libemulator.so artifacts/ - cp -R crypto/smartcont artifacts/ - cp -R crypto/fift/lib artifacts/ - - - name: Simple binaries test - run: | - sudo mv /nix/store /nix/store2 - artifacts/validator-engine -V - artifacts/lite-client -V - artifacts/fift -V - artifacts/func -V - - - name: Upload artifacts - uses: actions/upload-artifact@master - with: - name: ton-aarch64-linux-binaries - path: artifacts \ No newline at end of file diff --git a/.github/workflows/ton-aarch64-macos.yml b/.github/workflows/ton-aarch64-macos.yml deleted file mode 100644 index 75fcec789..000000000 --- a/.github/workflows/ton-aarch64-macos.yml +++ /dev/null @@ -1,47 +0,0 @@ -name: "TON aarch64 macOS binaries" - -on: [workflow_dispatch,workflow_call] - -jobs: - build: - runs-on: macos-12 - - steps: - - run: brew install qemu - - - uses: actions/checkout@v3 - with: - submodules: 'recursive' - - - uses: cachix/install-nix-action@v18 - with: - extra_nix_config: | - access-tokens = github.com=${{ secrets.GITHUB_TOKEN }} - - - name: Compile - run: nix build .?submodules=1#packages.aarch64-darwin.ton-staticbin-dylib --print-build-logs -o result-aarch64-darwin - - - name: Copy binaries - run: | - ls -lart - mkdir artifacts - cp $PWD/result-aarch64-darwin/bin/* artifacts/ - chmod +x artifacts/* - cp $PWD/result-aarch64-darwin/lib/libtonlibjson* artifacts/ - cp $PWD/result-aarch64-darwin/lib/libemulator* artifacts/ - cp -R crypto/smartcont artifacts/ - cp -R crypto/fift/lib artifacts/ - - - name: Simple binaries test - run: | - sudo mv /nix/store /nix/store2 - artifacts/validator-engine -V - artifacts/lite-client -V - artifacts/fift -V - artifacts/func -V - - - name: Upload artifacts - uses: actions/upload-artifact@master - with: - name: ton-aarch64-macos-binaries - path: artifacts diff --git a/.github/workflows/ton-ccpcheck.yml b/.github/workflows/ton-ccpcheck.yml index 8e9d6ad2f..d2d8cf700 100644 --- a/.github/workflows/ton-ccpcheck.yml +++ b/.github/workflows/ton-ccpcheck.yml @@ -1,10 +1,9 @@ -name: TON Ccpcheck +name: TON Static Code Analysis on: [push,workflow_dispatch,workflow_call] jobs: build: - runs-on: ubuntu-22.04 steps: diff --git a/.github/workflows/ton-wasm-emscripten.yml b/.github/workflows/ton-wasm-emscripten.yml deleted file mode 100644 index a31678004..000000000 --- a/.github/workflows/ton-wasm-emscripten.yml +++ /dev/null @@ -1,46 +0,0 @@ -name: TON WASM Compile - -on: [push,workflow_dispatch,workflow_call] - -jobs: - build: - runs-on: ubuntu-22.04 - - steps: - - name: Check out repository - uses: actions/checkout@v3 - with: - submodules: 'recursive' - - - name: Install libraries - run: | - sudo apt update - sudo apt install -y build-essential git make cmake ninja-build clang libgflags-dev zlib1g-dev libssl-dev libreadline-dev libmicrohttpd-dev pkg-config libgsl-dev python3 python3-dev python3-pip nodejs libsecp256k1-dev libsodium-dev automake libtool - - - name: Setup compiler - run: | - wget https://apt.llvm.org/llvm.sh - chmod +x llvm.sh - sudo ./llvm.sh 16 all - - - name: Configure & Build - run: | - cd .github/script - ./fift-func-wasm-build-ubuntu.sh - - - name: Find & copy binaries - run: | - mkdir artifacts - ls build/crypto - cp build/crypto/fift* artifacts - cp build/crypto/func* artifacts - cp build/crypto/tlbc* artifacts - cp build/emulator/emulator-emscripten* artifacts - cp -R crypto/smartcont artifacts - cp -R crypto/fift/lib artifacts - - - name: Upload artifacts - uses: actions/upload-artifact@master - with: - name: ton-wasm-binaries - path: artifacts \ No newline at end of file diff --git a/.github/workflows/ton-x86-64-linux.yml b/.github/workflows/ton-x86-64-linux.yml index a4760dc41..fdd910001 100644 --- a/.github/workflows/ton-x86-64-linux.yml +++ b/.github/workflows/ton-x86-64-linux.yml @@ -1,4 +1,4 @@ -name: "TON x86_64 Linux binaries" +name: Ubuntu TON build (portable, x86-64) on: [push,workflow_dispatch,workflow_call] @@ -20,19 +20,11 @@ jobs: extra_nix_config: | access-tokens = github.com=${{ secrets.GITHUB_TOKEN }} - - name: Compile - run: nix build .?submodules=1#packages.x86_64-linux.ton-oldglibc_staticbinaries --print-build-logs --system x86_64-linux -o result-x86_64 - - - name: Copy binaries + - name: Build TON run: | - ls -lart - mkdir artifacts - cp $PWD/result-x86_64/bin/* artifacts/ - chmod +x artifacts/* - cp $PWD/result-x86_64/lib/libtonlibjson.so.0.5 artifacts/libtonlibjson.so - cp $PWD/result-x86_64/lib/libemulator.so artifacts/ - cp -R crypto/smartcont artifacts/ - cp -R crypto/fift/lib artifacts/ + cp assembly/nix/build-linux-x86-64-nix.sh . + chmod +x build-linux-x86-64-nix.sh + ./build-linux-x86-64-nix.sh - name: Simple binaries test run: | diff --git a/.github/workflows/ton-x86-64-macos.yml b/.github/workflows/ton-x86-64-macos.yml index cea2937a3..c0f907181 100644 --- a/.github/workflows/ton-x86-64-macos.yml +++ b/.github/workflows/ton-x86-64-macos.yml @@ -1,4 +1,4 @@ -name: "TON x86_64 macOS binaries" +name: MacOS TON build (portable, x86-64) on: [push,workflow_dispatch,workflow_call] @@ -16,19 +16,11 @@ jobs: extra_nix_config: | access-tokens = github.com=${{ secrets.GITHUB_TOKEN }} - - name: Compile - run: nix build .?submodules=1#packages.x86_64-darwin.ton-staticbin-dylib --print-build-logs -o result-x86_64-darwin - - - name: Copy binaries + - name: Build TON run: | - ls -lart - mkdir artifacts - cp $PWD/result-x86_64-darwin/bin/* artifacts/ - chmod +x artifacts/* - cp $PWD/result-x86_64-darwin/lib/libtonlibjson.dylib artifacts/ - cp $PWD/result-x86_64-darwin/lib/libemulator.dylib artifacts/ - cp -R crypto/smartcont artifacts/ - cp -R crypto/fift/lib artifacts/ + cp assembly/nix/build-macos-nix.sh . + chmod +x build-macos-nix.sh + ./build-macos-nix.sh - name: Simple binaries test run: | diff --git a/.github/workflows/ton-x86-64-windows.yml b/.github/workflows/ton-x86-64-windows.yml new file mode 100644 index 000000000..670261839 --- /dev/null +++ b/.github/workflows/ton-x86-64-windows.yml @@ -0,0 +1,34 @@ +name: Windows TON build (portable, x86-64) + +on: [push,workflow_dispatch,workflow_call] + +defaults: + run: + shell: cmd + +jobs: + build: + + runs-on: windows-2022 + + steps: + - name: Get Current OS version + run: | + systeminfo | findstr /B /C:"OS Name" /C:"OS Version" + + - name: Check out current repository + uses: actions/checkout@v3 + with: + submodules: 'recursive' + + - name: Build TON + run: | + copy assembly\native\build-windows-github.bat . + copy assembly\native\build-windows.bat . + build-windows-github.bat Enterprise + + - name: Upload artifacts + uses: actions/upload-artifact@master + with: + name: ton-win-binaries + path: artifacts diff --git a/.github/workflows/tonlib-android-jni.yml b/.github/workflows/tonlib-android-jni.yml deleted file mode 100644 index 6e04f8b77..000000000 --- a/.github/workflows/tonlib-android-jni.yml +++ /dev/null @@ -1,61 +0,0 @@ -name: Tonlib Android JNI - -on: [push,workflow_dispatch,workflow_call] - -jobs: - build: - - runs-on: ubuntu-22.04 - - steps: - - name: Check out repository - uses: actions/checkout@v3 - with: - submodules: 'recursive' - - - name: Install libraries - run: | - sudo apt update - sudo apt install -y build-essential git make cmake clang libgflags-dev zlib1g-dev libssl-dev libreadline-dev libmicrohttpd-dev pkg-config libgsl-dev python3 python3-dev ninja-build - - - name: Configure & Build - run: | - wget -q https://dl.google.com/android/repository/android-ndk-r25b-linux.zip - unzip -q android-ndk-r25b-linux.zip - export JAVA_AWT_LIBRARY=NotNeeded - export JAVA_JVM_LIBRARY=NotNeeded - export JAVA_INCLUDE_PATH=${JAVA_HOME}/include - export JAVA_AWT_INCLUDE_PATH=${JAVA_HOME}/include - export JAVA_INCLUDE_PATH2=${JAVA_HOME}/include/linux - - export ANDROID_NDK_ROOT=$(pwd)/android-ndk-r25b - export NDK_PLATFORM="android-21" - export ANDROID_PLATFORM="android-21" - export OPENSSL_DIR=$(pwd)/example/android/third_party/crypto - - rm -rf example/android/src/drinkless/org/ton/TonApi.java - cd example/android/ - - sudo apt install -y libtool autoconf libsodium-dev libsecp256k1-dev - - cmake -GNinja -DTON_ONLY_TONLIB=ON . - - ninja prepare_cross_compiling - - sudo apt remove -y libsodium-dev libsecp256k1-dev - - rm CMakeCache.txt - ./build-all.sh - find . -name "*.debug" -type f -delete - - - name: Find & copy binaries - run: | - mkdir -p artifacts/tonlib-android-jni - cp example/android/src/drinkless/org/ton/TonApi.java artifacts/tonlib-android-jni/ - cp -R example/android/libs/* artifacts/tonlib-android-jni/ - - - name: Upload artifacts - uses: actions/upload-artifact@master - with: - name: tonlib-android-jni - path: artifacts \ No newline at end of file diff --git a/.github/workflows/ubuntu-22.04-compile.yml b/.github/workflows/ubuntu-22.04-compile.yml deleted file mode 100644 index af8943a1c..000000000 --- a/.github/workflows/ubuntu-22.04-compile.yml +++ /dev/null @@ -1,78 +0,0 @@ -name: Ubuntu 22.04 Compile - -on: [push,workflow_dispatch,workflow_call] - -jobs: - build: - - runs-on: ubuntu-22.04 - - steps: - - name: Check out repository - uses: actions/checkout@v3 - with: - submodules: 'recursive' - - - name: Install libraries - run: | - sudo apt update - sudo apt install -y build-essential git make cmake clang libgflags-dev zlib1g-dev libssl-dev libreadline-dev libmicrohttpd-dev pkg-config libgsl-dev python3 python3-dev ninja-build libsecp256k1-dev libsodium-dev - - - name: Show CPU flags - run: | - cat /proc/cpuinfo - - - name: Configure & Build - run: | - export CC=$(which clang) - export CXX=$(which clang++) - export CCACHE_DISABLE=1 - - git clone https://github.com/openssl/openssl openssl_3 - cd openssl_3 - git checkout openssl-3.1.4 - ./config - make build_libs -j4 - - cd .. - rootPath=`pwd` - mkdir build - cd build - - cmake -GNinja -DOPENSSL_FOUND=1 -DOPENSSL_INCLUDE_DIR=$rootPath/openssl_3/include -DOPENSSL_CRYPTO_LIBRARY=$rootPath/openssl_3/libcrypto.a -DCMAKE_BUILD_TYPE=Release -DPORTABLE=1 -DTON_ARCH= .. - - ninja storage-daemon storage-daemon-cli fift func tonlib tonlibjson tonlib-cli validator-engine lite-client \ - pow-miner validator-engine-console generate-random-id json2tlo dht-server http-proxy rldp-http-proxy \ - adnl-proxy create-state emulator \ - test-ed25519 test-ed25519-crypto test-bigint test-vm test-fift test-cells test-smartcont test-net test-tdactor \ - test-tdutils test-tonlib-offline test-adnl test-dht test-rldp test-rldp2 test-catchain test-fec test-tddb test-db test-validator-session-state - - - name: Strip binaries - run: | - strip -g build/storage/storage-daemon/storage-daemon build/storage/storage-daemon/storage-daemon-cli build/crypto/fift build/crypto/tlbc build/crypto/func build/crypto/create-state build/validator-engine-console/validator-engine-console build/tonlib/tonlib-cli build/tonlib/libtonlibjson.so.0.5 build/http/http-proxy build/rldp-http-proxy/rldp-http-proxy build/dht-server/dht-server build/lite-client/lite-client build/validator-engine/validator-engine build/utils/generate-random-id build/utils/json2tlo build/adnl/adnl-proxy build/emulator/libemulator.* - - - name: Run tests - run: | - cd build - ctest --output-on-failure -E "test-catchain|test-actors" - - - name: Find & copy binaries - run: | - mkdir artifacts - cp build/storage/storage-daemon/storage-daemon build/storage/storage-daemon/storage-daemon-cli build/crypto/fift build/crypto/tlbc build/crypto/func build/crypto/create-state build/validator-engine-console/validator-engine-console build/tonlib/tonlib-cli build/tonlib/libtonlibjson.so.0.5 build/http/http-proxy build/rldp-http-proxy/rldp-http-proxy build/dht-server/dht-server build/lite-client/lite-client build/validator-engine/validator-engine build/utils/generate-random-id build/utils/json2tlo build/adnl/adnl-proxy build/emulator/libemulator.* artifacts - chmod +x artifacts/* - cp -R crypto/smartcont artifacts/ - cp -R crypto/fift/lib artifacts/ - - - name: Simple binaries test - run: | - artifacts/validator-engine -V - artifacts/lite-client -V - artifacts/fift -V - artifacts/func -V - - - name: Upload artifacts - uses: actions/upload-artifact@master - with: - name: ton-ubuntu-binaries - path: artifacts diff --git a/.github/workflows/ubuntu-compile.yml b/.github/workflows/ubuntu-compile.yml deleted file mode 100644 index 3c1e7bad1..000000000 --- a/.github/workflows/ubuntu-compile.yml +++ /dev/null @@ -1,81 +0,0 @@ -name: Ubuntu Compile x86-64 - -on: [push,workflow_dispatch,workflow_call] - -jobs: - build: - strategy: - fail-fast: false - matrix: - os: [ubuntu-20.04, ubuntu-22.04] - runs-on: ${{ matrix.os }} - - steps: - - name: Check out repository - uses: actions/checkout@v3 - with: - submodules: 'recursive' - - - name: Install libraries - run: | - sudo apt update - sudo apt install -y build-essential git make cmake clang libgflags-dev zlib1g-dev libssl-dev libreadline-dev libmicrohttpd-dev pkg-config libgsl-dev python3 python3-dev ninja-build libsecp256k1-dev libsodium-dev - - - name: Show CPU flags - run: | - cat /proc/cpuinfo - - - name: Configure & Build - run: | - export CC=$(which clang) - export CXX=$(which clang++) - export CCACHE_DISABLE=1 - - mkdir build-${{ matrix.os }} - cd build-${{ matrix.os }} - - git clone https://github.com/openssl/openssl openssl_3 - cd openssl_3 - git checkout openssl-3.1.4 - ./config - make build_libs -j4 - - cd .. - rootPath=`pwd` - - cmake -GNinja -DOPENSSL_FOUND=1 -DOPENSSL_INCLUDE_DIR=$rootPath/openssl_3/include -DOPENSSL_CRYPTO_LIBRARY=$rootPath/openssl_3/libcrypto.a -DCMAKE_BUILD_TYPE=Release -DPORTABLE=1 -DTON_ARCH= .. - ninja storage-daemon storage-daemon-cli fift func tonlib tonlibjson tonlib-cli validator-engine lite-client \ - pow-miner validator-engine-console generate-random-id json2tlo dht-server http-proxy rldp-http-proxy adnl-proxy \ - create-state create-hardfork emulator \ - test-ed25519 test-ed25519-crypto test-bigint test-vm test-fift test-cells test-smartcont test-net test-tdactor \ - test-tdutils test-tonlib-offline test-adnl test-dht test-rldp test-rldp2 test-catchain test-fec test-tddb test-db test-validator-session-state - - - name: Strip binaries - run: | - strip -g build-${{ matrix.os }}/storage/storage-daemon/storage-daemon build-${{ matrix.os }}/storage/storage-daemon/storage-daemon-cli build-${{ matrix.os }}/crypto/fift build-${{ matrix.os }}/crypto/tlbc build-${{ matrix.os }}/crypto/func build-${{ matrix.os }}/crypto/create-state build-${{ matrix.os }}/validator-engine-console/validator-engine-console build-${{ matrix.os }}/tonlib/tonlib-cli build-${{ matrix.os }}/tonlib/libtonlibjson.so.0.5 build-${{ matrix.os }}/http/http-proxy build-${{ matrix.os }}/rldp-http-proxy/rldp-http-proxy build-${{ matrix.os }}/dht-server/dht-server build-${{ matrix.os }}/lite-client/lite-client build-${{ matrix.os }}/validator-engine/validator-engine build-${{ matrix.os }}/utils/generate-random-id build-${{ matrix.os }}/utils/json2tlo build-${{ matrix.os }}/adnl/adnl-proxy build-${{ matrix.os }}/emulator/libemulator.* - - - name: Run tests - run: | - cd build-${{ matrix.os }} - ctest --output-on-failure -E "test-catchain|test-actors" - - - name: Find & copy binaries - run: | - mkdir artifacts-${{ matrix.os }} - cp build-${{ matrix.os }}/storage/storage-daemon/storage-daemon build-${{ matrix.os }}/storage/storage-daemon/storage-daemon-cli build-${{ matrix.os }}/crypto/fift build-${{ matrix.os }}/crypto/tlbc build-${{ matrix.os }}/crypto/func build-${{ matrix.os }}/crypto/create-state build-${{ matrix.os }}/validator-engine-console/validator-engine-console build-${{ matrix.os }}/tonlib/tonlib-cli build-${{ matrix.os }}/tonlib/libtonlibjson.so.0.5 build-${{ matrix.os }}/http/http-proxy build-${{ matrix.os }}/rldp-http-proxy/rldp-http-proxy build-${{ matrix.os }}/dht-server/dht-server build-${{ matrix.os }}/lite-client/lite-client build-${{ matrix.os }}/validator-engine/validator-engine build-${{ matrix.os }}/utils/generate-random-id build-${{ matrix.os }}/utils/json2tlo build-${{ matrix.os }}/adnl/adnl-proxy build-${{ matrix.os }}/emulator/libemulator.* artifacts-${{ matrix.os }} - chmod +x artifacts-${{ matrix.os }}/* - cp -R crypto/smartcont artifacts-${{ matrix.os }} - cp -R crypto/fift/lib artifacts-${{ matrix.os }} - - - name: Simple binaries test - run: | - artifacts-${{ matrix.os }}/validator-engine -V - artifacts-${{ matrix.os }}/lite-client -V - artifacts-${{ matrix.os }}/fift -V - artifacts-${{ matrix.os }}/func -V - - - name: Upload artifacts - uses: actions/upload-artifact@master - with: - name: ton-binaries-${{ matrix.os }} - path: artifacts-${{ matrix.os }} diff --git a/.github/workflows/win-2019-compile.yml b/.github/workflows/win-2019-compile.yml deleted file mode 100644 index e94655f7d..000000000 --- a/.github/workflows/win-2019-compile.yml +++ /dev/null @@ -1,108 +0,0 @@ -name: Windows Server 2019 x64 Compile - -on: [push,workflow_dispatch,workflow_call] - -defaults: - run: - shell: cmd - -jobs: - build: - - runs-on: windows-2019 - - steps: - - name: Get Current OS version - run: | - systeminfo | findstr /B /C:"OS Name" /C:"OS Version" - - - name: Check out current repository - uses: actions/checkout@v3 - with: - submodules: 'recursive' - - - name: Check out zlib repository - uses: actions/checkout@v3 - with: - repository: desktop-app/zlib - path: zlib - - - name: Setup msbuild.exe - uses: microsoft/setup-msbuild@v1.1 - - - name: Install Pkg-config Lite - run: choco install pkgconfiglite - - - name: Compile zlib Win64 - run: | - cd zlib\contrib\vstudio\vc14 - msbuild zlibstat.vcxproj /p:Configuration=ReleaseWithoutAsm /p:platform=x64 -p:PlatformToolset=v142 - - - name: Compile secp256k1 Win64 - run: | - git clone https://github.com/libbitcoin/secp256k1.git - cd secp256k1\builds\msvc\vs2017 - msbuild /p:Configuration=StaticRelease -p:PlatformToolset=v142 -p:Platform=x64 - - - name: Install pre-compiled libsodium Win64 - run: | - curl -Lo libsodium-1.0.18-stable-msvc.zip https://download.libsodium.org/libsodium/releases/libsodium-1.0.18-stable-msvc.zip - unzip libsodium-1.0.18-stable-msvc.zip - - - name: Install pre-compiled OpenSSL 3 Win64 - run: | - curl -Lo openssl-3.1.4.zip https://github.com/neodiX42/precompiled-openssl-win64/raw/main/openssl-3.1.4.zip - unzip openssl-3.1.4.zip - - - name: Install pre-compiled libmicrohttpd Win64 - run: | - curl -Lo libmicrohttpd-0.9.77-w32-bin.zip https://github.com/neodiX42/precompiled-openssl-win64/raw/main/libmicrohttpd-0.9.77-w32-bin.zip - unzip libmicrohttpd-0.9.77-w32-bin.zip - - - name: Install pre-compiled Readline Win64 - run: | - curl -Lo readline-5.0-1-lib.zip https://github.com/neodiX42/precompiled-openssl-win64/raw/main/readline-5.0-1-lib.zip - unzip readline-5.0-1-lib.zip - - - name: Compile - run: | - set root=%cd% - set SODIUM_DIR=%root%\libsodium - echo %root% - echo %SODIUM_DIR% - mkdir build - cd build - cmake -DSODIUM_USE_STATIC_LIBS=1 -DSECP256K1_INCLUDE_DIR=%root%\secp256k1\include -DSECP256K1_LIBRARY=%root%\secp256k1\bin\x64\Release\v142\static\secp256k1.lib -DREADLINE_INCLUDE_DIR=%root%\readline-5.0-1-lib\include\readline -DREADLINE_LIBRARY=%root%\readline-5.0-1-lib\lib\readline.lib -DPORTABLE=1 -DZLIB_FOUND=1 -DMHD_FOUND=1 -DMHD_LIBRARY=%root%\libmicrohttpd-0.9.77-w32-bin\x86_64\VS2019\Release-static\libmicrohttpd.lib -DMHD_INCLUDE_DIR=%root%\libmicrohttpd-0.9.77-w32-bin\x86_64\VS2019\Release-static -DZLIB_INCLUDE_DIR=%root%\zlib -DZLIB_LIBRARY=%root%\zlib\contrib\vstudio\vc14\x64\ZlibStatReleaseWithoutAsm\zlibstat.lib -DOPENSSL_FOUND=1 -DOPENSSL_INCLUDE_DIR=%root%/openssl-3.1.4/x64/include -DOPENSSL_CRYPTO_LIBRARY=%root%/openssl-3.1.4/x64/lib/libcrypto_static.lib -DCMAKE_CXX_FLAGS="/DTD_WINDOWS=1 /EHsc /bigobj /W0" .. - cmake --build . --config Release --target storage-daemon storage-daemon-cli blockchain-explorer fift func tonlib tonlibjson tonlib-cli validator-engine lite-client pow-miner validator-engine-console generate-random-id json2tlo dht-server http-proxy rldp-http-proxy adnl-proxy create-state create-hardfork emulator test-ed25519 test-ed25519-crypto test-bigint test-vm test-fift test-cells test-smartcont test-net test-tdactor test-tdutils test-tonlib-offline test-adnl test-dht test-rldp test-rldp2 test-catchain test-fec test-tddb test-db test-validator-session-state - - - name: Run tests - run: | - cd build - ctest -C Release --output-on-failure -E "test-catchain|test-actors|test-validator-session-state" - - - name: Show executables - run: | - cd build - del Release\test-* - dir *.exe /a-D /S /B - dir *.dll /a-D /S /B - - - name: Check if validator-engine.exe exists - run: | - copy %cd%\build\validator-engine\Release\validator-engine.exe test - - - name: Find & copy binaries - run: | - mkdir artifacts - mkdir artifacts\smartcont - mkdir artifacts\lib - - for %%I in (build\storage\storage-daemon\Release\storage-daemon.exe build\storage\storage-daemon\Release\storage-daemon-cli.exe build\blockchain-explorer\blockchain-explorer.exe build\crypto\Release\fift.exe build\crypto\Release\tlbc.exe build\crypto\Release\func.exe build\crypto\Release\create-state.exe build\validator-engine-console\Release\validator-engine-console.exe build\tonlib\Release\tonlib-cli.exe build\tonlib\Release\tonlibjson.dll build\http\Release\http-proxy.exe build\rldp-http-proxy\Release\rldp-http-proxy.exe build\dht-server\Release\dht-server.exe build\lite-client\Release\lite-client.exe build\validator-engine\Release\validator-engine.exe build\utils\Release\generate-random-id.exe build\utils\Release\json2tlo.exe build\adnl\Release\adnl-proxy.exe build\emulator\Release\emulator.dll) do (strip -g %%I & copy %%I artifacts\) - xcopy /e /k /h /i crypto\smartcont artifacts\smartcont - xcopy /e /k /h /i crypto\fift\lib artifacts\lib - - - name: Upload artifacts - uses: actions/upload-artifact@master - with: - name: ton-win-binaries - path: artifacts diff --git a/.gitignore b/.gitignore index 54d9ffc71..536918ab3 100644 --- a/.gitignore +++ b/.gitignore @@ -12,4 +12,14 @@ test/regression-tests.cache/ *.swp **/*build*/ .idea -.vscode \ No newline at end of file +.vscode +zlib/ +libsodium/ +libmicrohttpd-0.9.77-w32-bin/ +readline-5.0-1-lib/ +secp256k1/ +openssl-3.1.4/ +libsodium-1.0.18-stable-msvc.zip +libmicrohttpd-0.9.77-w32-bin.zip +openssl-3.1.4.zip +readline-5.0-1-lib.zip diff --git a/CMake/FindMHD.cmake b/CMake/FindMHD.cmake index c4b94c0ef..7d6dd5fdb 100644 --- a/CMake/FindMHD.cmake +++ b/CMake/FindMHD.cmake @@ -2,23 +2,26 @@ # Once done this will define # # MHD_FOUND - system has MHD -# MHD_INCLUDE_DIRS - the MHD include directory +# MHD_INCLUDE_DIR - the MHD include directory # MHD_LIBRARY - Link these to use MHD -find_path( - MHD_INCLUDE_DIR - NAMES microhttpd.h - DOC "microhttpd include dir" -) +if (NOT MHD_LIBRARY) + find_path( + MHD_INCLUDE_DIR + NAMES microhttpd.h + DOC "microhttpd include dir" + ) -find_library( - MHD_LIBRARY - NAMES microhttpd microhttpd-10 libmicrohttpd libmicrohttpd-dll - DOC "microhttpd library" -) + find_library( + MHD_LIBRARY + NAMES microhttpd microhttpd-10 libmicrohttpd libmicrohttpd-dll + DOC "microhttpd library" + ) +endif() -set(MHD_INCLUDE_DIRS ${MHD_INCLUDE_DIR}) -set(MHD_LIBRARIES ${MHD_LIBRARY}) +if (MHD_LIBRARY) + message(STATUS "Found MHD: ${MHD_LIBRARY}") +endif() include(FindPackageHandleStandardArgs) find_package_handle_standard_args(MHD DEFAULT_MSG MHD_INCLUDE_DIR MHD_LIBRARY) diff --git a/CMake/FindSecp256k1.cmake b/CMake/FindSecp256k1.cmake index 11603f153..68a37c71f 100644 --- a/CMake/FindSecp256k1.cmake +++ b/CMake/FindSecp256k1.cmake @@ -2,28 +2,27 @@ # Once done this will define # # SECP256K1_FOUND - system has SECP256K1 -# SECP256K1_INCLUDE_DIRS - the SECP256K1 include directory +# SECP256K1_INCLUDE_DIR - the SECP256K1 include directory # SECP256K1_LIBRARY - Link these to use SECP256K1 -find_path( - SECP256K1_INCLUDE_DIR - NAMES secp256k1_recovery.h - DOC "secp256k1_recovery.h include dir" -) +if (NOT SECP256K1_LIBRARY) + find_path( + SECP256K1_INCLUDE_DIR + NAMES secp256k1_recovery.h + DOC "secp256k1_recovery.h include dir" + ) -find_library( - SECP256K1_LIBRARY - NAMES secp256k1 libsecp256k1 - DOC "secp256k1 library" -) + find_library( + SECP256K1_LIBRARY + NAMES secp256k1 libsecp256k1 + DOC "secp256k1 library" + ) +endif() if (SECP256K1_LIBRARY) message(STATUS "Found Secp256k1: ${SECP256K1_LIBRARY}") endif() -set(SECP256K1_INCLUDE_DIRS ${SECP256K1_INCLUDE_DIR}) -set(SECP256K1_LIBRARIES ${SECP256K1_LIBRARY}) - include(FindPackageHandleStandardArgs) find_package_handle_standard_args(Secp256k1 DEFAULT_MSG SECP256K1_INCLUDE_DIR SECP256K1_LIBRARY) mark_as_advanced(SECP256K1_INCLUDE_DIR SECP256K1_LIBRARY) diff --git a/CMake/FindSodium.cmake b/CMake/FindSodium.cmake index 0053ac54f..85194ee2f 100644 --- a/CMake/FindSodium.cmake +++ b/CMake/FindSodium.cmake @@ -26,6 +26,7 @@ # Furthermore an imported "sodium" target is created. # + if (CMAKE_C_COMPILER_ID STREQUAL "GNU" OR CMAKE_C_COMPILER_ID STREQUAL "Clang") set(_GCC_COMPATIBLE 1) diff --git a/CMakeLists.txt b/CMakeLists.txt index c6d7ed87a..89be32383 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -211,7 +211,13 @@ set(CMAKE_THREAD_PREFER_PTHREAD ON) set(THREADS_PREFER_PTHREAD_FLAG ON) find_package(Threads REQUIRED) find_package(PkgConfig REQUIRED) -find_package(ZLIB REQUIRED) + +if (NOT ZLIB_FOUND) + find_package(ZLIB REQUIRED) +else() + message(STATUS "Using zlib ${ZLIB_LIBRARIES}") +endif() + if (TON_ARCH AND NOT MSVC) CHECK_CXX_COMPILER_FLAG( "-march=${TON_ARCH}" COMPILER_OPT_ARCH_SUPPORTED ) @@ -371,6 +377,9 @@ if (LATEX_FOUND) add_latex_document(doc/fiftbase.tex TARGET_NAME fift_basic_description) add_latex_document(doc/catchain.tex TARGET_NAME catchain_consensus_description) endif() +if (NOT LATEX_FOUND) + message(STATUS "Could NOT find LATEX (this is NOT an error)") +endif() #END internal function(target_link_libraries_system target) @@ -580,8 +589,8 @@ if (NOT NIX) WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}/crypto/func/auto-tests) if (WIN32) set_property(TEST test-func PROPERTY ENVIRONMENT - "FUNC_EXECUTABLE=${CMAKE_CURRENT_BINARY_DIR}/crypto/Release/func.exe" - "FIFT_EXECUTABLE=${CMAKE_CURRENT_BINARY_DIR}/crypto/Release/fift.exe" + "FUNC_EXECUTABLE=${CMAKE_CURRENT_BINARY_DIR}/crypto/func.exe" + "FIFT_EXECUTABLE=${CMAKE_CURRENT_BINARY_DIR}/crypto/fift.exe" "FIFTPATH=${CMAKE_CURRENT_SOURCE_DIR}/crypto/fift/lib/") else() set_property(TEST test-func PROPERTY ENVIRONMENT @@ -596,8 +605,8 @@ if (NOT NIX) WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}/crypto/func/auto-tests) if (WIN32) set_property(TEST test-func-legacy PROPERTY ENVIRONMENT - "FUNC_EXECUTABLE=${CMAKE_CURRENT_BINARY_DIR}/crypto/Release/func.exe" - "FIFT_EXECUTABLE=${CMAKE_CURRENT_BINARY_DIR}/crypto/Release/fift.exe" + "FUNC_EXECUTABLE=${CMAKE_CURRENT_BINARY_DIR}/crypto/func.exe" + "FIFT_EXECUTABLE=${CMAKE_CURRENT_BINARY_DIR}/crypto/fift.exe" "FIFTPATH=${CMAKE_CURRENT_SOURCE_DIR}/crypto/fift/lib/") else() set_property(TEST test-func-legacy PROPERTY ENVIRONMENT diff --git a/Changelog.md b/Changelog.md index 3310d6a2e..9fec686de 100644 --- a/Changelog.md +++ b/Changelog.md @@ -1,3 +1,15 @@ +## 2024.01 Update + +1. Fixes in how gas in transactions on special accounts is accounted in block limit. Previously, gas was counted as usual, so to conduct elections that costs >30m gas block limit in masterchain was set to 37m gas. To lower the limit for safety reasons it is proposed to not count gas on special accounts. Besides `gas_max` is set to `special_gas_limit` for all types of transactions on special accounts. New behavior is activated through setting `gas_prices_v3` in `ConfigParam 20;`. + * Besides update of config temporally increases gas limit on `EQD_v9j1rlsuHHw2FIhcsCFFSD367ldfDdCKcsNmNpIRzUlu` to `special_gas_limit`, see [details](https://t.me/tonstatus/88). +2. Improvements in LS behavior + * Improved detection of the state with all shards applied to decrease rate of `Block is not applied` error + * Better error logs: `block not in db` and `block is not applied` separation + * Fix error in proof generation for blocks after merge +3. Improvements in DHT work and storage, CellDb, config.json ammendment, peer misbehavior detection, validator session stats collection, emulator. + +Besides the work of the core team, this update is based on the efforts of @XaBbl4 (peer misbehavior detection). + ## 2023.12 Update 1. Optimized message queue handling, now queue cleaning speed doesn't depend on total queue size diff --git a/docker/Dockerfile b/Dockerfile similarity index 65% rename from docker/Dockerfile rename to Dockerfile index 595b4d87e..e3cd7b26e 100644 --- a/docker/Dockerfile +++ b/Dockerfile @@ -1,26 +1,29 @@ -FROM ubuntu:20.04 as builder +FROM ubuntu:22.04 as builder RUN apt-get update && \ - DEBIAN_FRONTEND=noninteractive apt-get install -y build-essential cmake clang-6.0 openssl libssl-dev zlib1g-dev gperf wget git ninja-build libsecp256k1-dev libsodium-dev libmicrohttpd-dev pkg-config && \ + DEBIAN_FRONTEND=noninteractive apt-get install -y build-essential cmake clang openssl libssl-dev zlib1g-dev gperf wget git ninja-build libsecp256k1-dev libsodium-dev libmicrohttpd-dev pkg-config autoconf automake libtool && \ rm -rf /var/lib/apt/lists/* -ENV CC clang-6.0 -ENV CXX clang++-6.0 +ENV CC clang +ENV CXX clang++ ENV CCACHE_DISABLE 1 + WORKDIR / -RUN git clone --recursive https://github.com/ton-blockchain/ton +RUN mkdir ton WORKDIR /ton +COPY ./ ./ + RUN mkdir build && \ cd build && \ - cmake -GNinja -DCMAKE_BUILD_TYPE=Release -DPORTABLE=1 -DTON_ARCH= -DCMAKE_CXX_FLAGS="-mavx2" .. && \ + cmake -GNinja -DCMAKE_BUILD_TYPE=Release -DPORTABLE=1 -DTON_ARCH= .. && \ ninja storage-daemon storage-daemon-cli tonlibjson fift func validator-engine validator-engine-console generate-random-id dht-server lite-client -FROM ubuntu:20.04 +FROM ubuntu:22.04 RUN apt-get update && \ - apt-get install -y openssl wget libatomic1 && \ - rm -rf /var/lib/apt/lists/* + apt-get install -y wget libatomic1 openssl libsecp256k1-dev libsodium-dev libmicrohttpd-dev && \ + rm -rf /var/lib/apt/lists/* RUN mkdir -p /var/ton-work/db && \ - mkdir -p /var/ton-work/db/static + mkdir -p /var/ton-work/db/static COPY --from=builder /ton/build/storage/storage-daemon/storage-daemon /usr/local/bin/ COPY --from=builder /ton/build/storage/storage-daemon/storage-daemon-cli /usr/local/bin/ @@ -30,7 +33,7 @@ COPY --from=builder /ton/build/validator-engine-console/validator-engine-console COPY --from=builder /ton/build/utils/generate-random-id /usr/local/bin/ WORKDIR /var/ton-work/db -COPY init.sh control.template ./ +COPY ./docker/init.sh ./docker/control.template ./ RUN chmod +x init.sh -ENTRYPOINT ["/var/ton-work/db/init.sh"] +ENTRYPOINT ["/var/ton-work/db/init.sh"] \ No newline at end of file diff --git a/README.md b/README.md index 7e78bb04a..653f2f834 100644 --- a/README.md +++ b/README.md @@ -43,7 +43,7 @@ __The Open Network (TON)__ is a fast, secure, scalable blockchain focused on han - To work on TON check [wallets](https://ton.app/wallets), [explorers](https://ton.app/explorers), [DEXes](https://ton.app/dex) and [utilities](https://ton.app/utilities) - To interact with TON check [APIs](https://ton.org/docs/develop/dapps/apis/) -## Updates flow: +## Updates flow * **master branch** - mainnet is running on this stable branch. @@ -61,12 +61,90 @@ Usually, the response to your pull request will indicate which section it falls * Thou shall not merge your own PRs, at least one person should review the PR and merge it (4-eyes rule) * Thou shall make sure that workflows are cleanly completed for your PR before considering merge -## Workflows responsibility -If a CI workflow fails not because of your changes but workflow issues, try to fix it yourself or contact one of the persons listed below via Telegram messenger: - -* **C/C++ CI (ccpp-linux.yml)**: TBD -* **C/C++ CI Win64 Compile (ccpp-win64.yml)**: TBD - +## Build TON blockchain + +### Ubuntu 20.4, 22.04 (x86-64, aarch64) +Install additional system libraries +```bash + sudo apt-get update + sudo apt-get install -y build-essential git cmake ninja-build zlib1g-dev libsecp256k1-dev libmicrohttpd-dev libsodium-dev + + wget https://apt.llvm.org/llvm.sh + chmod +x llvm.sh + sudo ./llvm.sh 16 all +``` +Compile TON binaries +```bash + cp assembly/native/build-ubuntu-shared.sh . + chmod +x build-ubuntu-shared.sh + ./build-ubuntu-shared.sh +``` + +### MacOS 11, 12 (x86-64, aarch64) +```bash + cp assembly/native/build-macos-shared.sh . + chmod +x build-macos-shared.sh + ./build-macos-shared.sh +``` + +### Windows 10, 11, Server (x86-64) +You need to install `MS Visual Studio 2022` first. +Go to https://www.visualstudio.com/downloads/ and download `MS Visual Studio 2022 Community`. + +Launch installer and select `Desktop development with C++`. +After installation, also make sure that `cmake` is globally available by adding +`C:\Program Files\Microsoft Visual Studio\2022\Community\Common7\IDE\CommonExtensions\Microsoft\CMake\CMake\bin` to the system `PATH` (adjust the path per your needs). + +Open an elevated (Run as Administrator) `x86-64 Native Tools Command Prompt for VS 2022`, go to the root folder and execute: +```bash + copy assembly\native\build-windows.bat . + build-windows.bat +``` + +### Building TON to WebAssembly +Install additional system libraries on Ubuntu +```bash + sudo apt-get update + sudo apt-get install -y build-essential git cmake ninja-build zlib1g-dev libsecp256k1-dev libmicrohttpd-dev libsodium-dev + + wget https://apt.llvm.org/llvm.sh + chmod +x llvm.sh + sudo ./llvm.sh 16 all +``` +Compile TON binaries with emscripten +```bash + cd assembly/wasm + chmod +x fift-func-wasm-build-ubuntu.sh + ./fift-func-wasm-build-ubuntu.sh +``` + +### Building TON tonlib library for Android (arm64-v8a, armeabi-v7a, x86, x86-64) +Install additional system libraries on Ubuntu +```bash + sudo apt-get update + sudo apt-get install -y build-essential git cmake ninja-build automake libtool texinfo autoconf libgflags-dev \ + zlib1g-dev libssl-dev libreadline-dev libmicrohttpd-dev pkg-config libgsl-dev python3 python3-dev \ + libtool autoconf libsodium-dev libsecp256k1-dev +``` +Compile TON tonlib library +```bash + cp assembly/android/build-android-tonlib.sh . + chmod +x build-android-tonlib.sh + ./build-android-tonlib.sh +``` + +### Build TON portable binaries with Nix package manager +You need to install Nix first. +```bash + sh <(curl -L https://nixos.org/nix/install) --daemon +``` +Then compile TON with Nix by executing below command from the root folder: +```bash + cp -r assembly/nix/* . + export NIX_PATH=nixpkgs=https://github.com/nixOS/nixpkgs/archive/23.05.tar.gz + nix-build linux-x86-64-static.nix +``` +More examples for other platforms can be found under `assembly/nix`. ## Running tests diff --git a/assembly/android/build-android-tonlib.sh b/assembly/android/build-android-tonlib.sh new file mode 100644 index 000000000..e470f6022 --- /dev/null +++ b/assembly/android/build-android-tonlib.sh @@ -0,0 +1,55 @@ +with_artifacts=false + +while getopts 'a' flag; do + case "${flag}" in + a) with_artifacts=true ;; + *) break + ;; + esac +done + +if [ ! -d android-ndk-r25b ]; then + rm android-ndk-r25b-linux.zip + wget -q https://dl.google.com/android/repository/android-ndk-r25b-linux.zip + unzip -q android-ndk-r25b-linux.zip + test $? -eq 0 || { echo "Can't unzip android-ndk-r25b-linux.zip"; exit 1; } + echo Android NDK extracted +else + echo Using extracted Android NDK +fi + +export JAVA_AWT_LIBRARY=NotNeeded +export JAVA_JVM_LIBRARY=NotNeeded +export JAVA_INCLUDE_PATH=${JAVA_HOME}/include +export JAVA_AWT_INCLUDE_PATH=${JAVA_HOME}/include +export JAVA_INCLUDE_PATH2=${JAVA_HOME}/include/linux + +export ANDROID_NDK_ROOT=$(pwd)/android-ndk-r25b +export NDK_PLATFORM="android-21" +export ANDROID_PLATFORM="android-21" +export OPENSSL_DIR=$(pwd)/example/android/third_party/crypto + +rm -rf example/android/src/drinkless/org/ton/TonApi.java +cd example/android/ + +rm CMakeCache.txt .ninja_* +cmake -GNinja -DTON_ONLY_TONLIB=ON . + +test $? -eq 0 || { echo "Can't configure TON"; exit 1; } + +ninja prepare_cross_compiling + +test $? -eq 0 || { echo "Can't compile prepare_cross_compiling"; exit 1; } + +rm CMakeCache.txt .ninja_* + +. ./build-all.sh + +find . -name "*.debug" -type f -delete + +if [ "$with_artifacts" = true ]; then + cd ../.. + mkdir -p artifacts/tonlib-android-jni + cp example/android/src/drinkless/org/ton/TonApi.java artifacts/tonlib-android-jni/ + cp -R example/android/libs/* artifacts/tonlib-android-jni/ +fi diff --git a/assembly/cicd/jenkins/test-builds.groovy b/assembly/cicd/jenkins/test-builds.groovy new file mode 100644 index 000000000..380efedd4 --- /dev/null +++ b/assembly/cicd/jenkins/test-builds.groovy @@ -0,0 +1,236 @@ +pipeline { + agent none + stages { + stage('Run Builds') { + parallel { + stage('Ubuntu 20.04 x86-64 (shared)') { + agent { + label 'Ubuntu_x86-64' + } + steps { + timeout(time: 90, unit: 'MINUTES') { + sh ''' + cp assembly/native/build-ubuntu-shared.sh . + chmod +x build-ubuntu-shared.sh + ./build-ubuntu-shared.sh -t -a + ''' + sh ''' + cd artifacts + zip -9r ton-x86_64-linux-shared ./* + ''' + archiveArtifacts artifacts: 'artifacts/ton-x86_64-linux-shared.zip' + } + } + } + stage('Ubuntu 20.04 x86-64 (portable)') { + agent { + label 'Ubuntu_x86-64' + } + steps { + timeout(time: 90, unit: 'MINUTES') { + sh ''' + cp assembly/nix/build-linux-x86-64-nix.sh . + chmod +x build-linux-x86-64-nix.sh + ./build-linux-x86-64-nix.sh + ''' + sh ''' + cd artifacts + zip -9r ton-x86-64-linux-portable ./* + ''' + archiveArtifacts artifacts: 'artifacts/ton-x86-64-linux-portable.zip' + } + } + } + stage('Ubuntu 20.04 aarch64 (shared)') { + agent { + label 'Ubuntu_arm64' + } + steps { + timeout(time: 90, unit: 'MINUTES') { + sh ''' + cp assembly/native/build-ubuntu-shared.sh . + chmod +x build-ubuntu-shared.sh + ./build-ubuntu-shared.sh -t -a + ''' + sh ''' + cd artifacts + zip -9r ton-arm64-linux-shared ./* + ''' + archiveArtifacts artifacts: 'artifacts/ton-arm64-linux-shared.zip' + } + } + } + stage('Ubuntu 20.04 aarch64 (portable)') { + agent { + label 'Ubuntu_arm64' + } + steps { + timeout(time: 90, unit: 'MINUTES') { + sh ''' + cp assembly/nix/build-linux-arm64-nix.sh . + chmod +x build-linux-arm64-nix.sh + ./build-linux-arm64-nix.sh + ''' + sh ''' + cd artifacts + zip -9r ton-arm64-linux-portable ./* + ''' + archiveArtifacts artifacts: 'artifacts/ton-arm64-linux-portable.zip' + } + } + } + stage('macOS 12.7 x86-64 (shared)') { + agent { + label 'macOS_12.7_x86-64' + } + steps { + timeout(time: 90, unit: 'MINUTES') { + sh ''' + cp assembly/native/build-macos-shared.sh . + chmod +x build-macos-shared.sh + ./build-macos-shared.sh -t -a + ''' + sh ''' + cd artifacts + zip -9r ton-x86-64-macos-shared ./* + ''' + archiveArtifacts artifacts: 'artifacts/ton-x86-64-macos-shared.zip' + } + } + } + stage('macOS 12.7 x86-64 (portable)') { + agent { + label 'macOS_12.7_x86-64' + } + steps { + timeout(time: 90, unit: 'MINUTES') { + sh ''' + cp assembly/nix/build-macos-nix.sh . + chmod +x build-macos-nix.sh + ./build-macos-nix.sh + ''' + sh ''' + cd artifacts + zip -9r ton-x86-64-macos-portable ./* + ''' + archiveArtifacts artifacts: 'artifacts/ton-x86-64-macos-portable.zip' + } + } + } + stage('macOS 12.6 aarch64 (shared)') { + agent { + label 'macOS_12.6-arm64-m1' + } + steps { + timeout(time: 90, unit: 'MINUTES') { + sh ''' + cp assembly/native/build-macos-shared.sh . + chmod +x build-macos-shared.sh + ./build-macos-shared.sh -t -a + ''' + sh ''' + cd artifacts + zip -9r ton-arm64-macos-m1-shared ./* + ''' + archiveArtifacts artifacts: 'artifacts/ton-arm64-macos-m1-shared.zip' + } + } + } + stage('macOS 12.6 aarch64 (portable)') { + agent { + label 'macOS_12.6-arm64-m1' + } + steps { + timeout(time: 90, unit: 'MINUTES') { + sh ''' + cp assembly/nix/build-macos-nix.sh . + chmod +x build-macos-nix.sh + ./build-macos-nix.sh + ''' + sh ''' + cd artifacts + zip -9r ton-arm64-macos-portable ./* + ''' + archiveArtifacts artifacts: 'artifacts/ton-arm64-macos-portable.zip' + } + } + } + stage('macOS 13.2 aarch64 (shared)') { + agent { + label 'macOS_13.2-arm64-m2' + } + steps { + timeout(time: 90, unit: 'MINUTES') { + sh ''' + cp assembly/native/build-macos-shared.sh . + chmod +x build-macos-shared.sh + ./build-macos-shared.sh -t -a + ''' + sh ''' + cd artifacts + zip -9r ton-arm64-macos-m2-shared ./* + ''' + archiveArtifacts artifacts: 'artifacts/ton-arm64-macos-m2-shared.zip' + } + } + } + stage('Windows Server 2022 x86-64') { + agent { + label 'Windows_x86-64' + } + steps { + timeout(time: 90, unit: 'MINUTES') { + bat ''' + copy assembly\\native\\build-windows.bat . + build-windows.bat + ''' + bat ''' + cd artifacts + zip -9r ton-x86-64-windows ./* + ''' + archiveArtifacts artifacts: 'artifacts/ton-x86-64-windows.zip' + } + } + } + stage('Android Tonlib') { + agent { + label 'Ubuntu_x86-64' + } + steps { + timeout(time: 90, unit: 'MINUTES') { + sh ''' + cp assembly/android/build-android-tonlib.sh . + chmod +x build-android-tonlib.sh + ./build-android-tonlib.sh -a + ''' + sh ''' + cd artifacts/tonlib-android-jni + zip -9r ton-android-tonlib ./* + ''' + archiveArtifacts artifacts: 'artifacts/tonlib-android-jni/ton-android-tonlib.zip' + } + } + } + stage('WASM fift func emulator') { + agent { + label 'Ubuntu_x86-64' + } + steps { + timeout(time: 90, unit: 'MINUTES') { + sh ''' + cd assembly/wasm + chmod +x fift-func-wasm-build-ubuntu.sh + ./fift-func-wasm-build-ubuntu.sh -a + ''' + sh ''' + cd artifacts + zip -9r ton-wasm-binaries ./* + ''' + archiveArtifacts artifacts: 'artifacts/ton-wasm-binaries.zip' + } + } + } + } + } + } +} \ No newline at end of file diff --git a/assembly/native/build-macos-portable.sh b/assembly/native/build-macos-portable.sh new file mode 100644 index 000000000..a4187d4c3 --- /dev/null +++ b/assembly/native/build-macos-portable.sh @@ -0,0 +1,207 @@ +#/bin/bash + +with_tests=false +with_artifacts=false +OSX_TARGET=10.15 + + +while getopts 'tao:' flag; do + case "${flag}" in + t) with_tests=true ;; + a) with_artifacts=true ;; + o) OSX_TARGET=${OPTARG} ;; + *) break + ;; + esac +done + +if [ ! -d "build" ]; then + mkdir build + cd build +else + cd build + rm -rf .ninja* CMakeCache.txt +fi + +export NONINTERACTIVE=1 +brew install ninja pkg-config automake libtool autoconf +brew install llvm@16 + + +if [ -f /opt/homebrew/opt/llvm@16/bin/clang ]; then + export CC=/opt/homebrew/opt/llvm@16/bin/clang + export CXX=/opt/homebrew/opt/llvm@16/bin/clang++ +else + export CC=/usr/local/opt/llvm@16/bin/clang + export CXX=/usr/local/opt/llvm@16/bin/clang++ +fi +export CCACHE_DISABLE=1 + +if [ ! -d "secp256k1" ]; then +git clone https://github.com/bitcoin-core/secp256k1.git +cd secp256k1 +secp256k1Path=`pwd` +git checkout v0.3.2 +./autogen.sh +./configure --enable-module-recovery --enable-static --disable-tests --disable-benchmark --with-pic +make -j12 +test $? -eq 0 || { echo "Can't compile secp256k1"; exit 1; } +cd .. +else + secp256k1Path=$(pwd)/secp256k1 + echo "Using compiled secp256k1" +fi + +if [ ! -d "libsodium" ]; then + export LIBSODIUM_FULL_BUILD=1 + git clone https://github.com/jedisct1/libsodium.git + cd libsodium + sodiumPath=`pwd` + git checkout 1.0.18 + ./autogen.sh + ./configure --with-pic --enable-static + make -j12 + test $? -eq 0 || { echo "Can't compile libsodium"; exit 1; } + cd .. +else + sodiumPath=$(pwd)/libsodium + echo "Using compiled libsodium" +fi + +if [ ! -d "openssl_3" ]; then + git clone https://github.com/openssl/openssl openssl_3 + cd openssl_3 + opensslPath=`pwd` + git checkout openssl-3.1.4 + ./config -static + make build_libs -j12 + test $? -eq 0 || { echo "Can't compile openssl_3"; exit 1; } + cd .. +else + opensslPath=$(pwd)/openssl_3 + echo "Using compiled openssl_3" +fi + +if [ ! -d "zlib" ]; then + git clone https://github.com/madler/zlib.git + cd zlib + zlibPath=`pwd` + ./configure --static + make -j12 + test $? -eq 0 || { echo "Can't compile zlib"; exit 1; } + cd .. +else + zlibPath=$(pwd)/zlib + echo "Using compiled zlib" +fi + +if [ ! -d "libmicrohttpd" ]; then + git clone https://git.gnunet.org/libmicrohttpd.git + cd libmicrohttpd + libmicrohttpdPath=`pwd` + ./autogen.sh + ./configure --enable-static --disable-tests --disable-benchmark --disable-shared --disable-https --with-pic + make -j12 + test $? -eq 0 || { echo "Can't compile libmicrohttpd"; exit 1; } + cd .. +else + libmicrohttpdPath=$(pwd)/libmicrohttpd + echo "Using compiled libmicrohttpd" +fi + +cmake -GNinja .. \ +-DPORTABLE=1 \ +-DCMAKE_OSX_DEPLOYMENT_TARGET:STRING=$OSX_TARGET \ +-DCMAKE_CXX_FLAGS="-stdlib=libc++" \ +-DCMAKE_BUILD_TYPE=Release \ +-DOPENSSL_FOUND=1 \ +-DOPENSSL_INCLUDE_DIR=$opensslPath/include \ +-DOPENSSL_CRYPTO_LIBRARY=$opensslPath/libcrypto.a \ +-DZLIB_FOUND=1 \ +-DZLIB_INCLUDE_DIR=$zlibPath \ +-DZLIB_LIBRARIES=$zlibPath/libz.a \ +-DSECP256K1_FOUND=1 \ +-DSECP256K1_INCLUDE_DIR=$secp256k1Path/include \ +-DSECP256K1_LIBRARY=$secp256k1Path/.libs/libsecp256k1.a \ +-DSODIUM_FOUND=1 \ +-DSODIUM_INCLUDE_DIR=$sodiumPath/src/libsodium/include \ +-DSODIUM_LIBRARY_RELEASE=$sodiumPath/src/libsodium/.libs/libsodium.a \ +-DMHD_FOUND=1 \ +-DMHD_INCLUDE_DIR=$libmicrohttpdPath/src/include \ +-DMHD_LIBRARY=$libmicrohttpdPath/src/microhttpd/.libs/libmicrohttpd.a + + +test $? -eq 0 || { echo "Can't configure ton"; exit 1; } + +if [ "$with_tests" = true ]; then + ninja storage-daemon storage-daemon-cli blockchain-explorer \ + tonlib tonlibjson tonlib-cli validator-engine func fift \ + lite-client pow-miner validator-engine-console generate-random-id json2tlo dht-server \ + http-proxy rldp-http-proxy adnl-proxy create-state create-hardfork tlbc emulator \ + test-ed25519 test-ed25519-crypto test-bigint test-vm test-fift test-cells test-smartcont \ + test-net test-tdactor test-tdutils test-tonlib-offline test-adnl test-dht test-rldp \ + test-rldp2 test-catchain test-fec test-tddb test-db test-validator-session-state + test $? -eq 0 || { echo "Can't compile ton"; exit 1; } +else + ninja storage-daemon storage-daemon-cli blockchain-explorer \ + tonlib tonlibjson tonlib-cli validator-engine func fift \ + lite-client pow-miner validator-engine-console generate-random-id json2tlo dht-server \ + http-proxy rldp-http-proxy adnl-proxy create-state create-hardfork tlbc emulator + test $? -eq 0 || { echo "Can't compile ton"; exit 1; } +fi + +strip storage/storage-daemon/storage-daemon +strip storage/storage-daemon/storage-daemon-cli +strip blockchain-explorer/blockchain-explorer +strip crypto/fift +strip crypto/func +strip crypto/create-state +strip crypto/tlbc +strip validator-engine-console/validator-engine-console +strip tonlib/tonlib-cli +strip http/http-proxy +strip rldp-http-proxy/rldp-http-proxy +strip dht-server/dht-server +strip lite-client/lite-client +strip validator-engine/validator-engine +strip utils/generate-random-id +strip utils/json2tlo +strip adnl/adnl-proxy + +cd .. + +if [ "$with_artifacts" = true ]; then + echo Creating artifacts... + rm -rf artifacts + mkdir artifacts + cp crypto/fift/lib artifacts/ + cp -R crypto/smartcont/ artifacts/ + cp build/storage/storage-daemon/storage-daemon artifacts/ + cp build/storage/storage-daemon/storage-daemon-cli artifacts/ + cp build/blockchain-explorer/blockchain-explorer artifacts/ + cp build/crypto/fift artifacts/ + cp build/crypto/func artifacts/ + cp build/crypto/create-state artifacts/ + cp build/crypto/tlbc artifacts/ + cp build/validator-engine-console/validator-engine-console artifacts/ + cp build/tonlib/tonlib-cli artifacts/ + cp build/tonlib/libtonlibjson.0.5.dylib artifacts/libtonlibjson.dylib + cp build/http/http-proxy artifacts/ + cp build/rldp-http-proxy/rldp-http-proxy artifacts/ + cp build/dht-server/dht-server artifacts/ + cp build/lite-client/lite-client artifacts/ + cp build/validator-engine/validator-engine artifacts/ + cp build/utils/generate-random-id artifacts/ + cp build/utils/json2tlo artifacts/ + cp build/adnl/adnl-proxy artifacts/ + cp build/emulator/libemulator.dylib artifacts/ + chmod +x artifacts/* + rsync -r crypto/smartcont artifacts/ + rsync -r crypto/fift/lib artifacts/ +fi + +if [ "$with_tests" = true ]; then + cd build +# ctest --output-on-failure -E "test-catchain|test-actors" + ctest --output-on-failure +fi diff --git a/assembly/native/build-macos-shared.sh b/assembly/native/build-macos-shared.sh new file mode 100644 index 000000000..7b4f90ee4 --- /dev/null +++ b/assembly/native/build-macos-shared.sh @@ -0,0 +1,136 @@ +#/bin/bash + +with_tests=false +with_artifacts=false +OSX_TARGET=10.15 + + +while getopts 'tao:' flag; do + case "${flag}" in + t) with_tests=true ;; + a) with_artifacts=true ;; + o) OSX_TARGET=${OPTARG} ;; + *) break + ;; + esac +done + +if [ ! -d "build" ]; then + mkdir build + cd build +else + cd build + rm -rf .ninja* CMakeCache.txt +fi + +export NONINTERACTIVE=1 +brew install ninja libsodium libmicrohttpd pkg-config automake libtool autoconf gnutls +brew install llvm@16 + +if [ -f /opt/homebrew/opt/llvm@16/bin/clang ]; then + export CC=/opt/homebrew/opt/llvm@16/bin/clang + export CXX=/opt/homebrew/opt/llvm@16/bin/clang++ +else + export CC=/usr/local/opt/llvm@16/bin/clang + export CXX=/usr/local/opt/llvm@16/bin/clang++ +fi +export CCACHE_DISABLE=1 + +if [ ! -d "secp256k1" ]; then + git clone https://github.com/bitcoin-core/secp256k1.git + cd secp256k1 + secp256k1Path=`pwd` + git checkout v0.3.2 + ./autogen.sh + ./configure --enable-module-recovery --enable-static --disable-tests --disable-benchmark + make -j12 + test $? -eq 0 || { echo "Can't compile secp256k1"; exit 1; } + cd .. +else + secp256k1Path=$(pwd)/secp256k1 + echo "Using compiled secp256k1" +fi + +brew unlink openssl@1.1 +brew install openssl@3 +brew unlink openssl@3 && brew link --overwrite openssl@3 + +cmake -GNinja -DCMAKE_BUILD_TYPE=Release .. \ +-DCMAKE_CXX_FLAGS="-stdlib=libc++" \ +-DSECP256K1_FOUND=1 \ +-DSECP256K1_INCLUDE_DIR=$secp256k1Path/include \ +-DSECP256K1_LIBRARY=$secp256k1Path/.libs/libsecp256k1.a + +test $? -eq 0 || { echo "Can't configure ton"; exit 1; } + +if [ "$with_tests" = true ]; then + ninja storage-daemon storage-daemon-cli blockchain-explorer \ + tonlib tonlibjson tonlib-cli validator-engine func fift \ + lite-client pow-miner validator-engine-console generate-random-id json2tlo dht-server \ + http-proxy rldp-http-proxy adnl-proxy create-state create-hardfork tlbc emulator \ + test-ed25519 test-ed25519-crypto test-bigint test-vm test-fift test-cells test-smartcont \ + test-net test-tdactor test-tdutils test-tonlib-offline test-adnl test-dht test-rldp \ + test-rldp2 test-catchain test-fec test-tddb test-db test-validator-session-state + test $? -eq 0 || { echo "Can't compile ton"; exit 1; } +else + ninja storage-daemon storage-daemon-cli blockchain-explorer \ + tonlib tonlibjson tonlib-cli validator-engine func fift \ + lite-client pow-miner validator-engine-console generate-random-id json2tlo dht-server \ + http-proxy rldp-http-proxy adnl-proxy create-state create-hardfork tlbc emulator + test $? -eq 0 || { echo "Can't compile ton"; exit 1; } +fi + + +strip storage/storage-daemon/storage-daemon +strip storage/storage-daemon/storage-daemon-cli +strip blockchain-explorer/blockchain-explorer +strip crypto/fift +strip crypto/func +strip crypto/create-state +strip crypto/tlbc +strip validator-engine-console/validator-engine-console +strip tonlib/tonlib-cli +strip http/http-proxy +strip rldp-http-proxy/rldp-http-proxy +strip dht-server/dht-server +strip lite-client/lite-client +strip validator-engine/validator-engine +strip utils/generate-random-id +strip utils/json2tlo +strip adnl/adnl-proxy + +cd .. + +if [ "$with_artifacts" = true ]; then + echo Creating artifacts... + rm -rf artifacts + mkdir artifacts + cp build/storage/storage-daemon/storage-daemon artifacts/ + cp build/storage/storage-daemon/storage-daemon-cli artifacts/ + cp build/blockchain-explorer/blockchain-explorer artifacts/ + cp build/crypto/fift artifacts/ + cp build/crypto/func artifacts/ + cp build/crypto/create-state artifacts/ + cp build/crypto/tlbc artifacts/ + cp build/validator-engine-console/validator-engine-console artifacts/ + cp build/tonlib/tonlib-cli artifacts/ + cp build/tonlib/libtonlibjson.0.5.dylib artifacts/libtonlibjson.dylib + cp build/http/http-proxy artifacts/ + cp build/rldp-http-proxy/rldp-http-proxy artifacts/ + cp build/dht-server/dht-server artifacts/ + cp build/lite-client/lite-client artifacts/ + cp build/validator-engine/validator-engine artifacts/ + cp build/utils/generate-random-id artifacts/ + cp build/utils/json2tlo artifacts/ + cp build/adnl/adnl-proxy artifacts/ + cp build/emulator/libemulator.dylib artifacts/ + chmod +x artifacts/* + rsync -r crypto/smartcont artifacts/ + rsync -r crypto/fift/lib artifacts/ +fi + +if [ "$with_tests" = true ]; then + cd build +# ctest --output-on-failure -E "test-catchain|test-actors" + ctest --output-on-failure --timeout 1800 +fi diff --git a/assembly/native/build-ubuntu-portable.sh b/assembly/native/build-ubuntu-portable.sh new file mode 100644 index 000000000..81dbe7104 --- /dev/null +++ b/assembly/native/build-ubuntu-portable.sh @@ -0,0 +1,198 @@ +#/bin/bash + +#sudo apt-get update +#sudo apt-get install -y build-essential git cmake ninja-build automake libtool texinfo autoconf + +with_tests=false +with_artifacts=false + + +while getopts 'ta' flag; do + case "${flag}" in + t) with_tests=true ;; + a) with_artifacts=true ;; + *) break + ;; + esac +done + +if [ ! -d "build" ]; then + mkdir build + cd build +else + cd build + rm -rf .ninja* CMakeCache.txt +fi + +export CC=$(which clang-16) +export CXX=$(which clang++-16) +export CCACHE_DISABLE=1 + + +if [ ! -d "secp256k1" ]; then +git clone https://github.com/bitcoin-core/secp256k1.git +cd secp256k1 +secp256k1Path=`pwd` +git checkout v0.3.2 +./autogen.sh +./configure --enable-module-recovery --enable-static --disable-tests --disable-benchmark --with-pic +make -j12 +test $? -eq 0 || { echo "Can't compile secp256k1"; exit 1; } +cd .. +# ./.libs/libsecp256k1.a +# ./include +else + secp256k1Path=$(pwd)/secp256k1 + echo "Using compiled secp256k1" +fi + +if [ ! -d "libsodium" ]; then + export LIBSODIUM_FULL_BUILD=1 + git clone https://github.com/jedisct1/libsodium.git + cd libsodium + sodiumPath=`pwd` + git checkout 1.0.18 + ./autogen.sh + ./configure --with-pic --enable-static + make -j12 + test $? -eq 0 || { echo "Can't compile libsodium"; exit 1; } + cd .. +else + sodiumPath=$(pwd)/libsodium + echo "Using compiled libsodium" +fi + +if [ ! -d "openssl_3" ]; then + git clone https://github.com/openssl/openssl openssl_3 + cd openssl_3 + opensslPath=`pwd` + git checkout openssl-3.1.4 + ./config -static + make build_libs -j12 + test $? -eq 0 || { echo "Can't compile openssl_3"; exit 1; } + cd .. +else + opensslPath=$(pwd)/openssl_3 + echo "Using compiled openssl_3" +fi + +if [ ! -d "zlib" ]; then + git clone https://github.com/madler/zlib.git + cd zlib + zlibPath=`pwd` + ./configure --static + make -j12 + test $? -eq 0 || { echo "Can't compile zlib"; exit 1; } + cd .. +else + zlibPath=$(pwd)/zlib + echo "Using compiled zlib" +fi + +if [ ! -d "libmicrohttpd" ]; then + git clone https://git.gnunet.org/libmicrohttpd.git + cd libmicrohttpd + libmicrohttpdPath=`pwd` + ./autogen.sh + ./configure --enable-static --disable-tests --disable-benchmark --disable-shared --disable-https --with-pic + make -j12 + test $? -eq 0 || { echo "Can't compile libmicrohttpd"; exit 1; } + cd .. +else + libmicrohttpdPath=$(pwd)/libmicrohttpd + echo "Using compiled libmicrohttpd" +fi + +cmake -GNinja .. \ +-DPORTABLE=1 \ +-DCMAKE_BUILD_TYPE=Release \ +-DOPENSSL_FOUND=1 \ +-DOPENSSL_INCLUDE_DIR=$opensslPath/include \ +-DOPENSSL_CRYPTO_LIBRARY=$opensslPath/libcrypto.a \ +-DZLIB_FOUND=1 \ +-DZLIB_INCLUDE_DIR=$zlibPath \ +-DZLIB_LIBRARIES=$zlibPath/libz.a \ +-DSECP256K1_FOUND=1 \ +-DSECP256K1_INCLUDE_DIR=$secp256k1Path/include \ +-DSECP256K1_LIBRARY=$secp256k1Path/.libs/libsecp256k1.a \ +-DSODIUM_FOUND=1 \ +-DSODIUM_INCLUDE_DIR=$sodiumPath/src/libsodium/include \ +-DSODIUM_LIBRARY_RELEASE=$sodiumPath/src/libsodium/.libs/libsodium.a \ +-DMHD_FOUND=1 \ +-DMHD_INCLUDE_DIR=$libmicrohttpdPath/src/include \ +-DMHD_LIBRARY=$libmicrohttpdPath/src/microhttpd/.libs/libmicrohttpd.a + + +test $? -eq 0 || { echo "Can't configure ton"; exit 1; } + +if [ "$with_tests" = true ]; then +ninja storage-daemon storage-daemon-cli fift func tonlib tonlibjson tonlib-cli \ + validator-engine lite-client pow-miner validator-engine-console blockchain-explorer \ + generate-random-id json2tlo dht-server http-proxy rldp-http-proxy \ + adnl-proxy create-state emulator test-ed25519 test-ed25519-crypto test-bigint \ + test-vm test-fift test-cells test-smartcont test-net test-tdactor test-tdutils \ + test-tonlib-offline test-adnl test-dht test-rldp test-rldp2 test-catchain \ + test-fec test-tddb test-db test-validator-session-state + test $? -eq 0 || { echo "Can't compile ton"; exit 1; } +else +ninja storage-daemon storage-daemon-cli fift func tonlib tonlibjson tonlib-cli \ + validator-engine lite-client pow-miner validator-engine-console blockchain-explorer \ + generate-random-id json2tlo dht-server http-proxy rldp-http-proxy \ + adnl-proxy create-state emulator + test $? -eq 0 || { echo "Can't compile ton"; exit 1; } +fi + +strip -g storage/storage-daemon/storage-daemon \ + storage/storage-daemon/storage-daemon-cli \ + blockchain-explorer/blockchain-explorer \ + crypto/fift \ + crypto/tlbc \ + crypto/func \ + crypto/create-state \ + validator-engine-console/validator-engine-console \ + tonlib/tonlib-cli \ + tonlib/libtonlibjson.so.0.5 \ + http/http-proxy \ + rldp-http-proxy/rldp-http-proxy \ + dht-server/dht-server \ + lite-client/lite-client \ + validator-engine/validator-engine \ + utils/generate-random-id \ + utils/json2tlo \ + adnl/adnl-proxy \ + emulator/libemulator.* + +test $? -eq 0 || { echo "Can't strip final binaries"; exit 1; } + +# simple binaries' test +./storage/storage-daemon/storage-daemon -V || exit 1 +./validator-engine/validator-engine -V || exit 1 +./lite-client/lite-client -V || exit 1 +./crypto/fift -V || exit 1 + +cd .. + +if [ "$with_artifacts" = true ]; then + rm -rf artifacts + mkdir artifacts + cp crypto/fift/lib artifacts/ + cp -R crypto/smartcont/ artifacts/ + mv build/tonlib/libtonlibjson.so.0.5 build/tonlib/libtonlibjson.so + cp build/storage/storage-daemon/storage-daemon build/storage/storage-daemon/storage-daemon-cli \ + build/crypto/fift build/crypto/tlbc build/crypto/func build/crypto/create-state build/blockchain-explorer/blockchain-explorer \ + build/validator-engine-console/validator-engine-console build/tonlib/tonlib-cli \ + build/tonlib/libtonlibjson.so build/http/http-proxy build/rldp-http-proxy/rldp-http-proxy \ + build/dht-server/dht-server build/lite-client/lite-client build/validator-engine/validator-engine \ + build/utils/generate-random-id build/utils/json2tlo build/adnl/adnl-proxy build/emulator/libemulator.so \ + artifacts + test $? -eq 0 || { echo "Can't copy final binaries"; exit 1; } + chmod +x artifacts/* + cp -R crypto/smartcont artifacts + cp -R crypto/fift/lib artifacts +fi + +if [ "$with_tests" = true ]; then + cd build +# ctest --output-on-failure -E "test-catchain|test-actors|test-smartcont|test-adnl|test-validator-session-state|test-dht|test-rldp" + ctest --output-on-failure -E "test-adnl" +fi diff --git a/assembly/native/build-ubuntu-shared.sh b/assembly/native/build-ubuntu-shared.sh new file mode 100644 index 000000000..12c819cd0 --- /dev/null +++ b/assembly/native/build-ubuntu-shared.sh @@ -0,0 +1,122 @@ +#/bin/bash + +#sudo apt-get update +#sudo apt-get install -y build-essential git cmake ninja-build zlib1g-dev libsecp256k1-dev libmicrohttpd-dev libsodium-dev + +with_tests=false +with_artifacts=false + + +while getopts 'ta' flag; do + case "${flag}" in + t) with_tests=true ;; + a) with_artifacts=true ;; + *) break + ;; + esac +done + +if [ ! -d "build" ]; then + mkdir build + cd build +else + cd build + rm -rf .ninja* CMakeCache.txt +fi + +export CC=$(which clang-16) +export CXX=$(which clang++-16) +export CCACHE_DISABLE=1 + +if [ ! -d "openssl_3" ]; then + git clone https://github.com/openssl/openssl openssl_3 + cd openssl_3 + opensslPath=`pwd` + git checkout openssl-3.1.4 + ./config + make build_libs -j12 + test $? -eq 0 || { echo "Can't compile openssl_3"; exit 1; } + cd .. +else + opensslPath=$(pwd)/openssl_3 + echo "Using compiled openssl_3" +fi + +cmake -GNinja .. \ +-DCMAKE_BUILD_TYPE=Release \ +-DOPENSSL_ROOT_DIR=$opensslPath \ +-DOPENSSL_INCLUDE_DIR=$opensslPath/include \ +-DOPENSSL_CRYPTO_LIBRARY=$opensslPath/libcrypto.so + + +test $? -eq 0 || { echo "Can't configure ton"; exit 1; } + +if [ "$with_tests" = true ]; then +ninja storage-daemon storage-daemon-cli fift func tonlib tonlibjson tonlib-cli \ + validator-engine lite-client pow-miner validator-engine-console blockchain-explorer \ + generate-random-id json2tlo dht-server http-proxy rldp-http-proxy \ + adnl-proxy create-state emulator test-ed25519 test-ed25519-crypto test-bigint \ + test-vm test-fift test-cells test-smartcont test-net test-tdactor test-tdutils \ + test-tonlib-offline test-adnl test-dht test-rldp test-rldp2 test-catchain \ + test-fec test-tddb test-db test-validator-session-state + test $? -eq 0 || { echo "Can't compile ton"; exit 1; } +else +ninja storage-daemon storage-daemon-cli fift func tonlib tonlibjson tonlib-cli \ + validator-engine lite-client pow-miner validator-engine-console blockchain-explorer \ + generate-random-id json2tlo dht-server http-proxy rldp-http-proxy \ + adnl-proxy create-state emulator + test $? -eq 0 || { echo "Can't compile ton"; exit 1; } +fi + +strip -g storage/storage-daemon/storage-daemon \ + storage/storage-daemon/storage-daemon-cli \ + blockchain-explorer/blockchain-explorer \ + crypto/fift \ + crypto/tlbc \ + crypto/func \ + crypto/create-state \ + validator-engine-console/validator-engine-console \ + tonlib/tonlib-cli \ + tonlib/libtonlibjson.so.0.5 \ + http/http-proxy \ + rldp-http-proxy/rldp-http-proxy \ + dht-server/dht-server \ + lite-client/lite-client \ + validator-engine/validator-engine \ + utils/generate-random-id \ + utils/json2tlo \ + adnl/adnl-proxy \ + emulator/libemulator.* + +test $? -eq 0 || { echo "Can't strip final binaries"; exit 1; } + +# simple binaries' test +./storage/storage-daemon/storage-daemon -V || exit 1 +./validator-engine/validator-engine -V || exit 1 +./lite-client/lite-client -V || exit 1 +./crypto/fift -V || exit 1 + +cd .. + +if [ "$with_artifacts" = true ]; then + rm -rf artifacts + mkdir artifacts + mv build/tonlib/libtonlibjson.so.0.5 build/tonlib/libtonlibjson.so + cp build/storage/storage-daemon/storage-daemon build/storage/storage-daemon/storage-daemon-cli \ + build/crypto/fift build/crypto/tlbc build/crypto/func build/crypto/create-state build/blockchain-explorer/blockchain-explorer \ + build/validator-engine-console/validator-engine-console build/tonlib/tonlib-cli \ + build/tonlib/libtonlibjson.so build/http/http-proxy build/rldp-http-proxy/rldp-http-proxy \ + build/dht-server/dht-server build/lite-client/lite-client build/validator-engine/validator-engine \ + build/utils/generate-random-id build/utils/json2tlo build/adnl/adnl-proxy build/emulator/libemulator.so \ + artifacts + test $? -eq 0 || { echo "Can't copy final binaries"; exit 1; } + chmod +x artifacts/* + cp -R crypto/smartcont artifacts + cp -R crypto/fift/lib artifacts +fi + +if [ "$with_tests" = true ]; then + cd build +# ctest --output-on-failure -E "test-catchain|test-actors|test-smartcont|test-adnl|test-validator-session-state|test-dht|test-rldp" + ctest --output-on-failure --timeout 1800 +fi \ No newline at end of file diff --git a/assembly/native/build-windows-github.bat b/assembly/native/build-windows-github.bat new file mode 100644 index 000000000..7cad8c7e5 --- /dev/null +++ b/assembly/native/build-windows-github.bat @@ -0,0 +1,2 @@ +call "C:\Program Files\Microsoft Visual Studio\2022\%1\VC\Auxiliary\Build\vcvars64.bat" +call build-windows.bat -t \ No newline at end of file diff --git a/assembly/native/build-windows.bat b/assembly/native/build-windows.bat new file mode 100644 index 000000000..2e3f20824 --- /dev/null +++ b/assembly/native/build-windows.bat @@ -0,0 +1,193 @@ +REM execute this script inside elevated (Run as Administrator) console "x64 Native Tools Command Prompt for VS 2022" + +echo off + +echo Installing chocolatey windows package manager... +@"%SystemRoot%\System32\WindowsPowerShell\v1.0\powershell.exe" -NoProfile -InputFormat None -ExecutionPolicy Bypass -Command "iex ((New-Object System.Net.WebClient).DownloadString('https://chocolatey.org/install.ps1'))" && SET "PATH=%PATH%;%ALLUSERSPROFILE%\chocolatey\bin" +choco -? +IF %errorlevel% NEQ 0 ( + echo Can't install chocolatey + exit /b %errorlevel% +) + +choco feature enable -n allowEmptyChecksums + +echo Installing pkgconfiglite... +choco install -y pkgconfiglite +IF errorlevel 1 ( + echo Can't install pkgconfiglite + exit /b %errorlevel% +) + +echo Installing ninja... +choco install -y ninja +IF errorlevel 1 ( + echo Can't install ninja + exit /b %errorlevel% +) + +if not exist "zlib" ( +git clone https://github.com/madler/zlib.git +cd zlib\contrib\vstudio\vc14 +msbuild zlibstat.vcxproj /p:Configuration=ReleaseWithoutAsm /p:platform=x64 -p:PlatformToolset=v143 + +IF errorlevel 1 ( + echo Can't install zlib + exit /b %errorlevel% +) +cd ..\..\..\.. +) else ( +echo Using zlib... +) + +if not exist "secp256k1" ( +git clone https://github.com/libbitcoin/secp256k1.git +cd secp256k1\builds\msvc\vs2017 +msbuild /p:Configuration=StaticRelease -p:PlatformToolset=v143 -p:Platform=x64 +IF errorlevel 1 ( + echo Can't install secp256k1 + exit /b %errorlevel% +) +cd ..\..\..\.. +) else ( +echo Using secp256k1... +) + + +if not exist "libsodium" ( +curl -Lo libsodium-1.0.18-stable-msvc.zip https://download.libsodium.org/libsodium/releases/libsodium-1.0.18-stable-msvc.zip +IF errorlevel 1 ( + echo Can't download libsodium + exit /b %errorlevel% +) +unzip libsodium-1.0.18-stable-msvc.zip +) else ( +echo Using libsodium... +) + +if not exist "openssl-3.1.4" ( +curl -Lo openssl-3.1.4.zip https://github.com/neodiX42/precompiled-openssl-win64/raw/main/openssl-3.1.4.zip +IF errorlevel 1 ( + echo Can't download OpenSSL + exit /b %errorlevel% +) +unzip -q openssl-3.1.4.zip +) else ( +echo Using openssl... +) + +if not exist "libmicrohttpd-0.9.77-w32-bin" ( +curl -Lo libmicrohttpd-0.9.77-w32-bin.zip https://github.com/neodiX42/precompiled-openssl-win64/raw/main/libmicrohttpd-0.9.77-w32-bin.zip +IF errorlevel 1 ( + echo Can't download libmicrohttpd + exit /b %errorlevel% +) +unzip -q libmicrohttpd-0.9.77-w32-bin.zip +) else ( +echo Using libmicrohttpd... +) + +if not exist "readline-5.0-1-lib" ( +curl -Lo readline-5.0-1-lib.zip https://github.com/neodiX42/precompiled-openssl-win64/raw/main/readline-5.0-1-lib.zip +IF errorlevel 1 ( + echo Can't download readline + exit /b %errorlevel% +) +unzip -q -d readline-5.0-1-lib readline-5.0-1-lib.zip +) else ( +echo Using readline... +) + + +set root=%cd% +echo %root% +set SODIUM_DIR=%root%\libsodium + +mkdir build +cd build +cmake -GNinja -DCMAKE_BUILD_TYPE=Release ^ +-DPORTABLE=1 ^ +-DSODIUM_USE_STATIC_LIBS=1 ^ +-DSECP256K1_FOUND=1 ^ +-DSECP256K1_INCLUDE_DIR=%root%\secp256k1\include ^ +-DSECP256K1_LIBRARY=%root%\secp256k1\bin\x64\Release\v143\static\secp256k1.lib ^ +-DMHD_FOUND=1 ^ +-DMHD_LIBRARY=%root%\libmicrohttpd-0.9.77-w32-bin\x86_64\VS2019\Release-static\libmicrohttpd.lib ^ +-DMHD_INCLUDE_DIR=%root%\libmicrohttpd-0.9.77-w32-bin\x86_64\VS2019\Release-static ^ +-DZLIB_FOUND=1 ^ +-DZLIB_INCLUDE_DIR=%root%\zlib ^ +-DZLIB_LIBRARIES=%root%\zlib\contrib\vstudio\vc14\x64\ZlibStatReleaseWithoutAsm\zlibstat.lib ^ +-DOPENSSL_FOUND=1 ^ +-DOPENSSL_INCLUDE_DIR=%root%/openssl-3.1.4/x64/include ^ +-DOPENSSL_CRYPTO_LIBRARY=%root%/openssl-3.1.4/x64/lib/libcrypto_static.lib ^ +-DCMAKE_CXX_FLAGS="/DTD_WINDOWS=1 /EHsc /bigobj" .. +IF errorlevel 1 ( + echo Can't configure TON + exit /b %errorlevel% +) + +IF "%1"=="-t" ( +ninja storage-daemon storage-daemon-cli blockchain-explorer fift func tonlib tonlibjson ^ +tonlib-cli validator-engine lite-client pow-miner validator-engine-console generate-random-id ^ +json2tlo dht-server http-proxy rldp-http-proxy adnl-proxy create-state create-hardfork emulator ^ +test-ed25519 test-ed25519-crypto test-bigint test-vm test-fift test-cells test-smartcont test-net ^ +test-tdactor test-tdutils test-tonlib-offline test-adnl test-dht test-rldp test-rldp2 test-catchain ^ +test-fec test-tddb test-db test-validator-session-state +IF errorlevel 1 ( + echo Can't compile TON + exit /b %errorlevel% +) +) else ( +ninja storage-daemon storage-daemon-cli blockchain-explorer fift func tonlib tonlibjson ^ +tonlib-cli validator-engine lite-client pow-miner validator-engine-console generate-random-id ^ +json2tlo dht-server http-proxy rldp-http-proxy adnl-proxy create-state create-hardfork emulator +IF errorlevel 1 ( + echo Can't compile TON + exit /b %errorlevel% +) +) + +copy validator-engine\validator-engine.exe test +IF errorlevel 1 ( + echo validator-engine.exe does not exist + exit /b %errorlevel% +) + +IF "%1"=="-t" ( + echo Running tests... +REM ctest -C Release --output-on-failure -E "test-catchain|test-actors|test-validator-session-state" + ctest -C Release --output-on-failure --timeout 1800 + IF errorlevel 1 ( + echo Some tests failed + exit /b %errorlevel% + ) +) + + +echo Creating artifacts... +cd .. +mkdir artifacts +mkdir artifacts\smartcont +mkdir artifacts\lib + +for %%I in (build\storage\storage-daemon\storage-daemon.exe ^ +build\storage\storage-daemon\storage-daemon-cli.exe ^ +build\blockchain-explorer\blockchain-explorer.exe ^ +build\crypto\fift.exe ^ +build\crypto\tlbc.exe ^ +build\crypto\func.exe ^ +build\crypto\create-state.exe ^ +build\validator-engine-console\validator-engine-console.exe ^ +build\tonlib\tonlib-cli.exe ^ +build\tonlib\tonlibjson.dll ^ +build\http\http-proxy.exe ^ +build\rldp-http-proxy\rldp-http-proxy.exe ^ +build\dht-server\dht-server.exe ^ +build\lite-client\lite-client.exe ^ +build\validator-engine\validator-engine.exe ^ +build\utils\generate-random-id.exe ^ +build\utils\json2tlo.exe ^ +build\adnl\adnl-proxy.exe ^ +build\emulator\emulator.dll) do (strip -g %%I & copy %%I artifacts\) +xcopy /e /k /h /i crypto\smartcont artifacts\smartcont +xcopy /e /k /h /i crypto\fift\lib artifacts\lib diff --git a/assembly/nix/build-linux-arm64-nix.sh b/assembly/nix/build-linux-arm64-nix.sh new file mode 100644 index 000000000..bb8591413 --- /dev/null +++ b/assembly/nix/build-linux-arm64-nix.sh @@ -0,0 +1,20 @@ +#/bin/bash + +nix-build --version +test $? -eq 0 || { echo "Nix is not installed!"; exit 1; } + +cp assembly/nix/linux-arm64* . +cp assembly/nix/microhttpd.nix . +cp assembly/nix/openssl.nix . +export NIX_PATH=nixpkgs=https://github.com/nixOS/nixpkgs/archive/23.05.tar.gz + +nix-build linux-arm64-static.nix +mkdir artifacts +cp ./result/bin/* artifacts/ +chmod +x artifacts/* +rm -rf result +nix-build linux-arm64-tonlib.nix +cp ./result/lib/libtonlibjson.so.0.5 artifacts/libtonlibjson.so +cp ./result/lib/libemulator.so artifacts/ +cp -r crypto/fift/lib artifacts/ +cp -r crypto/smartcont artifacts/ \ No newline at end of file diff --git a/assembly/nix/build-linux-x86-64-nix.sh b/assembly/nix/build-linux-x86-64-nix.sh new file mode 100644 index 000000000..eca6fe58b --- /dev/null +++ b/assembly/nix/build-linux-x86-64-nix.sh @@ -0,0 +1,20 @@ +#/bin/bash + +nix-build --version +test $? -eq 0 || { echo "Nix is not installed!"; exit 1; } + +cp assembly/nix/linux-x86-64* . +cp assembly/nix/microhttpd.nix . +cp assembly/nix/openssl.nix . +export NIX_PATH=nixpkgs=https://github.com/nixOS/nixpkgs/archive/23.05.tar.gz + +nix-build linux-x86-64-static.nix +mkdir artifacts +cp ./result/bin/* artifacts/ +chmod +x artifacts/* +rm -rf result +nix-build linux-x86-64-tonlib.nix +cp ./result/lib/libtonlibjson.so.0.5 artifacts/libtonlibjson.so +cp ./result/lib/libemulator.so artifacts/ +cp -r crypto/fift/lib artifacts/ +cp -r crypto/smartcont artifacts/ \ No newline at end of file diff --git a/assembly/nix/build-macos-nix.sh b/assembly/nix/build-macos-nix.sh new file mode 100644 index 000000000..fdf674a69 --- /dev/null +++ b/assembly/nix/build-macos-nix.sh @@ -0,0 +1,17 @@ +#/bin/bash + +nix-build --version +test $? -eq 0 || { echo "Nix is not installed!"; exit 1; } + +cp assembly/nix/macos-* . +export NIX_PATH=nixpkgs=https://github.com/nixOS/nixpkgs/archive/23.05.tar.gz +nix-build macos-static.nix +mkdir artifacts +cp ./result-bin/bin/* artifacts/ +chmod +x artifacts/* +rm -rf result-bin +nix-build macos-tonlib.nix +cp ./result/lib/libtonlibjson.dylib artifacts/ +cp ./result/lib/libemulator.dylib artifacts/ +cp -r crypto/fift/lib artifacts/ +cp -r crypto/smartcont artifacts/ \ No newline at end of file diff --git a/flake.lock b/assembly/nix/flakes/flake.lock similarity index 100% rename from flake.lock rename to assembly/nix/flakes/flake.lock diff --git a/flake.nix b/assembly/nix/flakes/flake.nix similarity index 100% rename from flake.nix rename to assembly/nix/flakes/flake.nix diff --git a/shell.nix b/assembly/nix/flakes/shell.nix similarity index 100% rename from shell.nix rename to assembly/nix/flakes/shell.nix diff --git a/assembly/nix/linux-arm64-static.nix b/assembly/nix/linux-arm64-static.nix new file mode 100644 index 000000000..616dfba5b --- /dev/null +++ b/assembly/nix/linux-arm64-static.nix @@ -0,0 +1,45 @@ +# export NIX_PATH=nixpkgs=https://github.com/nixOS/nixpkgs/archive/23.05.tar.gz + +{ pkgs ? import { system = builtins.currentSystem; } +, lib ? pkgs.lib +, stdenv ? pkgs.stdenv +}: +let + microhttpdmy = (import ./microhttpd.nix) {}; +in +with import microhttpdmy; +stdenv.mkDerivation { + pname = "ton"; + version = "dev-bin"; + + src = ./.; + + nativeBuildInputs = with pkgs; + [ + cmake ninja git pkg-config + ]; + + buildInputs = with pkgs; + [ + pkgsStatic.openssl microhttpdmy pkgsStatic.zlib pkgsStatic.libsodium.dev pkgsStatic.secp256k1 glibc.static + ]; + + makeStatic = true; + doCheck = true; + + cmakeFlags = [ + "-DTON_USE_ABSEIL=OFF" + "-DNIX=ON" + "-DBUILD_SHARED_LIBS=OFF" + "-DCMAKE_LINK_SEARCH_START_STATIC=ON" + "-DCMAKE_LINK_SEARCH_END_STATIC=ON" + "-DMHD_FOUND=1" + "-DMHD_INCLUDE_DIR=${microhttpdmy}/usr/local/include" + "-DMHD_LIBRARY=${microhttpdmy}/usr/local/lib/libmicrohttpd.a" + "-DCMAKE_CTEST_ARGUMENTS=--timeout;1800" + ]; + + LDFLAGS = [ + "-static-libgcc" "-static-libstdc++" "-static" + ]; +} diff --git a/assembly/nix/linux-arm64-tonlib.nix b/assembly/nix/linux-arm64-tonlib.nix new file mode 100644 index 000000000..a753423bd --- /dev/null +++ b/assembly/nix/linux-arm64-tonlib.nix @@ -0,0 +1,44 @@ +# export NIX_PATH=nixpkgs=https://github.com/nixOS/nixpkgs/archive/23.05.tar.gz +{ + pkgs ? import { system = builtins.currentSystem; } +, lib ? pkgs.lib +, stdenv ? pkgs.stdenv +}: +let + microhttpdmy = (import ./microhttpd.nix) {}; +in +with import microhttpdmy; +pkgs.llvmPackages_16.stdenv.mkDerivation { + pname = "ton"; + version = "dev-lib"; + + src = ./.; + + nativeBuildInputs = with pkgs; + [ + cmake ninja git pkg-config + ]; + + buildInputs = with pkgs; + [ + pkgsStatic.openssl microhttpdmy pkgsStatic.zlib pkgsStatic.libsodium.dev pkgsStatic.secp256k1 + ]; + + dontAddStaticConfigureFlags = false; + + cmakeFlags = [ + "-DTON_USE_ABSEIL=OFF" + "-DNIX=ON" + "-DMHD_FOUND=1" + "-DMHD_INCLUDE_DIR=${microhttpdmy}/usr/local/include" + "-DMHD_LIBRARY=${microhttpdmy}/usr/local/lib/libmicrohttpd.a" + ]; + + LDFLAGS = [ + "-static-libgcc" "-static-libstdc++" "-fPIC" "-fcommon" + ]; + + ninjaFlags = [ + "tonlibjson" "emulator" + ]; +} diff --git a/assembly/nix/linux-x86-64-static.nix b/assembly/nix/linux-x86-64-static.nix new file mode 100644 index 000000000..616dfba5b --- /dev/null +++ b/assembly/nix/linux-x86-64-static.nix @@ -0,0 +1,45 @@ +# export NIX_PATH=nixpkgs=https://github.com/nixOS/nixpkgs/archive/23.05.tar.gz + +{ pkgs ? import { system = builtins.currentSystem; } +, lib ? pkgs.lib +, stdenv ? pkgs.stdenv +}: +let + microhttpdmy = (import ./microhttpd.nix) {}; +in +with import microhttpdmy; +stdenv.mkDerivation { + pname = "ton"; + version = "dev-bin"; + + src = ./.; + + nativeBuildInputs = with pkgs; + [ + cmake ninja git pkg-config + ]; + + buildInputs = with pkgs; + [ + pkgsStatic.openssl microhttpdmy pkgsStatic.zlib pkgsStatic.libsodium.dev pkgsStatic.secp256k1 glibc.static + ]; + + makeStatic = true; + doCheck = true; + + cmakeFlags = [ + "-DTON_USE_ABSEIL=OFF" + "-DNIX=ON" + "-DBUILD_SHARED_LIBS=OFF" + "-DCMAKE_LINK_SEARCH_START_STATIC=ON" + "-DCMAKE_LINK_SEARCH_END_STATIC=ON" + "-DMHD_FOUND=1" + "-DMHD_INCLUDE_DIR=${microhttpdmy}/usr/local/include" + "-DMHD_LIBRARY=${microhttpdmy}/usr/local/lib/libmicrohttpd.a" + "-DCMAKE_CTEST_ARGUMENTS=--timeout;1800" + ]; + + LDFLAGS = [ + "-static-libgcc" "-static-libstdc++" "-static" + ]; +} diff --git a/assembly/nix/linux-x86-64-tonlib.nix b/assembly/nix/linux-x86-64-tonlib.nix new file mode 100644 index 000000000..ac183d2bf --- /dev/null +++ b/assembly/nix/linux-x86-64-tonlib.nix @@ -0,0 +1,54 @@ +# export NIX_PATH=nixpkgs=https://github.com/nixOS/nixpkgs/archive/23.11.tar.gz +# copy linux-x86-64-tonlib.nix to git root directory and execute: +# nix-build linux-x86-64-tonlib.nix +{ + pkgs ? import { system = builtins.currentSystem; } +, lib ? pkgs.lib +, stdenv ? pkgs.stdenv +}: +let + system = builtins.currentSystem; + + nixos1909 = (import (builtins.fetchTarball { + url = "https://channels.nixos.org/nixos-19.09/nixexprs.tar.xz"; + sha256 = "1vp1h2gkkrckp8dzkqnpcc6xx5lph5d2z46sg2cwzccpr8ay58zy"; + }) { inherit system; }); + glibc227 = nixos1909.glibc // { pname = "glibc"; }; + stdenv227 = let + cc = pkgs.wrapCCWith { + cc = nixos1909.buildPackages.gcc-unwrapped; + libc = glibc227; + bintools = pkgs.binutils.override { libc = glibc227; }; + }; + in (pkgs.overrideCC pkgs.stdenv cc); + +in +stdenv227.mkDerivation { + pname = "ton"; + version = "dev-lib"; + + src = ./.; + + nativeBuildInputs = with pkgs; + [ cmake ninja git pkg-config ]; + + buildInputs = with pkgs; + [ + pkgsStatic.openssl pkgsStatic.zlib pkgsStatic.libmicrohttpd.dev pkgsStatic.libsodium.dev pkgsStatic.secp256k1 + ]; + + dontAddStaticConfigureFlags = false; + + cmakeFlags = [ + "-DTON_USE_ABSEIL=OFF" + "-DNIX=ON" + ]; + + LDFLAGS = [ + "-static-libgcc" "-static-libstdc++" "-fPIC" + ]; + + ninjaFlags = [ + "tonlibjson" "emulator" + ]; +} diff --git a/assembly/nix/macos-static.nix b/assembly/nix/macos-static.nix new file mode 100644 index 000000000..e65ec1a87 --- /dev/null +++ b/assembly/nix/macos-static.nix @@ -0,0 +1,65 @@ +# export NIX_PATH=nixpkgs=https://github.com/nixOS/nixpkgs/archive/23.05.tar.gz + +{ pkgs ? import { system = builtins.currentSystem; } +, lib ? pkgs.lib +, stdenv ? pkgs.stdenv +}: + +pkgs.llvmPackages_14.stdenv.mkDerivation { + pname = "ton"; + version = "dev-bin"; + + src = ./.; + + nativeBuildInputs = with pkgs; + [ cmake ninja git pkg-config ]; + + buildInputs = with pkgs; + lib.forEach [ + secp256k1 libsodium.dev libmicrohttpd.dev gmp.dev nettle.dev libtasn1.dev libidn2.dev libunistring.dev gettext (gnutls.override { withP11-kit = false; }).dev + ] + (x: x.overrideAttrs(oldAttrs: rec { configureFlags = (oldAttrs.configureFlags or []) ++ [ "--enable-static" "--disable-shared" "--disable-tests" ]; dontDisableStatic = true; })) + ++ [ + darwin.apple_sdk.frameworks.CoreFoundation + (openssl.override { static = true; }).dev + (zlib.override { shared = false; }).dev + (libiconv.override { enableStatic = true; enableShared = false; }) + ]; + + + dontAddStaticConfigureFlags = true; + makeStatic = true; + doCheck = true; + + configureFlags = []; + + cmakeFlags = [ + "-DTON_USE_ABSEIL=OFF" + "-DNIX=ON" + "-DCMAKE_CROSSCOMPILING=OFF" + "-DCMAKE_LINK_SEARCH_START_STATIC=ON" + "-DCMAKE_LINK_SEARCH_END_STATIC=ON" + "-DBUILD_SHARED_LIBS=OFF" + "-DCMAKE_CXX_FLAGS=-stdlib=libc++" + "-DCMAKE_OSX_DEPLOYMENT_TARGET:STRING=11.3" + "-DCMAKE_CTEST_ARGUMENTS=--timeout;1800" + ]; + + LDFLAGS = [ + "-static-libstdc++" + "-framework CoreFoundation" + ]; + + postInstall = '' + moveToOutput bin "$bin" + ''; + + preFixup = '' + for fn in "$bin"/bin/* "$out"/lib/*.dylib; do + echo Fixing libc++ in "$fn" + install_name_tool -change "$(otool -L "$fn" | grep libc++.1 | cut -d' ' -f1 | xargs)" libc++.1.dylib "$fn" + install_name_tool -change "$(otool -L "$fn" | grep libc++abi.1 | cut -d' ' -f1 | xargs)" libc++abi.dylib "$fn" + done + ''; + outputs = [ "bin" "out" ]; +} \ No newline at end of file diff --git a/assembly/nix/macos-tonlib.nix b/assembly/nix/macos-tonlib.nix new file mode 100644 index 000000000..c362de4ec --- /dev/null +++ b/assembly/nix/macos-tonlib.nix @@ -0,0 +1,55 @@ +# export NIX_PATH=nixpkgs=https://github.com/nixOS/nixpkgs/archive/23.05.tar.gz + +{ pkgs ? import { system = builtins.currentSystem; } +, lib ? pkgs.lib +, stdenv ? pkgs.stdenv +}: + +pkgs.llvmPackages_14.stdenv.mkDerivation { + pname = "ton"; + version = "dev-lib"; + + src = ./.; + + nativeBuildInputs = with pkgs; + [ cmake ninja git pkg-config ]; + + buildInputs = with pkgs; + lib.forEach [ + secp256k1 libsodium.dev libmicrohttpd.dev gmp.dev nettle.dev libtasn1.dev libidn2.dev libunistring.dev gettext (gnutls.override { withP11-kit = false; }).dev + ] (x: x.overrideAttrs(oldAttrs: rec { configureFlags = (oldAttrs.configureFlags or []) ++ [ "--enable-static" "--disable-shared" "--disable-tests" ]; dontDisableStatic = true; })) + ++ [ + darwin.apple_sdk.frameworks.CoreFoundation + (openssl.override { static = true; }).dev + (zlib.override { shared = false; }).dev + (libiconv.override { enableStatic = true; enableShared = false; }) + ]; + + dontAddStaticConfigureFlags = true; + + configureFlags = []; + + cmakeFlags = [ + "-DTON_USE_ABSEIL=OFF" + "-DNIX=ON" + "-DCMAKE_CXX_FLAGS=-stdlib=libc++" + "-DCMAKE_OSX_DEPLOYMENT_TARGET:STRING=11.3" + ]; + + LDFLAGS = [ + "-static-libstdc++" + "-framework CoreFoundation" + ]; + + ninjaFlags = [ + "tonlibjson" "emulator" + ]; + + preFixup = '' + for fn in $out/bin/* $out/lib/*.dylib; do + echo Fixing libc++ in "$fn" + install_name_tool -change "$(otool -L "$fn" | grep libc++.1 | cut -d' ' -f1 | xargs)" libc++.1.dylib "$fn" + install_name_tool -change "$(otool -L "$fn" | grep libc++abi.1 | cut -d' ' -f1 | xargs)" libc++abi.dylib "$fn" + done + ''; +} \ No newline at end of file diff --git a/assembly/nix/microhttpd.nix b/assembly/nix/microhttpd.nix new file mode 100644 index 000000000..4f871425a --- /dev/null +++ b/assembly/nix/microhttpd.nix @@ -0,0 +1,28 @@ +{ pkgs ? import { system = builtins.currentSystem; } +, stdenv ? pkgs.stdenv +, fetchgit ? pkgs.fetchgit +}: + +stdenv.mkDerivation rec { + name = "microhttpdmy"; + + + src = fetchgit { + url = "https://git.gnunet.org/libmicrohttpd.git"; + rev = "refs/tags/v0.9.77"; + sha256 = "sha256-x+nfB07PbZwBlFc6kZZFYiRpk0a3QN/ByHB+hC8na/o="; + }; + + nativeBuildInputs = with pkgs; [ automake libtool autoconf texinfo ]; + + buildInputs = with pkgs; [ ]; + + configurePhase = '' + ./autogen.sh + ./configure --enable-static --disable-tests --disable-benchmark --disable-shared --disable-https --with-pic + ''; + + installPhase = '' + make install DESTDIR=$out + ''; +} diff --git a/assembly/nix/openssl.nix b/assembly/nix/openssl.nix new file mode 100644 index 000000000..8d30aa504 --- /dev/null +++ b/assembly/nix/openssl.nix @@ -0,0 +1,30 @@ +{ pkgs ? import { system = builtins.currentSystem; } +, stdenv ? pkgs.stdenv +, fetchFromGitHub ? pkgs.fetchFromGitHub +}: + +stdenv.mkDerivation rec { + name = "opensslmy"; + + src = fetchFromGitHub { + owner = "openssl"; + repo = "openssl"; + rev = "refs/tags/openssl-3.1.4"; + sha256 = "sha256-Vvf1wiNb4ikg1lIS9U137aodZ2JzM711tSWMJFYWtWI="; + }; + + nativeBuildInputs = with pkgs; [ perl ]; + + buildInputs = with pkgs; [ ]; + + postPatch = '' + patchShebangs Configure + ''; + + configurePhase = '' + ./Configure no-shared + ''; + installPhase = '' + make install DESTDIR=$out + ''; +} diff --git a/.github/script/fift-func-wasm-build-ubuntu.sh b/assembly/wasm/fift-func-wasm-build-ubuntu.sh old mode 100755 new mode 100644 similarity index 71% rename from .github/script/fift-func-wasm-build-ubuntu.sh rename to assembly/wasm/fift-func-wasm-build-ubuntu.sh index feac19e31..9ca23cc00 --- a/.github/script/fift-func-wasm-build-ubuntu.sh +++ b/assembly/wasm/fift-func-wasm-build-ubuntu.sh @@ -1,7 +1,24 @@ # The script builds funcfift compiler to WASM -# dependencies: -#sudo apt-get install -y build-essential git make cmake clang libgflags-dev zlib1g-dev libssl-dev libreadline-dev libmicrohttpd-dev pkg-config libgsl-dev python3 python3-dev python3-pip nodejs libevent-dev +# Execute these prerequisites first +# sudo apt update +# sudo apt install -y build-essential git make cmake ninja-build clang libgflags-dev zlib1g-dev libssl-dev \ +# libreadline-dev libmicrohttpd-dev pkg-config libgsl-dev python3 python3-dev python3-pip \ +# nodejs libsecp256k1-dev libsodium-dev automake libtool + +# wget https://apt.llvm.org/llvm.sh +# chmod +x llvm.sh +# sudo ./llvm.sh 16 all + +with_artifacts=false + +while getopts 'a' flag; do + case "${flag}" in + a) with_artifacts=true ;; + *) break + ;; + esac +done export CC=$(which clang-16) export CXX=$(which clang++-16) @@ -39,8 +56,7 @@ mkdir build cd build cmake -GNinja -DCMAKE_BUILD_TYPE=Release \ -DCMAKE_CXX_STANDARD=17 \ --DZLIB_LIBRARY=/usr/lib/x86_64-linux-gnu/libz.so \ --DZLIB_INCLUDE_DIR=$ZLIB_DIR \ +-DOPENSSL_FOUND=1 \ -DOPENSSL_ROOT_DIR=$OPENSSL_DIR \ -DOPENSSL_INCLUDE_DIR=$OPENSSL_DIR/include \ -DOPENSSL_CRYPTO_LIBRARY=$OPENSSL_DIR/libcrypto.so \ @@ -62,8 +78,9 @@ cd emsdk ./emsdk install 3.1.19 ./emsdk activate 3.1.19 EMSDK_DIR=`pwd` +ls $EMSDK_DIR -source $EMSDK_DIR/emsdk_env.sh +. $EMSDK_DIR/emsdk_env.sh export CC=$(which emcc) export CXX=$(which em++) export CCACHE_DISABLE=1 @@ -71,7 +88,7 @@ export CCACHE_DISABLE=1 cd ../openssl make clean -emconfigure ./Configure linux-generic32 no-shared no-dso no-engine no-unit-test no-ui +emconfigure ./Configure linux-generic32 no-shared no-dso no-engine no-unit-test sed -i 's/CROSS_COMPILE=.*/CROSS_COMPILE=/g' Makefile sed -i 's/-ldl//g' Makefile sed -i 's/-O3/-Os/g' Makefile @@ -101,22 +118,42 @@ test $? -eq 0 || { echo "Can't compile libsodium with emmake "; exit 1; } cd ../build emcmake cmake -DUSE_EMSCRIPTEN=ON -DCMAKE_BUILD_TYPE=Release \ --DZLIB_LIBRARY=$ZLIB_DIR/libz.a \ +-DZLIB_FOUND=1 \ +-DZLIB_LIBRARIES=$ZLIB_DIR/libz.a \ -DZLIB_INCLUDE_DIR=$ZLIB_DIR \ +-DOPENSSL_FOUND=1 \ -DOPENSSL_ROOT_DIR=$OPENSSL_DIR \ -DOPENSSL_INCLUDE_DIR=$OPENSSL_DIR/include \ -DOPENSSL_CRYPTO_LIBRARY=$OPENSSL_DIR/libcrypto.a \ -DOPENSSL_SSL_LIBRARY=$OPENSSL_DIR/libssl.a \ -DCMAKE_TOOLCHAIN_FILE=$EMSDK_DIR/upstream/emscripten/cmake/Modules/Platform/Emscripten.cmake \ -DCMAKE_CXX_FLAGS="-sUSE_ZLIB=1" \ +-DSECP256K1_FOUND=1 \ -DSECP256K1_INCLUDE_DIR=$SECP256K1_DIR/include \ -DSECP256K1_LIBRARY=$SECP256K1_DIR/.libs/libsecp256k1.a \ -DSODIUM_INCLUDE_DIR=$SODIUM_DIR/src/libsodium/include \ -DSODIUM_LIBRARY_RELEASE=$SODIUM_DIR/src/libsodium/.libs/libsodium.a \ --DSODIUM_LIBRARY_DEBUG=$SODIUM_DIR/src/libsodium/.libs/libsodium.a \ -DSODIUM_USE_STATIC_LIBS=ON .. test $? -eq 0 || { echo "Can't configure TON with emmake "; exit 1; } cp -R ../crypto/smartcont ../crypto/fift/lib crypto emmake make -j16 funcfiftlib func fift tlbc emulator-emscripten + +test $? -eq 0 || { echo "Can't compile TON with emmake "; exit 1; } + +if [ "$with_artifacts" = true ]; then + echo "Creating artifacts..." + cd .. + rm -rf artifacts + mkdir artifacts + ls build/crypto + cp build/crypto/fift* artifacts + cp build/crypto/func* artifacts + cp build/crypto/tlbc* artifacts + cp build/emulator/emulator-emscripten* artifacts + cp -R crypto/smartcont artifacts + cp -R crypto/fift/lib artifacts +fi + + diff --git a/blockchain-explorer/CMakeLists.txt b/blockchain-explorer/CMakeLists.txt index fc94e7093..8aae88058 100644 --- a/blockchain-explorer/CMakeLists.txt +++ b/blockchain-explorer/CMakeLists.txt @@ -14,15 +14,28 @@ set(BLOCHAIN_EXPLORER_SOURCE add_executable(blockchain-explorer ${BLOCHAIN_EXPLORER_SOURCE}) if (NIX) - find_package(PkgConfig REQUIRED) - pkg_check_modules(MHD libmicrohttpd) - target_include_directories(blockchain-explorer PUBLIC ${MHD_INCLUDE_DIRS} ${MHD_STATIC_INCLUDE_DIRS}) - target_link_libraries(blockchain-explorer tdutils tdactor adnllite tl_lite_api tl-lite-utils ton_crypto ton_block ${MHD_LIBRARIES} ${MHD_STATIC_LIBRARIES}) + if (MHD_FOUND) + target_include_directories(blockchain-explorer PUBLIC ${MHD_INCLUDE_DIR}) + target_link_libraries(blockchain-explorer tdutils tdactor adnllite tl_lite_api tl-lite-utils ton_crypto ton_block ${MHD_LIBRARY}) + else() + find_package(PkgConfig REQUIRED) + pkg_check_modules(MHD libmicrohttpd) + target_include_directories(blockchain-explorer PUBLIC ${MHD_INCLUDE_DIR} ${MHD_STATIC_INCLUDE_DIRS}) + target_link_libraries(blockchain-explorer tdutils tdactor adnllite tl_lite_api tl-lite-utils ton_crypto ton_block ${MHD_LIBRARIES} ${MHD_STATIC_LIBRARIES}) + endif() else() - find_package(MHD) - target_include_directories(blockchain-explorer PUBLIC ${MHD_INCLUDE_DIRS}) - target_link_libraries(blockchain-explorer tdutils tdactor adnllite tl_lite_api tl-lite-utils ton_crypto ton_block ${MHD_LIBRARIES}) + if (MHD_FOUND) + target_include_directories(blockchain-explorer PUBLIC ${MHD_INCLUDE_DIR}) + target_link_libraries(blockchain-explorer tdutils tdactor adnllite tl_lite_api tl-lite-utils ton_crypto ton_block ${MHD_LIBRARY}) + else() + find_package(MHD) + target_include_directories(blockchain-explorer PUBLIC ${MHD_INCLUDE_DIR}) + target_link_libraries(blockchain-explorer tdutils tdactor adnllite tl_lite_api tl-lite-utils ton_crypto ton_block ${MHD_LIBRARY}) + endif() endif() +target_include_directories(blockchain-explorer PUBLIC ${MHD_INCLUDE_DIR}) +target_link_libraries(blockchain-explorer tdutils tdactor adnllite tl_lite_api tl-lite-utils ton_crypto ton_block ${MHD_LIBRARY}) + install(TARGETS blockchain-explorer RUNTIME DESTINATION bin) diff --git a/catchain/catchain-receiver.cpp b/catchain/catchain-receiver.cpp index 482cfb43c..c8206de95 100644 --- a/catchain/catchain-receiver.cpp +++ b/catchain/catchain-receiver.cpp @@ -287,7 +287,9 @@ void CatChainReceiverImpl::add_block_cont_3(tl_object_ptrdelivered()); + LOG_CHECK(last_sent_block_->delivered()) + << "source=" << last_sent_block_->get_source_id() << " ill=" << last_sent_block_->is_ill() + << " height=" << last_sent_block_->get_height(); } active_send_ = false; diff --git a/crypto/CMakeLists.txt b/crypto/CMakeLists.txt index 62b0d2161..0871d2505 100644 --- a/crypto/CMakeLists.txt +++ b/crypto/CMakeLists.txt @@ -325,23 +325,20 @@ endif() if (MSVC) find_package(Sodium REQUIRED) target_compile_definitions(ton_crypto PUBLIC SODIUM_STATIC) - target_include_directories(ton_crypto_core PUBLIC $) - target_link_libraries(ton_crypto PUBLIC ${SECP256K1_LIBRARIES}) -elseif (ANDROID) + target_include_directories(ton_crypto_core PUBLIC $) + target_link_libraries(ton_crypto PUBLIC ${SECP256K1_LIBRARY}) +elseif (ANDROID OR EMSCRIPTEN) target_include_directories(ton_crypto_core PUBLIC $) target_link_libraries(ton_crypto PUBLIC $) else() - if (NOT USE_EMSCRIPTEN) + if (NOT SODIUM_FOUND) find_package(Sodium REQUIRED) - target_include_directories(ton_crypto_core PUBLIC $) - target_link_libraries(ton_crypto PUBLIC ${SECP256K1_LIBRARIES}) else() - target_include_directories(ton_crypto_core PUBLIC $) - target_link_libraries(ton_crypto PUBLIC ${SECP256K1_LIBRARY}) - endif() - if (NOT APPLE AND NOT USE_EMSCRIPTEN) - target_link_libraries(ton_crypto_core PUBLIC secp256k1) + message(STATUS "Using Sodium ${SODIUM_LIBRARY_RELEASE}") endif() + target_compile_definitions(ton_crypto PUBLIC SODIUM_STATIC) + target_include_directories(ton_crypto_core PUBLIC $) + target_link_libraries(ton_crypto PUBLIC ${SECP256K1_LIBRARY}) endif() target_include_directories(ton_crypto_core PUBLIC $) @@ -356,7 +353,7 @@ add_executable(test-ed25519-crypto test/test-ed25519-crypto.cpp) target_include_directories(test-ed25519-crypto PUBLIC $) target_link_libraries(test-ed25519-crypto PUBLIC ton_crypto) -add_library(fift-lib ${FIFT_SOURCE}) +add_library(fift-lib STATIC ${FIFT_SOURCE}) target_include_directories(fift-lib PUBLIC $) target_link_libraries(fift-lib PUBLIC ton_crypto ton_db tdutils ton_block) if (USE_EMSCRIPTEN) @@ -400,6 +397,7 @@ if (USE_EMSCRIPTEN) target_link_options(funcfiftlib PRIVATE -sIGNORE_MISSING_MAIN=1) target_link_options(funcfiftlib PRIVATE -sAUTO_NATIVE_LIBRARIES=0) target_link_options(funcfiftlib PRIVATE -sMODULARIZE=1) + target_link_options(funcfiftlib PRIVATE -sTOTAL_MEMORY=33554432) target_link_options(funcfiftlib PRIVATE -sALLOW_MEMORY_GROWTH=1) target_link_options(funcfiftlib PRIVATE -sALLOW_TABLE_GROWTH=1) target_link_options(funcfiftlib PRIVATE --embed-file ${CMAKE_CURRENT_SOURCE_DIR}/fift/lib@/fiftlib) @@ -506,7 +504,7 @@ if (NOT CMAKE_CROSSCOMPILING OR USE_EMSCRIPTEN) GenFif(DEST smartcont/auto/simple-wallet-ext-code SOURCE smartcont/simple-wallet-ext-code.fc NAME simple-wallet-ext) endif() -add_library(smc-envelope ${SMC_ENVELOPE_SOURCE}) +add_library(smc-envelope STATIC ${SMC_ENVELOPE_SOURCE}) target_include_directories(smc-envelope PUBLIC $) target_link_libraries(smc-envelope PUBLIC ton_crypto PRIVATE tdutils ton_block) if (NOT CMAKE_CROSSCOMPILING) diff --git a/crypto/block/block.tlb b/crypto/block/block.tlb index 4b36f13ba..eb1f8b941 100644 --- a/crypto/block/block.tlb +++ b/crypto/block/block.tlb @@ -696,6 +696,11 @@ gas_prices_ext#de gas_price:uint64 gas_limit:uint64 special_gas_limit:uint64 gas block_gas_limit:uint64 freeze_due_limit:uint64 delete_due_limit:uint64 = GasLimitsPrices; +// same fields as gas_prices_ext; behavior differs +gas_prices_v3#df gas_price:uint64 gas_limit:uint64 special_gas_limit:uint64 gas_credit:uint64 + block_gas_limit:uint64 freeze_due_limit:uint64 delete_due_limit:uint64 + = GasLimitsPrices; + gas_flat_pfx#d1 flat_gas_limit:uint64 flat_gas_price:uint64 other:GasLimitsPrices = GasLimitsPrices; diff --git a/crypto/block/mc-config.cpp b/crypto/block/mc-config.cpp index 08be5c887..514021423 100644 --- a/crypto/block/mc-config.cpp +++ b/crypto/block/mc-config.cpp @@ -654,11 +654,16 @@ td::Result Config::do_get_gas_limits_prices(td::Ref c res.delete_due_limit = r.delete_due_limit; }; block::gen::GasLimitsPrices::Record_gas_prices_ext rec; + block::gen::GasLimitsPrices::Record_gas_prices_v3 rec_v3; + vm::CellSlice cs0 = cs; if (tlb::unpack(cs, rec)) { f(rec, rec.special_gas_limit); + } else if (tlb::unpack(cs = cs0, rec_v3)) { + f(rec_v3, rec_v3.special_gas_limit); + res.special_full_limit = true; } else { block::gen::GasLimitsPrices::Record_gas_prices rec0; - if (tlb::unpack(cs, rec0)) { + if (tlb::unpack(cs = cs0, rec0)) { f(rec0, rec0.gas_limit); } else { return td::Status::Error(PSLICE() << "configuration parameter " << id diff --git a/crypto/block/mc-config.h b/crypto/block/mc-config.h index caab93f36..dcc48b4bc 100644 --- a/crypto/block/mc-config.h +++ b/crypto/block/mc-config.h @@ -349,6 +349,7 @@ struct GasLimitsPrices { td::uint64 block_gas_limit{0}; td::uint64 freeze_due_limit{0}; td::uint64 delete_due_limit{0}; + bool special_full_limit{false}; td::RefInt256 compute_gas_price(td::uint64 gas_used) const; }; diff --git a/crypto/block/transaction.cpp b/crypto/block/transaction.cpp index 7a9073375..b4bd648bb 100644 --- a/crypto/block/transaction.cpp +++ b/crypto/block/transaction.cpp @@ -1039,8 +1039,12 @@ bool ComputePhaseConfig::parse_GasLimitsPrices_internal(Ref cs, t delete_due_limit = td::make_refint(r.delete_due_limit); }; block::gen::GasLimitsPrices::Record_gas_prices_ext rec; + block::gen::GasLimitsPrices::Record_gas_prices_v3 rec_v3; if (tlb::csr_unpack(cs, rec)) { f(rec, rec.special_gas_limit); + } else if (tlb::csr_unpack(cs, rec_v3)) { + f(rec_v3, rec_v3.special_gas_limit); + special_gas_full = true; } else { block::gen::GasLimitsPrices::Record_gas_prices rec0; if (tlb::csr_unpack(std::move(cs), rec0)) { @@ -1077,6 +1081,25 @@ bool ComputePhaseConfig::is_address_suspended(ton::WorkchainId wc, td::Bits256 a } } +/** + * Computes the maximum gas fee based on the gas prices and limits. + * + * @param gas_price256 The gas price from config as RefInt256 + * @param gas_limit The gas limit from config + * @param flat_gas_limit The flat gas limit from config + * @param flat_gas_price The flat gas price from config + * + * @returns The maximum gas fee. + */ +static td::RefInt256 compute_max_gas_threshold(const td::RefInt256& gas_price256, td::uint64 gas_limit, + td::uint64 flat_gas_limit, td::uint64 flat_gas_price) { + if (gas_limit > flat_gas_limit) { + return td::rshift(gas_price256 * (gas_limit - flat_gas_limit), 16, 1) + td::make_bigint(flat_gas_price); + } else { + return td::make_refint(flat_gas_price); + } +} + /** * Computes the maximum for gas fee based on the gas prices and limits. * @@ -1084,12 +1107,7 @@ bool ComputePhaseConfig::is_address_suspended(ton::WorkchainId wc, td::Bits256 a */ void ComputePhaseConfig::compute_threshold() { gas_price256 = td::make_refint(gas_price); - if (gas_limit > flat_gas_limit) { - max_gas_threshold = - td::rshift(gas_price256 * (gas_limit - flat_gas_limit), 16, 1) + td::make_bigint(flat_gas_price); - } else { - max_gas_threshold = td::make_refint(flat_gas_price); - } + max_gas_threshold = compute_max_gas_threshold(gas_price256, gas_limit, flat_gas_limit, flat_gas_price); } /** @@ -1126,6 +1144,67 @@ td::RefInt256 ComputePhaseConfig::compute_gas_price(td::uint64 gas_used) const { } namespace transaction { + +/** + * Checks if it is required to increase gas_limit (from GasLimitsPrices config) to special_gas_limit * 2 + * from masterchain GasLimitsPrices config for the transaction. + * + * In January 2024 a highload wallet of @wallet Telegram bot in mainnet was stuck because current gas limit (1M) is + * not enough to clean up old queires, thus locking funds inside. + * See comment in crypto/smartcont/highload-wallet-v2-code.fc for details on why this happened. + * Account address: EQD_v9j1rlsuHHw2FIhcsCFFSD367ldfDdCKcsNmNpIRzUlu + * It was proposed to validators to increase gas limit for this account for a limited amount of time (until 2024-02-16). + * It is activated by setting gas_prices_v3 in ConfigParam 20 (config_mc_gas_prices). + * This config change also activates new behavior for special accounts in masterchain. + * + * @param cfg The compute phase configuration. + * @param now The Unix time of the transaction. + * @param account The account of the transaction. + * + * @returns True if gas_limit override is required, false otherwise + */ +static bool override_gas_limit(const ComputePhaseConfig& cfg, ton::UnixTime now, const Account& account) { + if (!cfg.mc_gas_prices.special_full_limit) { + return false; + } + ton::UnixTime until = 1708041600; // 2024-02-16 00:00:00 UTC + ton::WorkchainId wc = 0; + const char* addr_hex = "FFBFD8F5AE5B2E1C7C3614885CB02145483DFAEE575F0DD08A72C366369211CD"; + return now < until && account.workchain == wc && account.addr.to_hex() == addr_hex; +} + +/** + * Computes the amount of gas that can be bought for a given amount of nanograms. + * Usually equal to `cfg.gas_bought_for(nanograms)` + * However, it overrides gas_limit from config in special cases. + * + * @param cfg The compute phase configuration. + * @param nanograms The amount of nanograms to compute gas for. + * + * @returns The amount of gas. + */ +td::uint64 Transaction::gas_bought_for(const ComputePhaseConfig& cfg, td::RefInt256 nanograms) { + if (override_gas_limit(cfg, now, account)) { + gas_limit_overridden = true; + // Same as ComputePhaseConfig::gas_bought for, but with other gas_limit and max_gas_threshold + auto gas_limit = cfg.mc_gas_prices.special_gas_limit * 2; + auto max_gas_threshold = + compute_max_gas_threshold(cfg.gas_price256, gas_limit, cfg.flat_gas_limit, cfg.flat_gas_price); + if (nanograms.is_null() || sgn(nanograms) < 0) { + return 0; + } + if (nanograms >= max_gas_threshold) { + return gas_limit; + } + if (nanograms < cfg.flat_gas_price) { + return 0; + } + auto res = td::div((std::move(nanograms) - cfg.flat_gas_price) << 16, cfg.gas_price256); + return res->to_long() + cfg.flat_gas_limit; + } + return cfg.gas_bought_for(nanograms); +} + /** * Computes the gas limits for a transaction. * @@ -1139,16 +1218,16 @@ bool Transaction::compute_gas_limits(ComputePhase& cp, const ComputePhaseConfig& if (account.is_special) { cp.gas_max = cfg.special_gas_limit; } else { - cp.gas_max = cfg.gas_bought_for(balance.grams); + cp.gas_max = gas_bought_for(cfg, balance.grams); } cp.gas_credit = 0; - if (trans_type != tr_ord) { + if (trans_type != tr_ord || (account.is_special && cfg.special_gas_full)) { // may use all gas that can be bought using remaining balance cp.gas_limit = cp.gas_max; } else { // originally use only gas bought using remaining message balance // if the message is "accepted" by the smart contract, the gas limit will be set to gas_max - cp.gas_limit = std::min(cfg.gas_bought_for(msg_balance_remaining.grams), cp.gas_max); + cp.gas_limit = std::min(gas_bought_for(cfg, msg_balance_remaining.grams), cp.gas_max); if (!block::tlb::t_Message.is_internal(in_msg)) { // external messages carry no balance, give them some credit to check whether they are accepted cp.gas_credit = std::min(cfg.gas_credit, cp.gas_max); @@ -3203,8 +3282,8 @@ td::Result Transaction::estimate_block_storage_pro * * @returns True if the limits were successfully updated, False otherwise. */ -bool Transaction::update_limits(block::BlockLimitStatus& blimst, bool with_size) const { - if (!(blimst.update_lt(end_lt) && blimst.update_gas(gas_used()))) { +bool Transaction::update_limits(block::BlockLimitStatus& blimst, bool with_gas, bool with_size) const { + if (!(blimst.update_lt(end_lt) && blimst.update_gas(with_gas ? gas_used() : 0))) { return false; } if (with_size) { @@ -3450,6 +3529,9 @@ td::Status FetchConfigParams::fetch_config_params( storage_phase_cfg->delete_due_limit)) { return td::Status::Error(-668, "cannot unpack current gas prices and limits from masterchain configuration"); } + TRY_RESULT_PREFIX(mc_gas_prices, config.get_gas_limits_prices(true), + "cannot unpack masterchain gas prices and limits: "); + compute_phase_cfg->mc_gas_prices = std::move(mc_gas_prices); storage_phase_cfg->enable_due_payment = config.get_global_version() >= 4; compute_phase_cfg->block_rand_seed = *rand_seed; compute_phase_cfg->max_vm_data_depth = size_limits.max_vm_data_depth; diff --git a/crypto/block/transaction.h b/crypto/block/transaction.h index d7cb95d1b..7539efe03 100644 --- a/crypto/block/transaction.h +++ b/crypto/block/transaction.h @@ -104,6 +104,8 @@ struct ComputePhaseConfig { td::uint64 gas_credit; td::uint64 flat_gas_limit = 0; td::uint64 flat_gas_price = 0; + bool special_gas_full = false; + block::GasLimitsPrices mc_gas_prices; static constexpr td::uint64 gas_infty = (1ULL << 63) - 1; td::RefInt256 gas_price256; td::RefInt256 max_gas_threshold; @@ -119,12 +121,7 @@ struct ComputePhaseConfig { SizeLimitsConfig size_limits; int vm_log_verbosity = 0; - ComputePhaseConfig(td::uint64 _gas_price = 0, td::uint64 _gas_limit = 0, td::uint64 _gas_credit = 0) - : gas_price(_gas_price), gas_limit(_gas_limit), special_gas_limit(_gas_limit), gas_credit(_gas_credit) { - compute_threshold(); - } - ComputePhaseConfig(td::uint64 _gas_price, td::uint64 _gas_limit, td::uint64 _spec_gas_limit, td::uint64 _gas_credit) - : gas_price(_gas_price), gas_limit(_gas_limit), special_gas_limit(_spec_gas_limit), gas_credit(_gas_credit) { + ComputePhaseConfig() : gas_price(0), gas_limit(0), special_gas_limit(0), gas_credit(0) { compute_threshold(); } void compute_threshold(); @@ -362,12 +359,14 @@ struct Transaction { std::unique_ptr action_phase; std::unique_ptr bounce_phase; vm::CellStorageStat new_storage_stat; + bool gas_limit_overridden{false}; Transaction(const Account& _account, int ttype, ton::LogicalTime req_start_lt, ton::UnixTime _now, Ref _inmsg = {}); bool unpack_input_msg(bool ihr_delivered, const ActionPhaseConfig* cfg); bool check_in_msg_state_hash(); bool prepare_storage_phase(const StoragePhaseConfig& cfg, bool force_collect = true, bool adjust_msg_value = false); bool prepare_credit_phase(); + td::uint64 gas_bought_for(const ComputePhaseConfig& cfg, td::RefInt256 nanograms); bool compute_gas_limits(ComputePhase& cp, const ComputePhaseConfig& cfg); Ref prepare_vm_stack(ComputePhase& cp); std::vector> compute_vm_libraries(const ComputePhaseConfig& cfg); @@ -383,7 +382,7 @@ struct Transaction { td::Result estimate_block_storage_profile_incr( const vm::NewCellStorageStat& store_stat, const vm::CellUsageTree* usage_tree) const; - bool update_limits(block::BlockLimitStatus& blk_lim_st, bool with_size = true) const; + bool update_limits(block::BlockLimitStatus& blk_lim_st, bool with_gas = true, bool with_size = true) const; Ref commit(Account& _account); // _account should point to the same account LtCellRef extract_out_msg(unsigned i); diff --git a/crypto/smartcont/highload-wallet-v2-code.fc b/crypto/smartcont/highload-wallet-v2-code.fc index 7dd65f9e6..b7626bbe5 100644 --- a/crypto/smartcont/highload-wallet-v2-code.fc +++ b/crypto/smartcont/highload-wallet-v2-code.fc @@ -3,6 +3,22 @@ ;; this version does not use seqno for replay protection; instead, it remembers all recent query_ids ;; in this way several external messages with different query_id can be sent in parallel + +;; Note, when dealing with highload-wallet the following limits need to be checked and taken into account: +;; 1) Storage size limit. Currently, size of contract storage should be less than 65535 cells. If size of +;; old_queries will grow above this limit, exception in ActionPhase will be thrown and transaction will fail. +;; Failed transaction may be replayed. +;; 2) Gas limit. Currently, gas limit is 1'000'000 gas units, that means that there is a limit of how much +;; old queries may be cleaned in one tx. If number of expired queries will be higher, contract will stuck. + +;; That means that it is not recommended to set too high expiration date: +;; number of queries during expiration timespan should not exceed 1000. +;; Also, number of expired queries cleaned in one transaction should be below 100. + +;; Such precautions are not easy to follow, so it is recommended to use highload contract +;; only when strictly necessary and the developer understands the above details. + + () recv_internal(slice in_msg) impure { ;; do nothing for internal messages } diff --git a/crypto/test/test-smartcont.cpp b/crypto/test/test-smartcont.cpp index 98534bc5e..7f512ceae 100644 --- a/crypto/test/test-smartcont.cpp +++ b/crypto/test/test-smartcont.cpp @@ -489,7 +489,7 @@ void do_test_wallet(int revision) { auto address = std::move(res.address); auto iwallet = std::move(res.wallet); auto public_key = priv_key.get_public_key().move_as_ok().as_octet_string(); - ; + check_wallet_state(iwallet, 1, 123, public_key); // lets send a lot of messages @@ -1026,7 +1026,7 @@ class CheckedDns { } return action; }); - auto query = dns_->create_update_query(key_.value(), smc_actions).move_as_ok(); + auto query = dns_->create_update_query(key_.value(), smc_actions, query_id_++).move_as_ok(); CHECK(dns_.write().send_external_message(std::move(query)).code == 0); } map_dns_.update(entries); @@ -1081,6 +1081,7 @@ class CheckedDns { using ManualDns = ton::ManualDns; td::optional key_; td::Ref dns_; + td::uint32 query_id_ = 1; // Query id serve as "valid until", but in tests now() == 0 MapDns map_dns_; td::optional combined_map_dns_; diff --git a/crypto/vm/boc.cpp b/crypto/vm/boc.cpp index 11583ede6..bd334cbfc 100644 --- a/crypto/vm/boc.cpp +++ b/crypto/vm/boc.cpp @@ -930,7 +930,7 @@ unsigned long long BagOfCells::get_idx_entry_raw(int index) { * */ -td::Result> std_boc_deserialize(td::Slice data, bool can_be_empty) { +td::Result> std_boc_deserialize(td::Slice data, bool can_be_empty, bool allow_nonzero_level) { if (data.empty() && can_be_empty) { return Ref(); } @@ -946,7 +946,7 @@ td::Result> std_boc_deserialize(td::Slice data, bool can_be_empty) { if (root.is_null()) { return td::Status::Error("bag of cells has null root cell (?)"); } - if (root->get_level() != 0) { + if (!allow_nonzero_level && root->get_level() != 0) { return td::Status::Error("bag of cells has a root with non-zero level"); } return std::move(root); diff --git a/crypto/vm/boc.h b/crypto/vm/boc.h index c7a1810d7..09ae1b661 100644 --- a/crypto/vm/boc.h +++ b/crypto/vm/boc.h @@ -52,6 +52,7 @@ class NewCellStorageStat { bool operator==(const Stat& other) const { return key() == other.key(); } + Stat(const Stat& other) = default; Stat& operator=(const Stat& other) = default; Stat& operator+=(const Stat& other) { cells += other.cells; @@ -323,7 +324,7 @@ class BagOfCells { std::vector* cell_should_cache); }; -td::Result> std_boc_deserialize(td::Slice data, bool can_be_empty = false); +td::Result> std_boc_deserialize(td::Slice data, bool can_be_empty = false, bool allow_nonzero_level = false); td::Result std_boc_serialize(Ref root, int mode = 0); td::Result>> std_boc_deserialize_multi(td::Slice data, diff --git a/crypto/vm/db/CellStorage.cpp b/crypto/vm/db/CellStorage.cpp index acc55898a..303d46503 100644 --- a/crypto/vm/db/CellStorage.cpp +++ b/crypto/vm/db/CellStorage.cpp @@ -98,7 +98,7 @@ class RefcntCellParser { auto size = parser.get_left_len(); td::Slice data = parser.template fetch_string_raw(size); if (stored_boc_) { - TRY_RESULT(boc, vm::std_boc_deserialize(data)); + TRY_RESULT(boc, vm::std_boc_deserialize(data, false, true)); TRY_RESULT(loaded_cell, boc->load_cell()); cell = std::move(loaded_cell.data_cell); return td::Status::OK(); diff --git a/dht-server/dht-server.cpp b/dht-server/dht-server.cpp index f729105ff..37a158ebb 100644 --- a/dht-server/dht-server.cpp +++ b/dht-server/dht-server.cpp @@ -572,6 +572,12 @@ void DhtServer::load_config(td::Promise promise) { config_file_ = db_root_ + "/config.json"; } auto conf_data_R = td::read_file(config_file_); + if (conf_data_R.is_error()) { + conf_data_R = td::read_file(temp_config_file()); + if (conf_data_R.is_ok()) { + td::rename(temp_config_file(), config_file_).ensure(); + } + } if (conf_data_R.is_error()) { auto P = td::PromiseCreator::lambda( [name = local_config_, new_name = config_file_, promise = std::move(promise)](td::Result R) { @@ -620,12 +626,15 @@ void DhtServer::load_config(td::Promise promise) { void DhtServer::write_config(td::Promise promise) { auto s = td::json_encode(td::ToJson(*config_.tl().get()), true); - auto S = td::write_file(config_file_, s); - if (S.is_ok()) { - promise.set_value(td::Unit()); - } else { + auto S = td::write_file(temp_config_file(), s); + if (S.is_error()) { + td::unlink(temp_config_file()).ignore(); promise.set_error(std::move(S)); + return; } + td::unlink(config_file_).ignore(); + TRY_STATUS_PROMISE(promise, td::rename(temp_config_file(), config_file_)); + promise.set_value(td::Unit()); } td::Promise DhtServer::get_key_promise(td::MultiPromise::InitGuard &ig) { diff --git a/dht-server/dht-server.hpp b/dht-server/dht-server.hpp index bf24d6216..5b81875be 100644 --- a/dht-server/dht-server.hpp +++ b/dht-server/dht-server.hpp @@ -109,6 +109,9 @@ class DhtServer : public td::actor::Actor { std::string local_config_ = ""; std::string global_config_ = "ton-global.config"; std::string config_file_; + std::string temp_config_file() const { + return config_file_ + ".tmp"; + } std::string db_root_ = "/var/ton-work/db/"; diff --git a/dht/dht-in.hpp b/dht/dht-in.hpp index 59ce21846..c2d20455f 100644 --- a/dht/dht-in.hpp +++ b/dht/dht-in.hpp @@ -155,10 +155,7 @@ class DhtMemberImpl : public DhtMember { } } - void add_full_node(DhtKeyId id, DhtNode node) override { - add_full_node_impl(id, std::move(node)); - } - void add_full_node_impl(DhtKeyId id, DhtNode node, bool set_active = false); + void add_full_node(DhtKeyId id, DhtNode node, bool set_active) override; adnl::AdnlNodeIdShort get_id() const override { return id_; diff --git a/dht/dht-query.cpp b/dht/dht-query.cpp index bc61242df..b84ef8c37 100644 --- a/dht/dht-query.cpp +++ b/dht/dht-query.cpp @@ -34,24 +34,33 @@ namespace ton { namespace dht { void DhtQuery::send_queries() { + while (pending_queries_.size() > k_ * 2) { + pending_queries_.erase(--pending_queries_.end()); + } VLOG(DHT_EXTRA_DEBUG) << this << ": sending new queries. active=" << active_queries_ << " max_active=" << a_; - while (pending_ids_.size() > 0 && active_queries_ < a_) { + while (pending_queries_.size() > 0 && active_queries_ < a_) { + auto id_xor = *pending_queries_.begin(); + if (result_list_.size() == k_ && *result_list_.rbegin() < id_xor) { + break; + } active_queries_++; - auto id_xor = *pending_ids_.begin(); auto id = id_xor ^ key_; VLOG(DHT_EXTRA_DEBUG) << this << ": sending " << get_name() << " query to " << id; - pending_ids_.erase(id_xor); + pending_queries_.erase(id_xor); - auto it = list_.find(id_xor); - CHECK(it != list_.end()); - td::actor::send_closure(adnl_, &adnl::Adnl::add_peer, get_src(), it->second.adnl_id(), it->second.addr_list()); + auto it = nodes_.find(id_xor); + CHECK(it != nodes_.end()); + td::actor::send_closure(adnl_, &adnl::Adnl::add_peer, get_src(), it->second.node.adnl_id(), + it->second.node.addr_list()); send_one_query(id.to_adnl()); } if (active_queries_ == 0) { - CHECK(pending_ids_.size() == 0); + pending_queries_.clear(); DhtNodesList list; - for (auto &node : list_) { - list.push_back(std::move(node.second)); + for (auto id_xor : result_list_) { + auto it = nodes_.find(id_xor); + CHECK(it != nodes_.end()); + list.push_back(it->second.node.clone()); } CHECK(list.size() <= k_); VLOG(DHT_EXTRA_DEBUG) << this << ": finalizing " << get_name() << " query. List size=" << list.size(); @@ -65,30 +74,32 @@ void DhtQuery::add_nodes(DhtNodesList list) { for (auto &node : list.list()) { auto id = node.get_key(); auto id_xor = key_ ^ id; - if (list_.find(id_xor) != list_.end()) { + if (nodes_.find(id_xor) != nodes_.end()) { continue; } - td::actor::send_closure(node_, &DhtMember::add_full_node, id, node.clone()); + VLOG(DHT_EXTRA_DEBUG) << this << ": " << get_name() << " query: adding " << id << " key"; + td::actor::send_closure(node_, &DhtMember::add_full_node, id, node.clone(), false); + nodes_[id_xor].node = std::move(node); + pending_queries_.insert(id_xor); + } +} - DhtKeyId last_id_xor; - if (list_.size() > 0) { - last_id_xor = list_.rbegin()->first; +void DhtQuery::finish_query(adnl::AdnlNodeIdShort id, bool success) { + active_queries_--; + CHECK(active_queries_ <= k_); + auto id_xor = key_ ^ DhtKeyId(id); + if (success) { + result_list_.insert(id_xor); + if (result_list_.size() > k_) { + result_list_.erase(--result_list_.end()); } - - if (list_.size() < k_ || id_xor < last_id_xor) { - list_[id_xor] = std::move(node); - pending_ids_.insert(id_xor); - if (list_.size() > k_) { - CHECK(id_xor != last_id_xor); - VLOG(DHT_EXTRA_DEBUG) << this << ": " << get_name() << " query: replacing " << (last_id_xor ^ key_) - << " key with " << id; - pending_ids_.erase(last_id_xor); - list_.erase(last_id_xor); - } else { - VLOG(DHT_EXTRA_DEBUG) << this << ": " << get_name() << " query: adding " << id << " key"; - } + } else { + NodeInfo &info = nodes_[id_xor]; + if (++info.failed_attempts < MAX_ATTEMPTS) { + pending_queries_.insert(id_xor); } } + send_queries(); } void DhtQueryFindNodes::send_one_query(adnl::AdnlNodeIdShort id) { @@ -111,7 +122,7 @@ void DhtQueryFindNodes::send_one_query(adnl::AdnlNodeIdShort id) { void DhtQueryFindNodes::on_result(td::Result R, adnl::AdnlNodeIdShort dst) { if (R.is_error()) { VLOG(DHT_INFO) << this << ": failed find nodes query " << get_src() << "->" << dst << ": " << R.move_as_error(); - finish_query(); + finish_query(dst, false); return; } @@ -122,7 +133,7 @@ void DhtQueryFindNodes::on_result(td::Result R, adnl::AdnlNodeI } else { add_nodes(DhtNodesList{Res.move_as_ok(), our_network_id()}); } - finish_query(); + finish_query(dst); } void DhtQueryFindNodes::finish(DhtNodesList list) { @@ -166,14 +177,14 @@ void DhtQueryFindValue::send_one_query_nodes(adnl::AdnlNodeIdShort id) { void DhtQueryFindValue::on_result(td::Result R, adnl::AdnlNodeIdShort dst) { if (R.is_error()) { VLOG(DHT_INFO) << this << ": failed find value query " << get_src() << "->" << dst << ": " << R.move_as_error(); - finish_query(); + finish_query(dst, false); return; } auto Res = fetch_tl_object(R.move_as_ok(), true); if (Res.is_error()) { VLOG(DHT_WARNING) << this << ": dropping incorrect answer on dht.findValue query from " << dst << ": " << Res.move_as_error(); - finish_query(); + finish_query(dst, false); return; } @@ -210,26 +221,26 @@ void DhtQueryFindValue::on_result(td::Result R, adnl::AdnlNodeI } else if (send_get_nodes) { send_one_query_nodes(dst); } else { - finish_query(); + finish_query(dst); } } void DhtQueryFindValue::on_result_nodes(td::Result R, adnl::AdnlNodeIdShort dst) { if (R.is_error()) { VLOG(DHT_INFO) << this << ": failed find nodes query " << get_src() << "->" << dst << ": " << R.move_as_error(); - finish_query(); + finish_query(dst, false); return; } auto Res = fetch_tl_object(R.move_as_ok(), true); if (Res.is_error()) { VLOG(DHT_WARNING) << this << ": dropping incorrect answer on dht.findNodes query from " << dst << ": " << Res.move_as_error(); - finish_query(); + finish_query(dst, false); return; } auto r = Res.move_as_ok(); add_nodes(DhtNodesList{create_tl_object(std::move(r->nodes_)), our_network_id()}); - finish_query(); + finish_query(dst); } void DhtQueryFindValue::finish(DhtNodesList list) { @@ -422,14 +433,14 @@ void DhtQueryRequestReversePing::send_one_query(adnl::AdnlNodeIdShort id) { void DhtQueryRequestReversePing::on_result(td::Result R, adnl::AdnlNodeIdShort dst) { if (R.is_error()) { VLOG(DHT_INFO) << this << ": failed reverse ping query " << get_src() << "->" << dst << ": " << R.move_as_error(); - finish_query(); + finish_query(dst, false); return; } auto Res = fetch_tl_object(R.move_as_ok(), true); if (Res.is_error()) { VLOG(DHT_WARNING) << this << ": dropping incorrect answer on dht.requestReversePing query from " << dst << ": " << Res.move_as_error(); - finish_query(); + finish_query(dst, false); return; } @@ -441,7 +452,7 @@ void DhtQueryRequestReversePing::on_result(td::Result R, adnl:: }, [&](ton_api::dht_clientNotFound &v) { add_nodes(DhtNodesList{std::move(v.nodes_), our_network_id()}); - finish_query(); + finish_query(dst); })); } diff --git a/dht/dht-query.hpp b/dht/dht-query.hpp index c1db0a0e1..e47403618 100644 --- a/dht/dht-query.hpp +++ b/dht/dht-query.hpp @@ -63,11 +63,7 @@ class DhtQuery : public td::actor::Actor { } void send_queries(); void add_nodes(DhtNodesList list); - void finish_query() { - active_queries_--; - CHECK(active_queries_ <= k_); - send_queries(); - } + void finish_query(adnl::AdnlNodeIdShort id, bool success = true); DhtKeyId get_key() const { return key_; } @@ -88,16 +84,22 @@ class DhtQuery : public td::actor::Actor { virtual std::string get_name() const = 0; private: + struct NodeInfo { + DhtNode node; + int failed_attempts = 0; + }; DhtMember::PrintId print_id_; adnl::AdnlNodeIdShort src_; - std::map list_; - std::set pending_ids_; + std::map nodes_; + std::set result_list_, pending_queries_; td::uint32 k_; td::uint32 a_; td::int32 our_network_id_; td::actor::ActorId node_; td::uint32 active_queries_ = 0; + static const int MAX_ATTEMPTS = 1; + protected: td::actor::ActorId adnl_; }; diff --git a/dht/dht.cpp b/dht/dht.cpp index e1e20d452..8d7b02b7d 100644 --- a/dht/dht.cpp +++ b/dht/dht.cpp @@ -57,7 +57,7 @@ td::Result> Dht::create(adnl::AdnlNodeIdShort id, std:: for (auto &node : nodes.list()) { auto key = node.get_key(); - td::actor::send_closure(D, &DhtMember::add_full_node, key, node.clone()); + td::actor::send_closure(D, &DhtMember::add_full_node, key, node.clone(), true); } return std::move(D); } @@ -74,7 +74,7 @@ td::Result> Dht::create_client(adnl::AdnlNodeIdShort id for (auto &node : nodes.list()) { auto key = node.get_key(); - td::actor::send_closure(D, &DhtMember::add_full_node, key, node.clone()); + td::actor::send_closure(D, &DhtMember::add_full_node, key, node.clone(), true); } return std::move(D); } @@ -368,7 +368,7 @@ void DhtMemberImpl::receive_query(adnl::AdnlNodeIdShort src, td::BufferSlice dat auto node = N.move_as_ok(); if (node.adnl_id().compute_short_id() == src) { auto key = node.get_key(); - add_full_node_impl(key, std::move(node), true); + add_full_node(key, std::move(node), true); } else { VLOG(DHT_WARNING) << this << ": dropping bad node: unexpected adnl id"; } @@ -398,7 +398,7 @@ void DhtMemberImpl::receive_query(adnl::AdnlNodeIdShort src, td::BufferSlice dat ton_api::downcast_call(*Q, [&](auto &object) { this->process_query(src, object, std::move(promise)); }); } -void DhtMemberImpl::add_full_node_impl(DhtKeyId key, DhtNode node, bool set_active) { +void DhtMemberImpl::add_full_node(DhtKeyId key, DhtNode node, bool set_active) { VLOG(DHT_EXTRA_DEBUG) << this << ": adding full node " << key; auto eid = key ^ key_; @@ -466,7 +466,7 @@ void DhtMemberImpl::set_value(DhtValue value, td::Promise promise) { void DhtMemberImpl::get_value_in(DhtKeyId key, td::Promise result) { auto P = td::PromiseCreator::lambda([key, promise = std::move(result), SelfId = actor_id(this), print_id = print_id(), - adnl = adnl_, list = get_nearest_nodes(key, k_), k = k_, a = a_, + adnl = adnl_, list = get_nearest_nodes(key, k_ * 2), k = k_, a = a_, network_id = network_id_, id = id_, client_only = client_only_](td::Result R) mutable { R.ensure(); @@ -485,7 +485,7 @@ void DhtMemberImpl::register_reverse_connection(adnl::AdnlNodeIdFull client, td: auto key_id = get_reverse_connection_key(client_short).compute_key_id(); td::actor::send_closure(keyring_, &keyring::Keyring::sign_message, client_short.pubkey_hash(), register_reverse_connection_to_sign(client_short, id_, ttl), - [=, print_id = print_id(), list = get_nearest_nodes(key_id, k_), SelfId = actor_id(this), + [=, print_id = print_id(), list = get_nearest_nodes(key_id, k_ * 2), SelfId = actor_id(this), promise = std::move(promise)](td::Result R) mutable { TRY_RESULT_PROMISE_PREFIX(promise, signature, std::move(R), "Failed to sign: "); td::actor::send_closure(SelfId, &DhtMemberImpl::get_self_node, @@ -532,7 +532,7 @@ void DhtMemberImpl::request_reverse_ping_cont(adnl::AdnlNode target, td::BufferS } auto key_id = get_reverse_connection_key(client).compute_key_id(); get_self_node([=, target = std::move(target), signature = std::move(signature), promise = std::move(promise), - SelfId = actor_id(this), print_id = print_id(), list = get_nearest_nodes(key_id, k_), + SelfId = actor_id(this), print_id = print_id(), list = get_nearest_nodes(key_id, k_ * 2), client_only = client_only_](td::Result R) mutable { R.ensure(); td::actor::create_actor( @@ -651,8 +651,8 @@ void DhtMemberImpl::check() { DhtKeyId key{x}; auto P = td::PromiseCreator::lambda([key, promise = std::move(promise), SelfId = actor_id(this), - print_id = print_id(), adnl = adnl_, list = get_nearest_nodes(key, k_), k = k_, - a = a_, network_id = network_id_, id = id_, + print_id = print_id(), adnl = adnl_, list = get_nearest_nodes(key, k_ * 2), + k = k_, a = a_, network_id = network_id_, id = id_, client_only = client_only_](td::Result R) mutable { R.ensure(); td::actor::create_actor("FindNodesQuery", key, print_id, id, std::move(list), k, a, network_id, @@ -677,8 +677,8 @@ void DhtMemberImpl::send_store(DhtValue value, td::Promise promise) { auto key_id = value.key_id(); auto P = td::PromiseCreator::lambda([value = std::move(value), print_id = print_id(), id = id_, - client_only = client_only_, list = get_nearest_nodes(key_id, k_), k = k_, a = a_, - network_id = network_id_, SelfId = actor_id(this), adnl = adnl_, + client_only = client_only_, list = get_nearest_nodes(key_id, k_ * 2), k = k_, + a = a_, network_id = network_id_, SelfId = actor_id(this), adnl = adnl_, promise = std::move(promise)](td::Result R) mutable { R.ensure(); td::actor::create_actor("StoreQuery", std::move(value), print_id, id, std::move(list), k, a, diff --git a/dht/dht.hpp b/dht/dht.hpp index 0b46d635e..9fb05e082 100644 --- a/dht/dht.hpp +++ b/dht/dht.hpp @@ -95,7 +95,7 @@ class DhtMember : public Dht { //virtual void update_addr_list(tl_object_ptr addr_list) = 0; //virtual void add_node(adnl::AdnlNodeIdShort id) = 0; - virtual void add_full_node(DhtKeyId id, DhtNode node) = 0; + virtual void add_full_node(DhtKeyId id, DhtNode node, bool set_active) = 0; virtual void receive_ping(DhtKeyId id, DhtNode result) = 0; diff --git a/emulator/emulator-extern.cpp b/emulator/emulator-extern.cpp index 9f06964e1..f8e2f7241 100644 --- a/emulator/emulator-extern.cpp +++ b/emulator/emulator-extern.cpp @@ -103,6 +103,7 @@ const char *transaction_emulator_emulate_transaction(void *transaction_emulator, td::Ref addr_slice; auto account_slice = vm::load_cell_slice(shard_account.account); + bool account_exists = block::gen::t_Account.get_tag(account_slice) == block::gen::Account::account; if (block::gen::t_Account.get_tag(account_slice) == block::gen::Account::account_none) { if (msg_tag == block::gen::CommonMsgInfo::ext_in_msg_info) { block::gen::CommonMsgInfo::Record_ext_in_msg_info info; @@ -120,12 +121,14 @@ const char *transaction_emulator_emulate_transaction(void *transaction_emulator, } else { ERROR_RESPONSE(PSTRING() << "Only ext in and int message are supported"); } - } else { + } else if (block::gen::t_Account.get_tag(account_slice) == block::gen::Account::account) { block::gen::Account::Record_account account_record; if (!tlb::unpack(account_slice, account_record)) { ERROR_RESPONSE(PSTRING() << "Can't unpack account cell"); } addr_slice = std::move(account_record.addr); + } else { + ERROR_RESPONSE(PSTRING() << "Can't parse account cell"); } ton::WorkchainId wc; ton::StdSmcAddress addr; @@ -139,8 +142,16 @@ const char *transaction_emulator_emulate_transaction(void *transaction_emulator, now = (unsigned)std::time(nullptr); } bool is_special = wc == ton::masterchainId && emulator->get_config().is_special_smartcontract(addr); - if (!account.unpack(vm::load_cell_slice_ref(shard_account_cell.move_as_ok()), now, is_special)) { - ERROR_RESPONSE(PSTRING() << "Can't unpack shard account"); + if (account_exists) { + if (!account.unpack(vm::load_cell_slice_ref(shard_account_cell.move_as_ok()), now, is_special)) { + ERROR_RESPONSE(PSTRING() << "Can't unpack shard account"); + } + } else { + if (!account.init_new(now)) { + ERROR_RESPONSE(PSTRING() << "Can't init new account"); + } + account.last_trans_lt_ = shard_account.last_trans_lt; + account.last_trans_hash_ = shard_account.last_trans_hash; } auto result = emulator->emulate_transaction(std::move(account), message_cell, now, 0, block::transaction::Transaction::tr_ord); diff --git a/emulator/transaction-emulator.cpp b/emulator/transaction-emulator.cpp index 81cf2e9fd..2e8ba0374 100644 --- a/emulator/transaction-emulator.cpp +++ b/emulator/transaction-emulator.cpp @@ -42,6 +42,7 @@ td::Result> TransactionEmu if (!lt) { lt = (account.last_trans_lt_ / block::ConfigInfo::get_lt_align() + 1) * block::ConfigInfo::get_lt_align(); // next block after account_.last_trans_lt_ } + account.block_lt = lt - lt % block::ConfigInfo::get_lt_align(); compute_phase_cfg.libraries = std::make_unique(libraries_); compute_phase_cfg.ignore_chksig = ignore_chksig_; diff --git a/example/android/README.md b/example/android/README.md index f17ba9d26..cf12ba304 100644 --- a/example/android/README.md +++ b/example/android/README.md @@ -6,27 +6,9 @@ Prerequisite: installed Java and set environment variable JAVA_HOME. ```bash git clone --recursive https://github.com/ton-blockchain/ton.git cd ton -wget https://dl.google.com/android/repository/android-ndk-r25b-linux.zip -unzip android-ndk-r25b-linux.zip -export JAVA_AWT_LIBRARY=NotNeeded -export JAVA_JVM_LIBRARY=NotNeeded -export JAVA_INCLUDE_PATH=${JAVA_HOME}/include -export JAVA_AWT_INCLUDE_PATH=${JAVA_HOME}/include -export JAVA_INCLUDE_PATH2=${JAVA_HOME}/include/linux - -export ANDROID_NDK_ROOT=$(pwd)/android-ndk-r25b -export OPENSSL_DIR=$(pwd)/example/android/third_party/crypto -export SECP256K1_INCLUDE_DIR=$(pwd)/example/android/third_party/secp256k1/include -export SECP256K1_LIBRARY=$(pwd)/example/android/third_party/secp256k1/.libs/libsecp256k1.a -export SODIUM_INCLUDE_DIR=$(pwd)/example/android/third_party/libsodium/libsodium-android-westmere/include -export SODIUM_LIBRARY=$(pwd)/example/android/third_party/libsodium/libsodium-android-westmere/lib/libsodium.a - -rm -rf example/android/src/drinkless/org/ton/TonApi.java -cd example/android/ -cmake -GNinja -DTON_ONLY_TONLIB=ON . -ninja prepare_cross_compiling -rm CMakeCache.txt -./build-all.sh +cp assembly/android/build-android-tonlib.sh . +chmod +x build-android-tonlib.sh +sudo -E ./build-android-tonlib.sh ``` # Generation of Tonlib libraries for iOS in Xcode diff --git a/example/android/build-all.sh b/example/android/build-all.sh index f436e3612..6f97dec0a 100755 --- a/example/android/build-all.sh +++ b/example/android/build-all.sh @@ -1,4 +1,6 @@ #!/bin/bash +echo ANDROID_NDK_ROOT = $ANDROID_NDK_ROOT + echo Building tonlib for x86... echo ARCH="x86" ./build.sh || exit 1 diff --git a/example/android/build.sh b/example/android/build.sh index 7f170dbcf..06217255a 100755 --- a/example/android/build.sh +++ b/example/android/build.sh @@ -42,14 +42,22 @@ ARCH=$ABI mkdir -p build-$ARCH cd build-$ARCH -cmake .. -GNinja -DPORTABLE=1 \ --DANDROID_ABI=x86 -DANDROID_PLATFORM=android-32 -DANDROID_NDK=${ANDROID_NDK_ROOT} \ --DCMAKE_TOOLCHAIN_FILE=${ANDROID_NDK_ROOT}/build/cmake/android.toolchain.cmake \ --DCMAKE_BUILD_TYPE=Release -DANDROID_ABI=${ABI} \ --DOPENSSL_ROOT_DIR=${OPENSSL_DIR}/${ORIG_ARCH} -DTON_ARCH="" \ +cmake .. -GNinja \ +-DPORTABLE=1 \ -DTON_ONLY_TONLIB=ON \ --DSECP256K1_INCLUDE_DIR=${SECP256K1_INCLUDE_DIR} -DSECP256K1_LIBRARY=${SECP256K1_LIBRARY} \ --DSODIUM_INCLUDE_DIR=${SODIUM_INCLUDE_DIR} -DSODIUM_LIBRARY_RELEASE=${SODIUM_LIBRARY_RELEASE} \ +-DTON_ARCH="" \ +-DANDROID_ABI=x86 \ +-DANDROID_PLATFORM=android-32 \ +-DANDROID_NDK=${ANDROID_NDK_ROOT} \ +-DCMAKE_TOOLCHAIN_FILE=${ANDROID_NDK_ROOT}/build/cmake/android.toolchain.cmake \ +-DCMAKE_BUILD_TYPE=Release \ +-DANDROID_ABI=${ABI} \ +-DOPENSSL_ROOT_DIR=${OPENSSL_DIR}/${ORIG_ARCH} \ +-DSECP256K1_FOUND=1 \ +-DSECP256K1_INCLUDE_DIR=${SECP256K1_INCLUDE_DIR} \ +-DSECP256K1_LIBRARY=${SECP256K1_LIBRARY} \ +-DSODIUM_INCLUDE_DIR=${SODIUM_INCLUDE_DIR} \ +-DSODIUM_LIBRARY_RELEASE=${SODIUM_LIBRARY_RELEASE} \ -DSODIUM_USE_STATIC_LIBS=1 \ -DBLST_LIB=${BLST_LIBRARY} || exit 1 diff --git a/lite-client/CMakeLists.txt b/lite-client/CMakeLists.txt index 53e09d772..598a8d28c 100644 --- a/lite-client/CMakeLists.txt +++ b/lite-client/CMakeLists.txt @@ -1,6 +1,6 @@ cmake_minimum_required(VERSION 3.5 FATAL_ERROR) -add_library(lite-client-common lite-client-common.cpp lite-client-common.h) +add_library(lite-client-common STATIC lite-client-common.cpp lite-client-common.h) target_link_libraries(lite-client-common PUBLIC tdutils tdactor adnllite tl_api tl_lite_api tl-lite-utils ton_crypto ton_block) add_executable(lite-client lite-client.cpp lite-client.h) diff --git a/overlay/overlay-manager.cpp b/overlay/overlay-manager.cpp index 431921908..3c5f5eabc 100644 --- a/overlay/overlay-manager.cpp +++ b/overlay/overlay-manager.cpp @@ -93,17 +93,17 @@ void OverlayManager::create_public_overlay(adnl::AdnlNodeIdShort local_id, Overl std::unique_ptr callback, OverlayPrivacyRules rules, td::string scope) { create_public_overlay_ex(local_id, std::move(overlay_id), std::move(callback), std::move(rules), std::move(scope), - true); + {}); } void OverlayManager::create_public_overlay_ex(adnl::AdnlNodeIdShort local_id, OverlayIdFull overlay_id, std::unique_ptr callback, OverlayPrivacyRules rules, - td::string scope, bool announce_self) { + td::string scope, OverlayOptions opts) { CHECK(!dht_node_.empty()); auto id = overlay_id.compute_short_id(); register_overlay(local_id, id, Overlay::create(keyring_, adnl_, actor_id(this), dht_node_, local_id, std::move(overlay_id), - std::move(callback), std::move(rules), scope, announce_self)); + std::move(callback), std::move(rules), scope, std::move(opts))); } void OverlayManager::create_private_overlay(adnl::AdnlNodeIdShort local_id, OverlayIdFull overlay_id, diff --git a/overlay/overlay-manager.h b/overlay/overlay-manager.h index fe1166ace..035ef3e83 100644 --- a/overlay/overlay-manager.h +++ b/overlay/overlay-manager.h @@ -54,7 +54,7 @@ class OverlayManager : public Overlays { std::unique_ptr callback, OverlayPrivacyRules rules, td::string scope) override; void create_public_overlay_ex(adnl::AdnlNodeIdShort local_id, OverlayIdFull overlay_id, std::unique_ptr callback, OverlayPrivacyRules rules, td::string scope, - bool announce_self) override; + OverlayOptions opts) override; void create_private_overlay(adnl::AdnlNodeIdShort local_id, OverlayIdFull overlay_id, std::vector nodes, std::unique_ptr callback, OverlayPrivacyRules rules) override; diff --git a/overlay/overlay.cpp b/overlay/overlay.cpp index fcf766fe2..af01e045d 100644 --- a/overlay/overlay.cpp +++ b/overlay/overlay.cpp @@ -37,10 +37,10 @@ td::actor::ActorOwn Overlay::create(td::actor::ActorId manager, td::actor::ActorId dht_node, adnl::AdnlNodeIdShort local_id, OverlayIdFull overlay_id, std::unique_ptr callback, - OverlayPrivacyRules rules, td::string scope, bool announce_self) { + OverlayPrivacyRules rules, td::string scope, OverlayOptions opts) { auto R = td::actor::create_actor("overlay", keyring, adnl, manager, dht_node, local_id, std::move(overlay_id), true, std::vector(), - std::move(callback), std::move(rules), scope, announce_self); + std::move(callback), std::move(rules), scope, opts); return td::actor::ActorOwn(std::move(R)); } @@ -60,7 +60,7 @@ OverlayImpl::OverlayImpl(td::actor::ActorId keyring, td::actor td::actor::ActorId manager, td::actor::ActorId dht_node, adnl::AdnlNodeIdShort local_id, OverlayIdFull overlay_id, bool pub, std::vector nodes, std::unique_ptr callback, - OverlayPrivacyRules rules, td::string scope, bool announce_self) + OverlayPrivacyRules rules, td::string scope, OverlayOptions opts) : keyring_(keyring) , adnl_(adnl) , manager_(manager) @@ -71,7 +71,8 @@ OverlayImpl::OverlayImpl(td::actor::ActorId keyring, td::actor , public_(pub) , rules_(std::move(rules)) , scope_(scope) - , announce_self_(announce_self) { + , announce_self_(opts.announce_self_) + , frequent_dht_lookup_(opts.frequent_dht_lookup_) { overlay_id_ = id_full_.compute_short_id(); VLOG(OVERLAY_INFO) << this << ": creating " << (public_ ? "public" : "private"); @@ -279,13 +280,13 @@ void OverlayImpl::alarm() { send_random_peers(P->get_id(), {}); } } - if (next_dht_query_.is_in_past()) { + if (next_dht_query_ && next_dht_query_.is_in_past()) { + next_dht_query_ = td::Timestamp::never(); auto P = td::PromiseCreator::lambda([SelfId = actor_id(this)](td::Result res) { td::actor::send_closure(SelfId, &OverlayImpl::receive_dht_nodes, std::move(res), true); }); td::actor::send_closure(dht_node_, &dht::Dht::get_value, dht::DhtKey{overlay_id_.pubkey_hash(), "nodes", 0}, std::move(P)); - next_dht_query_ = td::Timestamp::in(td::Random::fast(60.0, 100.0)); } if (update_db_at_.is_in_past()) { if (peers_.size() > 0) { @@ -333,7 +334,13 @@ void OverlayImpl::receive_dht_nodes(td::Result res, bool dummy) { VLOG(OVERLAY_NOTICE) << this << ": can not get value from DHT: " << res.move_as_error(); } + if (!(next_dht_store_query_ && next_dht_store_query_.is_in_past())) { + finish_dht_query(); + return; + } + next_dht_store_query_ = td::Timestamp::never(); if (!announce_self_) { + finish_dht_query(); return; } @@ -341,6 +348,7 @@ void OverlayImpl::receive_dht_nodes(td::Result res, bool dummy) { auto P = td::PromiseCreator::lambda([SelfId = actor_id(this), oid = print_id()](td::Result R) { if (R.is_error()) { LOG(ERROR) << oid << "cannot get self node"; + td::actor::send_closure(SelfId, &OverlayImpl::finish_dht_query); return; } td::actor::send_closure(SelfId, &OverlayImpl::update_dht_nodes, R.move_as_ok()); @@ -365,10 +373,11 @@ void OverlayImpl::update_dht_nodes(OverlayNode node) { static_cast(td::Clocks::system() + 3600), td::BufferSlice()}; value.check().ensure(); - auto P = td::PromiseCreator::lambda([oid = print_id()](td::Result res) { + auto P = td::PromiseCreator::lambda([SelfId = actor_id(this), oid = print_id()](td::Result res) { if (res.is_error()) { VLOG(OVERLAY_NOTICE) << oid << ": error storing to DHT: " << res.move_as_error(); } + td::actor::send_closure(SelfId, &OverlayImpl::finish_dht_query); }); td::actor::send_closure(dht_node_, &dht::Dht::set_value, std::move(value), std::move(P)); diff --git a/overlay/overlay.h b/overlay/overlay.h index a5f7b3a47..da41a247a 100644 --- a/overlay/overlay.h +++ b/overlay/overlay.h @@ -42,7 +42,7 @@ class Overlay : public td::actor::Actor { td::actor::ActorId manager, td::actor::ActorId dht_node, adnl::AdnlNodeIdShort local_id, OverlayIdFull overlay_id, std::unique_ptr callback, - OverlayPrivacyRules rules, td::string scope, bool announce_self = true); + OverlayPrivacyRules rules, td::string scope, OverlayOptions opts = {}); static td::actor::ActorOwn create(td::actor::ActorId keyring, td::actor::ActorId adnl, td::actor::ActorId manager, diff --git a/overlay/overlay.hpp b/overlay/overlay.hpp index 86d37d5bb..90fcc43d7 100644 --- a/overlay/overlay.hpp +++ b/overlay/overlay.hpp @@ -82,12 +82,17 @@ class OverlayPeer { void on_ping_result(bool success) { if (success) { missed_pings_ = 0; + last_ping_at_ = td::Timestamp::now(); + is_alive_ = true; } else { ++missed_pings_; + if (missed_pings_ >= 3 && last_ping_at_.is_in_past(td::Timestamp::in(-15.0))) { + is_alive_ = false; + } } } bool is_alive() const { - return missed_pings_ < 3; + return is_alive_; } td::uint32 throughput_out_bytes = 0; @@ -116,6 +121,8 @@ class OverlayPeer { bool is_neighbour_ = false; size_t missed_pings_ = 0; + bool is_alive_ = true; + td::Timestamp last_ping_at_ = td::Timestamp::now(); }; class OverlayImpl : public Overlay { @@ -124,7 +131,7 @@ class OverlayImpl : public Overlay { td::actor::ActorId manager, td::actor::ActorId dht_node, adnl::AdnlNodeIdShort local_id, OverlayIdFull overlay_id, bool pub, std::vector nodes, std::unique_ptr callback, - OverlayPrivacyRules rules, td::string scope = "{ \"type\": \"undefined\" }", bool announce_self = true); + OverlayPrivacyRules rules, td::string scope = "{ \"type\": \"undefined\" }", OverlayOptions opts = {}); void update_dht_node(td::actor::ActorId dht) override { dht_node_ = dht; } @@ -295,6 +302,17 @@ class OverlayImpl : public Overlay { void del_peer(adnl::AdnlNodeIdShort id); OverlayPeer *get_random_peer(bool only_alive = false); + void finish_dht_query() { + if (!next_dht_store_query_) { + next_dht_store_query_ = td::Timestamp::in(td::Random::fast(60.0, 100.0)); + } + if (frequent_dht_lookup_ && peers_.size() == bad_peers_.size()) { + next_dht_query_ = td::Timestamp::in(td::Random::fast(6.0, 10.0)); + } else { + next_dht_query_ = next_dht_store_query_; + } + } + td::actor::ActorId keyring_; td::actor::ActorId adnl_; td::actor::ActorId manager_; @@ -305,6 +323,7 @@ class OverlayImpl : public Overlay { td::DecTree peers_; td::Timestamp next_dht_query_ = td::Timestamp::in(1.0); + td::Timestamp next_dht_store_query_ = td::Timestamp::in(1.0); td::Timestamp update_db_at_; td::Timestamp update_throughput_at_; td::Timestamp last_throughput_update_; @@ -367,6 +386,7 @@ class OverlayImpl : public Overlay { OverlayPrivacyRules rules_; td::string scope_; bool announce_self_ = true; + bool frequent_dht_lookup_ = false; std::map> certs_; class CachedEncryptor : public td::ListNode { diff --git a/overlay/overlays.h b/overlay/overlays.h index e12bbbdb2..79551e05d 100644 --- a/overlay/overlays.h +++ b/overlay/overlays.h @@ -158,6 +158,11 @@ class Certificate { td::SharedSlice signature_; }; +struct OverlayOptions { + bool announce_self_ = true; + bool frequent_dht_lookup_ = false; +}; + class Overlays : public td::actor::Actor { public: class Callback { @@ -197,7 +202,7 @@ class Overlays : public td::actor::Actor { td::string scope) = 0; virtual void create_public_overlay_ex(adnl::AdnlNodeIdShort local_id, OverlayIdFull overlay_id, std::unique_ptr callback, OverlayPrivacyRules rules, - td::string scope, bool announce_self) = 0; + td::string scope, OverlayOptions opts) = 0; virtual void create_private_overlay(adnl::AdnlNodeIdShort local_id, OverlayIdFull overlay_id, std::vector nodes, std::unique_ptr callback, OverlayPrivacyRules rules) = 0; diff --git a/recent_changelog.md b/recent_changelog.md index 5e1224541..637f416c6 100644 --- a/recent_changelog.md +++ b/recent_changelog.md @@ -1,10 +1,11 @@ -## 2023.12 Update +## 2024.01 Update -1. Optimized message queue handling, now queue cleaning speed doesn't depend on total queue size - * Cleaning delivered messages using lt augmentation instead of random search / consequtive walk - * Keeping root cell of queue message in memory until outdated (caching) -2. Changes to block collation/validation limits -3. Stop accepting new external message if message queue is overloaded -4. Introducing conditions for shard split/merge based on queue size +1. Fixes in how gas in transactions on special accounts is accounted in block limit. Previously, gas was counted as usual, so to conduct elections that costs >30m gas block limit in masterchain was set to 37m gas. To lower the limit for safety reasons it is proposed to not count gas on special accounts. Besides `gas_max` is set to `special_gas_limit` for all types of transactions on special accounts. New behavior is activated through setting `gas_prices_v3` in `ConfigParam 20;`. + * Besides update of config temporally increases gas limit on `EQD_v9j1rlsuHHw2FIhcsCFFSD367ldfDdCKcsNmNpIRzUlu` to `special_gas_limit`, see [details](https://t.me/tonstatus/88). +2. Improvements in LS behavior + * Improved detection of the state with all shards applied to decrease rate of `Block is not applied` error + * Better error logs: `block not in db` and `block is not applied` separation + * Fix error in proof generation for blocks after merge +3. Improvements in DHT work and storage, CellDb, config.json ammendment, peer misbehavior detection, validator session stats collection, emulator. -Read [more](https://blog.ton.org/technical-report-december-5-inscriptions-launch-on-ton) on that update. +Besides the work of the core team, this update is based on the efforts of @XaBbl4 (peer misbehavior detection). diff --git a/rldp-http-proxy/rldp-http-proxy.cpp b/rldp-http-proxy/rldp-http-proxy.cpp index e91868228..0d518d6d1 100644 --- a/rldp-http-proxy/rldp-http-proxy.cpp +++ b/rldp-http-proxy/rldp-http-proxy.cpp @@ -54,6 +54,7 @@ #include "git.h" #include "td/utils/BufferedFd.h" #include "common/delay.h" +#include "td/utils/port/path.h" #include "tonlib/tonlib/TonlibClientWrapper.h" #include "DNSResolver.h" @@ -920,6 +921,12 @@ class RldpHttpProxy : public td::actor::Actor { } void run() { + if (!db_root_.empty()) { + td::mkpath(db_root_ + "/").ensure(); + } else if (!is_client_) { + LOG(ERROR) << "DB root is required for server proxy"; + std::_Exit(2); + } keyring_ = ton::keyring::Keyring::create(is_client_ ? std::string("") : (db_root_ + "/keyring")); { auto S = load_global_config(); @@ -955,9 +962,16 @@ class RldpHttpProxy : public td::actor::Actor { auto conf_dataR = td::read_file(global_config_); conf_dataR.ensure(); + ton::tl_object_ptr key_store; + if (db_root_.empty()) { + key_store = tonlib_api::make_object(); + } else { + td::mkpath(db_root_ + "/tonlib-cache/").ensure(); + key_store = tonlib_api::make_object(db_root_ + "/tonlib-cache/"); + } auto tonlib_options = tonlib_api::make_object( tonlib_api::make_object(conf_dataR.move_as_ok().as_slice().str(), "", false, false), - tonlib_api::make_object()); + std::move(key_store)); tonlib_client_ = td::actor::create_actor("tonlibclient", std::move(tonlib_options)); dns_resolver_ = td::actor::create_actor("dnsresolver", tonlib_client_.get()); } diff --git a/storage/PeerActor.cpp b/storage/PeerActor.cpp index 0cb21c0a8..48d456267 100644 --- a/storage/PeerActor.cpp +++ b/storage/PeerActor.cpp @@ -25,6 +25,7 @@ #include "td/utils/overloaded.h" #include "td/utils/Random.h" #include "vm/boc.h" +#include "common/delay.h" namespace ton { @@ -119,9 +120,9 @@ void PeerActor::on_get_piece_result(PartId piece_id, td::Result return std::move(res); }(); if (res.is_error()) { - LOG(DEBUG) << "getPiece " << piece_id << "query: " << res.error(); + LOG(DEBUG) << "getPiece " << piece_id << " query: " << res.error(); } else { - LOG(DEBUG) << "getPiece " << piece_id << "query: OK"; + LOG(DEBUG) << "getPiece " << piece_id << " query: OK"; } state_->node_queries_results_.add_element(std::make_pair(piece_id, std::move(res))); notify_node(); @@ -343,11 +344,20 @@ void PeerActor::loop_node_get_piece() { } auto piece_size = std::min(torrent_info_->piece_size, torrent_info_->file_size - part * torrent_info_->piece_size); - td::actor::send_closure(state_->speed_limiters_.download, &SpeedLimiter::enqueue, (double)piece_size, - td::Timestamp::in(3.0), [part, SelfId = actor_id(this)](td::Result R) { - td::actor::send_closure(SelfId, &PeerActor::node_get_piece_query_ready, part, - std::move(R)); - }); + td::Timestamp timeout = td::Timestamp::in(3.0); + td::actor::send_closure( + state_->speed_limiters_.download, &SpeedLimiter::enqueue, (double)piece_size, timeout, + [=, SelfId = actor_id(this)](td::Result R) { + if (R.is_ok()) { + td::actor::send_closure(SelfId, &PeerActor::node_get_piece_query_ready, part, std::move(R)); + } else { + delay_action( + [=, R = std::move(R)]() mutable { + td::actor::send_closure(SelfId, &PeerActor::node_get_piece_query_ready, part, std::move(R)); + }, + timeout); + } + }); } } diff --git a/storage/PeerManager.h b/storage/PeerManager.h index 52297ac5f..38d1494b0 100644 --- a/storage/PeerManager.h +++ b/storage/PeerManager.h @@ -143,9 +143,11 @@ class PeerManager : public td::actor::Actor { td::actor::ActorId peer_manager_; ton::adnl::AdnlNodeIdShort dst_; }; + ton::overlay::OverlayOptions opts; + opts.announce_self_ = !client_mode_; + opts.frequent_dht_lookup_ = true; send_closure(overlays_, &ton::overlay::Overlays::create_public_overlay_ex, src_id, overlay_id_.clone(), - std::make_unique(actor_id(this), src_id), rules, R"({ "type": "storage" })", - !client_mode_); + std::make_unique(actor_id(this), src_id), rules, R"({ "type": "storage" })", opts); } promise.set_value({}); } diff --git a/storage/SpeedLimiter.cpp b/storage/SpeedLimiter.cpp index 952005feb..704c7402d 100644 --- a/storage/SpeedLimiter.cpp +++ b/storage/SpeedLimiter.cpp @@ -16,6 +16,7 @@ */ #include "SpeedLimiter.h" +#include "common/errorcode.h" namespace ton { @@ -41,11 +42,11 @@ void SpeedLimiter::enqueue(double size, td::Timestamp timeout, td::Promise( tonlib_api::make_object(r_conf_data.move_as_ok().as_slice().str(), "", false, false), - tonlib_api::make_object()); + tonlib_api::make_object(key_store)); tonlib_client_ = td::actor::create_actor("tonlibclient", std::move(tonlib_options)); } diff --git a/tdnet/td/net/TcpListener.cpp b/tdnet/td/net/TcpListener.cpp index 7b7364bae..e711cbbda 100644 --- a/tdnet/td/net/TcpListener.cpp +++ b/tdnet/td/net/TcpListener.cpp @@ -46,9 +46,11 @@ void TcpListener::start_up() { } void TcpListener::tear_down() { - // unsubscribe from socket updates - // nb: interface will be changed - td::actor::SchedulerContext::get()->get_poll().unsubscribe(server_socket_fd_.get_poll_info().get_pollable_fd_ref()); + if (!server_socket_fd_.empty()) { + // unsubscribe from socket updates + // nb: interface will be changed + td::actor::SchedulerContext::get()->get_poll().unsubscribe(server_socket_fd_.get_poll_info().get_pollable_fd_ref()); + } } void TcpListener::loop() { diff --git a/tdnet/test/net-test.cpp b/tdnet/test/net-test.cpp index bb084a677..d20be5041 100644 --- a/tdnet/test/net-test.cpp +++ b/tdnet/test/net-test.cpp @@ -158,9 +158,11 @@ void run_server(int from_port, int to_port, bool is_first, bool use_tcp) { TEST(Net, PingPong) { SET_VERBOSITY_LEVEL(VERBOSITY_NAME(ERROR)); + int port1 = td::Random::fast(10000, 10999); + int port2 = td::Random::fast(11000, 11999); for (auto use_tcp : {false, true}) { - auto a = td::thread([use_tcp] { run_server(8091, 8092, true, use_tcp); }); - auto b = td::thread([use_tcp] { run_server(8092, 8091, false, use_tcp); }); + auto a = td::thread([=] { run_server(port1, port2, true, use_tcp); }); + auto b = td::thread([=] { run_server(port2, port1, false, use_tcp); }); a.join(); b.join(); } diff --git a/tdutils/td/utils/as.h b/tdutils/td/utils/as.h index c60c74e25..6015af294 100644 --- a/tdutils/td/utils/as.h +++ b/tdutils/td/utils/as.h @@ -76,12 +76,7 @@ class ConstAs { } // namespace detail -// no std::is_trivially_copyable in libstdc++ before 5.0 -#if __GLIBCXX__ -#define TD_IS_TRIVIALLY_COPYABLE(T) __has_trivial_copy(T) -#else #define TD_IS_TRIVIALLY_COPYABLE(T) std::is_trivially_copyable::value -#endif template = 0> diff --git a/test/test-catchain.cpp b/test/test-catchain.cpp index 149ea3e38..3131c2b95 100644 --- a/test/test-catchain.cpp +++ b/test/test-catchain.cpp @@ -186,6 +186,7 @@ class CatChainInst : public td::actor::Actor { void create_fork() { auto height = height_ - 1; //td::Random::fast(0, height_ - 1); + LOG(WARNING) << "Creating fork, source_id=" << idx_ << ", height=" << height; auto sum = prev_values_[height] + 1; td::uint64 x[2]; @@ -241,7 +242,8 @@ int main(int argc, char *argv[]) { td::actor::send_closure(adnl, &ton::adnl::Adnl::register_network_manager, network_manager.get()); }); - for (td::uint32 att = 0; att < 10; att++) { + for (td::uint32 att = 0; att < 20; att++) { + LOG(WARNING) << "Test #" << att; nodes.resize(total_nodes); scheduler.run_in_context([&] { @@ -296,7 +298,10 @@ int main(int argc, char *argv[]) { std::cout << "value=" << n.get_actor_unsafe().value() << std::endl; } - scheduler.run_in_context([&] { td::actor::send_closure(inst[0], &CatChainInst::create_fork); }); + td::uint32 fork_cnt = att < 10 ? 1 : (att - 10) / 5 + 2; + for (td::uint32 idx = 0; idx < fork_cnt; ++idx) { + scheduler.run_in_context([&] { td::actor::send_closure(inst[idx], &CatChainInst::create_fork); }); + } t = td::Timestamp::in(1.0); while (scheduler.run(1)) { diff --git a/tl/generate/scheme/ton_api.tl b/tl/generate/scheme/ton_api.tl index 346d91522..21e8318f8 100644 --- a/tl/generate/scheme/ton_api.tl +++ b/tl/generate/scheme/ton_api.tl @@ -742,11 +742,12 @@ http.server.config dhs:(vector http.server.dnsEntry) local_hosts:(vector http.se ---types--- -validatorSession.statsProducer id:int256 block_status:int block_timestamp:long = validatorSession.StatsProducer; +validatorSession.statsProducer id:int256 candidate_id:int256 block_status:int block_timestamp:long comment:string = validatorSession.StatsProducer; validatorSession.statsRound timestamp:long producers:(vector validatorSession.statsProducer) = validatorSession.StatsRound; -validatorSession.stats id:tonNode.blockId timestamp:long self:int256 creator:int256 total_validators:int total_weight:long +validatorSession.stats success:Bool id:tonNode.blockIdExt timestamp:long self:int256 session_id:int256 cc_seqno:int + creator:int256 total_validators:int total_weight:long signatures:int signatures_weight:long approve_signatures:int approve_signatures_weight:long first_round:int rounds:(vector validatorSession.statsRound) = validatorSession.Stats; diff --git a/tl/generate/scheme/ton_api.tlo b/tl/generate/scheme/ton_api.tlo index 5b8e1c725..9b74f1c3b 100644 Binary files a/tl/generate/scheme/ton_api.tlo and b/tl/generate/scheme/ton_api.tlo differ diff --git a/tonlib/test/offline.cpp b/tonlib/test/offline.cpp index 35a95f5b1..0fedc865e 100644 --- a/tonlib/test/offline.cpp +++ b/tonlib/test/offline.cpp @@ -467,15 +467,20 @@ TEST(Tonlib, KeysApi) { make_object(make_object(key->public_key_, key->secret_.copy()))) .move_as_ok(); - auto err1 = sync_send(client, make_object( - new_local_password.copy(), td::SecureString("wrong password"), - make_object(copy_word_list()))) - .move_as_error(); + auto err1 = sync_send( + client, make_object(new_local_password.copy(), td::SecureString("wrong password"), + make_object(copy_word_list()))); + if (err1.is_ok()) { + if (err1.ok()->public_key_ != key->public_key_) { + err1 = td::Status::Error("imported key successfully, but the public key is different"); + } + } + err1.ensure_error(); auto err2 = sync_send(client, make_object(new_local_password.copy(), td::SecureString(), - make_object(copy_word_list()))) - .move_as_error(); - LOG(INFO) << err1 << " | " << err2; + make_object(copy_word_list()))); + err2.ensure_error(); + LOG(INFO) << err1.move_as_error() << " | " << err2.move_as_error(); auto imported_key = sync_send(client, make_object(new_local_password.copy(), mnemonic_password.copy(), make_object(copy_word_list()))) diff --git a/tonlib/tonlib/TonlibClient.cpp b/tonlib/tonlib/TonlibClient.cpp index 9ec664bba..bee7ef409 100644 --- a/tonlib/tonlib/TonlibClient.cpp +++ b/tonlib/tonlib/TonlibClient.cpp @@ -4995,6 +4995,8 @@ td::Status TonlibClient::do_request(const tonlib_api::importKey& request, if (!request.exported_key_) { return TonlibError::EmptyField("exported_key"); } + // Note: the mnemonic is considered valid if a certain hash starts with zero byte (see Mnemonic::is_basic_seed()) + // Therefore, importKey with invalid password has 1/256 chance to return OK TRY_RESULT(key, key_storage_.import_key(std::move(request.local_password_), std::move(request.mnemonic_password_), KeyStorage::ExportedKey{std::move(request.exported_key_->word_list_)})); TRY_RESULT(key_bytes, public_key_from_bytes(key.public_key.as_slice())); diff --git a/validator-engine/validator-engine.cpp b/validator-engine/validator-engine.cpp index d98c296c9..e488504f0 100644 --- a/validator-engine/validator-engine.cpp +++ b/validator-engine/validator-engine.cpp @@ -1595,6 +1595,12 @@ void ValidatorEngine::load_config(td::Promise promise) { config_file_ = db_root_ + "/config.json"; } auto conf_data_R = td::read_file(config_file_); + if (conf_data_R.is_error()) { + conf_data_R = td::read_file(temp_config_file()); + if (conf_data_R.is_ok()) { + td::rename(temp_config_file(), config_file_).ensure(); + } + } if (conf_data_R.is_error()) { auto P = td::PromiseCreator::lambda( [name = local_config_, new_name = config_file_, promise = std::move(promise)](td::Result R) { @@ -1643,12 +1649,15 @@ void ValidatorEngine::load_config(td::Promise promise) { void ValidatorEngine::write_config(td::Promise promise) { auto s = td::json_encode(td::ToJson(*config_.tl().get()), true); - auto S = td::write_file(config_file_, s); - if (S.is_ok()) { - promise.set_value(td::Unit()); - } else { + auto S = td::write_file(temp_config_file(), s); + if (S.is_error()) { + td::unlink(temp_config_file()).ignore(); promise.set_error(std::move(S)); + return; } + td::unlink(config_file_).ignore(); + TRY_STATUS_PROMISE(promise, td::rename(temp_config_file(), config_file_)); + promise.set_value(td::Unit()); } td::Promise ValidatorEngine::get_key_promise(td::MultiPromise::InitGuard &ig) { @@ -3775,11 +3784,15 @@ int main(int argc, char *argv[]) { acts.push_back([&x, at]() { td::actor::send_closure(x, &ValidatorEngine::schedule_shutdown, (double)at); }); return td::Status::OK(); }); - p.add_checked_option('\0', "celldb-compress-depth", "(default: 0)", [&](td::Slice arg) { - TRY_RESULT(value, td::to_integer_safe(arg)); - acts.push_back([&x, value]() { td::actor::send_closure(x, &ValidatorEngine::set_celldb_compress_depth, value); }); - return td::Status::OK(); - }); + p.add_checked_option('\0', "celldb-compress-depth", + "optimize celldb by storing cells of depth X with whole subtrees (experimental, default: 0)", + [&](td::Slice arg) { + TRY_RESULT(value, td::to_integer_safe(arg)); + acts.push_back([&x, value]() { + td::actor::send_closure(x, &ValidatorEngine::set_celldb_compress_depth, value); + }); + return td::Status::OK(); + }); auto S = p.run(argc, argv); if (S.is_error()) { LOG(ERROR) << "failed to parse options: " << S.move_as_error(); diff --git a/validator-engine/validator-engine.hpp b/validator-engine/validator-engine.hpp index ebcd60c66..e59bb4181 100644 --- a/validator-engine/validator-engine.hpp +++ b/validator-engine/validator-engine.hpp @@ -1,4 +1,4 @@ -/* +/* This file is part of TON Blockchain source code. TON Blockchain is free software; you can redistribute it and/or @@ -152,6 +152,9 @@ class ValidatorEngine : public td::actor::Actor { std::string local_config_ = ""; std::string global_config_ = "ton-global.config"; std::string config_file_; + std::string temp_config_file() const { + return config_file_ + ".tmp"; + } std::string fift_dir_ = ""; diff --git a/validator-session/validator-session-types.h b/validator-session/validator-session-types.h index 579574786..bcbaa8f71 100644 --- a/validator-session/validator-session-types.h +++ b/validator-session/validator-session-types.h @@ -74,8 +74,10 @@ struct ValidatorSessionStats { struct Producer { PublicKeyHash id = PublicKeyHash::zero(); + ValidatorSessionCandidateId candidate_id = ValidatorSessionCandidateId::zero(); int block_status = status_none; td::uint64 block_timestamp = 0; + std::string comment; }; struct Round { td::uint64 timestamp = 0; @@ -85,6 +87,9 @@ struct ValidatorSessionStats { td::uint32 first_round; std::vector rounds; + bool success = false; + ValidatorSessionId session_id = ValidatorSessionId::zero(); + CatchainSeqno cc_seqno = 0; td::uint64 timestamp = 0; PublicKeyHash self = PublicKeyHash::zero(); PublicKeyHash creator = PublicKeyHash::zero(); diff --git a/validator-session/validator-session.cpp b/validator-session/validator-session.cpp index e08d8a7e0..88de0fa53 100644 --- a/validator-session/validator-session.cpp +++ b/validator-session/validator-session.cpp @@ -288,7 +288,7 @@ void ValidatorSessionImpl::process_broadcast(PublicKeyHash src, td::BufferSlice CHECK(!pending_reject_.count(block_id)); CHECK(!rejected_.count(block_id)); - stats_set_candidate_status(cur_round_, src, ValidatorSessionStats::status_received); + stats_set_candidate_status(cur_round_, src, block_id, ValidatorSessionStats::status_received); auto v = virtual_state_->choose_blocks_to_approve(description(), local_idx()); for (auto &b : v) { if (b && SentBlock::get_block_id(b) == block_id) { @@ -361,7 +361,8 @@ void ValidatorSessionImpl::process_query(PublicKeyHash src, td::BufferSlice data void ValidatorSessionImpl::candidate_decision_fail(td::uint32 round, ValidatorSessionCandidateId hash, std::string result, td::uint32 src, td::BufferSlice proof) { - stats_set_candidate_status(round, description().get_source_id(src), ValidatorSessionStats::status_rejected); + stats_set_candidate_status(round, description().get_source_id(src), hash, ValidatorSessionStats::status_rejected, + result); if (round != cur_round_) { return; } @@ -376,7 +377,8 @@ void ValidatorSessionImpl::candidate_decision_fail(td::uint32 round, ValidatorSe void ValidatorSessionImpl::candidate_decision_ok(td::uint32 round, ValidatorSessionCandidateId hash, RootHash root_hash, FileHash file_hash, td::uint32 src, td::uint32 ok_from) { - stats_set_candidate_status(round, description().get_source_id(src), ValidatorSessionStats::status_approved); + stats_set_candidate_status(round, description().get_source_id(src), hash, ValidatorSessionStats::status_approved, + PSTRING() << "ts=" << ok_from); if (round != cur_round_) { return; } @@ -812,15 +814,13 @@ void ValidatorSessionImpl::on_new_round(td::uint32 round) { if (!have_block) { callback_->on_block_skipped(cur_round_); } else { + cur_stats_.success = true; cur_stats_.timestamp = (td::uint64)td::Clocks::system(); - cur_stats_.total_validators = description().get_total_nodes(); - cur_stats_.total_weight = description().get_total_weight(); cur_stats_.signatures = (td::uint32)export_sigs.size(); cur_stats_.signatures_weight = signatures_weight; cur_stats_.approve_signatures = (td::uint32)export_approve_sigs.size(); cur_stats_.approve_signatures_weight = approve_signatures_weight; cur_stats_.creator = description().get_source_id(block->get_src_idx()); - cur_stats_.self = description().get_source_id(local_idx()); if (it == blocks_.end()) { callback_->on_block_committed(cur_round_, description().get_source_public_key(block->get_src_idx()), @@ -923,6 +923,12 @@ void ValidatorSessionImpl::destroy() { stop(); } +void ValidatorSessionImpl::get_current_stats(td::Promise promise) { + ValidatorSessionStats stats = cur_stats_; + stats.timestamp = (td::uint64)td::Clocks::system(); + promise.set_result(std::move(stats)); +} + void ValidatorSessionImpl::start_up() { CHECK(!rldp_.empty()); cur_round_ = 0; @@ -941,6 +947,10 @@ void ValidatorSessionImpl::start_up() { void ValidatorSessionImpl::stats_init() { cur_stats_ = ValidatorSessionStats(); cur_stats_.first_round = cur_round_; + cur_stats_.session_id = unique_hash_; + cur_stats_.total_validators = description().get_total_nodes(); + cur_stats_.total_weight = description().get_total_weight(); + cur_stats_.self = description().get_source_id(local_idx()); stats_add_round(); } @@ -961,20 +971,26 @@ void ValidatorSessionImpl::stats_add_round() { } } -void ValidatorSessionImpl::stats_set_candidate_status(td::uint32 round, PublicKeyHash src, int status) { +void ValidatorSessionImpl::stats_set_candidate_status(td::uint32 round, PublicKeyHash src, + ValidatorSessionCandidateId candidate_id, int status, + std::string comment) { if (round < cur_stats_.first_round || round - cur_stats_.first_round >= cur_stats_.rounds.size()) { return; } - auto& stats_round = cur_stats_.rounds[round - cur_stats_.first_round]; + auto &stats_round = cur_stats_.rounds[round - cur_stats_.first_round]; auto it = std::find_if(stats_round.producers.begin(), stats_round.producers.end(), - [&](const ValidatorSessionStats::Producer& p) { return p.id == src; }); + [&](const ValidatorSessionStats::Producer &p) { return p.id == src; }); if (it == stats_round.producers.end()) { return; } + it->candidate_id = candidate_id; if (it->block_status == ValidatorSessionStats::status_none) { it->block_timestamp = (td::uint64)td::Clocks::system(); } it->block_status = status; + if (!comment.empty()) { + it->comment = std::move(comment); + } } td::actor::ActorOwn ValidatorSession::create( diff --git a/validator-session/validator-session.h b/validator-session/validator-session.h index 376cac45e..3f3b7ab96 100644 --- a/validator-session/validator-session.h +++ b/validator-session/validator-session.h @@ -91,6 +91,7 @@ class ValidatorSession : public td::actor::Actor { virtual void start() = 0; virtual void destroy() = 0; + virtual void get_current_stats(td::Promise promise) = 0; static td::actor::ActorOwn create( catchain::CatChainSessionId session_id, ValidatorSessionOptions opts, PublicKeyHash local_id, diff --git a/validator-session/validator-session.hpp b/validator-session/validator-session.hpp index 1717c99f0..2dcbb46c5 100644 --- a/validator-session/validator-session.hpp +++ b/validator-session/validator-session.hpp @@ -160,7 +160,8 @@ class ValidatorSessionImpl : public ValidatorSession { ValidatorSessionStats cur_stats_; void stats_init(); void stats_add_round(); - void stats_set_candidate_status(td::uint32 round, PublicKeyHash src, int status); + void stats_set_candidate_status(td::uint32 round, PublicKeyHash src, ValidatorSessionCandidateId candidate_id, + int status, std::string comment = ""); public: ValidatorSessionImpl(catchain::CatChainSessionId session_id, ValidatorSessionOptions opts, PublicKeyHash local_id, @@ -173,6 +174,7 @@ class ValidatorSessionImpl : public ValidatorSession { void start() override; void destroy() override; + void get_current_stats(td::Promise promise) override; void process_blocks(std::vector blocks); void finished_processing(); diff --git a/validator/db/celldb.cpp b/validator/db/celldb.cpp index 6a2b46992..d29126cea 100644 --- a/validator/db/celldb.cpp +++ b/validator/db/celldb.cpp @@ -23,6 +23,7 @@ #include "ton/ton-tl.hpp" #include "ton/ton-io.hpp" +#include "common/delay.h" namespace ton { @@ -68,14 +69,16 @@ CellDbIn::CellDbIn(td::actor::ActorId root_db, td::actor::ActorId>( + td::actor::create_actor("celldbmigration", actor_id(this))), compress_depth = opts_->get_celldb_compress_depth()](const vm::CellLoader::LoadResult& res) { if (res.cell_.is_null()) { return; } bool expected_stored_boc = res.cell_->get_depth() == compress_depth && compress_depth != 0; if (expected_stored_boc != res.stored_boc_) { - td::actor::send_closure(db, &CellDbIn::migrate_cell, td::Bits256{res.cell_->get_hash().bits()}); + td::actor::send_closure(*actor, &CellDbIn::MigrationProxy::migrate_cell, + td::Bits256{res.cell_->get_hash().bits()}); } }; @@ -156,6 +159,13 @@ void CellDbIn::alarm() { if (migrate_after_ && migrate_after_.is_in_past()) { migrate_cells(); } + if (migration_stats_ && migration_stats_->end_at_.is_in_past()) { + LOG(INFO) << "CellDb migration, " << migration_stats_->start_.elapsed() + << "s stats: batches=" << migration_stats_->batches_ << " migrated=" << migration_stats_->migrated_cells_ + << " checked=" << migration_stats_->checked_cells_ << " time=" << migration_stats_->total_time_ + << " queue_size=" << cells_to_migrate_.size(); + migration_stats_ = {}; + } auto E = get_block(get_empty_key_hash()).move_as_ok(); auto N = get_block(E.next).move_as_ok(); if (N.is_empty()) { @@ -291,23 +301,31 @@ void CellDbIn::set_block(KeyHash key_hash, DbEntry e) { void CellDbIn::migrate_cell(td::Bits256 hash) { cells_to_migrate_.insert(hash); - if (cells_to_migrate_.size() >= 32) { - migrate_cells(); - } else if (!migrate_after_) { - migrate_after_ = td::Timestamp::in(1.0); + if (!migration_active_) { + migration_active_ = true; + migrate_after_ = td::Timestamp::in(10.0); } } void CellDbIn::migrate_cells() { + migrate_after_ = td::Timestamp::never(); if (cells_to_migrate_.empty()) { + migration_active_ = false; return; } + td::Timer timer; + if (!migration_stats_) { + migration_stats_ = std::make_unique(); + } vm::CellStorer stor{*cell_db_}; auto loader = std::make_unique(cell_db_->snapshot()); boc_->set_loader(std::make_unique(*loader)).ensure(); cell_db_->begin_write_batch().ensure(); - td::uint32 cnt = 0; - for (const auto& hash : cells_to_migrate_) { + td::uint32 checked = 0, migrated = 0; + for (auto it = cells_to_migrate_.begin(); it != cells_to_migrate_.end() && checked < 128; ) { + ++checked; + td::Bits256 hash = *it; + it = cells_to_migrate_.erase(it); auto R = loader->load(hash.as_slice(), true, boc_->as_ext_cell_creator()); if (R.is_error()) { continue; @@ -318,18 +336,27 @@ void CellDbIn::migrate_cells() { bool expected_stored_boc = R.ok().cell_->get_depth() == opts_->get_celldb_compress_depth() && opts_->get_celldb_compress_depth() != 0; if (expected_stored_boc != R.ok().stored_boc_) { - ++cnt; + ++migrated; stor.set(R.ok().refcnt(), R.ok().cell_, expected_stored_boc).ensure(); } } - cells_to_migrate_.clear(); - if (cnt > 0) { - LOG(DEBUG) << "Migrated " << cnt << " cells"; - } cell_db_->commit_write_batch().ensure(); boc_->set_loader(std::make_unique(cell_db_->snapshot(), on_load_callback_)).ensure(); td::actor::send_closure(parent_, &CellDb::update_snapshot, cell_db_->snapshot()); - migrate_after_ = td::Timestamp::never(); + + double time = timer.elapsed(); + LOG(DEBUG) << "CellDb migration: migrated=" << migrated << " checked=" << checked << " time=" << time; + ++migration_stats_->batches_; + migration_stats_->migrated_cells_ += migrated; + migration_stats_->checked_cells_ += checked; + migration_stats_->total_time_ += time; + + if (cells_to_migrate_.empty()) { + migration_active_ = false; + } else { + delay_action([SelfId = actor_id(this)] { td::actor::send_closure(SelfId, &CellDbIn::migrate_cells); }, + td::Timestamp::in(time * 2)); + } } void CellDb::load_cell(RootHash hash, td::Promise> promise) { @@ -361,14 +388,16 @@ void CellDb::start_up() { boc_ = vm::DynamicBagOfCellsDb::create(); boc_->set_celldb_compress_depth(opts_->get_celldb_compress_depth()); cell_db_ = td::actor::create_actor("celldbin", root_db_, actor_id(this), path_, opts_); - on_load_callback_ = [db = cell_db_.get(), + on_load_callback_ = [actor = std::make_shared>( + td::actor::create_actor("celldbmigration", cell_db_.get())), compress_depth = opts_->get_celldb_compress_depth()](const vm::CellLoader::LoadResult& res) { if (res.cell_.is_null()) { return; } bool expected_stored_boc = res.cell_->get_depth() == compress_depth && compress_depth != 0; if (expected_stored_boc != res.stored_boc_) { - td::actor::send_closure(db, &CellDbIn::migrate_cell, td::Bits256{res.cell_->get_hash().bits()}); + td::actor::send_closure(*actor, &CellDbIn::MigrationProxy::migrate_cell, + td::Bits256{res.cell_->get_hash().bits()}); } }; } diff --git a/validator/db/celldb.hpp b/validator/db/celldb.hpp index 6545d5970..a2a84ab4a 100644 --- a/validator/db/celldb.hpp +++ b/validator/db/celldb.hpp @@ -107,6 +107,30 @@ class CellDbIn : public CellDbBase { std::function on_load_callback_; std::set cells_to_migrate_; td::Timestamp migrate_after_ = td::Timestamp::never(); + bool migration_active_ = false; + + struct MigrationStats { + td::Timer start_; + td::Timestamp end_at_ = td::Timestamp::in(60.0); + size_t batches_ = 0; + size_t migrated_cells_ = 0; + size_t checked_cells_ = 0; + double total_time_ = 0.0; + }; + std::unique_ptr migration_stats_; + + public: + class MigrationProxy : public td::actor::Actor { + public: + explicit MigrationProxy(td::actor::ActorId cell_db) : cell_db_(cell_db) { + } + void migrate_cell(td::Bits256 hash) { + td::actor::send_closure(cell_db_, &CellDbIn::migrate_cell, hash); + } + + private: + td::actor::ActorId cell_db_; + }; }; class CellDb : public CellDbBase { diff --git a/validator/full-node-shard.cpp b/validator/full-node-shard.cpp index 0f4950779..7c59a79c4 100644 --- a/validator/full-node-shard.cpp +++ b/validator/full-node-shard.cpp @@ -795,7 +795,7 @@ void FullNodeShardImpl::download_block_proof_link(BlockIdExt block_id, td::uint3 td::Promise promise) { auto &b = choose_neighbour(); td::actor::create_actor("downloadproofreq", block_id, true, false, adnl_id_, overlay_id_, - adnl::AdnlNodeIdShort::zero(), priority, timeout, validator_manager_, rldp_, + b.adnl_id, priority, timeout, validator_manager_, rldp_, overlays_, adnl_, client_, create_neighbour_promise(b, std::move(promise))) .release(); } @@ -803,7 +803,7 @@ void FullNodeShardImpl::download_block_proof_link(BlockIdExt block_id, td::uint3 void FullNodeShardImpl::get_next_key_blocks(BlockIdExt block_id, td::Timestamp timeout, td::Promise> promise) { auto &b = choose_neighbour(); - td::actor::create_actor("next", block_id, 16, adnl_id_, overlay_id_, adnl::AdnlNodeIdShort::zero(), + td::actor::create_actor("next", block_id, 16, adnl_id_, overlay_id_, b.adnl_id, 1, timeout, validator_manager_, rldp_, overlays_, adnl_, client_, create_neighbour_promise(b, std::move(promise))) .release(); diff --git a/validator/impl/collator.cpp b/validator/impl/collator.cpp index 86ee1cf46..1b7991f6e 100644 --- a/validator/impl/collator.cpp +++ b/validator/impl/collator.cpp @@ -106,7 +106,7 @@ Collator::Collator(ShardIdFull shard, bool is_hardfork, UnixTime min_ts, BlockId * The results of these queries are handled by corresponding callback functions. */ void Collator::start_up() { - LOG(DEBUG) << "Collator for shard " << shard_.to_str() << " started"; + LOG(WARNING) << "Collator for shard " << shard_.to_str() << " started"; LOG(DEBUG) << "Previous block #1 is " << prev_blocks.at(0).to_str(); if (prev_blocks.size() > 1) { LOG(DEBUG) << "Previous block #2 is " << prev_blocks.at(1).to_str(); @@ -554,7 +554,7 @@ bool Collator::preprocess_prev_mc_state() { * @param res The retreived masterchain state. */ void Collator::after_get_mc_state(td::Result, BlockIdExt>> res) { - LOG(DEBUG) << "in Collator::after_get_mc_state()"; + LOG(WARNING) << "in Collator::after_get_mc_state()"; --pending; if (res.is_error()) { fatal_error(res.move_as_error()); @@ -589,7 +589,7 @@ void Collator::after_get_mc_state(td::Result, Bl * @param res The retrieved shard state. */ void Collator::after_get_shard_state(int idx, td::Result> res) { - LOG(DEBUG) << "in Collator::after_get_shard_state(" << idx << ")"; + LOG(WARNING) << "in Collator::after_get_shard_state(" << idx << ")"; --pending; if (res.is_error()) { fatal_error(res.move_as_error()); @@ -820,7 +820,6 @@ bool Collator::request_out_msg_queue_size() { * @param res The obtained outbound queue. */ void Collator::got_neighbor_out_queue(int i, td::Result> res) { - LOG(DEBUG) << "obtained outbound queue for neighbor #" << i; --pending; if (res.is_error()) { fatal_error(res.move_as_error()); @@ -828,6 +827,7 @@ void Collator::got_neighbor_out_queue(int i, td::Result> res) } Ref outq_descr = res.move_as_ok(); block::McShardDescr& descr = neighbors_.at(i); + LOG(WARNING) << "obtained outbound queue for neighbor #" << i << " : " << descr.shard().to_str(); if (outq_descr->get_block_id() != descr.blk_) { LOG(DEBUG) << "outq_descr->id = " << outq_descr->get_block_id().to_str() << " ; descr.id = " << descr.blk_.to_str(); fatal_error( @@ -893,7 +893,7 @@ void Collator::got_out_queue_size(size_t i, td::Result res) { return; } td::uint32 size = res.move_as_ok(); - LOG(DEBUG) << "got outbound queue size from prev block #" << i << ": " << size; + LOG(WARNING) << "got outbound queue size from prev block #" << i << ": " << size; out_msg_queue_size_ += size; check_pending(); } @@ -1762,7 +1762,7 @@ bool Collator::register_shard_block_creators(std::vector creator_li */ bool Collator::try_collate() { if (!preinit_complete) { - LOG(DEBUG) << "running do_preinit()"; + LOG(WARNING) << "running do_preinit()"; if (!do_preinit()) { return fatal_error(-667, "error preinitializing data required by collator"); } @@ -2062,7 +2062,7 @@ bool Collator::do_collate() { // After do_collate started it will not be interrupted by timeout alarm_timestamp() = td::Timestamp::never(); - LOG(DEBUG) << "do_collate() : start"; + LOG(WARNING) << "do_collate() : start"; if (!fetch_config_params()) { return fatal_error("cannot fetch required configuration parameters from masterchain state"); } @@ -2276,8 +2276,8 @@ bool Collator::out_msg_queue_cleanup() { } return !delivered; }); - LOG(INFO) << "deleted " << deleted << " messages from out_msg_queue after merge, remaining queue size is " - << out_msg_queue_size_; + LOG(WARNING) << "deleted " << deleted << " messages from out_msg_queue after merge, remaining queue size is " + << out_msg_queue_size_; if (res < 0) { return fatal_error("error scanning/updating OutMsgQueue"); } @@ -2352,8 +2352,8 @@ bool Collator::out_msg_queue_cleanup() { std::swap(queue_parts[i], queue_parts.back()); queue_parts.pop_back(); } - LOG(INFO) << "deleted " << deleted << " messages from out_msg_queue, remaining queue size is " - << out_msg_queue_size_; + LOG(WARNING) << "deleted " << deleted << " messages from out_msg_queue, remaining queue size is " + << out_msg_queue_size_; } if (verbosity >= 2) { auto rt = out_msg_queue_->get_root(); @@ -2667,7 +2667,8 @@ bool Collator::create_ticktock_transaction(const ton::StdSmcAddress& smc_addr, t return fatal_error(td::Status::Error( -666, std::string{"cannot serialize new transaction for smart contract "} + smc_addr.to_hex())); } - if (!trans->update_limits(*block_limit_status_)) { + if (!trans->update_limits(*block_limit_status_, + /* with_gas = */ !(acc->is_special && compute_phase_cfg_.special_gas_full))) { return fatal_error(-666, "cannot update block limit status to include the new transaction"); } if (trans->commit(*acc).is_null()) { @@ -2744,7 +2745,8 @@ Ref Collator::create_ordinary_transaction(Ref msg_root) { } std::unique_ptr trans = res.move_as_ok(); - if (!trans->update_limits(*block_limit_status_)) { + if (!trans->update_limits(*block_limit_status_, + /* with_gas = */ !(acc->is_special && compute_phase_cfg_.special_gas_full))) { fatal_error("cannot update block limit status to include the new transaction"); return {}; } @@ -5035,7 +5037,7 @@ void Collator::return_block_candidate(td::Result saved) { fatal_error(std::move(err)); } else { CHECK(block_candidate); - LOG(INFO) << "sending new BlockCandidate to Promise"; + LOG(WARNING) << "sending new BlockCandidate to Promise"; main_promise(block_candidate->clone()); busy_ = false; stop(); @@ -5133,6 +5135,8 @@ void Collator::after_get_external_messages(td::Resulthash()); } } + LOG(WARNING) << "got " << vect.size() << " external messages from mempool, " << bad_ext_msgs_.size() + << " bad messages"; check_pending(); } diff --git a/validator/impl/fabric.cpp b/validator/impl/fabric.cpp index 23a03482c..ede8d36d3 100644 --- a/validator/impl/fabric.cpp +++ b/validator/impl/fabric.cpp @@ -202,10 +202,12 @@ void run_validate_query(ShardIdFull shard, UnixTime min_ts, BlockIdExt min_maste seqno = p.seqno(); } } - td::actor::create_actor( - PSTRING() << (is_fake ? "fakevalidate" : "validateblock") << shard.to_str() << ":" << (seqno + 1), shard, min_ts, - min_masterchain_block_id, std::move(prev), std::move(candidate), std::move(validator_set), std::move(manager), - timeout, std::move(promise), is_fake) + static std::atomic idx; + td::actor::create_actor(PSTRING() << (is_fake ? "fakevalidate" : "validateblock") << shard.to_str() + << ":" << (seqno + 1) << "#" << idx.fetch_add(1), + shard, min_ts, min_masterchain_block_id, std::move(prev), std::move(candidate), + std::move(validator_set), std::move(manager), timeout, std::move(promise), + is_fake) .release(); } diff --git a/validator/impl/liteserver.cpp b/validator/impl/liteserver.cpp index 9c7a04561..34b9f1f1a 100644 --- a/validator/impl/liteserver.cpp +++ b/validator/impl/liteserver.cpp @@ -505,20 +505,7 @@ void LiteQuery::perform_sendMessage(td::BufferSlice data) { } void LiteQuery::get_block_handle_checked(BlockIdExt blkid, td::Promise promise) { - auto P = td::PromiseCreator::lambda( - [promise = std::move(promise)](td::Result R) mutable { - if (R.is_error()) { - promise.set_error(R.move_as_error()); - } else { - auto handle = R.move_as_ok(); - if (handle->is_applied()) { - promise.set_result(std::move(handle)); - } else { - promise.set_error(td::Status::Error(ErrorCode::notready, "block is not applied")); - } - } - }); - td::actor::send_closure(manager_, &ValidatorManager::get_block_handle, blkid, false, std::move(P)); + td::actor::send_closure(manager_, &ValidatorManager::get_block_handle_for_litequery, blkid, std::move(promise)); } bool LiteQuery::request_mc_block_data(BlockIdExt blkid) { @@ -1047,7 +1034,8 @@ bool LiteQuery::make_state_root_proof(Ref& proof, Ref state_ vm::MerkleProofBuilder pb{std::move(block_root)}; block::gen::Block::Record blk; block::gen::BlockInfo::Record info; - if (!(tlb::unpack_cell(pb.root(), blk) && tlb::unpack_cell(blk.info, info))) { + if (!(tlb::unpack_cell(pb.root(), blk) && tlb::unpack_cell(blk.info, info) && + block::gen::BlkPrevInfo(info.after_merge).validate_ref(info.prev_ref))) { return fatal_error("cannot unpack block header"); } vm::CellSlice upd_cs{vm::NoVmSpec(), blk.state_update}; @@ -1497,17 +1485,12 @@ void LiteQuery::continue_getTransactions(unsigned remaining, bool exact) { LOG(DEBUG) << "sending get_block_by_lt_from_db() query to manager for " << acc_workchain_ << ":" << acc_addr_.to_hex() << " " << trans_lt_; td::actor::send_closure_later( - manager_, &ValidatorManager::get_block_by_lt_from_db, ton::extract_addr_prefix(acc_workchain_, acc_addr_), + manager_, &ValidatorManager::get_block_by_lt_from_db_for_litequery, ton::extract_addr_prefix(acc_workchain_, acc_addr_), trans_lt_, [Self = actor_id(this), remaining, manager = manager_](td::Result res) { if (res.is_error()) { td::actor::send_closure(Self, &LiteQuery::abort_getTransactions, res.move_as_error(), ton::BlockIdExt{}); } else { auto handle = res.move_as_ok(); - if (!handle->is_applied()) { - td::actor::send_closure(Self, &LiteQuery::abort_getTransactions, td::Status::Error(ErrorCode::notready, "block is not applied"), - ton::BlockIdExt{}); - return; - } LOG(DEBUG) << "requesting data for block " << handle->id().to_str(); td::actor::send_closure_later(manager, &ValidatorManager::get_block_data_from_db, handle, [Self, blkid = handle->id(), remaining](td::Result> res) { @@ -1846,10 +1829,6 @@ void LiteQuery::perform_lookupBlock(BlockId blkid, int mode, LogicalTime lt, Uni td::actor::send_closure(Self, &LiteQuery::abort_query, res.move_as_error()); } else { auto handle = res.move_as_ok(); - if (!handle->is_applied()) { - td::actor::send_closure(Self, &LiteQuery::abort_query, td::Status::Error(ErrorCode::notready, "block is not applied")); - return; - } LOG(DEBUG) << "requesting data for block " << handle->id().to_str(); td::actor::send_closure_later(manager, &ValidatorManager::get_block_data_from_db, handle, [Self, blkid = handle->id(), mode](td::Result> res) { @@ -1865,13 +1844,14 @@ void LiteQuery::perform_lookupBlock(BlockId blkid, int mode, LogicalTime lt, Uni ton::AccountIdPrefixFull pfx{blkid.workchain, blkid.shard}; if (mode & 2) { - td::actor::send_closure_later(manager_, &ValidatorManager::get_block_by_lt_from_db, pfx, lt, std::move(P)); + td::actor::send_closure_later(manager_, &ValidatorManager::get_block_by_lt_from_db_for_litequery, pfx, lt, + std::move(P)); } else if (mode & 4) { - td::actor::send_closure_later(manager_, &ValidatorManager::get_block_by_unix_time_from_db, pfx, utime, + td::actor::send_closure_later(manager_, &ValidatorManager::get_block_by_unix_time_from_db_for_litequery, pfx, utime, std::move(P)); } else { - td::actor::send_closure_later(manager_, &ValidatorManager::get_block_by_seqno_from_db, pfx, blkid.seqno, - std::move(P)); + td::actor::send_closure_later(manager_, &ValidatorManager::get_block_by_seqno_from_db_for_litequery, pfx, + blkid.seqno, std::move(P)); } } @@ -2629,7 +2609,7 @@ void LiteQuery::perform_getShardBlockProof(BlockIdExt blkid) { } AccountIdPrefixFull pfx{masterchainId, shardIdAll}; td::actor::send_closure_later( - manager, &ValidatorManager::get_block_by_seqno_from_db, pfx, handle->masterchain_ref_block(), + manager, &ValidatorManager::get_block_by_seqno_from_db_for_litequery, pfx, handle->masterchain_ref_block(), [Self, manager](td::Result R) { if (R.is_error()) { td::actor::send_closure(Self, &LiteQuery::abort_query, R.move_as_error()); diff --git a/validator/impl/validate-query.cpp b/validator/impl/validate-query.cpp index d9d032078..94eded6cd 100644 --- a/validator/impl/validate-query.cpp +++ b/validator/impl/validate-query.cpp @@ -234,6 +234,7 @@ bool ValidateQuery::fatal_error(std::string err_msg, int err_code) { */ void ValidateQuery::finish_query() { if (main_promise) { + LOG(WARNING) << "validate query done"; main_promise.set_result(now_); } stop(); @@ -252,7 +253,7 @@ void ValidateQuery::finish_query() { * Then the function also sends requests to the ValidatorManager to fetch blocks and shard stated. */ void ValidateQuery::start_up() { - LOG(INFO) << "validate query for " << block_candidate.id.to_str() << " started"; + LOG(WARNING) << "validate query for " << block_candidate.id.to_str() << " started"; alarm_timestamp() = timeout; rand_seed_.set_zero(); created_by_ = block_candidate.pubkey; @@ -667,7 +668,7 @@ bool ValidateQuery::extract_collated_data() { * @param res The result of the retrieval of the latest masterchain state. */ void ValidateQuery::after_get_latest_mc_state(td::Result, BlockIdExt>> res) { - LOG(DEBUG) << "in ValidateQuery::after_get_latest_mc_state()"; + LOG(WARNING) << "in ValidateQuery::after_get_latest_mc_state()"; --pending; if (res.is_error()) { fatal_error(res.move_as_error()); @@ -708,7 +709,7 @@ void ValidateQuery::after_get_latest_mc_state(td::Result> res) { - LOG(DEBUG) << "in ValidateQuery::after_get_mc_state() for " << mc_blkid_.to_str(); + LOG(WARNING) << "in ValidateQuery::after_get_mc_state() for " << mc_blkid_.to_str(); --pending; if (res.is_error()) { fatal_error(res.move_as_error()); @@ -752,7 +753,7 @@ void ValidateQuery::got_mc_handle(td::Result res) { * @param res The result of the shard state retrieval. */ void ValidateQuery::after_get_shard_state(int idx, td::Result> res) { - LOG(DEBUG) << "in ValidateQuery::after_get_shard_state(" << idx << ")"; + LOG(WARNING) << "in ValidateQuery::after_get_shard_state(" << idx << ")"; --pending; if (res.is_error()) { fatal_error(res.move_as_error()); @@ -939,6 +940,11 @@ bool ValidateQuery::fetch_config_params() { storage_phase_cfg_.delete_due_limit)) { return fatal_error("cannot unpack current gas prices and limits from masterchain configuration"); } + auto mc_gas_prices = config_->get_gas_limits_prices(true); + if (mc_gas_prices.is_error()) { + return fatal_error(mc_gas_prices.move_as_error_prefix("cannot unpack masterchain gas prices and limits: ")); + } + compute_phase_cfg_.mc_gas_prices = mc_gas_prices.move_as_ok(); storage_phase_cfg_.enable_due_payment = config_->get_global_version() >= 4; compute_phase_cfg_.block_rand_seed = rand_seed_; compute_phase_cfg_.libraries = std::make_unique(config_->get_libraries_root(), 256); @@ -1495,7 +1501,6 @@ bool ValidateQuery::request_neighbor_queues() { * @param res The obtained outbound queue. */ void ValidateQuery::got_neighbor_out_queue(int i, td::Result> res) { - LOG(DEBUG) << "obtained outbound queue for neighbor #" << i; --pending; if (res.is_error()) { fatal_error(res.move_as_error()); @@ -1503,6 +1508,7 @@ void ValidateQuery::got_neighbor_out_queue(int i, td::Result> } Ref outq_descr = res.move_as_ok(); block::McShardDescr& descr = neighbors_.at(i); + LOG(WARNING) << "obtained outbound queue for neighbor #" << i << " : " << descr.shard().to_str(); if (outq_descr->get_block_id() != descr.blk_) { LOG(DEBUG) << "outq_descr->id = " << outq_descr->get_block_id().to_str() << " ; descr.id = " << descr.blk_.to_str(); fatal_error( @@ -4903,13 +4909,6 @@ bool ValidateQuery::check_one_transaction(block::Account& account, ton::LogicalT int trans_type = block::transaction::Transaction::tr_none; switch (tag) { case block::gen::TransactionDescr::trans_ord: { - if (!block_limit_status_->fits(block::ParamLimits::cl_medium)) { - return reject_query(PSTRING() << "cannod add ordinary transaction because hard block limits are exceeded: " - << "gas_used=" << block_limit_status_->gas_used - << "(limit=" << block_limits_->gas.hard() << "), " - << "lt_delta=" << block_limit_status_->cur_lt - block_limits_->start_lt - << "(limit=" << block_limits_->lt_delta.hard() << ")"); - } trans_type = block::transaction::Transaction::tr_ord; if (in_msg_root.is_null()) { return reject_query(PSTRING() << "ordinary transaction " << lt << " of account " << addr.to_hex() @@ -5057,10 +5056,19 @@ bool ValidateQuery::check_one_transaction(block::Account& account, ton::LogicalT return reject_query(PSTRING() << "cannot re-create the serialization of transaction " << lt << " for smart contract " << addr.to_hex()); } - if (!trs->update_limits(*block_limit_status_, false)) { + if (!trs->update_limits(*block_limit_status_, + /* with_gas = */ !account.is_special && !trs->gas_limit_overridden, + /* with_size = */ false)) { return fatal_error(PSTRING() << "cannot update block limit status to include transaction " << lt << " of account " << addr.to_hex()); } + if (block_limit_status_->gas_used > block_limits_->gas.hard() + compute_phase_cfg_.gas_limit) { + // Note that block_limit_status_->gas_used does not include transactions in special accounts + return reject_query(PSTRING() << "gas block limits are exceeded: total_gas_used > gas_limit_hard + trx_gas_limit (" + << "total_gas_used=" << block_limit_status_->gas_used + << ", gas_limit_hard=" << block_limits_->gas.hard() + << ", trx_gas_limit=" << compute_phase_cfg_.gas_limit << ")"); + } auto trans_root2 = trs->commit(account); if (trans_root2.is_null()) { return reject_query(PSTRING() << "the re-created transaction " << lt << " for smart contract " << addr.to_hex() @@ -6187,6 +6195,7 @@ bool ValidateQuery::try_validate() { } try { if (!stage_) { + LOG(WARNING) << "try_validate stage 0"; if (!compute_prev_state()) { return fatal_error(-666, "cannot compute previous state"); } @@ -6216,6 +6225,7 @@ bool ValidateQuery::try_validate() { return true; } } + LOG(WARNING) << "try_validate stage 1"; LOG(INFO) << "running automated validity checks for block candidate " << id_.to_str(); if (!block::gen::t_Block.validate_ref(10000000, block_root_)) { return reject_query("block "s + id_.to_str() + " failed to pass automated validity checks"); diff --git a/validator/interfaces/validator-manager.h b/validator/interfaces/validator-manager.h index ae15ed4cd..6b375baca 100644 --- a/validator/interfaces/validator-manager.h +++ b/validator/interfaces/validator-manager.h @@ -170,6 +170,14 @@ class ValidatorManager : public ValidatorManagerInterface { virtual void log_validator_session_stats(BlockIdExt block_id, validatorsession::ValidatorSessionStats stats) = 0; + virtual void get_block_handle_for_litequery(BlockIdExt block_id, td::Promise promise) = 0; + virtual void get_block_by_lt_from_db_for_litequery(AccountIdPrefixFull account, LogicalTime lt, + td::Promise promise) = 0; + virtual void get_block_by_unix_time_from_db_for_litequery(AccountIdPrefixFull account, UnixTime ts, + td::Promise promise) = 0; + virtual void get_block_by_seqno_from_db_for_litequery(AccountIdPrefixFull account, BlockSeqno seqno, + td::Promise promise) = 0; + static bool is_persistent_state(UnixTime ts, UnixTime prev_ts) { return ts / (1 << 17) != prev_ts / (1 << 17); } diff --git a/validator/manager-disk.hpp b/validator/manager-disk.hpp index 1c3f4f5b9..4812d60a8 100644 --- a/validator/manager-disk.hpp +++ b/validator/manager-disk.hpp @@ -384,6 +384,21 @@ class ValidatorManagerImpl : public ValidatorManager { } td::actor::send_closure(queue_size_counter_, &QueueSizeCounter::get_queue_size, block_id, std::move(promise)); } + void get_block_handle_for_litequery(BlockIdExt block_id, td::Promise promise) override { + get_block_handle(block_id, false, promise.wrap([](BlockHandle &&handle) -> ConstBlockHandle { return handle; })); + } + void get_block_by_lt_from_db_for_litequery(AccountIdPrefixFull account, LogicalTime lt, + td::Promise promise) override { + get_block_by_lt_from_db(account, lt, std::move(promise)); + } + void get_block_by_unix_time_from_db_for_litequery(AccountIdPrefixFull account, UnixTime ts, + td::Promise promise) override { + get_block_by_unix_time_from_db(account, ts, std::move(promise)); + } + void get_block_by_seqno_from_db_for_litequery(AccountIdPrefixFull account, BlockSeqno seqno, + td::Promise promise) override { + get_block_by_seqno_from_db(account, seqno, std::move(promise)); + } private: PublicKeyHash local_id_; diff --git a/validator/manager-hardfork.hpp b/validator/manager-hardfork.hpp index 675c23041..c34ae5c72 100644 --- a/validator/manager-hardfork.hpp +++ b/validator/manager-hardfork.hpp @@ -445,6 +445,21 @@ class ValidatorManagerImpl : public ValidatorManager { } td::actor::send_closure(queue_size_counter_, &QueueSizeCounter::get_queue_size, block_id, std::move(promise)); } + void get_block_handle_for_litequery(BlockIdExt block_id, td::Promise promise) override { + get_block_handle(block_id, false, promise.wrap([](BlockHandle &&handle) -> ConstBlockHandle { return handle; })); + } + void get_block_by_lt_from_db_for_litequery(AccountIdPrefixFull account, LogicalTime lt, + td::Promise promise) override { + get_block_by_lt_from_db(account, lt, std::move(promise)); + } + void get_block_by_unix_time_from_db_for_litequery(AccountIdPrefixFull account, UnixTime ts, + td::Promise promise) override { + get_block_by_unix_time_from_db(account, ts, std::move(promise)); + } + void get_block_by_seqno_from_db_for_litequery(AccountIdPrefixFull account, BlockSeqno seqno, + td::Promise promise) override { + get_block_by_seqno_from_db(account, seqno, std::move(promise)); + } private: td::Ref opts_; diff --git a/validator/manager.cpp b/validator/manager.cpp index 54d272bc9..21fa5887c 100644 --- a/validator/manager.cpp +++ b/validator/manager.cpp @@ -784,6 +784,8 @@ void ValidatorManagerImpl::wait_block_message_queue_short(BlockIdExt block_id, t void ValidatorManagerImpl::get_external_messages(ShardIdFull shard, td::Promise>> promise) { + td::Timer t; + size_t processed = 0, deleted = 0; std::vector> res; MessageId left{AccountIdPrefixFull{shard.workchain, shard.shard & (shard.shard - 1)}, Bits256::zero()}; auto it = ext_messages_.lower_bound(left); @@ -792,10 +794,12 @@ void ValidatorManagerImpl::get_external_messages(ShardIdFull shard, if (!shard_contains(shard, s.dst)) { break; } + ++processed; if (it->second->expired()) { ext_addr_messages_[it->second->address()].erase(it->first.hash); ext_messages_hashes_.erase(it->first.hash); it = ext_messages_.erase(it); + ++deleted; continue; } if (it->second->is_active()) { @@ -803,6 +807,9 @@ void ValidatorManagerImpl::get_external_messages(ShardIdFull shard, } it++; } + LOG(WARNING) << "get_external_messages to shard " << shard.to_str() << " : time=" << t.elapsed() + << " result_size=" << res.size() << " processed=" << processed << " expired=" << deleted + << " total_size=" << ext_messages_.size(); promise.set_value(std::move(res)); } @@ -1356,7 +1363,18 @@ td::Ref ValidatorManagerImpl::do_get_last_liteserver_state() { if (last_masterchain_state_.is_null()) { return {}; } - if (last_liteserver_state_.is_null() || last_liteserver_state_->get_unix_time() < td::Clocks::system() - 30) { + if (last_liteserver_state_.is_null()) { + last_liteserver_state_ = last_masterchain_state_; + return last_liteserver_state_; + } + if (last_liteserver_state_->get_seqno() == last_masterchain_state_->get_seqno()) { + return last_liteserver_state_; + } + // If liteserver seqno (i.e. shard client) lags then use last masterchain state for liteserver + // Allowed lag depends on the block rate + double time_per_block = double(last_masterchain_state_->get_unix_time() - last_liteserver_state_->get_unix_time()) / + double(last_masterchain_state_->get_seqno() - last_liteserver_state_->get_seqno()); + if (td::Clocks::system() - double(last_liteserver_state_->get_unix_time()) > std::min(time_per_block * 8, 180.0)) { last_liteserver_state_ = last_masterchain_state_; } return last_liteserver_state_; @@ -2316,8 +2334,11 @@ void ValidatorManagerImpl::update_shard_client_block_handle(BlockHandle handle, td::Promise promise) { shard_client_handle_ = std::move(handle); auto seqno = shard_client_handle_->id().seqno(); - if (last_liteserver_state_.is_null() || last_liteserver_state_->get_block_id().seqno() < seqno) { - last_liteserver_state_ = std::move(state); + if (state.not_null()) { + shard_client_shards_ = state->get_shards(); + if (last_liteserver_state_.is_null() || last_liteserver_state_->get_block_id().seqno() < seqno) { + last_liteserver_state_ = std::move(state); + } } shard_client_update(seqno); promise.set_value(td::Unit()); @@ -2356,15 +2377,15 @@ void ValidatorManagerImpl::alarm() { } if (log_status_at_.is_in_past()) { if (last_masterchain_block_handle_) { - LOG(INFO) << "STATUS: last_masterchain_block_ago=" - << td::format::as_time(td::Clocks::system() - last_masterchain_block_handle_->unix_time()) - << " last_known_key_block_ago=" - << td::format::as_time(td::Clocks::system() - (last_known_key_block_handle_->inited_unix_time() - ? last_known_key_block_handle_->unix_time() - : 0)) - << " shard_client_ago=" - << td::format::as_time(td::Clocks::system() - - (shard_client_handle_ ? shard_client_handle_->unix_time() : 0)); + LOG(ERROR) << "STATUS: last_masterchain_block_ago=" + << td::format::as_time(td::Clocks::system() - last_masterchain_block_handle_->unix_time()) + << " last_known_key_block_ago=" + << td::format::as_time(td::Clocks::system() - (last_known_key_block_handle_->inited_unix_time() + ? last_known_key_block_handle_->unix_time() + : 0)) + << " shard_client_ago=" + << td::format::as_time(td::Clocks::system() - + (shard_client_handle_ ? shard_client_handle_->unix_time() : 0)); } log_status_at_ = td::Timestamp::in(60.0); } @@ -2619,18 +2640,19 @@ void ValidatorManagerImpl::log_validator_session_stats(BlockIdExt block_id, } std::vector> rounds; - for (const auto& round : stats.rounds) { + for (const auto &round : stats.rounds) { std::vector> producers; - for (const auto& producer : round.producers) { + for (const auto &producer : round.producers) { producers.push_back(create_tl_object( - producer.id.bits256_value(), producer.block_status, producer.block_timestamp)); + producer.id.bits256_value(), producer.candidate_id, producer.block_status, producer.block_timestamp, + producer.comment)); } rounds.push_back(create_tl_object(round.timestamp, std::move(producers))); } auto obj = create_tl_object( - create_tl_block_id_simple(block_id.id), stats.timestamp, stats.self.bits256_value(), - stats.creator.bits256_value(), stats.total_validators, stats.total_weight, stats.signatures, + stats.success, create_tl_block_id(block_id), stats.timestamp, stats.self.bits256_value(), stats.session_id, + stats.cc_seqno, stats.creator.bits256_value(), stats.total_validators, stats.total_weight, stats.signatures, stats.signatures_weight, stats.approve_signatures, stats.approve_signatures_weight, stats.first_round, std::move(rounds)); std::string s = td::json_encode(td::ToJson(*obj.get()), false); @@ -2644,6 +2666,143 @@ void ValidatorManagerImpl::log_validator_session_stats(BlockIdExt block_id, LOG(INFO) << "Writing validator session stats for " << block_id.id; } +void ValidatorManagerImpl::get_block_handle_for_litequery(BlockIdExt block_id, td::Promise promise) { + get_block_handle( + block_id, false, + [SelfId = actor_id(this), block_id, promise = std::move(promise)](td::Result R) mutable { + if (R.is_ok() && R.ok()->is_applied()) { + promise.set_value(R.move_as_ok()); + } else { + td::actor::send_closure(SelfId, &ValidatorManagerImpl::process_block_handle_for_litequery_error, block_id, + std::move(R), std::move(promise)); + } + }); +} + +void ValidatorManagerImpl::get_block_by_lt_from_db_for_litequery(AccountIdPrefixFull account, LogicalTime lt, + td::Promise promise) { + get_block_by_lt_from_db( + account, lt, [=, SelfId = actor_id(this), promise = std::move(promise)](td::Result R) mutable { + if (R.is_ok() && R.ok()->is_applied()) { + promise.set_value(R.move_as_ok()); + } else { + td::actor::send_closure(SelfId, &ValidatorManagerImpl::process_lookup_block_for_litequery_error, account, 0, + lt, std::move(R), std::move(promise)); + } + }); +} + +void ValidatorManagerImpl::get_block_by_unix_time_from_db_for_litequery(AccountIdPrefixFull account, UnixTime ts, + td::Promise promise) { + get_block_by_unix_time_from_db( + account, ts, [=, SelfId = actor_id(this), promise = std::move(promise)](td::Result R) mutable { + if (R.is_ok() && R.ok()->is_applied()) { + promise.set_value(R.move_as_ok()); + } else { + td::actor::send_closure(SelfId, &ValidatorManagerImpl::process_lookup_block_for_litequery_error, account, 1, + ts, std::move(R), std::move(promise)); + } + }); +} + +void ValidatorManagerImpl::get_block_by_seqno_from_db_for_litequery(AccountIdPrefixFull account, BlockSeqno seqno, + td::Promise promise) { + get_block_by_seqno_from_db( + account, seqno, + [=, SelfId = actor_id(this), promise = std::move(promise)](td::Result R) mutable { + if (R.is_ok() && R.ok()->is_applied()) { + promise.set_value(R.move_as_ok()); + } else { + td::actor::send_closure(SelfId, &ValidatorManagerImpl::process_lookup_block_for_litequery_error, account, 2, + seqno, std::move(R), std::move(promise)); + } + }); +} + +void ValidatorManagerImpl::process_block_handle_for_litequery_error(BlockIdExt block_id, + td::Result r_handle, + td::Promise promise) { + td::Status err; + if (r_handle.is_error()) { + err = r_handle.move_as_error(); + } else { + auto handle = r_handle.move_as_ok(); + if (handle->is_applied()) { + promise.set_value(std::move(handle)); + return; + } + if (!handle->received() || !handle->received_state()) { + err = td::Status::Error(ErrorCode::notready, PSTRING() << "block " << block_id.id.to_str() << " is not in db"); + } else { + err = td::Status::Error(ErrorCode::notready, PSTRING() << "block " << block_id.id.to_str() << " is not applied"); + } + } + if (block_id.is_masterchain()) { + if (block_id.seqno() > last_masterchain_seqno_) { + err = err.move_as_error_suffix(PSTRING() << " (last known masterchain block: " << last_masterchain_seqno_ << ")"); + } + } else { + for (auto &shard : shard_client_shards_) { + if (shard_intersects(shard->shard(), block_id.shard_full())) { + if (block_id.seqno() > shard->top_block_id().seqno()) { + err = err.move_as_error_suffix( + PSTRING() << " (possibly out of sync: shard_client_seqno=" + << (shard_client_handle_ ? shard_client_handle_->id().seqno() : 0) << " ls_seqno=" + << (last_liteserver_state_.not_null() ? last_liteserver_state_->get_seqno() : 0) << ")"); + } + break; + } + } + } + promise.set_error(std::move(err)); +} + +void ValidatorManagerImpl::process_lookup_block_for_litequery_error(AccountIdPrefixFull account, int type, + td::uint64 value, + td::Result r_handle, + td::Promise promise) { + td::Status err; + if (r_handle.is_error()) { + err = r_handle.move_as_error(); + } else { + auto handle = r_handle.move_as_ok(); + if (handle->is_applied()) { + promise.set_value(std::move(handle)); + return; + } + if (!handle->received() || !handle->received_state()) { + err = td::Status::Error(ErrorCode::notready, PSTRING() << "block " << handle->id().to_str() << " is not in db"); + } else { + err = td::Status::Error(ErrorCode::notready, PSTRING() << "block " << handle->id().to_str() << " is not applied"); + } + } + if (account.is_masterchain()) { + if (value > (type == 0 + ? last_masterchain_state_->get_logical_time() + : (type == 1 ? last_masterchain_state_->get_unix_time() : last_masterchain_state_->get_seqno()))) { + err = err.move_as_error_suffix(PSTRING() << " (last known masterchain block: " << last_masterchain_seqno_ << ")"); + } + } else { + for (auto &shard : shard_client_shards_) { + if (shard_intersects(shard->shard(), account.as_leaf_shard())) { + if (value > (type == 0 ? shard->end_lt() + : (type == 1 ? (shard_client_handle_ ? shard_client_handle_->unix_time() : 0) + : shard->top_block_id().seqno()))) { + err = err.move_as_error_suffix( + PSTRING() << " (possibly out of sync: shard_client_seqno=" + << (shard_client_handle_ ? shard_client_handle_->id().seqno() : 0) << " ls_seqno=" + << (last_liteserver_state_.not_null() ? last_liteserver_state_->get_seqno() : 0) << ")"); + } + break; + } + } + } + static std::string names[3] = {"lt", "utime", "seqno"}; + err = err.move_as_error_prefix(PSTRING() << "cannot find block " << account.to_str() << " " << names[type] << "=" + << value << ": "); + promise.set_error(std::move(err)); +} + td::actor::ActorOwn ValidatorManagerFactory::create( td::Ref opts, std::string db_root, td::actor::ActorId keyring, td::actor::ActorId adnl, td::actor::ActorId rldp, diff --git a/validator/manager.hpp b/validator/manager.hpp index 9f51cc27d..bdf0155ee 100644 --- a/validator/manager.hpp +++ b/validator/manager.hpp @@ -252,6 +252,7 @@ class ValidatorManagerImpl : public ValidatorManager { BlockHandle last_key_block_handle_; BlockHandle last_known_key_block_handle_; BlockHandle shard_client_handle_; + std::vector> shard_client_shards_; td::Ref last_liteserver_state_; td::Ref do_get_last_liteserver_state(); @@ -561,6 +562,19 @@ class ValidatorManagerImpl : public ValidatorManager { td::actor::send_closure(queue_size_counter_, &QueueSizeCounter::get_queue_size, block_id, std::move(promise)); } + void get_block_handle_for_litequery(BlockIdExt block_id, td::Promise promise) override; + void get_block_by_lt_from_db_for_litequery(AccountIdPrefixFull account, LogicalTime lt, + td::Promise promise) override; + void get_block_by_unix_time_from_db_for_litequery(AccountIdPrefixFull account, UnixTime ts, + td::Promise promise) override; + void get_block_by_seqno_from_db_for_litequery(AccountIdPrefixFull account, BlockSeqno seqno, + td::Promise promise) override; + void process_block_handle_for_litequery_error(BlockIdExt block_id, td::Result r_handle, + td::Promise promise); + void process_lookup_block_for_litequery_error(AccountIdPrefixFull account, int type, td::uint64 value, + td::Result r_handle, + td::Promise promise); + private: td::Timestamp resend_shard_blocks_at_; td::Timestamp check_waiters_at_; diff --git a/validator/validator-group.cpp b/validator/validator-group.cpp index c1f4f38a9..51217bf9d 100644 --- a/validator/validator-group.cpp +++ b/validator/validator-group.cpp @@ -136,6 +136,7 @@ void ValidatorGroup::accept_block_candidate(td::uint32 round_id, PublicKeyHash s std::vector approve_signatures, validatorsession::ValidatorSessionStats stats, td::Promise promise) { + stats.cc_seqno = validator_set_->get_catchain_seqno(); if (round_id >= last_known_round_id_) { last_known_round_id_ = round_id + 1; } @@ -150,6 +151,7 @@ void ValidatorGroup::accept_block_candidate(td::uint32 round_id, PublicKeyHash s return; } auto next_block_id = create_next_block_id(root_hash, file_hash); + LOG(WARNING) << "Accepted block " << next_block_id; td::actor::send_closure(manager_, &ValidatorManager::log_validator_session_stats, next_block_id, std::move(stats)); auto block = block_data.size() > 0 ? create_block(next_block_id, std::move(block_data)).move_as_ok() : td::Ref{}; @@ -353,6 +355,19 @@ void ValidatorGroup::start(std::vector prev, BlockIdExt min_masterch void ValidatorGroup::destroy() { if (!session_.empty()) { + td::actor::send_closure(session_, &validatorsession::ValidatorSession::get_current_stats, + [manager = manager_, cc_seqno = validator_set_->get_catchain_seqno(), + block_id = create_next_block_id(RootHash::zero(), FileHash::zero())]( + td::Result R) { + if (R.is_error()) { + LOG(WARNING) << "Failed to get validator session stats: " << R.move_as_error(); + return; + } + auto stats = R.move_as_ok(); + stats.cc_seqno = cc_seqno; + td::actor::send_closure(manager, &ValidatorManager::log_validator_session_stats, block_id, + std::move(stats)); + }); auto ses = session_.release(); delay_action([ses]() mutable { td::actor::send_closure(ses, &validatorsession::ValidatorSession::destroy); }, td::Timestamp::in(10.0));