diff --git a/.dockerignore b/.dockerignore index 85b37d840..789dce485 100644 --- a/.dockerignore +++ b/.dockerignore @@ -1 +1,2 @@ # ./bin +build \ No newline at end of file diff --git a/.github/workflows/build-test.yaml b/.github/workflows/build-test.yaml new file mode 100644 index 000000000..fe0889d77 --- /dev/null +++ b/.github/workflows/build-test.yaml @@ -0,0 +1,52 @@ +name: Build Test + +on: + push: + branches: + - master + - develop + + pull_request: + branches: + - master + - develop + +jobs: + unit-test: + strategy: + matrix: + go-version: [1.17.x] + os: [ubuntu-18.04] + runs-on: ${{ matrix.os }} + steps: + - name: Install Go + uses: actions/setup-go@v2 + with: + go-version: ${{ matrix.go-version }} + + - name: Checkout code + uses: actions/checkout@v2 + + - uses: actions/cache@v2 + with: + # In order: + # * Module download cache + # * Build cache (Linux) + # * Build cache (Mac) + # * Build cache (Windows) + path: | + ~/go/pkg/mod + ~/.cache/go-build + ~/Library/Caches/go-build + %LocalAppData%\go-build + key: ${{ runner.os }}-go-${{ hashFiles('**/go.sum') }} + restore-keys: | + ${{ runner.os }}-go- + + - name: Test Build + run: | + export GOPATH=$(go env GOPATH) + go install github.com/zeromicro/go-zero/tools/goctl@v1.4.0 + make build + + diff --git a/.github/workflows/deploy-on-qa1.yml b/.github/workflows/deploy-on-qa1.yml new file mode 100644 index 000000000..e9d4fd84e --- /dev/null +++ b/.github/workflows/deploy-on-qa1.yml @@ -0,0 +1,38 @@ +name: Deploy zkbas on qa1 + +on: + push: + branches: + - qa1 + +jobs: + deploy: + runs-on: self-hosted + permissions: + issues: write + pull-requests: write + steps: + - name: Deploy new zkbas on qa1 + run: | + echo 'fetch zkbas repo' + export BRANCH=$(echo $GITHUB_REF | awk 'BEGIN { FS = "/" } ; { print $3 }') + + cd ~ + rm -rf ./zkbas + git clone --branch qa1 https://github.com/bnb-chain/zkbas.git + cd ./zkbas + + sudo scp -r ./deploy-qa.sh "qa1:/tmp/" + sudo ssh qa1 "sudo bash -x /tmp/deploy-qa.sh qa1;exit" + echo "end deploy on qa1" + + - name: Notification via slack + run: | + export SLACK_WEBHOOK_URL=`sudo cat /home/ec2-user/actions-runner/slack-config.json | jq -r '.slack'` + export JOB_STATUS=${{ job.status }} + sudo scp -r qa1:/root/zkbas-deploy/zkbas-contract/info/addresses.json ~/addresses.json + export ZkBas=`sudo cat ~/addresses.json | jq -r '.zkbasProxy'` + export AssetGov=`sudo cat ~/addresses.json | jq -r '.assetGovernance'` + curl -X POST $SLACK_WEBHOOK_URL --header 'Content-Type: application/json' \ + --data-raw '{ "author": "@'$GITHUB_ACTOR'", "status": "'$JOB_STATUS'", "ref": "'$GITHUB_REF'", "event": "'$GITHUB_EVENT_NAME'", "url": "'$GITHUB_SERVER_URL'/'$GITHUB_REPOSITORY'/commit/'$GITHUB_SHA'/checks", "ZkbasContract": "'$ZkBas'", "AssetGovContract": "'$AssetGov'" }' + diff --git a/.github/workflows/deploy-on-qa2.yml b/.github/workflows/deploy-on-qa2.yml new file mode 100644 index 000000000..c570ccfb4 --- /dev/null +++ b/.github/workflows/deploy-on-qa2.yml @@ -0,0 +1,38 @@ +name: Deploy zkbas on qa2 + +on: + push: + branches: + - qa2 + +jobs: + deploy: + runs-on: self-hosted + permissions: + issues: write + pull-requests: write + steps: + - name: Deploy new zkbas on qa2 + run: | + echo 'fetch zkbas repo' + export BRANCH=$(echo $GITHUB_REF | awk 'BEGIN { FS = "/" } ; { print $3 }') + + cd ~ + rm -rf ./zkbas + git clone --branch qa2 https://github.com/bnb-chain/zkbas.git + cd ./zkbas + + sudo scp -r ./deploy-qa.sh "qa2:/tmp/" + sudo ssh qa2 "sudo bash -x /tmp/deploy-qa.sh qa2;exit" + echo "end deploy on qa2" + + - name: Notification via slack + run: | + export SLACK_WEBHOOK_URL=`sudo cat /home/ec2-user/actions-runner/slack-config.json | jq -r '.slack'` + export JOB_STATUS=${{ job.status }} + sudo scp -r qa2:/root/zkbas-deploy/zkbas-contract/info/addresses.json ~/addresses.json + export ZkBas=`sudo cat ~/addresses.json | jq -r '.zkbasProxy'` + export AssetGov=`sudo cat ~/addresses.json | jq -r '.assetGovernance'` + curl -X POST $SLACK_WEBHOOK_URL --header 'Content-Type: application/json' \ + --data-raw '{ "author": "@'$GITHUB_ACTOR'", "status": "'$JOB_STATUS'", "ref": "'$GITHUB_REF'", "event": "'$GITHUB_EVENT_NAME'", "url": "'$GITHUB_SERVER_URL'/'$GITHUB_REPOSITORY'/commit/'$GITHUB_SHA'/checks", "ZkbasContract": "'$ZkBas'", "AssetGovContract": "'$AssetGov'" }' + diff --git a/.github/workflows/deploy-on-qa3.yml b/.github/workflows/deploy-on-qa3.yml new file mode 100644 index 000000000..e8d3bd3df --- /dev/null +++ b/.github/workflows/deploy-on-qa3.yml @@ -0,0 +1,38 @@ +name: Deploy zkbas on qa3 + +on: + push: + branches: + - qa3 + +jobs: + deploy: + runs-on: self-hosted + permissions: + issues: write + pull-requests: write + steps: + - name: Deploy new zkbas on qa3 + run: | + echo 'fetch zkbas repo' + export BRANCH=$(echo $GITHUB_REF | awk 'BEGIN { FS = "/" } ; { print $3 }') + + cd ~ + rm -rf ./zkbas + git clone --branch qa3 https://github.com/bnb-chain/zkbas.git + cd ./zkbas + + sudo scp -r ./deploy-qa.sh "qa3:/tmp/" + sudo ssh qa3 "sudo bash -x /tmp/deploy-qa.sh qa3;exit" + echo "end deploy on qa3" + + - name: Notification via slack + run: | + export SLACK_WEBHOOK_URL=`sudo cat /home/ec2-user/actions-runner/slack-config.json | jq -r '.slack'` + export JOB_STATUS=${{ job.status }} + sudo scp -r qa3:/root/zkbas-deploy/zkbas-contract/info/addresses.json ~/addresses.json + export ZkBas=`sudo cat ~/addresses.json | jq -r '.zkbasProxy'` + export AssetGov=`sudo cat ~/addresses.json | jq -r '.assetGovernance'` + curl -X POST $SLACK_WEBHOOK_URL --header 'Content-Type: application/json' \ + --data-raw '{ "author": "@'$GITHUB_ACTOR'", "status": "'$JOB_STATUS'", "ref": "'$GITHUB_REF'", "event": "'$GITHUB_EVENT_NAME'", "url": "'$GITHUB_SERVER_URL'/'$GITHUB_REPOSITORY'/commit/'$GITHUB_SHA'/checks", "ZkbasContract": "'$ZkBas'", "AssetGovContract": "'$AssetGov'" }' + diff --git a/.github/workflows/integration-test.yml b/.github/workflows/integration-test.yml index d5744f31f..4db77f3aa 100644 --- a/.github/workflows/integration-test.yml +++ b/.github/workflows/integration-test.yml @@ -1,9 +1,17 @@ -name: Integration +name: Integration Test on Runner on: push: branches: - develop + - master + pull_request: + branches: + - master + - develop +env: + L1_ENDPOINT: https://data-seed-prebsc-1-s1.binance.org:8545 + L2_ENDPOINT: http://localhost:8888 jobs: deploy: @@ -12,19 +20,63 @@ jobs: issues: write pull-requests: write steps: - - name: deploy new zkbas + - name: deploy new zkbas on runner run: | - cd /tmp + echo 'fetch zkbas repo' + export PR_NUMBER=$(echo $GITHUB_REF | awk 'BEGIN { FS = "/" } ; { print $3 }') + echo Pull requests $PR_NUMBER + + cd ~ sudo rm -rf ./zkbas - git clone --branch github-action https://github.com/bnb-chain/zkbas.git + git clone --branch develop https://github.com/bnb-chain/zkbas.git + + cd ./zkbas + git fetch origin pull/$PR_NUMBER/head:local-deploy-tmp + git checkout local-deploy-tmp + git rev-parse HEAD + echo "start deploy new zkbas" - sudo bash -x ./zkbas/deploy-local.sh new + sudo bash ./deployment/tool/generate_api.sh + go mod tidy + docker image prune -f + make docker-image + cp -r /server/test.keyfile ./deployment/ + mv ./deployment/test.keyfile ./deployment/.zkbas + blockNr=$(sudo bash ./deployment/tool/tool.sh blockHeight) + sudo bash ./deployment/tool/tool.sh all + sudo bash ./deployment/docker-compose/docker-compose.sh down + sudo bash ./deployment/docker-compose/docker-compose.sh up $blockNr + echo "Waiting 10m for the initialization tx to be verified" + sleep 10m # Waiting for the initialization tx to be verified echo "end deploy" - name: run integration test run: | - echo "start integration test" - cd /tmp - sudo bash -x ./zkbas/local-test.sh - echo "end integration test" + export PATH=$PATH:/usr/local/go/bin:/usr/local/go/bin:/root/go/bin + export ZkBas=$(sudo cat ~/zkbas/deployment/dependency/zkbas-contract/info/addresses.json | jq -r '.zkbasProxy') + export AssetGov=$(sudo cat ~/zkbas/deployment/dependency/zkbas-contract/info/addresses.json | jq -r '.assetGovernance') + export TestLogLevel=2 + export L1EndPoint=$L1_ENDPOINT + export L2EndPoint=$L2_ENDPOINT + + cd /tmp && sudo rm -rf ./zkbas-integration-test + git clone --branch main https://github.com/bnb-chain/zkbas-integration-test.git + cd ./zkbas-integration-test/tests + + echo '1. start TestSetupSuite' + go test -v -run TestSetupSuite -timeout 30m + + echo '2. start L1 test' + go test -v -run TestL1Suite -timeout 30m + + echo '3. start L2 test' + go test -v -run TestL2Suite -timeout 30m + - name: notification via slack + run: | + export SLACK_WEBHOOK_URL=`sudo cat /home/ec2-user/actions-runner/slack-config.json | jq -r '.slack'` + export JOB_STATUS=${{ job.status }} + export ZkBas=`sudo cat /root/zkbas-deploy/zkbas-contract/info/addresses.json | jq -r '.zkbasProxy'` + export AssetGov=`sudo cat /root/zkbas-deploy/zkbas-contract/info/addresses.json | jq -r '.assetGovernance'` + curl -X POST $SLACK_WEBHOOK_URL --header 'Content-Type: application/json' \ + --data-raw '{ "author": "@'$GITHUB_ACTOR'", "status": "'$JOB_STATUS'", "ref": "'$GITHUB_REF'", "event": "'$GITHUB_EVENT_NAME'", "url": "'$GITHUB_SERVER_URL'/'$GITHUB_REPOSITORY'/commit/'$GITHUB_SHA'/checks", "ZkbasContract": "'$ZkBas'", "AssetGovContract": "'$AssetGov'" }' diff --git a/.github/workflows/issue-trigger.yml b/.github/workflows/issue-trigger.yml new file mode 100644 index 000000000..07b46a413 --- /dev/null +++ b/.github/workflows/issue-trigger.yml @@ -0,0 +1,25 @@ +name: Issue Trigger + +on: + pull_request: + types: [opened, edited, closed] + issue_comment: + types: [created, edited, deleted] + +jobs: + trigger: + runs-on: self-hosted + permissions: + issues: write + pull-requests: write + steps: + - name: update-integration-keyfile + if: contains(github.event.pull_request.body, '/update-integration-keyfile') # check the comment if it contains the keywords + run: | + cd /server + sudo rm -rf ./zkbas + sudo git clone --branch develop https://github.com/bnb-chain/zkbas.git + cd ./zkbas + sudo bash ./deployment/tool/tool.sh prepare new + sudo rm -rf /server/test.keyfile + sudo cp -r ./deployment/.zkbas /server/test.keyfile diff --git a/.github/workflows/lint.yaml b/.github/workflows/lint.yaml new file mode 100644 index 000000000..c930e386d --- /dev/null +++ b/.github/workflows/lint.yaml @@ -0,0 +1,52 @@ +name: Lint + +on: + push: + branches: + - master + - develop + + pull_request: + branches: + - master + - develop + +jobs: + lint: + strategy: + matrix: + go-version: [1.17.x] + os: [ubuntu-18.04] + runs-on: ${{ matrix.os }} + steps: + - name: Install Go + uses: actions/setup-go@v2 + with: + go-version: ${{ matrix.go-version }} + + - name: Checkout code + uses: actions/checkout@v2 + + - uses: actions/cache@v2 + with: + # In order: + # * Module download cache + # * Build cache (Linux) + # * Build cache (Mac) + # * Build cache (Windows) + path: | + ~/go/pkg/mod + ~/.cache/go-build + ~/Library/Caches/go-build + %LocalAppData%\go-build + key: ${{ runner.os }}-go-${{ hashFiles('**/go.sum') }} + restore-keys: | + ${{ runner.os }}-go- + + - name: Lint + run: | + export GOPATH=$(go env GOPATH) + go install github.com/zeromicro/go-zero/tools/goctl@v1.4.0 + make api-server + curl -sSfL https://raw.githubusercontent.com/golangci/golangci-lint/master/install.sh | sh -s -- -b $(go env GOPATH)/bin v1.49.0 + golangci-lint run ./... diff --git a/.github/workflows/unit-test.yaml b/.github/workflows/unit-test.yaml new file mode 100644 index 000000000..63fc62433 --- /dev/null +++ b/.github/workflows/unit-test.yaml @@ -0,0 +1,53 @@ +name: Unit Test + +on: + push: + branches: + - master + - develop + + pull_request: + branches: + - master + - develop + +jobs: + unit-test: + strategy: + matrix: + go-version: [1.17.x] + os: [ubuntu-18.04] + runs-on: ${{ matrix.os }} + steps: + - name: Install Go + uses: actions/setup-go@v2 + with: + go-version: ${{ matrix.go-version }} + + - name: Checkout code + uses: actions/checkout@v2 + + - uses: actions/cache@v2 + with: + # In order: + # * Module download cache + # * Build cache (Linux) + # * Build cache (Mac) + # * Build cache (Windows) + path: | + ~/go/pkg/mod + ~/.cache/go-build + ~/Library/Caches/go-build + %LocalAppData%\go-build + key: ${{ runner.os }}-go-${{ hashFiles('**/go.sum') }} + restore-keys: | + ${{ runner.os }}-go- + + - name: Uint Test + env: + ANDROID_HOME: "" # Skip android test + run: | + export GOPATH=$(go env GOPATH) + go install github.com/zeromicro/go-zero/tools/goctl@v1.4.0 + make api-server + make test diff --git a/.gitignore b/.gitignore index ede923466..64dda6b1d 100644 --- a/.gitignore +++ b/.gitignore @@ -24,7 +24,13 @@ ***/types/types.go -*.vk +**/main + *.pk +*.vk -**/main \ No newline at end of file +build +vendor +deployment/dependency +deployment/configs +deployment/.zkbas \ No newline at end of file diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 000000000..3810a8979 --- /dev/null +++ b/Dockerfile @@ -0,0 +1,37 @@ +FROM golang:1.17-alpine as builder + +RUN apk add --no-cache make git bash + +ADD . /zkbas + +ENV CGO_ENABLED=0 +ENV GO111MODULE=on + +RUN cd /zkbas && make build-only + +# Pull zkBAS into a second stage deploy alpine container +FROM alpine:3.16.0 + +ARG USER=bsc +ARG USER_UID=1000 +ARG USER_GID=1000 + +ENV PACKAGES ca-certificates~=20220614 bash~=5.1.16-r2 +ENV WORKDIR=/server + +RUN apk add --no-cache $PACKAGES \ + && rm -rf /var/cache/apk/* \ + && addgroup -g ${USER_GID} ${USER} \ + && adduser -u ${USER_UID} -G ${USER} --shell /sbin/nologin --no-create-home -D ${USER} \ + && addgroup ${USER} tty \ + && sed -i -e "s/bin\/sh/bin\/bash/" /etc/passwd + +RUN echo "[ ! -z \"\$TERM\" -a -r /etc/motd ] && cat /etc/motd" >> /etc/bash/bashrc + +WORKDIR ${WORKDIR} + +COPY --from=builder /zkbas/build/bin/zkbas ${WORKDIR}/ +RUN chown -R ${USER_UID}:${USER_GID} ${WORKDIR} +USER ${USER_UID}:${USER_GID} + +ENTRYPOINT ["/server/zkbas"] diff --git a/Makefile b/Makefile index 5fd898990..d48e94ee9 100644 --- a/Makefile +++ b/Makefile @@ -7,24 +7,40 @@ .PHONY: zkbas-linux-arm zkbas-linux-arm-5 zkbas-linux-arm-6 zkbas-linux-arm-7 zkbas-linux-arm64 .PHONY: zkbas-darwin zkbas-darwin-386 zkbas-darwin-amd64 .PHONY: zkbas-windows zkbas-windows-386 zkbas-windows-amd64 +GOBIN?=${GOPATH}/bin -APP = ./service/api/app -EXPLORE = ./service/api/explorer +VERSION=$(shell git describe --tags) +GIT_COMMIT=$(shell git rev-parse HEAD) +GIT_COMMIT_DATE=$(shell git log -n1 --pretty='format:%cd' --date=format:'%Y%m%d') +REPO=github.com/bnb-chain/zkbas +IMAGE_NAME=ghcr.io/bnb-chain/zkbas +API_SERVER = ./service/apiserver -globalRPCProtoPath = ./service/rpc/globalRPC +api-server: + cd $(API_SERVER) && ${GOBIN}/goctl api go -api server.api -dir .; + @echo "Done generate server api"; -app: - cd $(APP) && goctl api go -api app.api -dir .; - @echo "Done generate app api"; +deploy: + sudo bash -x ./deploy-local.sh new +integration-test: + sudo bash -x ./local-test.sh -globalRPCProto: - cd $(globalRPCProtoPath) && goctl rpc protoc globalRPC.proto --go_out=. --go-grpc_out=. --zrpc_out=.; - @echo "Done generate globalRPCProto"; +test: api-server + @echo "--> Running go test" + @go test ./... +tools: + go install -u github.com/zeromicro/go-zero/tools/goctl@v1.4.0 -deploy: - sudo bash -x ./deploy-local.sh new +build: api-server build-only -test: - sudo bash -x ./local-test.sh +lint: + golangci-lint run ./... + +build-only: + go build -o build/bin/zkbas -ldflags="-X main.version=${VERSION} -X main.gitCommit=${GIT_COMMIT} -X main.gitDate=${GIT_COMMIT_DATE}" ./cmd/zkbas + +docker-image: + go mod vendor # temporary, should be removed after open source + docker build . -t ${IMAGE_NAME} \ No newline at end of file diff --git a/README.md b/README.md index eaead13bc..2f6d26186 100644 --- a/README.md +++ b/README.md @@ -1,17 +1,177 @@ -# zkbas +# ZkRollup BNB Application Side Chain +![banner](./docs/assets/banner.png) -### goctl +The ZkRollup BNB Application Side Chain(ZkBAS) is an infrastructure for developers that helps them to build large scale +BSC-based apps with higher throughput and much lower or even zero transaction fees. + +ZkBAS is built on ZK Rollup architecture. ZkBAS bundle (or “roll-up”) hundreds of transactions off-chain and generates +cryptographic proof. These proofs can come in the form of SNARKs (succinct non-interactive argument of knowledge) which +can prove the validity of every single transaction in the Rollup Block. It means all funds are held on the BSC, +while computation and storage are performed on BAS with less cost and fast speed. + +ZkBAS achieves the following goals: +- **L1 security**. The ZkBAS share the same security as BSC does. Thanks to zkSNARK proofs, the security is guaranteed by + cryptographic. Users do not have to trust any third parties or keep monitoring the Rollup blocks in order to + prevent fraud. +- **L1<>L2 Communication**. BNB, and BEP20/BEP721/BEP1155 created on BSC or zkBAS can flow freely between BSC and zkBAS. +- **Built-in instant AMM swap**. It allows digital assets to be traded without permission and automatically by using + liquidity pools. +- **Built-in NFT marketplace**. Developer can build marketplace for crypto collectibles and non-fungible tokens (NFTs) + out of box on ZkBAS. +- **Fast transaction speed and faster finality**. +- **Low gas fee**. The gas token on the zkBAS can be either BEP20 or BNB. +- **"Full exit" on BSC**. The user can request this operation to withdraw funds if he thinks that his transactions + are censored by zkBAS. + +ZkBAS starts its development based on [Zecrey](https://github.com/bnb-chain/zecrey-legend), special thanks to +[Zecrey](https://www.zecrey.com/) team. + +## Table of Content + +- [Framework](#Framework) +- [Document](#Document) +- [Key Features](#Key-Features) + + [Digital Asset Management](#Digital-Asset-Management) + + [NFT Management and Marketplace](#NFT-Management-and-Marketplace) + + [AMM Exchange](#AMM-Exchange) + + [Native Name Service](#Native-Name-Service) + + [Seamless L1 Wallet Management](#Seamless-L1-Wallet-Management) + +- [Key Tech](#Key-Tech) + + [Sparse Merkle Tree K-V Store](#Sparse-Merkle-Tree-KV-Store) + + [Circuit Model](#Circuit-Model) +- [Building from Source](#Building-from-Source) +- [Dev Network Setup](#Dev-Network-Setup) +- [Testnet(coming soon)](#Testnet(coming-soon)) +- [Contribution](#Contribution) +- [Related Projects](#Related-Projects) +- [Outlook](#Outlook) + + +## Framework +![Framework](./docs/assets/Frame_work.png) + +- **committer**. Committer executes transactions and produce consecutive blocks. +- **monitor**. Monitor tracks events on BSC, and translates them into **transactions** on zkBAS. +- **witness**. Witness re-executes the transactions within the block and generates witness materials. +- **prover**. Prover generates cryptographic proof based on the witness materials. +- **sender**. The sender rollups the compressed l2 blocks to L1, and submit proof to verify it. +- **api server**. The api server is the access endpoints for most users, it provides rich data, including + digital assets, blocks, transactions, swap info, gas fees. +- **recovery**. A tool to recover the sparse merkle tree in kv-rocks based on the state world in postgresql. + + +## Document +The `./docs` directory includes a lot of useful documentation. You can find detail design and tutorial [there](docs/readme.md). + +## Key Features + +### Digital Asset Management +The ZkBAS will serve as an alternative marketplace for issuing, using, paying and exchanging digital assets in a +decentralized manner. ZkBAS and BSC share the same token universe for BNB, BEP2 and NFT tokens. This defines: +- The same token can circulate on both networks, and flow between them bi-directionally via L1 <> L2 communication. +- The total circulation of the same token should be managed across the two networks, i.e. the total effective supply + of a token should be the sum of the token's total effective supply on both BSC and BC. +- The tokens can only be initially created on BSC in BEP20, then pegged to the ZkBAS. It is permissionless to peg + token onto ZkBAS. + +User can **1.deposit 2.transfer 3.withdraw** both non-fungible token and fungible token on ZkBAS. + +Users enter the ZK-rollup by **depositing tokens** in the rollup's contract deployed on the BSC. The ZkBAS monitor +will track deposits and submit it as a layer2 transaction, once committer verifies the transaction, users get funds on +their account, they can start transacting by sending transactions to the committer for processing. + +User can **transfer** any amount of funds to any existed accounts on ZkBAS by sending a signed transaction to the +network. + +**Withdrawing** from ZkBAS to BSC is straightforward. The user initiates the withdrawal transaction, the fund will be +burned on ZkBAS. Once the transaction in the next batch been rolluped, a related amount of token will be unlocked from +rollup contract to target account. + +### NFT Management and Marketplace +We target to provide an opensource NFT marketplace for users to browse, buy, sell or create their own NFT. +The meta-data of NFT on ZkBAS sticks to the [BSC standard](https://docs.bnbchain.org/docs/nft-metadata-standard/). +The ERC721 standard NFT can be seamlessly deposited on ZkBAS, or in reverse. + +![Marketplace framework](./docs/assets/NFT_Marketplace.png) + +Above diagram shows the framework of Nft Marketplace and ZkBAS. All the buy/sell offer, meta-data of NFT/Collection, +medium resources, account profiles are store in the backend of NFT marketplace, only the **contendHash**, +**ownership**, **creatorTreasuryRate** and few other fields are recorded on ZkBAS. To encourage price discovery, anyone +can place buy/sell offer in the marketplace without paying any fees since the offer is cached in the backend instead of +being sent to the ZkBAS. Once the offer is matched, an **AtomicMatch** transaction that consist of buy and sell offer +will be sent to ZkBAS to make the trade happen. Users can also cancel an offer manually by sending a cancel offer +transaction to disable the backend cached offer. + +### AMM Exchange + +Automated market makers (AMM) are decentralized exchanges that pool liquidity from users and price the assets within +the pool using algorithms. + +ZkBAS follows the similar mechanic as [UniSwap V2](https://docs.uniswap.org/protocol/V2/concepts/protocol-overview/how-uniswap-works). +Anyone can become a liquidity provider (LP) for a pool by depositing an equivalent value of each underlying token in +return for pool tokens. The different from UniSwap is that the LP token on ZkBAS can not transfer or trade. Users can +simply list a swap pair by calling the rollup contract on BSC. + +### Native Name Service +No more copying and pasting long addresses on ZkBAS. Every account on ZkBAS gets its short name, user can use that to +store funds and receive any cryptocurrency, token, or NFT. + +### Seamless L1 Wallet Management +ZkBAS natively supports ECDSA signatures and follows [EIP712](https://github.com/ethereum/EIPs/blob/master/EIPS/eip-712.md) +signing structure, which means most of the Ethereum wallets can seamless support ZkBAS. There is no extra effort for BSC +users to leverage ZkBAS. + +## Key Tech + +### Sparse Merkle Tree KV Store +Unlike most rollup solution to put the state tree in memory, [BAS-SMT](https://github.com/bnb-chain/zkbas-smt/) is a versioned, +snapshottable (immutable) sparse tree for persistent data. BAS-SMT is the key factor for the massive adoption of ZkBAS. + +### Circuit Model +[ZkBAS Crypto](https://github.com/bnb-chain/zkbas-crypto) is the library that describe the proving circuit. Once +the ZK-rollup node has enough transactions, it aggregates them into a batch and compiles inputs for the proving circuit +to compile into a succinct zk-proof. + + +## Building from Source + +1. Install necessary tools before building, and this only need to executed by once. ```shell -# api -goctl api go -api xx.api -dir . -style gozero -# rpc -goctl rpc protoc xx.proto --go_out=. --go-grpc_out=. --zrpc_out=. +make tools ``` -### mockgen - +2. Build the binary. ```shell -go install github.com/golang/mock/mockgen@v1.6.0 -``` \ No newline at end of file +make build +``` + +## Dev Network Setup +We are preparing to set up the whole system using docker composer, it is coming soon.. + +## Testnet(coming soon) + +## Contribution +Thank you for considering to help out with the source code! We welcome contributions from anyone on the internet, +and are grateful for even the smallest of fixes! + +If you'd like to contribute to bsc, please fork, fix, commit and send a pull request for the maintainers to review +and merge into the main code base. If you wish to submit more complex changes though, Start by browsing +[new issues](https://github.com/bnb-chain/zkbas/issues) and [BEPs](https://github.com/bnb-chain/BEPs). +If you are looking for something interesting or if you have something in your mind, there is a chance it had been discussed. + +## Related Projects + +- [ZkBAS Rollup Contracts](https://github.com/bnb-chain/zkbas-contract). +- [ZkBAS Crypto](https://github.com/bnb-chain/zkbas-crypto). +- [ZkBAS Eth RPC](https://github.com/bnb-chain/zkbas-eth-rpc). +- [ZkBAS Go SDK](https://github.com/bnb-chain/zkbas-go-sdk). + +## Outlook +We believe that zk-Rollup Sooner or later L2 The best of the track — This is a very cheap and safe first-class +L2 Expansion solutions. However, ZkBAS is application specific so far, this makes it difficult for developers to +build custom dApp on that, we will introduce generic programmability in the future... + + diff --git a/build.sh b/build.sh deleted file mode 100755 index 83df8f896..000000000 --- a/build.sh +++ /dev/null @@ -1,91 +0,0 @@ -#!/bin/bash - - -# ./build.sh - -api="app" -rpc="globalRPC" -cronjob="monitor committer sender prover witnessGenerator" - -# pull newest code -cd $2 -# ignore local modification -git checkout . -# fetch tags -git fetch -unf origin $1:refs/tags/$1 -# switch to new tag -git checkout -f $1 - - -gcloud auth configure-docker us-central1-docker.pkg.dev -# run goctl -for val in $api; do - cd ./service/api/${val} - echo "[${val}]: " - goctl api go -api ${val}.api -dir . -style gozero - cd ../../.. -done - -for val in $rpc; do - cd ./service/rpc/${val} - echo -n "[${val}]: " - goctl rpc protoc ${val}.proto --go_out=. --go-grpc_out=. --zrpc_out=. - cd ../../.. -done - - -echo "go mod tidy ..." -go env -w GOSUMDB=off -go mod tidy - -# go build all service&rpc in one script -for val in $api; do - echo "Go Build [${val}]: " - declare -l lower="${val}" - go build -ldflags "-X main.CodeVersion=`git describe --tags` -X main.GitCommitHash=`git rev-parse --short HEAD` -linkmode=external -extldflags=-static" -o ./bin/${lower} service/api/${val}/${lower}.go - - echo "Docker Build & Push [${val}]: " - declare -l lower="${val}" - docker build -t us-central1-docker.pkg.dev/zkbas-330903/zkbas-webhook/${lower}:$1 -f service/api/${val}/Dockerfile . - docker push us-central1-docker.pkg.dev/zkbas-330903/zkbas-webhook/${lower}:$1 - docker image prune --filter label=stage=gobuilder --force - - rm ./bin/${lower} -done - -for val in $rpc; do - echo "Go Build [${val}]: " - declare -l lower="${val}" - go build -ldflags '-linkmode "external" -extldflags "-static"' -o ./bin/${lower} service/rpc/${val}/${lower}.go - - echo "Docker Build & Push [${val}]: " - declare -l lower="${val}" - docker build -t us-central1-docker.pkg.dev/zkbas-330903/zkbas-webhook/${lower}:$1 -f service/rpc/${val}/Dockerfile . - docker push us-central1-docker.pkg.dev/zkbas-330903/zkbas-webhook/${lower}:$1 - docker image prune --filter label=stage=gobuilder --force - - rm ./bin/${lower} -done - -for val in $cronjob; do - echo "Go Build [${val}]: " - declare -l lower="${val}" - go build -ldflags '-linkmode "external" -extldflags "-static"' -o ./bin/${lower} service/cronjob/${val}/${lower}.go - - echo "Docker Build & Push [${val}]: " - declare -l lower="${val}" - - - docker build --no-cache -t us-central1-docker.pkg.dev/zkbas-330903/zkbas-webhook/${lower}:$1 -f service/cronjob/${val}/Dockerfile . - - docker push us-central1-docker.pkg.dev/zkbas-330903/zkbas-webhook/${lower}:$1 - docker image prune --filter label=stage=gobuilder --force - - - rm ./bin/${lower} -done - - -gcloud container clusters get-credentials "webhook" --region=us-central1-c -export TAG_NAME=$1 -envsubst < ./kubeyaml/compiled.yaml | kubectl apply -f - \ No newline at end of file diff --git a/cmd/flags/flags.go b/cmd/flags/flags.go new file mode 100644 index 000000000..2e8e83e84 --- /dev/null +++ b/cmd/flags/flags.go @@ -0,0 +1,44 @@ +package flags + +import ( + "github.com/urfave/cli/v2" +) + +var ( + ConfigFlag = &cli.StringFlag{ + Name: "config", + Aliases: []string{"f"}, + Usage: "the config file", + } + ContractAddrFlag = &cli.StringFlag{ + Name: "contractAddr", + Usage: "the contract addresses file", + } + DSNFlag = &cli.StringFlag{ + Name: "dsn", + Usage: "data source name", + } + BSCTestNetworkRPCFlag = &cli.StringFlag{ + Name: "testnet", + Value: "https://data-seed-prebsc-1-s1.binance.org:8545/", + Usage: "the rpc endpoint of bsc testnet", + } + LocalTestNetworkRPCFlag = &cli.StringFlag{ + Name: "local", + Value: "http://127.0.0.1:8545/", + Usage: "the rpc endpoint of local net", + } + BlockHeightFlag = &cli.Int64Flag{ + Name: "height", + Usage: "block height", + } + ServiceNameFlag = &cli.StringFlag{ + Name: "service", + Usage: "service name(committer, witness)", + } + BatchSizeFlag = &cli.IntFlag{ + Name: "batch", + Value: 1000, + Usage: "batch size for reading history record from the database", + } +) diff --git a/cmd/zkbas/main.go b/cmd/zkbas/main.go new file mode 100644 index 000000000..cddd61750 --- /dev/null +++ b/cmd/zkbas/main.go @@ -0,0 +1,194 @@ +package main + +import ( + "fmt" + "os" + "runtime" + + "github.com/urfave/cli/v2" + + "github.com/bnb-chain/zkbas/cmd/flags" + "github.com/bnb-chain/zkbas/service/apiserver" + "github.com/bnb-chain/zkbas/service/committer" + "github.com/bnb-chain/zkbas/service/monitor" + "github.com/bnb-chain/zkbas/service/prover" + "github.com/bnb-chain/zkbas/service/sender" + "github.com/bnb-chain/zkbas/service/witness" + "github.com/bnb-chain/zkbas/tools/dbinitializer" + "github.com/bnb-chain/zkbas/tools/recovery" +) + +// Build Info (set via linker flags) +var ( + gitCommit = "unknown" + gitDate = "unknown" + version = "unknown" +) + +func main() { + cli.VersionPrinter = func(ctx *cli.Context) { + fmt.Println("Version:", ctx.App.Version) + fmt.Println("Git Commit:", gitCommit) + fmt.Println("Git Commit Date:", gitDate) + fmt.Println("Architecture:", runtime.GOARCH) + fmt.Println("Go Version:", runtime.Version()) + fmt.Println("Operating System:", runtime.GOOS) + } + + app := &cli.App{ + Name: "zkBAS", + HelpName: "zkbas", + Version: version, + Description: "ZkRollup BNB Application Side Chain", + Commands: []*cli.Command{ + // services + { + Name: "prover", + Usage: "Run prover service", + Flags: []cli.Flag{ + flags.ConfigFlag, + }, + Action: func(cCtx *cli.Context) error { + if !cCtx.IsSet(flags.ConfigFlag.Name) { + return cli.ShowSubcommandHelp(cCtx) + } + + return prover.Run(cCtx.String(flags.ConfigFlag.Name)) + }, + }, + { + Name: "witness", + Usage: "Run witness service", + Flags: []cli.Flag{ + flags.ConfigFlag, + }, + Action: func(cCtx *cli.Context) error { + if !cCtx.IsSet(flags.ConfigFlag.Name) { + return cli.ShowSubcommandHelp(cCtx) + } + + return witness.Run(cCtx.String(flags.ConfigFlag.Name)) + }, + }, + { + Name: "monitor", + Usage: "Run monitor service", + Flags: []cli.Flag{ + flags.ConfigFlag, + }, + Action: func(cCtx *cli.Context) error { + if !cCtx.IsSet(flags.ConfigFlag.Name) { + return cli.ShowSubcommandHelp(cCtx) + } + + return monitor.Run(cCtx.String(flags.ConfigFlag.Name)) + }, + }, + { + Name: "committer", + Flags: []cli.Flag{ + flags.ConfigFlag, + }, + Usage: "Run committer service", + Action: func(cCtx *cli.Context) error { + if !cCtx.IsSet(flags.ConfigFlag.Name) { + return cli.ShowSubcommandHelp(cCtx) + } + + return committer.Run(cCtx.String(flags.ConfigFlag.Name)) + }, + }, + { + Name: "sender", + Usage: "Run sender service", + Flags: []cli.Flag{ + flags.ConfigFlag, + }, + Action: func(cCtx *cli.Context) error { + if !cCtx.IsSet(flags.ConfigFlag.Name) { + return cli.ShowSubcommandHelp(cCtx) + } + + return sender.Run(cCtx.String(flags.ConfigFlag.Name)) + }, + }, + { + Name: "apiserver", + Usage: "Run apiserver service", + Flags: []cli.Flag{ + flags.ConfigFlag, + }, + Action: func(cCtx *cli.Context) error { + if !cCtx.IsSet(flags.ConfigFlag.Name) { + return cli.ShowSubcommandHelp(cCtx) + } + + return apiserver.Run(cCtx.String(flags.ConfigFlag.Name)) + }, + }, + // tools + { + Name: "db", + Usage: "Database tools", + Subcommands: []*cli.Command{ + { + Name: "initialize", + Usage: "Initialize DB tables", + Flags: []cli.Flag{ + flags.ContractAddrFlag, + flags.DSNFlag, + flags.BSCTestNetworkRPCFlag, + flags.LocalTestNetworkRPCFlag, + }, + Action: func(cCtx *cli.Context) error { + if !cCtx.IsSet(flags.ContractAddrFlag.Name) || + !cCtx.IsSet(flags.DSNFlag.Name) { + return cli.ShowSubcommandHelp(cCtx) + } + + return dbinitializer.Initialize( + cCtx.String(flags.DSNFlag.Name), + cCtx.String(flags.ContractAddrFlag.Name), + cCtx.String(flags.BSCTestNetworkRPCFlag.Name), + cCtx.String(flags.LocalTestNetworkRPCFlag.Name), + ) + }, + }, + }, + }, + { + Name: "tree", + Usage: "TreeDB tools", + Subcommands: []*cli.Command{ + { + Name: "recovery", + Usage: "Recovery treedb from the database", + Flags: []cli.Flag{ + flags.ConfigFlag, + flags.BlockHeightFlag, + flags.ServiceNameFlag, + flags.BatchSizeFlag, + }, + Action: func(cCtx *cli.Context) error { + if !cCtx.IsSet(flags.ServiceNameFlag.Name) || + !cCtx.IsSet(flags.BlockHeightFlag.Name) || + !cCtx.IsSet(flags.ConfigFlag.Name) { + return cli.ShowSubcommandHelp(cCtx) + } + recovery.RecoveryTreeDB( + cCtx.String(flags.ConfigFlag.Name), + cCtx.Int64(flags.BlockHeightFlag.Name), + cCtx.String(flags.ServiceNameFlag.Name), + cCtx.Int(flags.BatchSizeFlag.Name), + ) + return nil + }, + }, + }, + }, + }, + } + if err := app.Run(os.Args); err != nil { + fmt.Println(err) + } +} diff --git a/common/util/bufHelper.go b/common/buffer.go similarity index 68% rename from common/util/bufHelper.go rename to common/buffer.go index fc6dadf98..c76be37d9 100644 --- a/common/util/bufHelper.go +++ b/common/buffer.go @@ -1,5 +1,5 @@ /* - * Copyright © 2021 Zkbas Protocol + * Copyright © 2021 ZkBAS Protocol * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -15,7 +15,7 @@ * */ -package util +package common import ( "bytes" @@ -23,33 +23,29 @@ import ( "errors" "math/big" - "github.com/bnb-chain/zkbas-crypto/zero/twistededwards/tebn254/zero" - "github.com/zeromicro/go-zero/core/logx" - - "github.com/bnb-chain/zkbas/common/commonConstant" + curve "github.com/bnb-chain/zkbas-crypto/ecc/ztwistededwards/tebn254" + "github.com/bnb-chain/zkbas/types" ) func PaddingStringBigIntIntoBuf(buf *bytes.Buffer, aStr string) error { a, isValid := new(big.Int).SetString(aStr, 10) if !isValid { - logx.Errorf("[PaddingStringBigIntIntoBuf] invalid string") return errors.New("[PaddingStringBigIntIntoBuf] invalid string") } - buf.Write(a.FillBytes(make([]byte, zero.PointSize))) + buf.Write(a.FillBytes(make([]byte, curve.PointSize))) return nil } func PaddingAddressIntoBuf(buf *bytes.Buffer, address string) (err error) { - if address == commonConstant.NilL1Address { + if address == types.NilL1Address { buf.Write(new(big.Int).FillBytes(make([]byte, 32))) return nil } addrBytes, err := DecodeAddress(address) if err != nil { - logx.Errorf("[PaddingAddressIntoBuf] invalid addr: %s, err: %s", address, err.Error()) return err } - buf.Write(new(big.Int).SetBytes(addrBytes).FillBytes(make([]byte, zero.PointSize))) + buf.Write(new(big.Int).SetBytes(addrBytes).FillBytes(make([]byte, curve.PointSize))) return nil } @@ -61,28 +57,26 @@ func DecodeAddress(addr string) ([]byte, error) { if err != nil { return nil, err } - if len(addrBytes) != AddressSize { - logx.Errorf("[DecodeAddress] invalid address: %s, err: %s", addr, err.Error()) + if len(addrBytes) != types.AddressSize { return nil, errors.New("[DecodeAddress] invalid address") } return addrBytes, nil } func PaddingInt64IntoBuf(buf *bytes.Buffer, a int64) { - buf.Write(new(big.Int).SetInt64(a).FillBytes(make([]byte, zero.PointSize))) + buf.Write(new(big.Int).SetInt64(a).FillBytes(make([]byte, curve.PointSize))) } func PaddingPkIntoBuf(buf *bytes.Buffer, pkStr string) (err error) { pk, err := ParsePubKey(pkStr) if err != nil { - logx.Errorf("[WriteEncIntoBuf] unable to parse pk: %s", err.Error()) return err } writePointIntoBuf(buf, &pk.A) return nil } -func writePointIntoBuf(buf *bytes.Buffer, p *zero.Point) { +func writePointIntoBuf(buf *bytes.Buffer, p *curve.Point) { buf.Write(p.X.Marshal()) buf.Write(p.Y.Marshal()) } diff --git a/common/util/bytesHelper.go b/common/bytes.go similarity index 61% rename from common/util/bytesHelper.go rename to common/bytes.go index 4595004f0..9200ac815 100644 --- a/common/util/bytesHelper.go +++ b/common/bytes.go @@ -1,5 +1,5 @@ /* - * Copyright © 2021 Zkbas Protocol + * Copyright © 2021 ZkBAS Protocol * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -15,20 +15,53 @@ * */ -package util +package common import ( - "encoding/hex" + "encoding/binary" "math/big" "strings" + "github.com/bnb-chain/zkbas/types" "github.com/ethereum/go-ethereum/common" - "github.com/zeromicro/go-zero/core/logx" ) -const ( - AccountNameSuffix = ".legend" -) +func ReadUint8(buf []byte, offset int) (newOffset int, res uint8) { + return offset + 1, buf[offset] +} + +func ReadUint16(buf []byte, offset int) (newOffset int, res uint16) { + res = binary.BigEndian.Uint16(buf[offset : offset+2]) + return offset + 2, res +} + +func ReadUint32(buf []byte, offset int) (newOffset int, res uint32) { + res = binary.BigEndian.Uint32(buf[offset : offset+4]) + return offset + 4, res +} + +func ReadUint40(buf []byte, offset int) (newOffset int, res int64) { + return offset + 5, new(big.Int).SetBytes(buf[offset : offset+5]).Int64() +} + +func ReadUint128(buf []byte, offset int) (newOffset int, res *big.Int) { + return offset + 16, new(big.Int).SetBytes(buf[offset : offset+16]) +} + +func ReadUint256(buf []byte, offset int) (newOffset int, res *big.Int) { + return offset + 32, new(big.Int).SetBytes(buf[offset : offset+32]) +} + +func ReadBytes32(buf []byte, offset int) (newOffset int, res []byte) { + res = make([]byte, 32) + copy(res[:], buf[offset:offset+32]) + return offset + 32, res +} + +func ReadAddress(buf []byte, offset int) (newOffset int, res string) { + res = common.BytesToAddress(buf[offset : offset+20]).Hex() + return offset + 20, res +} func PrefixPaddingBufToChunkSize(buf []byte) []byte { return new(big.Int).SetBytes(buf).FillBytes(make([]byte, 32)) @@ -41,7 +74,7 @@ func SuffixPaddingBufToChunkSize(buf []byte) []byte { } func AccountNameToBytes32(accountName string) []byte { - realName := strings.Split(accountName, AccountNameSuffix)[0] + realName := strings.Split(accountName, types.AccountNameSuffix)[0] buf := make([]byte, 32) copy(buf[:], realName) return buf @@ -78,7 +111,6 @@ func Uint256ToBytes(a *big.Int) []byte { func AmountToPackedAmountBytes(a *big.Int) (res []byte, err error) { packedAmount, err := ToPackedAmount(a) if err != nil { - logx.Errorf("[AmountToPackedAmountBytes] invalid amount: %s", err.Error()) return nil, err } return Uint40ToBytes(packedAmount), nil @@ -87,19 +119,7 @@ func AmountToPackedAmountBytes(a *big.Int) (res []byte, err error) { func FeeToPackedFeeBytes(a *big.Int) (res []byte, err error) { packedFee, err := ToPackedFee(a) if err != nil { - logx.Errorf("[FeeToPackedFeeBytes] invalid fee amount: %s", err.Error()) return nil, err } return Uint16ToBytes(uint16(packedFee)), nil } - -func FromHex(s string) ([]byte, error) { - if len(s) >= 2 && s[0] == '0' && (s[1] == 'x' || s[1] == 'X') { - s = s[2:] - } - - if len(s)%2 == 1 { - s = "0" + s - } - return hex.DecodeString(s) -} diff --git a/common/util/bytesHelper_test.go b/common/bytes_test.go similarity index 78% rename from common/util/bytesHelper_test.go rename to common/bytes_test.go index 64e7655e9..ccf893811 100644 --- a/common/util/bytesHelper_test.go +++ b/common/bytes_test.go @@ -1,5 +1,5 @@ /* - * Copyright © 2021 Zkbas Protocol + * Copyright © 2021 ZkBAS Protocol * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -15,17 +15,17 @@ * */ -package util +package common import ( - "fmt" "testing" "github.com/ethereum/go-ethereum/common" + "github.com/stretchr/testify/assert" ) func TestAccountNameToBytes32(t *testing.T) { accountName := "sher" info := AccountNameToBytes32(accountName) - fmt.Println(common.Bytes2Hex(info[:])) + assert.Equal(t, common.Bytes2Hex(info[:]), "7368657200000000000000000000000000000000000000000000000000000000") } diff --git a/common/chain/account_helper.go b/common/chain/account_helper.go new file mode 100644 index 000000000..a18a8c759 --- /dev/null +++ b/common/chain/account_helper.go @@ -0,0 +1,55 @@ +package chain + +import ( + "encoding/json" + + "gorm.io/gorm" + + "github.com/bnb-chain/zkbas/dao/account" + "github.com/bnb-chain/zkbas/types" +) + +func FromFormatAccountInfo(formatAccountInfo *types.AccountInfo) (accountInfo *account.Account, err error) { + assetInfoBytes, err := json.Marshal(formatAccountInfo.AssetInfo) + if err != nil { + return nil, types.JsonErrMarshal + } + accountInfo = &account.Account{ + Model: gorm.Model{ + ID: formatAccountInfo.AccountId, + }, + AccountIndex: formatAccountInfo.AccountIndex, + AccountName: formatAccountInfo.AccountName, + PublicKey: formatAccountInfo.PublicKey, + AccountNameHash: formatAccountInfo.AccountNameHash, + L1Address: formatAccountInfo.L1Address, + Nonce: formatAccountInfo.Nonce, + CollectionNonce: formatAccountInfo.CollectionNonce, + AssetInfo: string(assetInfoBytes), + AssetRoot: formatAccountInfo.AssetRoot, + Status: formatAccountInfo.Status, + } + return accountInfo, nil +} + +func ToFormatAccountInfo(accountInfo *account.Account) (formatAccountInfo *types.AccountInfo, err error) { + var assetInfo map[int64]*types.AccountAsset + err = json.Unmarshal([]byte(accountInfo.AssetInfo), &assetInfo) + if err != nil { + return nil, types.JsonErrUnmarshal + } + formatAccountInfo = &types.AccountInfo{ + AccountId: accountInfo.ID, + AccountIndex: accountInfo.AccountIndex, + AccountName: accountInfo.AccountName, + PublicKey: accountInfo.PublicKey, + AccountNameHash: accountInfo.AccountNameHash, + L1Address: accountInfo.L1Address, + Nonce: accountInfo.Nonce, + CollectionNonce: accountInfo.CollectionNonce, + AssetInfo: assetInfo, + AssetRoot: accountInfo.AssetRoot, + Status: accountInfo.Status, + } + return formatAccountInfo, nil +} diff --git a/common/commonAsset/balanceHelper.go b/common/chain/balance_helper.go similarity index 65% rename from common/commonAsset/balanceHelper.go rename to common/chain/balance_helper.go index edc9b55ac..bee5c929d 100644 --- a/common/commonAsset/balanceHelper.go +++ b/common/chain/balance_helper.go @@ -1,5 +1,5 @@ /* - * Copyright © 2021 Zkbas Protocol + * Copyright © 2021 ZkBAS Protocol * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -15,53 +15,43 @@ * */ -package commonAsset +package chain import ( "errors" "github.com/bnb-chain/zkbas-crypto/ffmath" - "github.com/zeromicro/go-zero/core/logx" - - "github.com/bnb-chain/zkbas/common/commonConstant" + "github.com/bnb-chain/zkbas/types" ) -/* - ComputeNewBalance: helper function for computing new balance for different asset types -*/ func ComputeNewBalance(assetType int64, balance string, balanceDelta string) (newBalance string, err error) { switch assetType { - case GeneralAssetType: - assetInfo, err := ParseAccountAsset(balance) + case types.FungibleAssetType: + assetInfo, err := types.ParseAccountAsset(balance) if err != nil { - logx.Errorf("[ComputeNewBalance] unable to parse account asset: %s", err.Error()) return "", err } - assetDelta, err := ParseAccountAsset(balanceDelta) + assetDelta, err := types.ParseAccountAsset(balanceDelta) if err != nil { - logx.Errorf("[ComputeNewBalance] unable to parse account asset: %s", err.Error()) return "", err } assetInfo.Balance = ffmath.Add(assetInfo.Balance, assetDelta.Balance) assetInfo.LpAmount = ffmath.Add(assetInfo.LpAmount, assetDelta.LpAmount) if assetDelta.OfferCanceledOrFinalized == nil { - assetDelta.OfferCanceledOrFinalized = ZeroBigInt + assetDelta.OfferCanceledOrFinalized = types.ZeroBigInt } - if assetDelta.OfferCanceledOrFinalized.Cmp(commonConstant.NilOfferCanceledOrFinalized) != 0 { + if assetDelta.OfferCanceledOrFinalized.Cmp(types.NilOfferCanceledOrFinalized) != 0 { assetInfo.OfferCanceledOrFinalized = assetDelta.OfferCanceledOrFinalized } newBalance = assetInfo.String() - break - case LiquidityAssetType: + case types.LiquidityAssetType: // balance: LiquidityInfo - liquidityInfo, err := ParseLiquidityInfo(balance) + liquidityInfo, err := types.ParseLiquidityInfo(balance) if err != nil { - logx.Errorf("[ComputeNewBalance] unable to parse liquidity info: %s", err.Error()) return "", err } - deltaLiquidity, err := ParseLiquidityInfo(balanceDelta) + deltaLiquidity, err := types.ParseLiquidityInfo(balanceDelta) if err != nil { - logx.Errorf("[ComputeNewBalance] unable to parse liquidity info: %s", err.Error()) return "", err } liquidityInfo.AssetAId = deltaLiquidity.AssetAId @@ -69,18 +59,16 @@ func ComputeNewBalance(assetType int64, balance string, balanceDelta string) (ne liquidityInfo.AssetA = ffmath.Add(liquidityInfo.AssetA, deltaLiquidity.AssetA) liquidityInfo.AssetB = ffmath.Add(liquidityInfo.AssetB, deltaLiquidity.AssetB) liquidityInfo.LpAmount = ffmath.Add(liquidityInfo.LpAmount, deltaLiquidity.LpAmount) - if deltaLiquidity.KLast.Cmp(ZeroBigInt) != 0 { + if deltaLiquidity.KLast.Cmp(types.ZeroBigInt) != 0 { liquidityInfo.KLast = deltaLiquidity.KLast } liquidityInfo.FeeRate = deltaLiquidity.FeeRate liquidityInfo.TreasuryAccountIndex = deltaLiquidity.TreasuryAccountIndex liquidityInfo.TreasuryRate = deltaLiquidity.TreasuryRate newBalance = liquidityInfo.String() - break - case NftAssetType: + case types.NftAssetType: // just set the old one as the new one newBalance = balanceDelta - break default: return "", errors.New("[ComputeNewBalance] invalid asset type") } diff --git a/common/chain/block_helper.go b/common/chain/block_helper.go new file mode 100644 index 000000000..bcc09053b --- /dev/null +++ b/common/chain/block_helper.go @@ -0,0 +1,87 @@ +/* + * Copyright © 2021 ZkBAS Protocol + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package chain + +import ( + "bytes" + "math/big" + + "github.com/ethereum/go-ethereum/common" + + curve "github.com/bnb-chain/zkbas-crypto/ecc/ztwistededwards/tebn254" + "github.com/bnb-chain/zkbas-crypto/ffmath" + zkbas "github.com/bnb-chain/zkbas-eth-rpc/zkbas/core/legend" + common2 "github.com/bnb-chain/zkbas/common" + "github.com/bnb-chain/zkbas/dao/block" +) + +func CreateBlockCommitment( + currentBlockHeight int64, + createdAt int64, + oldStateRoot []byte, + newStateRoot []byte, + pubData []byte, + onChainOpsCount int64, +) string { + var buf bytes.Buffer + common2.PaddingInt64IntoBuf(&buf, currentBlockHeight) + common2.PaddingInt64IntoBuf(&buf, createdAt) + buf.Write(CleanAndPaddingByteByModulus(oldStateRoot)) + buf.Write(CleanAndPaddingByteByModulus(newStateRoot)) + buf.Write(CleanAndPaddingByteByModulus(pubData)) + common2.PaddingInt64IntoBuf(&buf, onChainOpsCount) + // TODO Keccak256 + //hFunc := mimc.NewMiMC() + //hFunc.Write(buf.Bytes()) + //commitment := hFunc.Sum(nil) + commitment := common2.KeccakHash(buf.Bytes()) + return common.Bytes2Hex(commitment) +} + +func ConstructStoredBlockInfo(oBlock *block.Block) zkbas.StorageStoredBlockInfo { + var ( + PendingOnchainOperationsHash [32]byte + StateRoot [32]byte + Commitment [32]byte + ) + copy(PendingOnchainOperationsHash[:], common.FromHex(oBlock.PendingOnChainOperationsHash)[:]) + copy(StateRoot[:], common.FromHex(oBlock.StateRoot)[:]) + copy(Commitment[:], common.FromHex(oBlock.BlockCommitment)[:]) + return zkbas.StorageStoredBlockInfo{ + BlockNumber: uint32(oBlock.BlockHeight), + PriorityOperations: uint64(oBlock.PriorityOperations), + PendingOnchainOperationsHash: PendingOnchainOperationsHash, + Timestamp: big.NewInt(oBlock.CreatedAt.UnixMilli()), + StateRoot: StateRoot, + Commitment: Commitment, + BlockSize: oBlock.BlockSize, + } +} + +func CleanAndPaddingByteByModulus(buf []byte) []byte { + if len(buf) <= 32 { + return ffmath.Mod(new(big.Int).SetBytes(buf), curve.Modulus).FillBytes(make([]byte, 32)) + } + offset := 32 + var pendingBuf bytes.Buffer + for offset <= len(buf) { + pendingBuf.Write(ffmath.Mod(new(big.Int).SetBytes(buf[offset-32:offset]), curve.Modulus).FillBytes(make([]byte, 32))) + offset += 32 + } + return pendingBuf.Bytes() +} diff --git a/common/util/pubdataHelper_test.go b/common/chain/block_helper_test.go similarity index 55% rename from common/util/pubdataHelper_test.go rename to common/chain/block_helper_test.go index 0f20c14aa..a8b3b03b8 100644 --- a/common/util/pubdataHelper_test.go +++ b/common/chain/block_helper_test.go @@ -1,5 +1,5 @@ /* - * Copyright © 2021 Zkbas Protocol + * Copyright © 2021 ZkBAS Protocol * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -15,40 +15,22 @@ * */ -package util +package chain import ( "bytes" - "fmt" - "log" "math/big" "testing" - curve "github.com/bnb-chain/zkbas-crypto/ecc/ztwistededwards/tebn254" - "github.com/bnb-chain/zkbas-crypto/ffmath" "github.com/consensys/gnark-crypto/ecc/bn254/fr/mimc" "github.com/ethereum/go-ethereum/common" + "github.com/stretchr/testify/assert" - "github.com/bnb-chain/zkbas/common/model/basic" - "github.com/bnb-chain/zkbas/common/model/mempool" -) - -var ( - mempoolModel = mempool.NewMempoolModel(basic.Connection, basic.CacheConf, basic.DB) + curve "github.com/bnb-chain/zkbas-crypto/ecc/ztwistededwards/tebn254" + "github.com/bnb-chain/zkbas-crypto/ffmath" + common2 "github.com/bnb-chain/zkbas/common" ) -func TestConvertTxToRegisterZNSPubData(t *testing.T) { - txInfo, err := mempoolModel.GetMempoolTxByTxId(1) - if err != nil { - t.Fatal(err) - } - pubData, err := ConvertTxToRegisterZNSPubData(txInfo) - if err != nil { - t.Fatal(err) - } - log.Println(common.Bytes2Hex(pubData)) -} - func TestPubDataComputation(t *testing.T) { oldStateRoot, _ := new(big.Int).SetString("15043264495212376832665268192414242291394558777525090122806455607283976407362", 10) @@ -60,8 +42,8 @@ func TestPubDataComputation(t *testing.T) { pubData5, _ := new(big.Int).SetString("8734016109108763008334396672504977758060680100901855772709016788881531390238", 10) pubData6, _ := new(big.Int).SetString("0", 10) - fmt.Println(common.Bytes2Hex(oldStateRoot.FillBytes(make([]byte, 32)))) - fmt.Println(common.Bytes2Hex(newStateRoot.FillBytes(make([]byte, 32)))) + assert.Equal(t, common.Bytes2Hex(oldStateRoot.FillBytes(make([]byte, 32))), "21422f9bebac15af8ddc504da0dbb88020c1a4de7e7b6722fe00acb0ed968942") + assert.Equal(t, common.Bytes2Hex(newStateRoot.FillBytes(make([]byte, 32))), "1b2ff4ae0d507a971fb267849af6a28000b1d483865c5a610cc47db6f196c672") var buf bytes.Buffer buf.Write(pubData1.FillBytes(make([]byte, 32))) @@ -70,63 +52,46 @@ func TestPubDataComputation(t *testing.T) { buf.Write(pubData4.FillBytes(make([]byte, 32))) buf.Write(pubData5.FillBytes(make([]byte, 32))) buf.Write(pubData6.FillBytes(make([]byte, 32))) - fmt.Println(common.Bytes2Hex(buf.Bytes())) + assert.Equal(t, common.Bytes2Hex(buf.Bytes()), "01000000010000000000000000000000000000000000000000000000000000000698d61a3d9cbfac8f5f7492fcfd4f45af982f6f0c8d1edd783c14d81ffffffe0a48e9892a45a04d0c5b0f235a3aeb07b92137ba71a59b9c457774bafde959832c24415b75651673b0d7bbf145ac8d7cb744ba6926963d1d014836336df1317a134f4726b89983a8e7babbf6973e7ee16311e24328edf987bb0fbe7a494ec91e0000000000000000000000000000000000000000000000000000000000000000") commitment, _ := new(big.Int).SetString("2001904096268940627870837110796902048094724981649621731904769518577628368633", 10) - fmt.Println(common.Bytes2Hex(commitment.FillBytes(make([]byte, 32)))) + assert.Equal(t, common.Bytes2Hex(commitment.FillBytes(make([]byte, 32))), "046d099ddea2c1ef130f85916df7e73d761454bd847cee12bb5919227c9a4ef9") } func TestPubData2(t *testing.T) { var buf bytes.Buffer - //bytesType, _ := abi.NewType("bytes", "", nil) - //uint32Type, _ := abi.NewType("uint32", "", nil) - //uint64Type, _ := abi.NewType("uint64", "", nil) - //bytes32Type, _ := abi.NewType("bytes32", "", nil) buf.Write(new(big.Int).SetInt64(1).FillBytes(make([]byte, 32))) buf.Write(new(big.Int).SetInt64(1654843322039).FillBytes(make([]byte, 32))) buf.Write(common.FromHex("14e4e8ad4848558d7200530337052e1ad30f5385b3c7187c80ad85f48547b74f")) buf.Write(common.FromHex("21422f9bebac15af8ddc504da0dbb88020c1a4de7e7b6722fe00acb0ed968942")) buf.Write(common.FromHex("01000000000000000000000000000000000000000000000000000000000000007472656173757279000000000000000000000000000000000000000000000000167c5363088a40a4839912a872f43164270740c7e986ec55397b2d583317ab4a2005db7af2bdcfae1fa8d28833ae2f1995e9a8e0825377cff121db64b0db21b718a96ca582a72b16f464330c89ab73277cb96e42df105ebf5c9ac5330d47b8fc0000000000000000000000000000000000000000000000000000000000000000")) buf.Write(new(big.Int).SetInt64(1).FillBytes(make([]byte, 32))) - //hFunc.Write(buf.Bytes()) - //hashVal := hFunc.Sum(nil) hFunc := mimc.NewMiMC() - //arguments := abi.Arguments{{Type: uint64Type}, {Type: uint64Type}, {Type: bytes32Type}, {Type: bytes32Type}, {Type: bytesType}, {Type: uint32Type}} - //info, _ := arguments.Pack( - // uint64(1), - // uint64(1654843322039), - // common.FromHex("14e4e8ad4848558d7200530337052e1ad30f5385b3c7187c80ad85f48547b74f"), - // common.FromHex("21422f9bebac15af8ddc504da0dbb88020c1a4de7e7b6722fe00acb0ed968942"), - // common.FromHex("01000000000000000000000000000000000000000000000000000000000000007472656173757279000000000000000000000000000000000000000000000000167c5363088a40a4839912a872f43164270740c7e986ec55397b2d583317ab4a2005db7af2bdcfae1fa8d28833ae2f1995e9a8e0825377cff121db64b0db21b718a96ca582a72b16f464330c89ab73277cb96e42df105ebf5c9ac5330d47b8fc0000000000000000000000000000000000000000000000000000000000000000"), - // uint32(1), - //) - //log.Println(common.Bytes2Hex(info)) - log.Println(common.Bytes2Hex(buf.Bytes())) - //hashVal := KeccakHash(info) + assert.Equal(t, common.Bytes2Hex(buf.Bytes()), "0000000000000000000000000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000001814c592eb714e4e8ad4848558d7200530337052e1ad30f5385b3c7187c80ad85f48547b74f21422f9bebac15af8ddc504da0dbb88020c1a4de7e7b6722fe00acb0ed96894201000000000000000000000000000000000000000000000000000000000000007472656173757279000000000000000000000000000000000000000000000000167c5363088a40a4839912a872f43164270740c7e986ec55397b2d583317ab4a2005db7af2bdcfae1fa8d28833ae2f1995e9a8e0825377cff121db64b0db21b718a96ca582a72b16f464330c89ab73277cb96e42df105ebf5c9ac5330d47b8fc00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001") hFunc.Write(buf.Bytes()) hashVal := hFunc.Sum(nil) - fmt.Println(common.Bytes2Hex(hashVal)) + assert.Equal(t, common.Bytes2Hex(hashVal), "0e4df6d7053619400d712319012c47b2cb7dcb2d83c203391547148b4f17741a") } func TestMiMCHash(t *testing.T) { hFunc := mimc.NewMiMC() hFunc.Write(new(big.Int).SetInt64(123123123).FillBytes(make([]byte, 32))) a := hFunc.Sum(nil) - fmt.Println(new(big.Int).SetBytes(a).String()) - fmt.Println(common.Bytes2Hex(curve.Modulus.Bytes())) + assert.Equal(t, new(big.Int).SetBytes(a).String(), "6158863128777714998435927227085268531294199267913818508594792142833376806078") + assert.Equal(t, common.Bytes2Hex(curve.Modulus.Bytes()), "30644e72e131a029b85045b68181585d2833e84879b9709143e1f593f0000001") } func TestParsePubKey(t *testing.T) { - pk, err := ParsePubKey("58130e24cd20d9de8a110a20751f0a9b36089400ac0f20ca1993c28ee663318a") + pk, err := common2.ParsePubKey("58130e24cd20d9de8a110a20751f0a9b36089400ac0f20ca1993c28ee663318a") if err != nil { t.Fatal(err) } a := curve.ScalarBaseMul(big.NewInt(2)) f, _ := new(big.Int).SetString("15527681003928902128179717624703512672403908117992798440346960750464748824729", 10) - log.Println(ffmath.DivMod(new(big.Int).SetBytes(a.X.Marshal()), f, curve.Modulus).String()) - log.Println(a.Y.String()) - log.Println(pk.A.IsOnCurve()) - log.Println(pk.A.X.String()) - log.Println(pk.A.Y.String()) - log.Println(pk.Bytes()) + assert.Equal(t, ffmath.DivMod(new(big.Int).SetBytes(a.X.Marshal()), f, curve.Modulus).Int64(), int64(0)) + assert.Equal(t, a.Y.String(), "633281375905621697187330766174974863687049529291089048651929454608812697683") + assert.True(t, pk.A.IsOnCurve()) + assert.Equal(t, pk.A.X.String(), "15824650925573404919778443019341920124666294571462377929750266189082392233365") + assert.Equal(t, pk.A.Y.String(), "4610393480717259196086276896776664313868698522523751289329834907930790335320") + assert.Equal(t, common.Bytes2Hex(pk.Bytes()), "58130e24cd20d9de8a110a20751f0a9b36089400ac0f20ca1993c28ee663318a") } diff --git a/common/util/liquidityHelper.go b/common/chain/liquidity_helper.go similarity index 64% rename from common/util/liquidityHelper.go rename to common/chain/liquidity_helper.go index bd5565ad7..a598d09c1 100644 --- a/common/util/liquidityHelper.go +++ b/common/chain/liquidity_helper.go @@ -1,5 +1,5 @@ /* - * Copyright © 2021 Zkbas Protocol + * Copyright © 2021 ZkBAS Protocol * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -15,7 +15,7 @@ * */ -package util +package chain import ( "errors" @@ -23,9 +23,8 @@ import ( "github.com/bnb-chain/zkbas-crypto/ffmath" "github.com/bnb-chain/zkbas-crypto/util" - "github.com/zeromicro/go-zero/core/logx" - - "github.com/bnb-chain/zkbas/common/commonAsset" + "github.com/bnb-chain/zkbas/common" + "github.com/bnb-chain/zkbas/types" ) func ComputeEmptyLpAmount( @@ -34,34 +33,33 @@ func ComputeEmptyLpAmount( ) (lpAmount *big.Int, err error) { lpSquare := ffmath.Multiply(assetAAmount, assetBAmount) lpFloat := ffmath.FloatSqrt(ffmath.IntToFloat(lpSquare)) - lpAmount, err = CleanPackedAmount(ffmath.FloatToInt(lpFloat)) + lpAmount, err = common.CleanPackedAmount(ffmath.FloatToInt(lpFloat)) if err != nil { - logx.Errorf("[ComputeEmptyLpAmount] unable to compute lp amount: %s", err.Error()) return nil, err } return lpAmount, nil } -func ComputeLpAmount(liquidityInfo *commonAsset.LiquidityInfo, assetAAmount *big.Int) (lpAmount *big.Int, err error) { +func ComputeLpAmount(liquidityInfo *types.LiquidityInfo, assetAAmount *big.Int) (lpAmount *big.Int, err error) { // lp = assetAAmount / poolA * LpAmount sLp, err := ComputeSLp(liquidityInfo.AssetA, liquidityInfo.AssetB, liquidityInfo.KLast, liquidityInfo.FeeRate, liquidityInfo.TreasuryRate) if err != nil { return nil, err } poolLpAmount := ffmath.Sub(liquidityInfo.LpAmount, sLp) - lpAmount, err = CleanPackedAmount(ffmath.Div(ffmath.Multiply(assetAAmount, poolLpAmount), liquidityInfo.AssetA)) + lpAmount, err = common.CleanPackedAmount(ffmath.Div(ffmath.Multiply(assetAAmount, poolLpAmount), liquidityInfo.AssetA)) if err != nil { return nil, err } return lpAmount, nil } -func ComputeRemoveLiquidityAmount(liquidityInfo *commonAsset.LiquidityInfo, lpAmount *big.Int) (assetAAmount, assetBAmount *big.Int, err error) { +func ComputeRemoveLiquidityAmount(liquidityInfo *types.LiquidityInfo, lpAmount *big.Int) (assetAAmount, assetBAmount *big.Int, err error) { sLp, err := ComputeSLp(liquidityInfo.AssetA, liquidityInfo.AssetB, liquidityInfo.KLast, liquidityInfo.FeeRate, liquidityInfo.TreasuryRate) if err != nil { return nil, nil, err } - lpAmount, err = CleanPackedAmount(lpAmount) + lpAmount, err = common.CleanPackedAmount(lpAmount) if err != nil { return nil, nil, err } @@ -95,8 +93,7 @@ func ComputeDelta( } return delta, assetAId, nil } else { - logx.Errorf("[ComputeDelta] invalid asset id") - return ZeroBigInt, 0, errors.New("[ComputeDelta]: invalid asset id") + return types.ZeroBigInt, 0, errors.New("[ComputeDelta]: invalid asset id") } } else { if assetAId == assetId { @@ -112,35 +109,25 @@ func ComputeDelta( } return delta, assetAId, nil } else { - logx.Errorf("[ComputeDelta] invalid asset id") - return ZeroBigInt, 0, errors.New("[ComputeDelta]: invalid asset id") + return types.ZeroBigInt, 0, errors.New("[ComputeDelta]: invalid asset id") } } } -/* - Implementation Reference: - https://github.com/runtimeverification/verified-smart-contracts/blob/master/uniswap/x-y-k.pdf -*/ - -/* - InputPrice = (9970 * deltaX * y) / (10000 * x + 9970 * deltaX) -*/ +// ComputeInputPrice InputPrice = (9970 * deltaX * y) / (10000 * x + 9970 * deltaX) func ComputeInputPrice(x *big.Int, y *big.Int, inputX *big.Int, feeRate int64) (*big.Int, error) { - rFeeR := big.NewInt(FeeRateBase - feeRate) - res, err := util.CleanPackedAmount(ffmath.Div(ffmath.Multiply(rFeeR, ffmath.Multiply(inputX, y)), ffmath.Add(ffmath.Multiply(big.NewInt(FeeRateBase), x), ffmath.Multiply(rFeeR, inputX)))) + rFeeR := big.NewInt(types.FeeRateBase - feeRate) + res, err := util.CleanPackedAmount(ffmath.Div(ffmath.Multiply(rFeeR, ffmath.Multiply(inputX, y)), ffmath.Add(ffmath.Multiply(big.NewInt(types.FeeRateBase), x), ffmath.Multiply(rFeeR, inputX)))) if err != nil { return nil, err } return res, nil } -/* - OutputPrice = (10000 * x * deltaY) / (9970 * (y - deltaY)) + 1 -*/ +// ComputeOutputPrice OutputPrice = (10000 * x * deltaY) / (9970 * (y - deltaY)) + 1 func ComputeOutputPrice(x *big.Int, y *big.Int, inputY *big.Int, feeRate int64) (*big.Int, error) { - rFeeR := big.NewInt(FeeRateBase - feeRate) - res, err := CleanPackedAmount(ffmath.Add(ffmath.Div(ffmath.Multiply(big.NewInt(FeeRateBase), ffmath.Multiply(x, inputY)), ffmath.Multiply(rFeeR, ffmath.Sub(y, inputY))), big.NewInt(1))) + rFeeR := big.NewInt(types.FeeRateBase - feeRate) + res, err := common.CleanPackedAmount(ffmath.Add(ffmath.Div(ffmath.Multiply(big.NewInt(types.FeeRateBase), ffmath.Multiply(x, inputY)), ffmath.Multiply(rFeeR, ffmath.Sub(y, inputY))), big.NewInt(1))) if err != nil { return nil, err } @@ -149,15 +136,15 @@ func ComputeOutputPrice(x *big.Int, y *big.Int, inputY *big.Int, feeRate int64) func ComputeSLp(poolA, poolB *big.Int, kLast *big.Int, feeRate, treasuryRate int64) (*big.Int, error) { kCurrent := ffmath.Multiply(poolA, poolB) - if kCurrent.Cmp(ZeroBigInt) == 0 { - return ZeroBigInt, nil + if kCurrent.Cmp(types.ZeroBigInt) == 0 { + return types.ZeroBigInt, nil } kCurrent.Sqrt(kCurrent) kLast.Sqrt(kLast) - l := ffmath.Multiply(ffmath.Sub(kCurrent, kLast), big.NewInt(FeeRateBase)) - r := ffmath.Multiply(ffmath.Sub(ffmath.Multiply(big.NewInt(FeeRateBase), ffmath.Div(big.NewInt(feeRate), big.NewInt(treasuryRate))), big.NewInt(FeeRateBase)), kCurrent) - r = ffmath.Add(r, ffmath.Multiply(big.NewInt(FeeRateBase), kLast)) - res, err := CleanPackedAmount(ffmath.Div(l, r)) + l := ffmath.Multiply(ffmath.Sub(kCurrent, kLast), big.NewInt(types.FeeRateBase)) + r := ffmath.Multiply(ffmath.Sub(ffmath.Multiply(big.NewInt(types.FeeRateBase), ffmath.Div(big.NewInt(feeRate), big.NewInt(treasuryRate))), big.NewInt(types.FeeRateBase)), kCurrent) + r = ffmath.Add(r, ffmath.Multiply(big.NewInt(types.FeeRateBase), kLast)) + res, err := common.CleanPackedAmount(ffmath.Div(l, r)) if err != nil { return nil, err } diff --git a/common/util/liquidityHelper_test.go b/common/chain/liquidity_helper_test.go similarity index 77% rename from common/util/liquidityHelper_test.go rename to common/chain/liquidity_helper_test.go index ac9402877..bb6eb9e33 100644 --- a/common/util/liquidityHelper_test.go +++ b/common/chain/liquidity_helper_test.go @@ -1,5 +1,5 @@ /* - * Copyright © 2021 Zkbas Protocol + * Copyright © 2021 ZkBAS Protocol * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -15,15 +15,15 @@ * */ -package util +package chain import ( - "fmt" - "log" "math/big" "testing" - "github.com/bnb-chain/zkbas/common/commonAsset" + "github.com/stretchr/testify/assert" + + "github.com/bnb-chain/zkbas/types" ) func TestComputeDeltaY(t *testing.T) { @@ -37,11 +37,12 @@ func TestComputeDeltaY(t *testing.T) { if err != nil { t.Fatal(err) } - fmt.Println(deltaY.String(), assetId) + assert.Equal(t, deltaY.String(), "1004") + assert.Equal(t, assetId, int64(2)) } func TestComputeRemoveLiquidityAmount(t *testing.T) { - liquidityInfo := &commonAsset.LiquidityInfo{ + liquidityInfo := &types.LiquidityInfo{ PairIndex: 0, AssetAId: 0, AssetA: big.NewInt(99901), @@ -57,8 +58,8 @@ func TestComputeRemoveLiquidityAmount(t *testing.T) { liquidityInfo, big.NewInt(100), ) - fmt.Println(aAmount.String()) - fmt.Println(bAmount.String()) + assert.Equal(t, aAmount.Int64(), int64(99)) + assert.Equal(t, bAmount.Int64(), int64(100)) } func TestComputeInputPrice(t *testing.T) { @@ -68,16 +69,7 @@ func TestComputeInputPrice(t *testing.T) { poolA, poolB, big.NewInt(500), 30, ) - fmt.Println(deltaY.String()) -} - -func TestComputeInputPriceS(t *testing.T) { - X := 1000 - Y := 1000 - inputX := 500 - - output := (9970 * inputX * Y) / (10000*X + 9970*inputX) - log.Println(output) + assert.Equal(t, deltaY.Int64(), int64(332)) } func TestComputeOutputPrice(t *testing.T) { @@ -87,5 +79,5 @@ func TestComputeOutputPrice(t *testing.T) { poolA, poolB, big.NewInt(500), 30, ) - fmt.Println(deltaY.String()) + assert.Equal(t, deltaY.Int64(), int64(1004)) } diff --git a/common/chain/pubdata_helper.go b/common/chain/pubdata_helper.go new file mode 100644 index 000000000..dda1ec44d --- /dev/null +++ b/common/chain/pubdata_helper.go @@ -0,0 +1,216 @@ +/* + * Copyright © 2021 ZkBAS Protocol + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package chain + +import ( + "errors" + + "github.com/bnb-chain/zkbas-crypto/wasm/legend/legendTxTypes" + common2 "github.com/bnb-chain/zkbas/common" + "github.com/bnb-chain/zkbas/types" + "github.com/consensys/gnark-crypto/ecc/bn254/twistededwards/eddsa" + "github.com/ethereum/go-ethereum/common" +) + +func ParseRegisterZnsPubData(pubData []byte) (tx *types.RegisterZnsTxInfo, err error) { + /* + struct RegisterZNS { + uint8 txType; + bytes32 accountName; + bytes32 accountNameHash; + bytes32 pubKeyX; + bytes32 pubKeyY; + } + */ + if len(pubData) != types.RegisterZnsPubDataSize { + return nil, errors.New("[ParseRegisterZnsPubData] invalid size") + } + offset := 0 + offset, txType := common2.ReadUint8(pubData, offset) + offset, accountIndex := common2.ReadUint32(pubData, offset) + offset, accountName := common2.ReadBytes32(pubData, offset) + offset, accountNameHash := common2.ReadBytes32(pubData, offset) + offset, pubKeyX := common2.ReadBytes32(pubData, offset) + _, pubKeyY := common2.ReadBytes32(pubData, offset) + pk := new(eddsa.PublicKey) + pk.A.X.SetBytes(pubKeyX) + pk.A.Y.SetBytes(pubKeyY) + tx = &types.RegisterZnsTxInfo{ + TxType: txType, + AccountIndex: int64(accountIndex), + AccountName: common2.CleanAccountName(common2.SerializeAccountName(accountName)), + AccountNameHash: accountNameHash, + PubKey: common.Bytes2Hex(pk.Bytes()), + } + return tx, nil +} + +func ParseCreatePairPubData(pubData []byte) (tx *types.CreatePairTxInfo, err error) { + if len(pubData) != types.CreatePairPubDataSize { + return nil, errors.New("[ParseCreatePairPubData] invalid size") + } + offset := 0 + offset, txType := common2.ReadUint8(pubData, offset) + offset, pairIndex := common2.ReadUint16(pubData, offset) + offset, assetAId := common2.ReadUint16(pubData, offset) + offset, assetBId := common2.ReadUint16(pubData, offset) + offset, feeRate := common2.ReadUint16(pubData, offset) + offset, treasuryAccountIndex := common2.ReadUint32(pubData, offset) + _, treasuryRate := common2.ReadUint16(pubData, offset) + tx = &types.CreatePairTxInfo{ + TxType: txType, + PairIndex: int64(pairIndex), + AssetAId: int64(assetAId), + AssetBId: int64(assetBId), + FeeRate: int64(feeRate), + TreasuryAccountIndex: int64(treasuryAccountIndex), + TreasuryRate: int64(treasuryRate), + } + return tx, nil +} + +func ParseUpdatePairRatePubData(pubData []byte) (tx *types.UpdatePairRateTxInfo, err error) { + if len(pubData) != types.UpdatePairRatePubdataSize { + return nil, errors.New("[ParseUpdatePairRatePubData] invalid size") + } + offset := 0 + offset, txType := common2.ReadUint8(pubData, offset) + offset, pairIndex := common2.ReadUint16(pubData, offset) + offset, feeRate := common2.ReadUint16(pubData, offset) + offset, treasuryAccountIndex := common2.ReadUint32(pubData, offset) + _, treasuryRate := common2.ReadUint16(pubData, offset) + tx = &types.UpdatePairRateTxInfo{ + TxType: txType, + PairIndex: int64(pairIndex), + FeeRate: int64(feeRate), + TreasuryAccountIndex: int64(treasuryAccountIndex), + TreasuryRate: int64(treasuryRate), + } + return tx, nil +} + +func ParseDepositPubData(pubData []byte) (tx *types.DepositTxInfo, err error) { + /* + struct Deposit { + uint8 txType; + uint32 accountIndex; + bytes32 accountNameHash; + uint16 assetId; + uint128 amount; + } + */ + if len(pubData) != types.DepositPubDataSize { + return nil, errors.New("[ParseDepositPubData] invalid size") + } + offset := 0 + offset, txType := common2.ReadUint8(pubData, offset) + offset, accountIndex := common2.ReadUint32(pubData, offset) + offset, accountNameHash := common2.ReadBytes32(pubData, offset) + offset, assetId := common2.ReadUint16(pubData, offset) + _, amount := common2.ReadUint128(pubData, offset) + tx = &types.DepositTxInfo{ + TxType: txType, + AccountIndex: int64(accountIndex), + AccountNameHash: accountNameHash, + AssetId: int64(assetId), + AssetAmount: amount, + } + return tx, nil +} + +func ParseDepositNftPubData(pubData []byte) (tx *types.DepositNftTxInfo, err error) { + if len(pubData) != types.DepositNftPubDataSize { + return nil, errors.New("[ParseDepositNftPubData] invalid size") + } + offset := 0 + offset, txType := common2.ReadUint8(pubData, offset) + offset, accountIndex := common2.ReadUint32(pubData, offset) + offset, nftIndex := common2.ReadUint40(pubData, offset) + offset, nftL1Address := common2.ReadAddress(pubData, offset) + offset, creatorAccountIndex := common2.ReadUint32(pubData, offset) + offset, creatorTreasuryRate := common2.ReadUint16(pubData, offset) + offset, nftContentHash := common2.ReadBytes32(pubData, offset) + offset, nftL1TokenId := common2.ReadUint256(pubData, offset) + offset, accountNameHash := common2.ReadBytes32(pubData, offset) + _, collectionId := common2.ReadUint16(pubData, offset) + tx = &types.DepositNftTxInfo{ + TxType: txType, + AccountIndex: int64(accountIndex), + NftIndex: nftIndex, + NftL1Address: nftL1Address, + CreatorAccountIndex: int64(creatorAccountIndex), + CreatorTreasuryRate: int64(creatorTreasuryRate), + NftContentHash: nftContentHash, + NftL1TokenId: nftL1TokenId, + AccountNameHash: accountNameHash, + CollectionId: int64(collectionId), + } + return tx, nil +} + +func ParseFullExitPubData(pubData []byte) (tx *types.FullExitTxInfo, err error) { + if len(pubData) != types.FullExitPubDataSize { + return nil, errors.New("[ParseFullExitPubData] invalid size") + } + offset := 0 + offset, txType := common2.ReadUint8(pubData, offset) + offset, accountIndex := common2.ReadUint32(pubData, offset) + offset, assetId := common2.ReadUint16(pubData, offset) + offset, assetAmount := common2.ReadUint128(pubData, offset) + _, accountNameHash := common2.ReadBytes32(pubData, offset) + tx = &types.FullExitTxInfo{ + TxType: txType, + AccountIndex: int64(accountIndex), + AccountNameHash: accountNameHash, + AssetId: int64(assetId), + AssetAmount: assetAmount, + } + return tx, nil +} + +func ParseFullExitNftPubData(pubData []byte) (tx *legendTxTypes.FullExitNftTxInfo, err error) { + if len(pubData) != types.FullExitNftPubDataSize { + return nil, errors.New("[ParseFullExitNftPubData] invalid size") + } + offset := 0 + offset, txType := common2.ReadUint8(pubData, offset) + offset, accountIndex := common2.ReadUint32(pubData, offset) + offset, creatorAccountIndex := common2.ReadUint32(pubData, offset) + offset, creatorTreasuryRate := common2.ReadUint16(pubData, offset) + offset, nftIndex := common2.ReadUint40(pubData, offset) + offset, collectionId := common2.ReadUint16(pubData, offset) + offset, nftL1Address := common2.ReadAddress(pubData, offset) + offset, accountNameHash := common2.ReadBytes32(pubData, offset) + offset, creatorAccountNameHash := common2.ReadBytes32(pubData, offset) + offset, nftContentHash := common2.ReadBytes32(pubData, offset) + _, nftL1TokenId := common2.ReadUint256(pubData, offset) + tx = &types.FullExitNftTxInfo{ + TxType: txType, + AccountIndex: int64(accountIndex), + CreatorAccountIndex: int64(creatorAccountIndex), + CreatorTreasuryRate: int64(creatorTreasuryRate), + NftIndex: nftIndex, + CollectionId: int64(collectionId), + NftL1Address: nftL1Address, + AccountNameHash: accountNameHash, + CreatorAccountNameHash: creatorAccountNameHash, + NftContentHash: nftContentHash, + NftL1TokenId: nftL1TokenId, + } + return tx, nil +} diff --git a/common/checker/checker.go b/common/checker/checker.go deleted file mode 100644 index 94a053f78..000000000 --- a/common/checker/checker.go +++ /dev/null @@ -1,46 +0,0 @@ -// check option don't need to return error, because error is determined by business logic -// sometimes, the true case represents error. sometimes the false case indicates error, ever if that two case is generated by same checkFunc -package checker - -import ( - "math" - "strings" -) - -const ( - maxAccountNameLength = 30 - maxAccountNameLengthOmitSpace = 20 - - minAssetId = 0 - maxAssetId = math.MaxUint32 - - minAccountIndex = 0 - maxAccountIndex = math.MaxUint32 -) - -func CheckAccountName(accountName string) bool { - return len(accountName) > maxAccountNameLength -} - -func CheckFormatAccountName(accountName string) bool { - return len(accountName) > maxAccountNameLengthOmitSpace -} - -func CheckAssetId(assetId uint32) bool { - return assetId > maxAssetId -} - -func CheckPairIndex(pairIndex uint32) bool { - return pairIndex > maxAccountIndex -} - -func CheckOffset(offset, total uint32) bool { - return offset < total -} - -// Format AccountName and -func FormatSting(name string) string { - name = strings.ToLower(name) - name = strings.Replace(name, "\n", "", -1) - return name -} diff --git a/common/commonAsset/constants.go b/common/commonAsset/constants.go deleted file mode 100644 index e39f69d82..000000000 --- a/common/commonAsset/constants.go +++ /dev/null @@ -1,36 +0,0 @@ -/* - * Copyright © 2021 Zkbas Protocol - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package commonAsset - -import ( - "math/big" -) - -const ( - // asset type - GeneralAssetType = 1 - LiquidityAssetType = 2 - NftAssetType = 3 - CollectionNonceAssetType = 4 - - BuyOfferType = 0 - SellOfferType = 1 -) - -var ( - ZeroBigInt = big.NewInt(0) -) diff --git a/common/commonAsset/types.go b/common/commonAsset/types.go deleted file mode 100644 index 5a521209f..000000000 --- a/common/commonAsset/types.go +++ /dev/null @@ -1,132 +0,0 @@ -/* - * Copyright © 2021 Zkbas Protocol - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package commonAsset - -import ( - "encoding/json" - "math/big" - - "github.com/zeromicro/go-zero/core/logx" - "gorm.io/gorm" - - "github.com/bnb-chain/zkbas/common/model/account" - "github.com/bnb-chain/zkbas/errorcode" -) - -type AccountAsset struct { - AssetId int64 - Balance *big.Int - LpAmount *big.Int - OfferCanceledOrFinalized *big.Int -} - -func ConstructAccountAsset(assetId int64, balance *big.Int, lpAmount *big.Int, offerCanceledOrFinalized *big.Int) *AccountAsset { - return &AccountAsset{ - assetId, - balance, - lpAmount, - offerCanceledOrFinalized, - } -} - -func ParseAccountAsset(balance string) (asset *AccountAsset, err error) { - err = json.Unmarshal([]byte(balance), &asset) - if err != nil { - logx.Errorf("[ParseAccountAsset] unable to parse account asset") - return nil, errorcode.JsonErrUnmarshal - } - return asset, nil -} - -func (asset *AccountAsset) String() (info string) { - infoBytes, _ := json.Marshal(asset) - return string(infoBytes) -} - -type AccountInfo struct { - AccountId uint - AccountIndex int64 - AccountName string - PublicKey string - AccountNameHash string - L1Address string - Nonce int64 - CollectionNonce int64 - // map[int64]*AccountAsset - AssetInfo map[int64]*AccountAsset // key: index, value: balance - AssetRoot string - Status int -} - -func FromFormatAccountInfo(formatAccountInfo *AccountInfo) (accountInfo *account.Account, err error) { - assetInfoBytes, err := json.Marshal(formatAccountInfo.AssetInfo) - if err != nil { - return nil, errorcode.JsonErrMarshal - } - accountInfo = &account.Account{ - Model: gorm.Model{ - ID: formatAccountInfo.AccountId, - }, - AccountIndex: formatAccountInfo.AccountIndex, - AccountName: formatAccountInfo.AccountName, - PublicKey: formatAccountInfo.PublicKey, - AccountNameHash: formatAccountInfo.AccountNameHash, - L1Address: formatAccountInfo.L1Address, - Nonce: formatAccountInfo.Nonce, - CollectionNonce: formatAccountInfo.CollectionNonce, - AssetInfo: string(assetInfoBytes), - AssetRoot: formatAccountInfo.AssetRoot, - Status: formatAccountInfo.Status, - } - return accountInfo, nil -} - -func ToFormatAccountInfo(accountInfo *account.Account) (formatAccountInfo *AccountInfo, err error) { - var assetInfo map[int64]*AccountAsset - err = json.Unmarshal([]byte(accountInfo.AssetInfo), &assetInfo) - if err != nil { - return nil, errorcode.JsonErrUnmarshal - } - formatAccountInfo = &AccountInfo{ - AccountId: accountInfo.ID, - AccountIndex: accountInfo.AccountIndex, - AccountName: accountInfo.AccountName, - PublicKey: accountInfo.PublicKey, - AccountNameHash: accountInfo.AccountNameHash, - L1Address: accountInfo.L1Address, - Nonce: accountInfo.Nonce, - CollectionNonce: accountInfo.CollectionNonce, - AssetInfo: assetInfo, - AssetRoot: accountInfo.AssetRoot, - Status: accountInfo.Status, - } - return formatAccountInfo, nil -} - -type FormatAccountHistoryInfo struct { - AccountId uint - AccountIndex int64 - Nonce int64 - CollectionNonce int64 - // map[int64]*AccountAsset - AssetInfo map[int64]*AccountAsset - AssetRoot string - // map[int64]*Liquidity - L2BlockHeight int64 - Status int -} diff --git a/common/commonTx/constant.go b/common/commonTx/constant.go deleted file mode 100644 index 353bc63b2..000000000 --- a/common/commonTx/constant.go +++ /dev/null @@ -1,41 +0,0 @@ -/* - * Copyright © 2021 Zkbas Protocol - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package commonTx - -const ( - TxTypeEmpty = iota - TxTypeRegisterZns - TxTypeCreatePair - TxTypeUpdatePairRate - TxTypeDeposit - TxTypeDepositNft - TxTypeTransfer - TxTypeSwap - TxTypeAddLiquidity - TxTypeRemoveLiquidity - TxTypeWithdraw - TxTypeCreateCollection - TxTypeMintNft - TxTypeTransferNft - TxTypeAtomicMatch - TxTypeCancelOffer - TxTypeWithdrawNft - TxTypeFullExit - TxTypeFullExitNft - TxTypeOffer -) diff --git a/common/commonTx/tx.go b/common/commonTx/tx.go deleted file mode 100644 index cb8e919de..000000000 --- a/common/commonTx/tx.go +++ /dev/null @@ -1,273 +0,0 @@ -/* - * Copyright © 2021 Zkbas Protocol - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package commonTx - -import ( - "encoding/json" - "math/big" - - "github.com/bnb-chain/zkbas-crypto/wasm/legend/legendTxTypes" - "github.com/zeromicro/go-zero/core/logx" -) - -type ( - TransferTxInfo = legendTxTypes.TransferTxInfo - SwapTxInfo = legendTxTypes.SwapTxInfo - AddLiquidityTxInfo = legendTxTypes.AddLiquidityTxInfo - RemoveLiquidityTxInfo = legendTxTypes.RemoveLiquidityTxInfo - WithdrawTxInfo = legendTxTypes.WithdrawTxInfo - CreateCollectionTxInfo = legendTxTypes.CreateCollectionTxInfo - MintNftTxInfo = legendTxTypes.MintNftTxInfo - TransferNftTxInfo = legendTxTypes.TransferNftTxInfo - OfferTxInfo = legendTxTypes.OfferTxInfo - AtomicMatchTxInfo = legendTxTypes.AtomicMatchTxInfo - CancelOfferTxInfo = legendTxTypes.CancelOfferTxInfo - WithdrawNftTxInfo = legendTxTypes.WithdrawNftTxInfo -) - -type RegisterZnsTxInfo struct { - TxType uint8 - AccountIndex int64 - AccountName string - AccountNameHash []byte - PubKey string -} - -func ParseRegisterZnsTxInfo(txInfoStr string) (txInfo *RegisterZnsTxInfo, err error) { - err = json.Unmarshal([]byte(txInfoStr), &txInfo) - if err != nil { - logx.Errorf("[ParseRegisterZnsTxInfo] unable to parse tx info: %s", err.Error()) - return nil, err - } - return txInfo, nil -} - -type CreatePairTxInfo struct { - TxType uint8 - PairIndex int64 - AssetAId int64 - AssetBId int64 - FeeRate int64 - TreasuryAccountIndex int64 - TreasuryRate int64 -} - -func ParseCreatePairTxInfo(txInfoStr string) (txInfo *CreatePairTxInfo, err error) { - err = json.Unmarshal([]byte(txInfoStr), &txInfo) - if err != nil { - logx.Errorf("[ParseCreatePairTxInfo] unable to parse tx info: %s", err.Error()) - return nil, err - } - return txInfo, nil -} - -type UpdatePairRateTxInfo struct { - TxType uint8 - PairIndex int64 - FeeRate int64 - TreasuryAccountIndex int64 - TreasuryRate int64 -} - -func ParseUpdatePairRateTxInfo(txInfoStr string) (txInfo *UpdatePairRateTxInfo, err error) { - err = json.Unmarshal([]byte(txInfoStr), &txInfo) - if err != nil { - logx.Errorf("[ParseUpdatePairRateTxInfo] unable to parse tx info: %s", err.Error()) - return nil, err - } - return txInfo, nil -} - -type DepositTxInfo struct { - TxType uint8 - AccountIndex int64 - AccountNameHash []byte - AssetId int64 - AssetAmount *big.Int -} - -func ParseDepositTxInfo(txInfoStr string) (txInfo *DepositTxInfo, err error) { - err = json.Unmarshal([]byte(txInfoStr), &txInfo) - if err != nil { - logx.Errorf("[ParseDepositTxInfo] unable to parse tx info: %s", err.Error()) - return nil, err - } - return txInfo, nil -} - -type DepositNftTxInfo struct { - TxType uint8 - AccountIndex int64 - NftIndex int64 - NftL1Address string - CreatorAccountIndex int64 - CreatorTreasuryRate int64 - NftContentHash []byte - NftL1TokenId *big.Int - AccountNameHash []byte - CollectionId int64 -} - -func ParseDepositNftTxInfo(txInfoStr string) (txInfo *DepositNftTxInfo, err error) { - err = json.Unmarshal([]byte(txInfoStr), &txInfo) - if err != nil { - logx.Errorf("[ParseDepositNftTxInfo] unable to parse tx info: %s", err.Error()) - return nil, err - } - return txInfo, nil -} - -type FullExitTxInfo struct { - TxType uint8 - AccountIndex int64 - AccountNameHash []byte - AssetId int64 - AssetAmount *big.Int -} - -func ParseFullExitTxInfo(txInfoStr string) (txInfo *FullExitTxInfo, err error) { - err = json.Unmarshal([]byte(txInfoStr), &txInfo) - if err != nil { - logx.Errorf("[ParseFullExitTxInfo] unable to parse tx info: %s", err.Error()) - return nil, err - } - return txInfo, nil -} - -type FullExitNftTxInfo struct { - TxType uint8 - AccountIndex int64 - CreatorAccountIndex int64 - CreatorTreasuryRate int64 - NftIndex int64 - CollectionId int64 - NftL1Address string - AccountNameHash []byte - CreatorAccountNameHash []byte - NftContentHash []byte - NftL1TokenId *big.Int -} - -func ParseFullExitNftTxInfo(txInfoStr string) (txInfo *FullExitNftTxInfo, err error) { - err = json.Unmarshal([]byte(txInfoStr), &txInfo) - if err != nil { - logx.Errorf("[ParseFullExitNftTxInfo] unable to parse tx info: %s", err.Error()) - return nil, err - } - return txInfo, nil -} - -func ParseCreateCollectionTxInfo(txInfoStr string) (txInfo *CreateCollectionTxInfo, err error) { - err = json.Unmarshal([]byte(txInfoStr), &txInfo) - if err != nil { - logx.Errorf("[ParseCreateCollectionTxInfo] unable to parse tx info: %s", err.Error()) - return nil, err - } - return txInfo, nil -} - -// layer-2 transactions -func ParseTransferTxInfo(txInfoStr string) (txInfo *TransferTxInfo, err error) { - err = json.Unmarshal([]byte(txInfoStr), &txInfo) - if err != nil { - logx.Errorf("[ParseTransferTxInfo] unable to parse tx info: %s", err.Error()) - return nil, err - } - return txInfo, nil -} - -func ParseSwapTxInfo(txInfoStr string) (txInfo *SwapTxInfo, err error) { - err = json.Unmarshal([]byte(txInfoStr), &txInfo) - if err != nil { - logx.Errorf("[ParseSwapTxInfo] unable to parse tx info: %s", err.Error()) - return nil, err - } - return txInfo, nil -} - -func ParseAddLiquidityTxInfo(txInfoStr string) (txInfo *AddLiquidityTxInfo, err error) { - err = json.Unmarshal([]byte(txInfoStr), &txInfo) - if err != nil { - logx.Errorf("[ParseAddLiquidityTxInfo] unable to parse tx info: %s", err.Error()) - return nil, err - } - return txInfo, nil -} - -func ParseRemoveLiquidityTxInfo(txInfoStr string) (txInfo *RemoveLiquidityTxInfo, err error) { - err = json.Unmarshal([]byte(txInfoStr), &txInfo) - if err != nil { - logx.Errorf("[ParseRemoveLiquidityTxInfo] unable to parse tx info: %s", err.Error()) - return nil, err - } - return txInfo, nil -} - -func ParseMintNftTxInfo(txInfoStr string) (txInfo *MintNftTxInfo, err error) { - err = json.Unmarshal([]byte(txInfoStr), &txInfo) - if err != nil { - logx.Errorf("[ParseMintNftTxInfo] unable to parse tx info: %s", err.Error()) - return nil, err - } - return txInfo, nil -} - -func ParseTransferNftTxInfo(txInfoStr string) (txInfo *TransferNftTxInfo, err error) { - err = json.Unmarshal([]byte(txInfoStr), &txInfo) - if err != nil { - logx.Errorf("[ParseTransferNftTxInfo] unable to parse tx info: %s", err.Error()) - return nil, err - } - return txInfo, nil -} - -func ParseAtomicMatchTxInfo(txInfoStr string) (txInfo *AtomicMatchTxInfo, err error) { - err = json.Unmarshal([]byte(txInfoStr), &txInfo) - if err != nil { - logx.Errorf("[ParseAtomicMatchTxInfo] unable to parse tx info: %s", err.Error()) - return nil, err - } - return txInfo, nil -} - -func ParseCancelOfferTxInfo(txInfoStr string) (txInfo *CancelOfferTxInfo, err error) { - err = json.Unmarshal([]byte(txInfoStr), &txInfo) - if err != nil { - logx.Errorf("[ParseCancelOfferTxInfo] unable to parse tx info: %s", err.Error()) - return nil, err - } - return txInfo, nil -} - -func ParseWithdrawTxInfo(txInfoStr string) (txInfo *WithdrawTxInfo, err error) { - err = json.Unmarshal([]byte(txInfoStr), &txInfo) - if err != nil { - logx.Errorf("[ParseWithdrawTxInfo] unable to parse tx info: %s", err.Error()) - return nil, err - } - return txInfo, nil -} - -func ParseWithdrawNftTxInfo(txInfoStr string) (txInfo *WithdrawNftTxInfo, err error) { - err = json.Unmarshal([]byte(txInfoStr), &txInfo) - if err != nil { - logx.Errorf("[ParseWithdrawNftTxInfo] unable to parse tx info: %s", err.Error()) - return nil, err - } - return txInfo, nil -} diff --git a/common/util/hashHelper.go b/common/hash.go similarity index 94% rename from common/util/hashHelper.go rename to common/hash.go index 008864e1f..b78123a47 100644 --- a/common/util/hashHelper.go +++ b/common/hash.go @@ -1,5 +1,5 @@ /* - * Copyright © 2021 Zkbas Protocol + * Copyright © 2021 ZkBAS Protocol * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -15,7 +15,7 @@ * */ -package util +package common import ( "github.com/ethereum/go-ethereum/crypto" diff --git a/common/util/mathHelper.go b/common/math.go similarity index 82% rename from common/util/mathHelper.go rename to common/math.go index dc27cb133..1881b34fd 100644 --- a/common/util/mathHelper.go +++ b/common/math.go @@ -1,4 +1,4 @@ -package util +package common func MinInt64(x, y int64) int64 { if x < y { diff --git a/common/model/account/account.go b/common/model/account/account.go deleted file mode 100644 index 032178393..000000000 --- a/common/model/account/account.go +++ /dev/null @@ -1,351 +0,0 @@ -/* - * Copyright © 2021 Zkbas Protocol - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package account - -import ( - "fmt" - "strings" - - "github.com/zeromicro/go-zero/core/logx" - "github.com/zeromicro/go-zero/core/stores/cache" - "github.com/zeromicro/go-zero/core/stores/sqlc" - "github.com/zeromicro/go-zero/core/stores/sqlx" - "gorm.io/gorm" - - "github.com/bnb-chain/zkbas/errorcode" -) - -type ( - AccountModel interface { - CreateAccountTable() error - DropAccountTable() error - IfAccountNameExist(name string) (bool, error) - IfAccountExistsByAccountIndex(accountIndex int64) (bool, error) - GetAccountByAccountIndex(accountIndex int64) (account *Account, err error) - GetVerifiedAccountByAccountIndex(accountIndex int64) (account *Account, err error) - GetConfirmedAccountByAccountIndex(accountIndex int64) (account *Account, err error) - GetAccountByPk(pk string) (account *Account, err error) - GetAccountByAccountName(accountName string) (account *Account, err error) - GetAccountByAccountNameHash(accountNameHash string) (account *Account, err error) - GetAccountsList(limit int, offset int64) (accounts []*Account, err error) - GetAccountsTotalCount() (count int64, err error) - GetAllAccounts() (accounts []*Account, err error) - GetLatestAccountIndex() (accountIndex int64, err error) - GetConfirmedAccounts() (accounts []*Account, err error) - } - - defaultAccountModel struct { - sqlc.CachedConn - table string - DB *gorm.DB - } - - /* - always keep the latest data of committer - */ - Account struct { - gorm.Model - AccountIndex int64 `gorm:"uniqueIndex"` - AccountName string `gorm:"uniqueIndex"` - PublicKey string `gorm:"uniqueIndex"` - AccountNameHash string `gorm:"uniqueIndex"` - L1Address string - Nonce int64 - CollectionNonce int64 - // map[int64]*AccountAsset - AssetInfo string - AssetRoot string - // 0 - registered, not committer 1 - committer - Status int - } -) - -func NewAccountModel(conn sqlx.SqlConn, c cache.CacheConf, db *gorm.DB) AccountModel { - return &defaultAccountModel{ - CachedConn: sqlc.NewConn(conn, c), - table: AccountTableName, - DB: db, - } -} - -func (*Account) TableName() string { - return AccountTableName -} - -/* - Func: CreateAccountTable - Params: - Return: err error - Description: create account table -*/ -func (m *defaultAccountModel) CreateAccountTable() error { - return m.DB.AutoMigrate(Account{}) -} - -/* - Func: DropAccountTable - Params: - Return: err error - Description: drop account table -*/ -func (m *defaultAccountModel) DropAccountTable() error { - return m.DB.Migrator().DropTable(m.table) -} - -/* - Func: IfAccountNameExist - Params: name string - Return: bool, error - Description: check account name existence -*/ -func (m *defaultAccountModel) IfAccountNameExist(name string) (bool, error) { - var res int64 - dbTx := m.DB.Table(m.table).Where("account_name = ? and deleted_at is NULL", strings.ToLower(name)).Count(&res) - - if dbTx.Error != nil { - err := fmt.Sprintf("[account.IfAccountNameExist] %s", dbTx.Error) - logx.Error(err) - return true, dbTx.Error - } else if res == 0 { - return false, nil - } else if res != 1 { - logx.Errorf("[account.IfAccountNameExist] %s", errorcode.DbErrDuplicatedAccountName) - return true, errorcode.DbErrDuplicatedAccountName - } else { - return true, nil - } -} - -/* - Func: IfAccountExistsByAccountIndex - Params: accountIndex int64 - Return: bool, error - Description: check account index existence -*/ -func (m *defaultAccountModel) IfAccountExistsByAccountIndex(accountIndex int64) (bool, error) { - var res int64 - dbTx := m.DB.Table(m.table).Where("account_index = ? and deleted_at is NULL", accountIndex).Count(&res) - - if dbTx.Error != nil { - err := fmt.Sprintf("[account.IfAccountExistsByAccountIndex] %s", dbTx.Error) - logx.Error(err) - // TODO : to be modified - return true, dbTx.Error - } else if res == 0 { - return false, nil - } else if res != 1 { - logx.Errorf("[account.IfAccountExistsByAccountIndex] %s", errorcode.DbErrDuplicatedAccountIndex) - return true, errorcode.DbErrDuplicatedAccountIndex - } else { - return true, nil - } -} - -/* - Func: GetAccountByAccountIndex - Params: accountIndex int64 - Return: account Account, err error - Description: get account info by index -*/ - -func (m *defaultAccountModel) GetAccountByAccountIndex(accountIndex int64) (account *Account, err error) { - dbTx := m.DB.Table(m.table).Where("account_index = ?", accountIndex).Find(&account) - if dbTx.Error != nil { - err := fmt.Sprintf("[account.GetAccountByAccountIndex] %s", dbTx.Error) - logx.Error(err) - return nil, errorcode.DbErrSqlOperation - } else if dbTx.RowsAffected == 0 { - err := fmt.Sprintf("[account.GetAccountByAccountIndex] %s", errorcode.DbErrNotFound) - logx.Error(err) - return nil, errorcode.DbErrNotFound - } - return account, nil -} - -func (m *defaultAccountModel) GetVerifiedAccountByAccountIndex(accountIndex int64) (account *Account, err error) { - dbTx := m.DB.Table(m.table).Where("account_index = ? and status = ?", accountIndex, AccountStatusVerified).Find(&account) - if dbTx.Error != nil { - err := fmt.Sprintf("[account.GetAccountByAccountIndex] %s", dbTx.Error) - logx.Error(err) - return nil, errorcode.DbErrSqlOperation - } else if dbTx.RowsAffected == 0 { - err := fmt.Sprintf("[account.GetAccountByAccountIndex] %s", errorcode.DbErrNotFound) - logx.Error(err) - return nil, errorcode.DbErrNotFound - } - return account, nil -} - -/* - Func: GetAccountByPk - Params: pk string - Return: account Account, err error - Description: get account info by public key -*/ - -func (m *defaultAccountModel) GetAccountByPk(pk string) (account *Account, err error) { - dbTx := m.DB.Table(m.table).Where("public_key = ?", pk).Find(&account) - if dbTx.Error != nil { - err := fmt.Sprintf("[account.GetAccountByPk] %s", dbTx.Error) - logx.Error(err) - return nil, errorcode.DbErrSqlOperation - } else if dbTx.RowsAffected == 0 { - err := fmt.Sprintf("[account.GetAccountByPk] %s", errorcode.DbErrNotFound) - logx.Error(err) - return nil, errorcode.DbErrNotFound - } - return account, nil -} - -/* - Func: GetAccountByAccountName - Params: accountName string - Return: account Account, err error - Description: get account info by account name -*/ - -func (m *defaultAccountModel) GetAccountByAccountName(accountName string) (account *Account, err error) { - dbTx := m.DB.Table(m.table).Where("account_name = ?", accountName).Find(&account) - if dbTx.Error != nil { - err := fmt.Sprintf("[account.GetAccountByAccountName] %s", dbTx.Error) - logx.Error(err) - return nil, errorcode.DbErrSqlOperation - } else if dbTx.RowsAffected == 0 { - err := fmt.Sprintf("[account.GetAccountByAccountName] %s", errorcode.DbErrNotFound) - logx.Info(err) - return nil, errorcode.DbErrNotFound - } - return account, nil -} - -/* - Func: GetAccountsList - Params: limit int, offset int64 - Return: err error - Description: For API /api/v1/info/getAccountsList - -*/ -func (m *defaultAccountModel) GetAccountsList(limit int, offset int64) (accounts []*Account, err error) { - dbTx := m.DB.Table(m.table).Limit(limit).Offset(int(offset)).Order("account_index desc").Find(&accounts) - if dbTx.Error != nil { - logx.Errorf("[account.GetAccountsList] error: %s", dbTx.Error.Error()) - return nil, errorcode.DbErrSqlOperation - } else if dbTx.RowsAffected == 0 { - logx.Error("[account.GetAccountsList] Get Accounts Error") - return nil, errorcode.DbErrNotFound - } - return accounts, nil -} - -/* - Func: GetAccountsTotalCount - Params: - Return: count int64, err error - Description: used for counting total accounts for explorer dashboard -*/ -func (m *defaultAccountModel) GetAccountsTotalCount() (count int64, err error) { - dbTx := m.DB.Table(m.table).Where("deleted_at is NULL").Count(&count) - if dbTx.Error != nil { - logx.Errorf("[account.GetAccountsTotalCount] error: %s", dbTx.Error.Error()) - return 0, errorcode.DbErrSqlOperation - } else if dbTx.RowsAffected == 0 { - logx.Error("[account.GetAccountsTotalCount] No Accounts in Account Table") - return 0, nil - } - return count, nil -} - -/* - Func: GetAllAccounts - Params: - Return: count int64, err error - Description: used for construct MPT -*/ -func (m *defaultAccountModel) GetAllAccounts() (accounts []*Account, err error) { - dbTx := m.DB.Table(m.table).Order("account_index").Find(&accounts) - if dbTx.Error != nil { - logx.Errorf("[account.GetAllAccounts] %s", dbTx.Error.Error()) - return accounts, errorcode.DbErrSqlOperation - } else if dbTx.RowsAffected == 0 { - logx.Error("[account.GetAllAccounts] No Account in Account Table") - return accounts, nil - } - return accounts, nil -} - -/* - Func: GetLatestAccountIndex - Params: - Return: accountIndex int64, err error - Description: get max accountIndex -*/ -func (m *defaultAccountModel) GetLatestAccountIndex() (accountIndex int64, err error) { - dbTx := m.DB.Table(m.table).Select("account_index").Order("account_index desc").Limit(1).Find(&accountIndex) - if dbTx.Error != nil { - err := fmt.Sprintf("[account.GetLatestAccountIndex] %s", dbTx.Error) - logx.Error(err) - return 0, dbTx.Error - } else if dbTx.RowsAffected == 0 { - logx.Info("[account.GetLatestAccountIndex] No Account in Account Table") - return 0, errorcode.DbErrNotFound - } - logx.Info(accountIndex) - return accountIndex, nil -} - -func (m *defaultAccountModel) GetAccountByAccountNameHash(accountNameHash string) (account *Account, err error) { - dbTx := m.DB.Table(m.table).Where("account_name_hash = ?", accountNameHash).Find(&account) - if dbTx.Error != nil { - err := fmt.Sprintf("[account.GetAccountByAccountNameHash] %s", dbTx.Error) - logx.Error(err) - return nil, errorcode.DbErrSqlOperation - } else if dbTx.RowsAffected == 0 { - err := fmt.Sprintf("[account.GetAccountByAccountNameHash] %s", errorcode.DbErrNotFound) - logx.Info(err) - return nil, errorcode.DbErrNotFound - } - return account, nil -} - -func (m *defaultAccountModel) GetConfirmedAccounts() (accounts []*Account, err error) { - dbTx := m.DB.Table(m.table).Where("status = ?", AccountStatusConfirmed).Order("account_index").Find(&accounts) - if dbTx.Error != nil { - err := fmt.Sprintf("[account.GetConfirmedAccounts] %s", dbTx.Error) - logx.Error(err) - return nil, errorcode.DbErrSqlOperation - } else if dbTx.RowsAffected == 0 { - err := fmt.Sprintf("[account.GetConfirmedAccounts] %s", errorcode.DbErrNotFound) - logx.Info(err) - return nil, errorcode.DbErrNotFound - } - return accounts, nil -} - -func (m *defaultAccountModel) GetConfirmedAccountByAccountIndex(accountIndex int64) (account *Account, err error) { - dbTx := m.DB.Table(m.table).Where("account_index = ? and status = ?", accountIndex, AccountStatusConfirmed).Find(&account) - if dbTx.Error != nil { - err := fmt.Sprintf("[account.GetAccountByAccountIndex] %s", dbTx.Error) - logx.Error(err) - return nil, errorcode.DbErrSqlOperation - } else if dbTx.RowsAffected == 0 { - err := fmt.Sprintf("[account.GetAccountByAccountIndex] %s", errorcode.DbErrNotFound) - logx.Error(err) - return nil, errorcode.DbErrNotFound - } - return account, nil -} diff --git a/common/model/account/accountHistory.go b/common/model/account/accountHistory.go deleted file mode 100644 index 6e5de11ee..000000000 --- a/common/model/account/accountHistory.go +++ /dev/null @@ -1,377 +0,0 @@ -/* - * Copyright © 2021 Zkbas Protocol - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package account - -import ( - "errors" - "fmt" - "strings" - - "github.com/zeromicro/go-zero/core/logx" - "github.com/zeromicro/go-zero/core/stores/cache" - "github.com/zeromicro/go-zero/core/stores/sqlc" - "github.com/zeromicro/go-zero/core/stores/sqlx" - "gorm.io/gorm" - - "github.com/bnb-chain/zkbas/errorcode" -) - -type ( - AccountHistoryModel interface { - CreateAccountHistoryTable() error - DropAccountHistoryTable() error - IfAccountNameExist(name string) (bool, error) - IfAccountExistsByAccountIndex(accountIndex int64) (bool, error) - GetAccountsByBlockHeight(blockHeight int64) (accounts []*AccountHistory, err error) - GetAccountByAccountIndex(accountIndex int64) (account *AccountHistory, err error) - GetLatestAccountNonceByAccountIndex(accountIndex int64) (nonce int64, err error) - GetAccountByPk(pk string) (account *AccountHistory, err error) - GetAccountByAccountName(accountName string) (account *AccountHistory, err error) - GetAccountByAccountNameHash(accountNameHash string) (account *AccountHistory, err error) - GetAccountsList(limit int, offset int64) (accounts []*AccountHistory, err error) - GetAccountsTotalCount() (count int64, err error) - GetLatestAccountIndex() (accountIndex int64, err error) - GetValidAccounts(height int64) (rowsAffected int64, accounts []*AccountHistory, err error) - GetValidAccountNums(height int64) (accounts int64, err error) - GetLatestAccountInfoByAccountIndex(accountIndex int64) (account *AccountHistory, err error) - } - - defaultAccountHistoryModel struct { - sqlc.CachedConn - table string - DB *gorm.DB - } - - AccountHistory struct { - gorm.Model - AccountIndex int64 `gorm:"index"` - Nonce int64 - CollectionNonce int64 - AssetInfo string - AssetRoot string - L2BlockHeight int64 - } -) - -func NewAccountHistoryModel(conn sqlx.SqlConn, c cache.CacheConf, db *gorm.DB) AccountHistoryModel { - return &defaultAccountHistoryModel{ - CachedConn: sqlc.NewConn(conn, c), - table: AccountHistoryTableName, - DB: db, - } -} - -func (*AccountHistory) TableName() string { - return AccountHistoryTableName -} - -/* - Func: CreateAccountHistoryTable - Params: - Return: err error - Description: create account history table -*/ -func (m *defaultAccountHistoryModel) CreateAccountHistoryTable() error { - return m.DB.AutoMigrate(AccountHistory{}) -} - -/* - Func: DropAccountHistoryTable - Params: - Return: err error - Description: drop account history table -*/ -func (m *defaultAccountHistoryModel) DropAccountHistoryTable() error { - return m.DB.Migrator().DropTable(m.table) -} - -/* - Func: IfAccountNameExist - Params: name string - Return: bool, error - Description: check account name existence -*/ -func (m *defaultAccountHistoryModel) IfAccountNameExist(name string) (bool, error) { - var res int64 - dbTx := m.DB.Table(m.table).Where("account_name = ? and deleted_at is NULL", strings.ToLower(name)).Count(&res) - - if dbTx.Error != nil { - err := fmt.Sprintf("[accountHistory.IfAccountNameExist] %s", dbTx.Error) - logx.Error(err) - return true, errors.New(err) - } else if res == 0 { - return false, nil - } else if res != 1 { - logx.Errorf("[accountHistory.IfAccountNameExist] %s", errorcode.DbErrDuplicatedAccountName) - return true, errorcode.DbErrDuplicatedAccountName - } else { - return true, nil - } -} - -/* - Func: IfAccountExistsByAccountIndex - Params: accountIndex int64 - Return: bool, error - Description: check account index existence -*/ -func (m *defaultAccountHistoryModel) IfAccountExistsByAccountIndex(accountIndex int64) (bool, error) { - var res int64 - dbTx := m.DB.Table(m.table).Where("account_index = ? and deleted_at is NULL", accountIndex).Count(&res) - - if dbTx.Error != nil { - err := fmt.Sprintf("[accountHistory.IfAccountExistsByAccountIndex] %s", dbTx.Error) - logx.Error(err) - return true, errors.New(err) - } else if res == 0 { - return false, nil - } else if res != 1 { - logx.Errorf("[accountHistory.IfAccountExistsByAccountIndex] %s", errorcode.DbErrDuplicatedAccountIndex) - return true, errorcode.DbErrDuplicatedAccountIndex - } else { - return true, nil - } -} - -/* - Func: GetAccountByAccountIndex - Params: accountIndex int64 - Return: account Account, err error - Description: get account info by index -*/ - -func (m *defaultAccountHistoryModel) GetAccountByAccountIndex(accountIndex int64) (account *AccountHistory, err error) { - dbTx := m.DB.Table(m.table).Where("account_index = ?", accountIndex).Order("l2_block_height desc").Find(&account) - if dbTx.Error != nil { - err := fmt.Sprintf("[accountHistory.GetAccountByAccountIndex] %s", dbTx.Error) - logx.Error(err) - return nil, errorcode.DbErrSqlOperation - } else if dbTx.RowsAffected == 0 { - err := fmt.Sprintf("[accountHistory.GetAccountByAccountIndex] %s", errorcode.DbErrNotFound) - logx.Error(err) - return nil, errorcode.DbErrNotFound - } - return account, nil -} - -func (m *defaultAccountHistoryModel) GetLatestAccountNonceByAccountIndex(accountIndex int64) (nonce int64, err error) { - var account *AccountHistory - dbTx := m.DB.Table(m.table).Where("account_index = ? and nonce != -1", accountIndex).Order("l2_block_height desc").Find(&account) - if dbTx.Error != nil { - err := fmt.Sprintf("[accountHistory.GetAccountByAccountIndex] %s", dbTx.Error) - logx.Error(err) - return 0, dbTx.Error - } else if dbTx.RowsAffected == 0 { - err := fmt.Sprintf("[accountHistory.GetAccountByAccountIndex] %s", errorcode.DbErrNotFound) - logx.Error(err) - return 0, errorcode.DbErrNotFound - } - return account.Nonce, nil -} - -/* - Func: GetAccountByPk - Params: pk string - Return: account Account, err error - Description: get account info by public key -*/ - -func (m *defaultAccountHistoryModel) GetAccountByPk(pk string) (account *AccountHistory, err error) { - dbTx := m.DB.Table(m.table).Where("public_key = ?", pk).Find(&account) - if dbTx.Error != nil { - errInfo := fmt.Sprintf("[accountHistory.GetAccountByPk] %s", dbTx.Error) - logx.Error(errInfo) - return nil, errors.New(errInfo) - } else if dbTx.RowsAffected == 0 { - err := fmt.Sprintf("[accountHistory.GetAccountByPk] %s", errorcode.DbErrNotFound) - logx.Error(err) - return nil, errorcode.DbErrNotFound - } - return account, nil -} - -/* - Func: GetAccountByAccountName - Params: accountName string - Return: account Account, err error - Description: get account info by account name -*/ - -func (m *defaultAccountHistoryModel) GetAccountByAccountName(accountName string) (account *AccountHistory, err error) { - dbTx := m.DB.Table(m.table).Where("account_name = ?", accountName).Find(&account) - if dbTx.Error != nil { - errInfo := fmt.Sprintf("[accountHistory.GetAccountByAccountName] %s", dbTx.Error) - logx.Error(errInfo) - return nil, errors.New(errInfo) - } else if dbTx.RowsAffected == 0 { - errInfo := fmt.Sprintf("[accountHistory.GetAccountByAccountName] %s", errorcode.DbErrNotFound) - logx.Info(errInfo) - return nil, errorcode.DbErrNotFound - } - return account, nil -} - -/* - Func: GetAccountsList - Params: limit int, offset int64 - Return: err error - Description: For API /api/v1/info/getAccountsList - -*/ -func (m *defaultAccountHistoryModel) GetAccountsList(limit int, offset int64) (accounts []*AccountHistory, err error) { - dbTx := m.DB.Table(m.table).Limit(limit).Offset(int(offset)).Order("account_index desc").Find(&accounts) - if dbTx.Error != nil { - errInfo := fmt.Sprintf("[accountHistory.GetAccountsList] %s", dbTx.Error) - logx.Error(errInfo) - return nil, errors.New(errInfo) - } else if dbTx.RowsAffected == 0 { - logx.Error("[accountHistory.GetAccountsList] Get Accounts Error") - return nil, errorcode.DbErrNotFound - } - return accounts, nil -} - -/* - Func: GetAccountsTotalCount - Params: - Return: count int64, err error - Description: used for counting total accounts for explorer dashboard -*/ -func (m *defaultAccountHistoryModel) GetAccountsTotalCount() (count int64, err error) { - dbTx := m.DB.Table(m.table).Where("deleted_at is NULL").Count(&count) - if dbTx.Error != nil { - errInfo := fmt.Sprintf("[accountHistory.GetAccountsTotalCount] %s", dbTx.Error) - logx.Error(errInfo) - return 0, errors.New(errInfo) - } else if dbTx.RowsAffected == 0 { - logx.Error("[accountHistory.GetAccountsTotalCount] No Accounts in Account Table") - return 0, nil - } - return count, nil -} - -/* - Func: GetLatestAccountIndex - Params: - Return: accountIndex int64, err error - Description: get max accountIndex -*/ -func (m *defaultAccountHistoryModel) GetLatestAccountIndex() (accountIndex int64, err error) { - dbTx := m.DB.Table(m.table).Select("account_index").Order("account_index desc").Limit(1).Find(&accountIndex) - if dbTx.Error != nil { - errInfo := fmt.Sprintf("[accountHistory.GetLatestAccountIndex] %s", dbTx.Error) - logx.Error(errInfo) - return 0, errors.New(errInfo) - } else if dbTx.RowsAffected == 0 { - logx.Info("[accountHistory.GetLatestAccountIndex] No Account in Account Table") - return 0, errorcode.DbErrNotFound - } - logx.Info(accountIndex) - return accountIndex, nil -} - -/* - Func: CreateNewAccount - Params: nAccount *AccountHistory - Return: err error - Description: -*/ -func (m *defaultAccountHistoryModel) CreateNewAccount(nAccount *AccountHistory) (err error) { - dbTx := m.DB.Table(m.table).Create(&nAccount) - if dbTx.Error != nil { - errInfo := fmt.Sprintf("[accountHistory.CreateNewAccount] %s", dbTx.Error) - logx.Error(errInfo) - return errors.New(errInfo) - } else if dbTx.RowsAffected == 0 { - logx.Info("[accountHistory.CreateNewAccount] Create nAccount no rows affected") - return errors.New("[accountHistory.CreateNewAccount] Create nAccount no rows affected") - } - - return nil -} - -func (m *defaultAccountHistoryModel) GetValidAccounts(height int64) (rowsAffected int64, accounts []*AccountHistory, err error) { - - dbTx := m.DB.Table(m.table). - Raw("SELECT a.* FROM account_history a WHERE NOT EXISTS"+ - "(SELECT * FROM account_history WHERE account_index = a.account_index AND l2_block_height <= ? AND l2_block_height > a.l2_block_height AND l2_block_height != -1) "+ - "AND l2_block_height <= ? AND l2_block_height != -1 ORDER BY account_index", height, height). - Find(&accounts) - if dbTx.Error != nil { - logx.Errorf("[GetValidAccounts] unable to get related accounts: %s", dbTx.Error.Error()) - return 0, nil, dbTx.Error - } - return dbTx.RowsAffected, accounts, nil - -} - -type countResult struct { - Count int `json:"count"` -} - -func (m *defaultAccountHistoryModel) GetValidAccountNums(height int64) (accounts int64, err error) { - var countResult countResult - dbTx := m.DB.Table(m.table). - Raw("SELECT count(a.*) FROM account_history a WHERE NOT EXISTS"+ - "(SELECT * FROM account_history WHERE account_index = a.account_index AND l2_block_height <= ? AND l2_block_height > a.l2_block_height AND l2_block_height != -1) "+ - "AND l2_block_height <= ? AND l2_block_height != -1", height, height). - Scan(&countResult) - if dbTx.Error != nil { - logx.Errorf("[GetValidAccountNums] unable to get related accounts: %s", dbTx.Error.Error()) - return 0, dbTx.Error - } - return int64(countResult.Count), nil -} - -func (m *defaultAccountHistoryModel) GetLatestAccountInfoByAccountIndex(accountIndex int64) (account *AccountHistory, err error) { - dbTx := m.DB.Table(m.table).Where("account_index = ?", accountIndex).Order("l2_block_height desc").Find(&account) - if dbTx.Error != nil { - logx.Errorf("[GetLatestAccountInfoByAccountIndex] unable to get related account: %s", dbTx.Error.Error()) - return nil, errorcode.DbErrSqlOperation - } else if dbTx.RowsAffected == 0 { - return nil, errorcode.DbErrNotFound - } - return account, nil -} - -func (m *defaultAccountHistoryModel) GetAccountByAccountNameHash(accountNameHash string) (account *AccountHistory, err error) { - dbTx := m.DB.Table(m.table).Where("account_name_hash = ?", accountNameHash).Find(&account) - if dbTx.Error != nil { - errInfo := fmt.Sprintf("[accountHistory.GetAccountByAccountName] %s", dbTx.Error) - logx.Error(errInfo) - return nil, errors.New(errInfo) - } else if dbTx.RowsAffected == 0 { - errInfo := fmt.Sprintf("[accountHistory.GetAccountByAccountName] %s", errorcode.DbErrNotFound) - logx.Info(errInfo) - return nil, errorcode.DbErrNotFound - } - return account, nil -} - -func (m *defaultAccountHistoryModel) GetAccountsByBlockHeight(blockHeight int64) (accounts []*AccountHistory, err error) { - dbTx := m.DB.Table(m.table).Where("l2_block_height = ?", blockHeight).Find(&accounts) - if dbTx.Error != nil { - errInfo := fmt.Sprintf("[accountHistory.GetAccountByAccountName] %s", dbTx.Error) - logx.Error(errInfo) - return nil, errors.New(errInfo) - } else if dbTx.RowsAffected == 0 { - errInfo := fmt.Sprintf("[accountHistory.GetAccountByAccountName] %s", errorcode.DbErrNotFound) - logx.Info(errInfo) - return nil, errorcode.DbErrNotFound - } - return accounts, nil -} diff --git a/common/model/account/constant.go b/common/model/account/constant.go deleted file mode 100644 index e40e5ffa2..000000000 --- a/common/model/account/constant.go +++ /dev/null @@ -1,29 +0,0 @@ -/* - * Copyright © 2021 Zkbas Protocol - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package account - -const ( - AccountTableName = `account` - AccountHistoryTableName = `account_history` -) - -const ( - AccountStatusPending = iota - AccountStatusConfirmed - AccountStatusVerified -) diff --git a/common/model/assetInfo/assetInfo.go b/common/model/assetInfo/assetInfo.go deleted file mode 100644 index adbf98e54..000000000 --- a/common/model/assetInfo/assetInfo.go +++ /dev/null @@ -1,270 +0,0 @@ -/* - * Copyright © 2021 Zkbas Protocol - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package assetInfo - -import ( - "fmt" - - "github.com/zeromicro/go-zero/core/logx" - "github.com/zeromicro/go-zero/core/stores/cache" - "github.com/zeromicro/go-zero/core/stores/sqlc" - "github.com/zeromicro/go-zero/core/stores/sqlx" - "gorm.io/gorm" - - "github.com/bnb-chain/zkbas/errorcode" -) - -type ( - AssetInfoModel interface { - CreateAssetInfoTable() error - DropAssetInfoTable() error - CreateAssetInfo(l2AssetInfo *AssetInfo) (bool, error) - CreateAssetsInfoInBatches(l2AssetsInfo []*AssetInfo) (rowsAffected int64, err error) - GetAssetsCount() (assetCount uint32, err error) - GetAssetsList() (res []*AssetInfo, err error) - GetAssetsListWithoutL1AssetsInfo() (res []*AssetInfo, err error) - GetSimpleAssetInfoByAssetId(assetId int64) (res *AssetInfo, err error) - GetAssetIdCount() (res int64, err error) - GetAssetInfoBySymbol(symbol string) (res *AssetInfo, err error) - GetAssetByAddress(address string) (info *AssetInfo, err error) - } - - defaultAssetInfoModel struct { - sqlc.CachedConn - table string - DB *gorm.DB - } - - AssetInfo struct { - gorm.Model - AssetId uint32 `gorm:"uniqueIndex"` - AssetName string - AssetSymbol string - L1Address string - Decimals uint32 - Status uint32 - IsGasAsset uint32 - } -) - -func (*AssetInfo) TableName() string { - return AssetInfoTableName -} - -func NewAssetInfoModel(conn sqlx.SqlConn, c cache.CacheConf, db *gorm.DB) AssetInfoModel { - return &defaultAssetInfoModel{ - CachedConn: sqlc.NewConn(conn, c), - table: AssetInfoTableName, - DB: db, - } -} - -/* - Func: CreateL2AssetInfoTable - Params: - Return: err error - Description: create l2 asset info table -*/ -func (m *defaultAssetInfoModel) CreateAssetInfoTable() error { - return m.DB.AutoMigrate(AssetInfo{}) -} - -/* - Func: DropL2AssetInfoTable - Params: - Return: err error - Description: drop l2 asset info table -*/ -func (m *defaultAssetInfoModel) DropAssetInfoTable() error { - return m.DB.Migrator().DropTable(m.table) -} - -/* - Func: GetL2AssetsList - Params: - Return: err error - Description: create account table -*/ -func (m *defaultAssetInfoModel) GetAssetsList() (res []*AssetInfo, err error) { - dbTx := m.DB.Table(m.table).Find(&res) - if dbTx.Error != nil { - err := fmt.Sprintf("[l2asset.GetL2AssetsList] %s", dbTx.Error) - logx.Error(err) - return nil, errorcode.DbErrSqlOperation - } - if dbTx.RowsAffected == 0 { - err := fmt.Sprintf("[l2asset.GetL2AssetsList] %s", errorcode.DbErrNotFound) - logx.Error(err) - return nil, errorcode.DbErrNotFound - } - return res, nil -} - -/* - Func: GetL2AssetsListWithoutL1AssetsInfo - Params: - Return: err error - Description: GetL2AssetsListWithoutL1AssetsInfo -*/ -func (m *defaultAssetInfoModel) GetAssetsListWithoutL1AssetsInfo() (res []*AssetInfo, err error) { - dbTx := m.DB.Table(m.table).Find(&res) - if dbTx.Error != nil { - err := fmt.Sprintf("[l2asset.GetL2AssetsList] %s", dbTx.Error) - logx.Error(err) - return nil, errorcode.DbErrSqlOperation - } - if dbTx.RowsAffected == 0 { - err := fmt.Sprintf("[l2asset.GetL2AssetsList] %s", errorcode.DbErrNotFound) - logx.Error(err) - return nil, errorcode.DbErrNotFound - } - return res, nil -} - -/* - Func: CreateL2AssetInfo - Params: l2AssetInfo *L2AssetInfo - Return: bool, error - Description: create L2AssetsInfo batches -*/ -func (m *defaultAssetInfoModel) CreateAssetInfo(l2AssetInfo *AssetInfo) (bool, error) { - dbTx := m.DB.Table(m.table).Create(l2AssetInfo) - if dbTx.Error != nil { - err := fmt.Sprintf("[l2asset.CreateL2AssetInfo] %s", dbTx.Error) - logx.Error(err) - return false, dbTx.Error - } - if dbTx.RowsAffected == 0 { - err := fmt.Sprintf("[l2asset.CreateL2AssetInfo] %s", errorcode.DbErrFailToCreateAssetInfo) - logx.Error(err) - return false, errorcode.DbErrFailToCreateAssetInfo - } - return true, nil -} - -/* - Func: CreateL2AssetsInfoInBatches - Params: []*L2AssetInfo - Return: rowsAffected int64, err error - Description: create L2AssetsInfo batches -*/ -func (m *defaultAssetInfoModel) CreateAssetsInfoInBatches(l2AssetsInfo []*AssetInfo) (rowsAffected int64, err error) { - dbTx := m.DB.Table(m.table).CreateInBatches(l2AssetsInfo, len(l2AssetsInfo)) - if dbTx.Error != nil { - err := fmt.Sprintf("[l2asset.CreateL2AssetsInfoInBatches] %s", dbTx.Error) - logx.Error(err) - return 0, dbTx.Error - } - if dbTx.RowsAffected == 0 { - return 0, nil - } - return dbTx.RowsAffected, nil -} - -/* - Func: GetL2AssetsCount - Params: - Return: latestHeight int64, err error - Description: get latest l1asset id to active accounts -*/ -func (m *defaultAssetInfoModel) GetAssetsCount() (assetCount uint32, err error) { - var asset *AssetInfo - dbTx := m.DB.Table(m.table).Order("l2_asset_id desc").First(&asset) - if dbTx.Error != nil { - err := fmt.Sprintf("[l2asset.GetL2AssetsCount] %s", dbTx.Error) - logx.Error(err) - return 0, dbTx.Error - } - if dbTx.RowsAffected == 0 { - err := fmt.Sprintf("[l2asset.GetL2AssetsCount] %s", errorcode.DbErrNotFound) - logx.Error(err) - return 0, errorcode.DbErrNotFound - } - return asset.AssetId + 1, nil -} - -/* - Func: GetSimpleL2AssetInfoByAssetId - Params: assetId int64 - Return: L2AssetInfo, error - Description: get layer-2 asset info by assetId -*/ -func (m *defaultAssetInfoModel) GetSimpleAssetInfoByAssetId(assetId int64) (res *AssetInfo, err error) { - dbTx := m.DB.Table(m.table).Where("asset_id = ?", assetId).Find(&res) - if dbTx.Error != nil { - errInfo := fmt.Sprintf("[l2asset.GetL2AssetInfoByAssetId] %s", dbTx.Error) - logx.Error(errInfo) - return nil, errorcode.DbErrSqlOperation - } - if dbTx.RowsAffected == 0 { - errInfo := fmt.Sprintf("[l2asset.GetL2AssetInfoByAssetId] %s", errorcode.DbErrNotFound) - logx.Error(errInfo) - return nil, errorcode.DbErrNotFound - } - return res, nil -} - -/* - Func: GetAssetIdCount - Params: - Return: res int64, err error - Description: get l2 asset id count -*/ -func (m *defaultAssetInfoModel) GetAssetIdCount() (res int64, err error) { - dbTx := m.DB.Table(m.table).Where("deleted_at is NULL").Count(&res) - if dbTx.Error != nil { - errInfo := fmt.Sprintf("[l2asset.GetAssetIdCount] %s", dbTx.Error) - logx.Error(errInfo) - // TODO : to be modified - return 0, dbTx.Error - } else { - return res, nil - } -} - -/* - Func: GetL2AssetInfoBySymbol - Params: symbol string - Return: res *L2AssetInfo, err error - Description: get l2 asset info by l2 symbol -*/ -func (m *defaultAssetInfoModel) GetAssetInfoBySymbol(symbol string) (res *AssetInfo, err error) { - dbTx := m.DB.Table(m.table).Where("l2_symbol = ?", symbol).Find(&res) - if dbTx.Error != nil { - errInfo := fmt.Sprintf("[l2asset.GetL2AssetInfoBySymbol] %s", dbTx.Error) - logx.Error(errInfo) - return nil, errorcode.DbErrSqlOperation - } - if dbTx.RowsAffected == 0 { - errInfo := fmt.Sprintf("[l2asset.GetL2AssetInfoBySymbol] %s", errorcode.DbErrNotFound) - logx.Error(errInfo) - return nil, errorcode.DbErrNotFound - } - return res, nil -} - -func (m *defaultAssetInfoModel) GetAssetByAddress(address string) (info *AssetInfo, err error) { - dbTx := m.DB.Table(m.table).Where("asset_address = ?", address).Find(&info) - if dbTx.Error != nil { - logx.Errorf("fail to get asset by address: %s, error: %s", address, dbTx.Error.Error()) - return nil, errorcode.DbErrSqlOperation - } else if dbTx.RowsAffected == 0 { - return nil, errorcode.DbErrNotFound - } - return info, nil -} diff --git a/common/model/assetInfo/constant.go b/common/model/assetInfo/constant.go deleted file mode 100644 index 17e288262..000000000 --- a/common/model/assetInfo/constant.go +++ /dev/null @@ -1,31 +0,0 @@ -/* - * Copyright © 2021 Zkbas Protocol - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package assetInfo - -const ( - AssetInfoTableName = `asset_info` - - StatusActive uint32 = 0 - StatusInactive uint32 = 1 -) - -// flag: asset could be used as gasfee or not -const ( - NotGasAsset = 0 - IsGasAsset = 1 -) diff --git a/common/model/basic/connection.go b/common/model/basic/connection.go deleted file mode 100644 index 57cad3425..000000000 --- a/common/model/basic/connection.go +++ /dev/null @@ -1,24 +0,0 @@ -package basic - -import ( - "github.com/zeromicro/go-zero/core/stores/cache" - "github.com/zeromicro/go-zero/core/stores/redis" - "github.com/zeromicro/go-zero/core/stores/sqlx" - "gorm.io/driver/postgres" - "gorm.io/gorm" -) - -var ( - dsn = "host=localhost user=postgres password=ZecreyProtocolDB@123 dbname=zkbas port=5432 sslmode=disable" - DB, _ = gorm.Open(postgres.Open(dsn), &gorm.Config{}) - DbInfo, _ = DB.DB() - Connection = sqlx.NewSqlConnFromDB(DbInfo) - CacheConf = []cache.NodeConf{{ - RedisConf: redis.RedisConf{ - Host: "127.0.0.1:6379", - Type: "node", - Pass: "myredis", - }, - Weight: 10, - }} -) diff --git a/common/model/block/block.go b/common/model/block/block.go deleted file mode 100644 index 97cb03e8e..000000000 --- a/common/model/block/block.go +++ /dev/null @@ -1,937 +0,0 @@ -/* - * Copyright © 2021 Zkbas Protocol - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package block - -import ( - "encoding/json" - "errors" - "fmt" - "sort" - "strconv" - - "github.com/zeromicro/go-zero/core/logx" - "github.com/zeromicro/go-zero/core/stores/cache" - "github.com/zeromicro/go-zero/core/stores/redis" - "github.com/zeromicro/go-zero/core/stores/sqlc" - "github.com/zeromicro/go-zero/core/stores/sqlx" - "gorm.io/gorm" - - "github.com/bnb-chain/zkbas/common/model/account" - "github.com/bnb-chain/zkbas/common/model/blockForCommit" - "github.com/bnb-chain/zkbas/common/model/liquidity" - "github.com/bnb-chain/zkbas/common/model/mempool" - "github.com/bnb-chain/zkbas/common/model/nft" - "github.com/bnb-chain/zkbas/common/model/tx" - "github.com/bnb-chain/zkbas/errorcode" -) - -var ( - cacheBlockIdPrefix = "cache::block:id:" - - CacheBlockStatusPrefix = "cache::block:blockStatus:" - cacheBlockListLimitPrefix = "cache::block:blockList:" - cacheBlockCommittedCountPrefix = "cache::block:committed_count" - cacheBlockVerifiedCountPrefix = "cache::block:verified_count" -) - -type ( - BlockModel interface { - CreateBlockTable() error - DropBlockTable() error - GetBlocksList(limit int64, offset int64) (blocks []*Block, err error) - GetBlocksBetween(start int64, end int64) (blocks []*Block, err error) - GetBlocksForSender(status int, limit int) (blocks []*Block, err error) - GetBlocksForSenderBetween(start int64, end int64, status int, maxBlocksCount int) (blocks []*Block, err error) - GetBlocksForSenderHigherThanBlockHeight(blockHeight int64, status int, limit int) (blocks []*Block, err error) - GetBlocksLowerThanHeight(end int64, status int) (rowsAffected int64, blocks []*Block, err error) - GetBlocksHigherThanBlockHeight(blockHeight int64) (blocks []*Block, err error) - GetBlockByCommitment(blockCommitment string) (block *Block, err error) - GetBlockByBlockHeight(blockHeight int64) (block *Block, err error) - GetBlockByBlockHeightWithoutTx(blockHeight int64) (block *Block, err error) - GetNotVerifiedOrExecutedBlocks() (blocks []*Block, err error) - GetCommittedBlocksCount() (count int64, err error) - GetVerifiedBlocksCount() (count int64, err error) - GetLatestVerifiedBlockHeight() (height int64, err error) - GetBlocksForProverBetween(start, end int64) (blocks []*Block, err error) - CreateBlock(block *Block) error - CreateGenesisBlock(block *Block) error - UpdateBlock(block *Block) error - GetCurrentBlockHeight() (blockHeight int64, err error) - GetBlocksTotalCount() (count int64, err error) - UpdateBlockStatusCacheByBlockHeight(blockHeight int64, blockStatusInfo *BlockStatusInfo) error - GetBlockStatusCacheByBlockHeight(blockHeight int64) (blockStatusInfo *BlockStatusInfo, err error) - CreateBlockForCommitter( - oBlock *Block, - oBlockForCommit *blockForCommit.BlockForCommit, - pendingMempoolTxs []*mempool.MempoolTx, - pendingDeleteMempoolTxs []*mempool.MempoolTx, - pendingUpdateAccounts []*account.Account, - pendingNewAccountHistories []*account.AccountHistory, - pendingUpdateLiquidity []*liquidity.Liquidity, - pendingNewLiquidityHistories []*liquidity.LiquidityHistory, - pendingUpdateNft []*nft.L2Nft, - pendingNewNftHistories []*nft.L2NftHistory, - pendingNewNftWithdrawHistories []*nft.L2NftWithdrawHistory, - ) (err error) - } - - defaultBlockModel struct { - sqlc.CachedConn - table string - DB *gorm.DB - RedisConn *redis.Redis - } - - Block struct { - gorm.Model - BlockSize uint16 - // pubdata - BlockCommitment string - BlockHeight int64 `gorm:"uniqueIndex"` - StateRoot string - PriorityOperations int64 - PendingOnChainOperationsHash string - PendingOnChainOperationsPubData string - CommittedTxHash string - CommittedAt int64 - VerifiedTxHash string - VerifiedAt int64 - Txs []*tx.Tx `gorm:"foreignKey:BlockId"` - BlockStatus int64 - } -) - -func NewBlockModel(conn sqlx.SqlConn, c cache.CacheConf, db *gorm.DB, redisConn *redis.Redis) BlockModel { - return &defaultBlockModel{ - CachedConn: sqlc.NewConn(conn, c), - table: BlockTableName, - DB: db, - RedisConn: redisConn, - } -} - -func (*Block) TableName() string { - return BlockTableName -} - -/* - Func: CreateBlockTable - Params: - Return: err error - Description: create Block table -*/ - -func (m *defaultBlockModel) CreateBlockTable() error { - return m.DB.AutoMigrate(Block{}) -} - -/* - Func: DropBlockTable - Params: - Return: err error - Description: drop block table -*/ - -func (m *defaultBlockModel) DropBlockTable() error { - return m.DB.Migrator().DropTable(m.table) -} - -/* - Func: GetBlocksList - Params: limit int64, offset int64 - Return: err error - Description: For API /api/v1/block/getBlocksList - -*/ -func (m *defaultBlockModel) GetBlocksList(limit int64, offset int64) (blocks []*Block, err error) { - var ( - //blockForeignKeyColumn = `BlockDetails` - txForeignKeyColumn = `Txs` - ) - key := fmt.Sprintf("%s%v:%v", cacheBlockListLimitPrefix, limit, offset) - cacheBlockListLimitVal, err := m.RedisConn.Get(key) - - if err != nil { - errInfo := fmt.Sprintf("[block.GetBlocksList] Get Redis Error: %s, key:%s", err.Error(), key) - logx.Errorf(errInfo) - return nil, err - } else if cacheBlockListLimitVal == "" { - dbTx := m.DB.Table(m.table).Limit(int(limit)).Offset(int(offset)).Order("block_height desc").Find(&blocks) - if dbTx.Error != nil { - logx.Errorf("[block.GetBlocksList] %s", dbTx.Error.Error()) - return nil, errorcode.DbErrSqlOperation - } else if dbTx.RowsAffected == 0 { - logx.Error("[block.GetBlocksList] Get Blocks Error") - return nil, errorcode.DbErrNotFound - } - - for _, block := range blocks { - cacheBlockIdKey := fmt.Sprintf("%s%v", cacheBlockIdPrefix, block.ID) - cacheBlockIdVal, err := m.RedisConn.Get(cacheBlockIdKey) - if err != nil { - errInfo := fmt.Sprintf("[block.GetBlocksList] Get Redis Error: %s, key:%s", err.Error(), key) - logx.Errorf(errInfo) - return nil, err - } else if cacheBlockIdVal == "" { - /* - err = m.DB.Model(&block).Association(blockForeignKeyColumn).Find(&block.BlockDetails) - if err != nil { - logx.Error("[block.GetBlocksList] Get Associate BlockDetails Error") - return nil, err - } - */ - txLength := m.DB.Model(&block).Association(txForeignKeyColumn).Count() - block.Txs = make([]*tx.Tx, txLength) - - // json string - jsonString, err := json.Marshal(block) - if err != nil { - logx.Errorf("[block.GetBlocksList] json.Marshal Error: %s, value: %v", err.Error(), block) - return nil, err - } - // todo - err = m.RedisConn.Setex(key, string(jsonString), 60) - if err != nil { - logx.Errorf("[block.GetBlocksList] redis set error: %s", err.Error()) - return nil, err - } - } else { - // json string unmarshal - var ( - nBlock *Block - ) - err = json.Unmarshal([]byte(cacheBlockIdVal), &nBlock) - if err != nil { - logx.Errorf("[tblock.GetBlocksList] json.Unmarshal error: %s, value : %s", err.Error(), cacheBlockIdVal) - return nil, err - } - block = nBlock - } - } - // json string - jsonString, err := json.Marshal(blocks) - if err != nil { - logx.Errorf("[block.GetBlocksList] json.Marshal Error: %s, value: %v", err.Error(), blocks) - return nil, err - } - // todo - err = m.RedisConn.Setex(key, string(jsonString), 30) - if err != nil { - logx.Errorf("[block.GetBlocksList] redis set error: %s", err.Error()) - return nil, err - } - - } else { - // json string unmarshal - var ( - nBlocks []*Block - ) - err = json.Unmarshal([]byte(cacheBlockListLimitVal), &nBlocks) - if err != nil { - logx.Errorf("[block.GetBlocksList] json.Unmarshal error: %s, value : %s", err.Error(), cacheBlockListLimitVal) - return nil, err - } - blocks = nBlocks - } - - return blocks, nil -} - -/* - Func: GetBlocksForSender - Params: limit int64 - Return: err error - Description: For API /api/v1/block/getBlocksList - -*/ -func (m *defaultBlockModel) GetBlocksForSender(status int, limit int) (blocks []*Block, err error) { - dbTx := m.DB.Table(m.table).Where("block_status = ?", status).Limit(limit).Order("block_height").Find(&blocks) - if dbTx.Error != nil { - logx.Errorf("[block.GetBlocksList] %s", dbTx.Error.Error()) - return nil, errorcode.DbErrSqlOperation - } else if dbTx.RowsAffected == 0 { - logx.Error("[block.GetBlocksList] Get Blocks Error") - return nil, errorcode.DbErrNotFound - } - return blocks, nil -} - -func (m *defaultBlockModel) GetBlocksForSenderBetween(start int64, end int64, status int, maxBlocksCount int) (blocks []*Block, err error) { - dbTx := m.DB.Table(m.table).Where("block_status = ? AND block_height > ? AND block_height <= ?", status, start, end). - Order("block_height"). - Limit(maxBlocksCount). - Find(&blocks) - if dbTx.Error != nil { - logx.Errorf("[block.GetBlocksList] %s", dbTx.Error.Error()) - return nil, errorcode.DbErrSqlOperation - } else if dbTx.RowsAffected == 0 { - logx.Error("[block.GetBlocksList] Get Blocks Error") - return nil, errorcode.DbErrNotFound - } - return blocks, nil -} - -func (m *defaultBlockModel) GetBlocksBetween(start int64, end int64) (blocks []*Block, err error) { - var ( - txForeignKeyColumn = `Txs` - txDetailsForeignKeyColumn = `TxDetails` - ) - dbTx := m.DB.Table(m.table).Where("block_height >= ? AND block_height <= ?", start, end). - Order("block_height"). - Find(&blocks) - if dbTx.Error != nil { - logx.Errorf("[block.GetBlocksList] %s", dbTx.Error.Error()) - return nil, errorcode.DbErrSqlOperation - } else if dbTx.RowsAffected == 0 { - logx.Error("[block.GetBlocksList] Blocks not found") - return nil, errorcode.DbErrNotFound - } - - for _, block := range blocks { - err = m.DB.Model(&block).Association(txForeignKeyColumn).Find(&block.Txs) - if err != nil { - logx.Error("[block.GetBlocksList] Get Associate Txs Error") - return nil, err - } - sort.Slice(block.Txs, func(i, j int) bool { - return block.Txs[i].TxIndex < block.Txs[j].TxIndex - }) - - for _, txInfo := range block.Txs { - err = m.DB.Model(&txInfo).Association(txDetailsForeignKeyColumn).Find(&txInfo.TxDetails) - if err != nil { - logx.Error("[block.GetBlocksList] Get Associate Tx details Error") - return nil, err - } - sort.Slice(txInfo.TxDetails, func(i, j int) bool { - return txInfo.TxDetails[i].Order < txInfo.TxDetails[j].Order - }) - } - } - return blocks, nil -} - -func (m *defaultBlockModel) GetBlocksLowerThanHeight(end int64, status int) (rowsAffected int64, blocks []*Block, err error) { - dbTx := m.DB.Table(m.table).Where("block_status = ? AND block_height <= ?", status, end).Order("block_height").Find(&blocks) - if dbTx.Error != nil { - logx.Errorf("[block.GetBlocksLowerThanHeight] %s", dbTx.Error.Error()) - return 0, nil, dbTx.Error - } - return dbTx.RowsAffected, blocks, nil -} - -func (m *defaultBlockModel) GetBlocksForSenderHigherThanBlockHeight(blockHeight int64, status int, limit int) (blocks []*Block, err error) { - var ( - txForeignKeyColumn = `Txs` - ) - dbTx := m.DB.Table(m.table).Limit(limit).Where("block_height > ? AND block_status = ?", blockHeight, status).Order("block_height").Find(&blocks) - if dbTx.Error != nil { - logx.Errorf("[block.GetBlocksList] %s", dbTx.Error.Error()) - return nil, errorcode.DbErrSqlOperation - } else if dbTx.RowsAffected == 0 { - logx.Error("[block.GetBlocksList] Get Blocks Error") - return nil, errorcode.DbErrNotFound - } - for _, block := range blocks { - err = m.DB.Model(&block).Association(txForeignKeyColumn).Find(&block.Txs) - sort.Slice(block.Txs, func(i, j int) bool { - return block.Txs[i].TxIndex < block.Txs[j].TxIndex - }) - if err != nil { - logx.Error("[block.GetBlocksList] Get Associate Txs Error") - return nil, err - } - } - return blocks, nil -} - -/* - Func: GetBlocksList - Params: limit int64, offset int64 - Return: err error - Description: For API /api/v1/block/getBlocksList - -*/ -func (m *defaultBlockModel) GetBlocksHigherThanBlockHeight(blockHeight int64) (blocks []*Block, err error) { - var ( - txForeignKeyColumn = `Txs` - ) - dbTx := m.DB.Table(m.table).Where("block_height > ?", blockHeight).Order("block_height desc").Find(&blocks) - if dbTx.Error != nil { - logx.Errorf("[block.GetBlocksHigherThanBlockHeight] %s", dbTx.Error.Error()) - return nil, errorcode.DbErrSqlOperation - } else if dbTx.RowsAffected == 0 { - logx.Error("[block.GetBlocksHigherThanBlockHeight] Get Blocks Error") - return nil, errorcode.DbErrNotFound - } - for _, block := range blocks { - err = m.DB.Model(&block).Association(txForeignKeyColumn).Find(&block.Txs) - sort.Slice(block.Txs, func(i, j int) bool { - return block.Txs[i].TxIndex < block.Txs[j].TxIndex - }) - if err != nil { - logx.Error("[block.GetBlocksHigherThanBlockHeight] Get Associate Txs Error") - return nil, err - } - } - return blocks, nil -} - -/* - Func: GetBlockByCommitment - Params: blockCommitment string - Return: err error - Description: For API /api/v1/block/getBlockByCommitment -*/ -func (m *defaultBlockModel) GetBlockByCommitment(blockCommitment string) (block *Block, err error) { - var ( - txForeignKeyColumn = `Txs` - ) - dbTx := m.DB.Table(m.table).Where("block_commitment = ?", blockCommitment).Find(&block) - if dbTx.Error != nil { - logx.Errorf("[block.GetBlockByCommitment] %s", dbTx.Error.Error()) - return nil, errorcode.DbErrSqlOperation - } else if dbTx.RowsAffected == 0 { - logx.Error("[block.GetBlockByCommitment] Get Block Error") - return nil, errorcode.DbErrNotFound - } - err = m.DB.Model(&block).Association(txForeignKeyColumn).Find(&block.Txs) - sort.Slice(block.Txs, func(i, j int) bool { - return block.Txs[i].TxIndex < block.Txs[j].TxIndex - }) - if err != nil { - logx.Error("[block.GetBlockByCommitment] Get Associate Txs Error") - return nil, err - } - return block, nil -} - -/* - Func: GetBlockByBlockStatus - Params: blockStatus int64 - Return: err error -*/ -func (m *defaultBlockModel) GetNotVerifiedOrExecutedBlocks() (blocks []*Block, err error) { - var ( - txForeignKeyColumn = `Txs` - ) - dbTx := m.DB.Table(m.table).Where("block_status < ?", StatusVerifiedAndExecuted).Find(&blocks) - if dbTx.Error != nil { - logx.Errorf("[block.GetBlockByBlockHeight] %s", dbTx.Error.Error()) - return nil, errorcode.DbErrSqlOperation - } else if dbTx.RowsAffected == 0 { - logx.Error("[block.GetBlockByBlockHeight] Get Block Error") - return nil, errorcode.DbErrNotFound - } - for _, block := range blocks { - err = m.DB.Model(&block).Association(txForeignKeyColumn).Find(&block.Txs) - sort.Slice(block.Txs, func(i, j int) bool { - return block.Txs[i].TxIndex < block.Txs[j].TxIndex - }) - if err != nil { - logx.Error("[block.GetBlockByBlockHeight] Get Associate Txs Error") - return nil, err - } - } - return blocks, nil -} - -/* - Func: GetBlockByBlockHeight - Params: blockHeight int64 - Return: err error - Description: For API /api/v1/block/getBlockByBlockHeight -*/ -func (m *defaultBlockModel) GetBlockByBlockHeight(blockHeight int64) (block *Block, err error) { - var ( - txForeignKeyColumn = `Txs` - ) - dbTx := m.DB.Table(m.table).Where("block_height = ?", blockHeight).Find(&block) - if dbTx.Error != nil { - logx.Errorf("[block.GetBlockByBlockHeight] %s", dbTx.Error.Error()) - return nil, errorcode.DbErrSqlOperation - } else if dbTx.RowsAffected == 0 { - logx.Error("[block.GetBlockByBlockHeight] Get Block Error") - return nil, errorcode.DbErrNotFound - } - err = m.DB.Model(&block).Association(txForeignKeyColumn).Find(&block.Txs) - sort.Slice(block.Txs, func(i, j int) bool { - return block.Txs[i].TxIndex < block.Txs[j].TxIndex - }) - if err != nil { - logx.Error("[block.GetBlockByBlockHeight] Get Associate Txs Error") - return nil, err - } - - return block, nil -} - -/* - Func: GetBlockByBlockHeightWithoutTx - Params: blockHeight int64 - Return: err error - Description: For API /api/v1/block/getBlockByBlockHeight -*/ -func (m *defaultBlockModel) GetBlockByBlockHeightWithoutTx(blockHeight int64) (block *Block, err error) { - dbTx := m.DB.Table(m.table).Where("block_height = ?", blockHeight).Find(&block) - if dbTx.Error != nil { - logx.Errorf("[block.GetBlockByBlockHeight] %s", dbTx.Error.Error()) - return nil, errorcode.DbErrSqlOperation - } else if dbTx.RowsAffected == 0 { - logx.Error("[block.GetBlockByBlockHeight] Get Block Error") - return nil, errorcode.DbErrNotFound - } - return block, nil -} - -/* - Func: GetCommitedBlocksCount - Params: - Return: count int64, err error - Description: For API /api/v1/info/getLayer2BasicInfo -*/ -func (m *defaultBlockModel) GetCommittedBlocksCount() (count int64, err error) { - key := fmt.Sprintf("%s", cacheBlockCommittedCountPrefix) - val, err := m.RedisConn.Get(key) - if err != nil { - errInfo := fmt.Sprintf("[block.GetCommittedBlocksCount] Get Redis Error: %s, key:%s", err.Error(), key) - logx.Errorf(errInfo) - return 0, err - - } else if val == "" { - dbTx := m.DB.Table(m.table).Where("block_status >= ? and deleted_at is NULL", StatusCommitted).Count(&count) - - if dbTx.Error != nil { - if dbTx.Error == errorcode.DbErrNotFound { - return 0, nil - } - logx.Error("[block.GetCommittedBlocksCount] Get block Count Error") - return 0, err - } - - err = m.RedisConn.Setex(key, strconv.FormatInt(count, 10), 120) - if err != nil { - logx.Errorf("[block.GetCommittedBlocksCount] redis set error: %s", err.Error()) - return 0, err - } - } else { - count, err = strconv.ParseInt(val, 10, 64) - if err != nil { - logx.Errorf("[block.GetCommittedBlocksCount] strconv.ParseInt error: %s, value : %s", err.Error(), val) - return 0, err - } - } - - return count, nil - -} - -/* - Func: GetVerifiedBlocksCount - Params: - Return: count int64, err error - Description: For API /api/v1/info/getLayer2BasicInfo -*/ -func (m *defaultBlockModel) GetVerifiedBlocksCount() (count int64, err error) { - key := fmt.Sprintf("%s", cacheBlockVerifiedCountPrefix) - val, err := m.RedisConn.Get(key) - if err != nil { - errInfo := fmt.Sprintf("[block.GetVerifiedBlocksCount] Get Redis Error: %s, key:%s", err.Error(), key) - logx.Errorf(errInfo) - return 0, err - - } else if val == "" { - dbTx := m.DB.Table(m.table).Where("block_status = ? and deleted_at is NULL", StatusVerifiedAndExecuted).Count(&count) - - if dbTx.Error != nil { - if dbTx.Error == errorcode.DbErrNotFound { - return 0, nil - } - logx.Error("[block.GetVerifiedBlocksCount] Get block Count Error") - return 0, err - } - - err = m.RedisConn.Setex(key, strconv.FormatInt(count, 10), 120) - if err != nil { - logx.Errorf("[block.GetVerifiedBlocksCount] redis set error: %s", err.Error()) - return 0, err - } - } else { - count, err = strconv.ParseInt(val, 10, 64) - if err != nil { - logx.Errorf("[block.GetVerifiedBlocksCount] strconv.ParseInt error: %s, value : %s", err.Error(), val) - return 0, err - } - } - - return count, nil -} - -/* - Func: CreateBlock - Params: *Block - Return: error - Description: Insert Block when committerProto completing packing new L2Block. -*/ -func (m *defaultBlockModel) CreateBlock(block *Block) error { - dbTx := m.DB.Table(m.table).Create(block) - - if dbTx.Error != nil { - logx.Errorf("[block.CreateBlock] %s", dbTx.Error.Error()) - return dbTx.Error - } - if dbTx.RowsAffected == 0 { - logx.Error("[block.CreateBlock] Create Invalid Block") - return errorcode.DbErrFailToCreateBlock - } - return nil -} - -func (m *defaultBlockModel) CreateGenesisBlock(block *Block) error { - dbTx := m.DB.Table(m.table).Omit("BlockDetails").Omit("Txs").Create(block) - - if dbTx.Error != nil { - logx.Errorf("[block.CreateBlock] %s", dbTx.Error.Error()) - return dbTx.Error - } - if dbTx.RowsAffected == 0 { - logx.Error("[block.CreateBlock] Create Invalid Block") - return errorcode.DbErrFailToCreateBlock - } - return nil -} - -/* - Func: UpdateBlock - Params: *Block - Return: error - Description: Update Block when committer completing packing new L2Block. And insert txVerification -*/ -func (m *defaultBlockModel) UpdateBlock(block *Block) error { - dbTx := m.DB.Save(block) - - if dbTx.Error != nil { - logx.Errorf("[block.UpdateBlock] %s", dbTx.Error.Error()) - return dbTx.Error - } - if dbTx.RowsAffected == 0 { - logx.Error("[block.UpdateBlock] Update Invalid Block") - return errorcode.DbErrFailToCreateBlock - } - return nil -} - -/* - Func: GetCurrentBlockHeight - Params: - Return: blockHeight int64, err error - Description: get latest block height -*/ -func (m *defaultBlockModel) GetCurrentBlockHeight() (blockHeight int64, err error) { - dbTx := m.DB.Table(m.table).Select("block_height").Order("block_height desc").Limit(1).Find(&blockHeight) - if dbTx.Error != nil { - logx.Errorf("[block.GetCurrentBlockHeight] %s", dbTx.Error.Error()) - return 0, dbTx.Error - } else if dbTx.RowsAffected == 0 { - logx.Info("[block.GetCurrentBlockHeight] No block yet") - return 0, errorcode.DbErrNotFound - } - return blockHeight, nil -} - -/* - Func: GetBlocksTotalCount - Params: - Return: count int64, err error - Description: used for counting total blocks for explorer dashboard -*/ -func (m *defaultBlockModel) GetBlocksTotalCount() (count int64, err error) { - dbTx := m.DB.Table(m.table).Where("deleted_at is NULL").Count(&count) - if dbTx.Error != nil { - logx.Errorf("[block.GetBlocksTotalCount] %s", dbTx.Error.Error()) - return 0, dbTx.Error - } else if dbTx.RowsAffected == 0 { - logx.Info("[block.GetBlocksTotalCount] No Blocks in Block Table") - return 0, nil - } - return count, nil -} - -/* - Func: UpdateBlockStatusCacheByBlockHeight - Params: blockHeight int64, blockStatus int64 - Return: err - Description: update blockStatus cache by blockHeight -*/ -func (m *defaultBlockModel) UpdateBlockStatusCacheByBlockHeight(blockHeight int64, blockStatusInfo *BlockStatusInfo) error { - key := fmt.Sprintf("%s%v", CacheBlockStatusPrefix, blockHeight) - - jsonBytes, err := json.Marshal(blockStatusInfo) - if err != nil { - logx.Errorf("[blockModel.UpdateBlockStatusCacheByBlockHeight] json.Marshal Error: %s, value: %v", err.Error(), blockStatusInfo) - return err - } - err = m.RedisConn.Setex(key, string(jsonBytes), 60) - if err != nil { - logx.Errorf("[blockModel.UpdateBlockStatusCacheByBlockHeight] error: %s", err.Error()) - return err - } - - logx.Infof("[blockModel.UpdateBlockStatusCacheByBlockHeight] Set Block Status Cache, BlockHeight: %d, BlockStatus: %s", blockHeight, string(jsonBytes)) - - return nil -} - -/* - Func: GetBlockStatusCacheByBlockHeight - Params: blockHeight int64 - Return: blockStatus int64, err - Description: get blockStatus cache by blockHeight -*/ - -type BlockStatusInfo struct { - BlockStatus int64 - CommittedAt int64 - VerifiedAt int64 -} - -func (m *defaultBlockModel) GetBlockStatusCacheByBlockHeight(blockHeight int64) (blockStatusInfo *BlockStatusInfo, err error) { - - key := fmt.Sprintf("%s%v", CacheBlockStatusPrefix, blockHeight) - blockStatusInfoFromCache, err := m.RedisConn.Get(key) - if err != nil { - errInfo := fmt.Sprintf("[blockModel.GetBlockStatusCacheByBlockHeight] %s %s", key, err) - logx.Error(errInfo) - return blockStatusInfo, err - } else if blockStatusInfoFromCache == "" { - errInfo := fmt.Sprintf("[blockModel.GetBlockStatusCacheByBlockHeight] %s not found", key) - logx.Info(errInfo) - return blockStatusInfo, errorcode.DbErrNotFound - } else { - err = json.Unmarshal([]byte(blockStatusInfoFromCache), &blockStatusInfo) - if err != nil { - logx.Errorf("[txVerification.GetBlockStatusCacheByBlockHeight] json.Unmarshal error: %s, value : %s", err.Error(), blockStatusInfoFromCache) - return nil, err - } - } - - return blockStatusInfo, nil -} - -func (m *defaultBlockModel) CreateBlockForCommitter( - oBlock *Block, - oBlockForCommit *blockForCommit.BlockForCommit, - pendingMempoolTxs []*mempool.MempoolTx, - pendingDeleteMempoolTxs []*mempool.MempoolTx, - pendingUpdateAccounts []*account.Account, - pendingNewAccountHistories []*account.AccountHistory, - pendingUpdateLiquiditys []*liquidity.Liquidity, - pendingNewLiquidityHistories []*liquidity.LiquidityHistory, - pendingUpdateNfts []*nft.L2Nft, - pendingNewNftHistories []*nft.L2NftHistory, - pendingNewNftWithdrawHistory []*nft.L2NftWithdrawHistory, -) (err error) { - err = m.DB.Transaction(func(tx *gorm.DB) error { // transact - // create block - if oBlock != nil { - dbTx := tx.Table(m.table).Create(oBlock) - if dbTx.Error != nil { - logx.Errorf("[CreateBlockForCommitter] unable to create block: %s", dbTx.Error.Error()) - return dbTx.Error - } - if dbTx.RowsAffected == 0 { - blockInfo, err := json.Marshal(oBlock) - if err != nil { - logx.Errorf("[CreateBlockForCommitter] unable to marshal block") - return err - } - logx.Errorf("[CreateBlockForCommitter] invalid block info: %s", string(blockInfo)) - return errors.New("[CreateBlockForCommitter] invalid block info") - } - } - if oBlockForCommit != nil { - // create block for commit - dbTx := tx.Table(blockForCommit.BlockForCommitTableName).Create(oBlockForCommit) - if dbTx.Error != nil { - logx.Errorf("[CreateBlockForCommitter] unable to create block for commit: %s", dbTx.Error.Error()) - return dbTx.Error - } - if dbTx.RowsAffected == 0 { - blockInfo, err := json.Marshal(oBlockForCommit) - if err != nil { - logx.Errorf("[CreateBlockForCommitter] unable to marshal block for commit") - return err - } - logx.Errorf("[CreateBlockForCommitter] invalid block for commit info: %s", string(blockInfo)) - return errors.New("[CreateBlockForCommitter] invalid block for commit info") - } - } - // update mempool - for _, mempoolTx := range pendingMempoolTxs { - dbTx := tx.Table(mempool.MempoolTableName).Where("id = ?", mempoolTx.ID). - Select("*"). - Updates(&mempoolTx) - if dbTx.Error != nil { - logx.Errorf("[CreateBlockForCommitter] unable to update mempool tx: %s", dbTx.Error.Error()) - return dbTx.Error - } - if dbTx.RowsAffected == 0 { - logx.Errorf("[CreateBlockForCommitter] no new mempoolTx") - return errors.New("[CreateBlockForCommitter] no new mempoolTx") - } - } - for _, pendingDeleteMempoolTx := range pendingDeleteMempoolTxs { - for _, detail := range pendingDeleteMempoolTx.MempoolDetails { - dbTx := tx.Table(mempool.DetailTableName).Where("id = ?", detail.ID).Delete(&detail) - if dbTx.Error != nil { - logx.Errorf("[CreateBlockForCommitter] %s", dbTx.Error.Error()) - return dbTx.Error - } - if dbTx.RowsAffected == 0 { - logx.Errorf("[CreateBlockForCommitter] Delete Invalid Mempool Tx") - return errors.New("[CreateBlockForCommitter] Delete Invalid Mempool Tx") - } - } - dbTx := tx.Table(mempool.MempoolTableName).Where("id = ?", pendingDeleteMempoolTx.ID).Delete(&pendingDeleteMempoolTx) - if dbTx.Error != nil { - logx.Errorf("[CreateBlockForCommitter] %s", dbTx.Error.Error()) - return dbTx.Error - } - if dbTx.RowsAffected == 0 { - logx.Error("[CreateBlockForCommitter] Delete Invalid Mempool Tx") - return errors.New("[CreateBlockForCommitter] Delete Invalid Mempool Tx") - } - } - // update account - for _, pendignUpdateAccount := range pendingUpdateAccounts { - dbTx := tx.Table(account.AccountTableName).Where("id = ?", pendignUpdateAccount.ID). - Select("*"). - Updates(&pendignUpdateAccount) - if dbTx.Error != nil { - logx.Errorf("[CreateBlockForCommitter] unable to update account: %s", dbTx.Error.Error()) - return dbTx.Error - } - if dbTx.RowsAffected == 0 { - logx.Errorf("[CreateBlockForCommitter] no new account") - return errors.New("[CreateBlockForCommitter] no new account") - } - } - // create new account history - if len(pendingNewAccountHistories) != 0 { - dbTx := tx.Table(account.AccountHistoryTableName).CreateInBatches(pendingNewAccountHistories, len(pendingNewAccountHistories)) - if dbTx.Error != nil { - return dbTx.Error - } - if dbTx.RowsAffected != int64(len(pendingNewAccountHistories)) { - logx.Errorf("[CreateBlockForCommitter] unable to create new account history") - return errors.New("[CreateBlockForCommitter] unable to create new account history") - } - } - // update liquidity - for _, entity := range pendingUpdateLiquiditys { - dbTx := tx.Table(liquidity.LiquidityTable).Where("id = ?", entity.ID). - Select("*"). - Updates(&entity) - if dbTx.Error != nil { - logx.Errorf("[CreateBlockForCommitter] unable to update liquidity: %s", dbTx.Error.Error()) - return dbTx.Error - } - if dbTx.RowsAffected == 0 { - logx.Errorf("[CreateBlockForCommitter] no new liquidity") - return errors.New("[CreateBlockForCommitter] no new liquidity") - } - } - // create new liquidity history - if len(pendingNewLiquidityHistories) != 0 { - dbTx := tx.Table(liquidity.LiquidityHistoryTable).CreateInBatches(pendingNewLiquidityHistories, len(pendingNewLiquidityHistories)) - if dbTx.Error != nil { - return dbTx.Error - } - if dbTx.RowsAffected != int64(len(pendingNewLiquidityHistories)) { - logx.Errorf("[CreateBlockForCommitter] unable to create new liquidity history") - return errors.New("[CreateBlockForCommitter] unable to create new liquidity history") - } - } - // new nft - if len(pendingNewNftWithdrawHistory) != 0 { - dbTx := tx.Table(nft.L2NftWithdrawHistoryTableName).CreateInBatches(pendingNewNftWithdrawHistory, len(pendingNewNftWithdrawHistory)) - if dbTx.Error != nil { - return dbTx.Error - } - if dbTx.RowsAffected != int64(len(pendingNewNftWithdrawHistory)) { - logx.Errorf("[CreateBlockForCommitter] unable to create new nft withdraw ") - return errors.New("[CreateBlockForCommitter] unable to create new nft withdraw") - } - } - // update nft - for _, entity := range pendingUpdateNfts { - dbTx := tx.Table(nft.L2NftTableName).Where("id = ?", entity.ID). - Select("*"). - Updates(&entity) - if dbTx.Error != nil { - logx.Errorf("[CreateBlockForCommitter] unable to update nft: %s", dbTx.Error.Error()) - return dbTx.Error - } - if dbTx.RowsAffected == 0 { - logx.Errorf("[CreateBlockForCommitter] no new nft") - return errors.New("[CreateBlockForCommitter] no new nft") - } - } - // new nft history - if len(pendingNewNftHistories) != 0 { - dbTx := tx.Table(nft.L2NftHistoryTableName).CreateInBatches(pendingNewNftHistories, len(pendingNewNftHistories)) - if dbTx.Error != nil { - return dbTx.Error - } - if dbTx.RowsAffected != int64(len(pendingNewNftHistories)) { - logx.Errorf("[CreateBlockForCommitter] unable to create new nft history") - return errors.New("[CreateBlockForCommitter] unable to create new nft history") - } - } - return nil - }) - return err -} - -func (m *defaultBlockModel) GetBlocksForProverBetween(start, end int64) (blocks []*Block, err error) { - dbTx := m.DB.Table(m.table).Where("block_status = ? AND block_height >= ? AND block_height <= ?", StatusCommitted, start, end). - Order("block_height"). - Find(&blocks) - if dbTx.Error != nil { - logx.Errorf("[GetBlocksForProverBetween] unable to get block between: %s", dbTx.Error.Error()) - return nil, errorcode.DbErrSqlOperation - } else if dbTx.RowsAffected == 0 { - return nil, errorcode.DbErrNotFound - } - return blocks, nil -} - -func (m *defaultBlockModel) GetLatestVerifiedBlockHeight() (height int64, err error) { - block := &Block{} - dbTx := m.DB.Table(m.table).Where("block_status = ?", StatusVerifiedAndExecuted). - Order("block_height DESC"). - Limit(1). - First(&block) - if dbTx.Error != nil { - logx.Errorf("[GetLatestVerifiedBlockHeight] unable to get block: %s", dbTx.Error) - return 0, dbTx.Error - } else if dbTx.RowsAffected == 0 { - return 0, errorcode.DbErrNotFound - } - return block.BlockHeight, nil -} diff --git a/common/model/block/constant.go b/common/model/block/constant.go deleted file mode 100644 index 1918672e5..000000000 --- a/common/model/block/constant.go +++ /dev/null @@ -1,29 +0,0 @@ -/* - * Copyright © 2021 Zkbas Protocol - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package block - -const ( - _ = iota - StatusPending - StatusCommitted - StatusVerifiedAndExecuted -) - -const ( - BlockTableName = `block` -) diff --git a/common/model/blockForCommit/blockForCommit.go b/common/model/blockForCommit/blockForCommit.go deleted file mode 100644 index 8acf81073..000000000 --- a/common/model/blockForCommit/blockForCommit.go +++ /dev/null @@ -1,109 +0,0 @@ -/* - * Copyright © 2021 Zkbas Protocol - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package blockForCommit - -import ( - "github.com/zeromicro/go-zero/core/logx" - "github.com/zeromicro/go-zero/core/stores/cache" - "github.com/zeromicro/go-zero/core/stores/sqlc" - "github.com/zeromicro/go-zero/core/stores/sqlx" - "gorm.io/gorm" - - "github.com/bnb-chain/zkbas/errorcode" -) - -type ( - BlockForCommitModel interface { - CreateBlockForCommitTable() error - DropBlockForCommitTable() error - GetBlockForCommitByHeight(height int64) (blockForCommit *BlockForCommit, err error) - GetBlockForCommitBetween(start, end int64) (blocksForCommit []*BlockForCommit, err error) - } - - defaultBlockForCommitModel struct { - sqlc.CachedConn - table string - DB *gorm.DB - } - - BlockForCommit struct { - gorm.Model - BlockSize uint16 - BlockHeight int64 - StateRoot string - PublicData string - Timestamp int64 - PublicDataOffsets string - } -) - -func NewBlockForCommitModel(conn sqlx.SqlConn, c cache.CacheConf, db *gorm.DB) BlockForCommitModel { - return &defaultBlockForCommitModel{ - CachedConn: sqlc.NewConn(conn, c), - table: BlockForCommitTableName, - DB: db, - } -} - -func (*BlockForCommit) TableName() string { - return BlockForCommitTableName -} - -/* - Func: CreateBlockForCommitTable - Params: - Return: err error - Description: create Block table -*/ - -func (m *defaultBlockForCommitModel) CreateBlockForCommitTable() error { - return m.DB.AutoMigrate(BlockForCommit{}) -} - -/* - Func: DropBlockForCommitTable - Params: - Return: err error - Description: drop block table -*/ - -func (m *defaultBlockForCommitModel) DropBlockForCommitTable() error { - return m.DB.Migrator().DropTable(m.table) -} - -func (m *defaultBlockForCommitModel) GetBlockForCommitByHeight(height int64) (blockForCommit *BlockForCommit, err error) { - dbTx := m.DB.Table(m.table).Where("block_height = ?", height).Find(&blockForCommit) - if dbTx.Error != nil { - logx.Errorf("[GetBlockForCommitBetween] unable to get block for commit by height: %s", dbTx.Error.Error()) - return nil, errorcode.DbErrSqlOperation - } else if dbTx.RowsAffected == 0 { - return nil, errorcode.DbErrNotFound - } - return blockForCommit, nil -} - -func (m *defaultBlockForCommitModel) GetBlockForCommitBetween(start, end int64) (blocksForCommit []*BlockForCommit, err error) { - dbTx := m.DB.Table(m.table).Where("block_height >= ? AND block_height <= ?", start, end).Find(&blocksForCommit) - if dbTx.Error != nil { - logx.Errorf("[GetBlockForCommitBetween] unable to get block for commit between: %s", dbTx.Error.Error()) - return nil, errorcode.DbErrSqlOperation - } else if dbTx.RowsAffected == 0 { - return nil, errorcode.DbErrNotFound - } - return blocksForCommit, nil -} diff --git a/common/model/blockForCommit/constant.go b/common/model/blockForCommit/constant.go deleted file mode 100644 index f3f769a4d..000000000 --- a/common/model/blockForCommit/constant.go +++ /dev/null @@ -1,22 +0,0 @@ -/* - * Copyright © 2021 Zkbas Protocol - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package blockForCommit - -const ( - BlockForCommitTableName = `block_for_commit` -) diff --git a/common/model/blockForProof/blockForProof.go b/common/model/blockForProof/blockForProof.go deleted file mode 100644 index e9e3fcda1..000000000 --- a/common/model/blockForProof/blockForProof.go +++ /dev/null @@ -1,167 +0,0 @@ -/* - * Copyright © 2021 Zkbas Protocol - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package blockForProof - -import ( - "fmt" - "time" - - "github.com/zeromicro/go-zero/core/logx" - "github.com/zeromicro/go-zero/core/stores/cache" - "github.com/zeromicro/go-zero/core/stores/sqlc" - "github.com/zeromicro/go-zero/core/stores/sqlx" - "gorm.io/gorm" - - "github.com/bnb-chain/zkbas/common/util" - "github.com/bnb-chain/zkbas/errorcode" -) - -type ( - BlockForProofModel interface { - CreateBlockForProofTable() error - DropBlockForProofTable() error - GetLatestUnprovedBlockHeight() (blockNumber int64, err error) - GetUnprovedCryptoBlockByBlockNumber(height int64) (block *BlockForProof, err error) - UpdateUnprovedCryptoBlockStatus(block *BlockForProof, status int64) error - GetUnprovedCryptoBlockByMode(mode int64) (block *BlockForProof, err error) - CreateConsecutiveUnprovedCryptoBlock(block *BlockForProof) error - } - - defaultBlockForProofModel struct { - sqlc.CachedConn - table string - DB *gorm.DB - } - - BlockForProof struct { - gorm.Model - BlockHeight int64 `gorm:"index:idx_height,unique"` - BlockData string - Status int64 - } -) - -func NewBlockForProofModel(conn sqlx.SqlConn, c cache.CacheConf, db *gorm.DB) BlockForProofModel { - return &defaultBlockForProofModel{ - CachedConn: sqlc.NewConn(conn, c), - table: BlockForProofTableName, - DB: db, - } -} - -func (*BlockForProof) TableName() string { - return BlockForProofTableName -} - -/* - Func: CreateBlockForProofTable - Params: - Return: err error - Description: create Block table -*/ - -func (m *defaultBlockForProofModel) CreateBlockForProofTable() error { - return m.DB.AutoMigrate(BlockForProof{}) -} - -/* - Func: DropBlockForProofTable - Params: - Return: err error - Description: drop block table -*/ - -func (m *defaultBlockForProofModel) DropBlockForProofTable() error { - return m.DB.Migrator().DropTable(m.table) -} - -func (m *defaultBlockForProofModel) GetLatestUnprovedBlockHeight() (blockNumber int64, err error) { - var row *BlockForProof - dbTx := m.DB.Table(m.table).Order("block_height desc").Limit(1).Find(&row) - if dbTx.Error != nil { - logx.Errorf("[GetLatestUnprovedBlockHeight] unable to get latest unproved block: %s", dbTx.Error.Error()) - return 0, dbTx.Error - } else if dbTx.RowsAffected == 0 { - return 0, errorcode.DbErrNotFound - } - return row.BlockHeight, nil -} - -func (m *defaultBlockForProofModel) GetUnprovedCryptoBlockByMode(mode int64) (block *BlockForProof, err error) { - switch mode { - case util.COO_MODE: - dbTx := m.DB.Table(m.table).Where("status = ?", StatusPublished).Order("block_height asc").Limit(1).Find(&block) - if dbTx.Error != nil { - logx.Errorf("[GetUnprovedCryptoBlockByMode] unable to get unproved block: %s", dbTx.Error.Error()) - return nil, errorcode.DbErrSqlOperation - } else if dbTx.RowsAffected == 0 { - return nil, errorcode.DbErrNotFound - } - return block, nil - case util.COM_MODE: - dbTx := m.DB.Table(m.table).Where("status <= ?", StatusReceived).Order("block_height asc").Limit(1).Find(&block) - if dbTx.Error != nil { - logx.Errorf("[GetUnprovedCryptoBlockByMode] unable to get unproved block: %s", dbTx.Error.Error()) - return nil, errorcode.DbErrSqlOperation - } else if dbTx.RowsAffected == 0 { - return nil, errorcode.DbErrNotFound - } - return block, nil - default: - return nil, nil - } -} - -func (m *defaultBlockForProofModel) GetUnprovedCryptoBlockByBlockNumber(height int64) (block *BlockForProof, err error) { - dbTx := m.DB.Table(m.table).Where("block_height = ?", height).Limit(1).Find(&block) - if dbTx.Error != nil { - logx.Errorf("[GetUnprovedCryptoBlockByBlockNumber] unable to get unproved block: %s", dbTx.Error.Error()) - return nil, errorcode.DbErrSqlOperation - } else if dbTx.RowsAffected == 0 { - return nil, errorcode.DbErrNotFound - } - return block, nil -} - -func (m *defaultBlockForProofModel) CreateConsecutiveUnprovedCryptoBlock(block *BlockForProof) error { - if block.BlockHeight > 1 { - _, err := m.GetUnprovedCryptoBlockByBlockNumber(block.BlockHeight - 1) - if err != nil { - logx.Infof("[CreateConsecutiveUnprovedCryptoBlock] block exist: %s", err.Error()) - return fmt.Errorf("previous block does not exist") - } - } - - dbTx := m.DB.Table(m.table).Create(block) - if dbTx.Error != nil { - logx.Errorf("[CreateConsecutiveUnprovedCryptoBlock] create block error: %s", dbTx.Error.Error()) - return dbTx.Error - } - return nil -} - -func (m *defaultBlockForProofModel) UpdateUnprovedCryptoBlockStatus(block *BlockForProof, status int64) error { - block.Status = status - block.UpdatedAt = time.Now() - dbTx := m.DB.Table(m.table).Save(block) - if dbTx.Error != nil { - logx.Errorf("[UpdateUnprovedCryptoBlockStatus] update block status error: %s", dbTx.Error.Error()) - return dbTx.Error - } - return nil -} diff --git a/common/model/info/tvl.go b/common/model/info/tvl.go deleted file mode 100644 index 5ffd6b1db..000000000 --- a/common/model/info/tvl.go +++ /dev/null @@ -1,170 +0,0 @@ -/* - * Copyright © 2021 Zkbas Protocol - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package info - -import ( - "time" - - "github.com/zeromicro/go-zero/core/logx" - "github.com/zeromicro/go-zero/core/stores/cache" - "github.com/zeromicro/go-zero/core/stores/sqlc" - "github.com/zeromicro/go-zero/core/stores/sqlx" - "gorm.io/gorm" - - "github.com/bnb-chain/zkbas/errorcode" -) - -type ( - TVLModel interface { - CreateTVLTable() error - DropTVLTable() error - CreateTVL(tvl *TVL) error - CreateTVLsInBatch(tvls []*TVL) error - GetLockAmountSum(date time.Time) (result []*ResultTvlSum, err error) - GetLockAmountSumGroupByDays() (result []*ResultTvlDaySum, err error) - } - - defaultTVLModel struct { - sqlc.CachedConn - table string - DB *gorm.DB - } - - TVL struct { - gorm.Model - AssetId int64 `gorm:"index"` - LockAmountDelta int64 - BlockHeight int64 - Date time.Time `gorm:"index"` //days:hour - } -) - -func NewTVLModel(conn sqlx.SqlConn, c cache.CacheConf, db *gorm.DB) TVLModel { - return &defaultTVLModel{ - CachedConn: sqlc.NewConn(conn, c), - table: `tvl`, - DB: db, - } -} - -func (*TVL) TableName() string { - return `tvl` -} - -/* - Func: CreateTVLTable - Params: - Return: err error - Description: create TVL table -*/ -func (m *defaultTVLModel) CreateTVLTable() error { - return m.DB.AutoMigrate(TVL{}) -} - -/* - Func: DropTVLTable - Params: - Return: err error - Description: drop TVL table -*/ -func (m *defaultTVLModel) DropTVLTable() error { - return m.DB.Migrator().DropTable(m.table) -} - -/* - Func: CreateTVL - Params: tvl *TVL - Return: error - Description: Insert New TVL -*/ - -func (m *defaultTVLModel) CreateTVL(tvl *TVL) error { - dbTx := m.DB.Table(m.table).Create(tvl) - if dbTx.Error != nil { - logx.Errorf("[tvl.CreateTVL] %s", dbTx.Error) - return dbTx.Error - } - if dbTx.RowsAffected == 0 { - logx.Errorf("[tvl.CreateTVL] Delete Invalid Mempool Tx") - return errorcode.DbErrFailToCreateTVL - } - return nil -} - -/* - Func: CreateTVLsInBatch - Params: tvls []*TVL - Return: error - Description: Insert New TVLs in Batch -*/ -func (m *defaultTVLModel) CreateTVLsInBatch(tvls []*TVL) error { - dbTx := m.DB.Table(m.table).CreateInBatches(tvls, len(tvls)) - if dbTx.Error != nil { - logx.Errorf("[tvl.CreateTVLsInBatch] %s", dbTx.Error) - return dbTx.Error - } - if dbTx.RowsAffected == 0 { - logx.Errorf("[tvl.CreateTVLsInBatch] Create TVL Error") - return errorcode.DbErrFailToCreateTVL - } - return nil -} - -/* - Func: GetLockAmountSum - Params: tvls []*TVL - Return: error - Description: Insert New TVLs in Batch -*/ -type ResultTvlSum struct { - AssetId int64 - Total int64 -} - -func (m *defaultTVLModel) GetLockAmountSum(date time.Time) (result []*ResultTvlSum, err error) { - dbTx := m.DB.Table(m.table).Select("asset_id, sum(lock_amount_delta) as total").Where("date <= ?", date).Group("asset_id").Order("asset_id").Find(&result) - if dbTx.Error != nil { - logx.Errorf("[tvl.CreateTVLsInBatch] %s", dbTx.Error.Error()) - return nil, errorcode.DbErrSqlOperation - } else if dbTx.RowsAffected == 0 { - logx.Info("[volume.GetLockAmountSum] no result in tvl table") - return nil, errorcode.DbErrNotFound - } - - return result, nil -} - -type ResultTvlDaySum struct { - Total int64 - AssetId int64 - Date time.Time -} - -func (m *defaultTVLModel) GetLockAmountSumGroupByDays() (result []*ResultTvlDaySum, err error) { - // SELECT SUM( lock_amount_delta ), asset_id, date_trunc( 'day', DATE ) FROM tvl GROUP BY date_trunc( 'day', DATE ), asset_id ORDER BY date_trunc( 'day', DATE ), asset_id - dbTx := m.DB.Table(m.table).Debug().Select("sum(lock_amount_delta) as total, asset_id, date_trunc( 'day', DATE )::date as date").Group("date_trunc( 'day', DATE ), asset_id").Order("date_trunc( 'day', DATE ), asset_id").Find(&result) - if dbTx.Error != nil { - logx.Errorf("[tvl.GetLockAmountSumGroupByDays] %s", dbTx.Error.Error()) - return nil, errorcode.DbErrSqlOperation - } else if dbTx.RowsAffected == 0 { - logx.Info("[volume.GetLockAmountSumGroupByDays] no result in tvl table") - return nil, errorcode.DbErrNotFound - } - - return result, nil -} diff --git a/common/model/info/tvlPool.go b/common/model/info/tvlPool.go deleted file mode 100644 index aa837a1f5..000000000 --- a/common/model/info/tvlPool.go +++ /dev/null @@ -1,150 +0,0 @@ -/* - * Copyright © 2021 Zkbas Protocol - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package info - -import ( - "time" - - "github.com/zeromicro/go-zero/core/logx" - "github.com/zeromicro/go-zero/core/stores/cache" - "github.com/zeromicro/go-zero/core/stores/sqlc" - "github.com/zeromicro/go-zero/core/stores/sqlx" - "gorm.io/gorm" - - "github.com/bnb-chain/zkbas/errorcode" -) - -type ( - TVLPoolModel interface { - CreateTVLPoolTable() error - DropTVLPoolTable() error - CreateTVLPool(tvlpool *TVLPool) error - CreateTVLPoolsInBatch(tvlPools []*TVLPool) error - GetPoolAmountSum(date time.Time) (result []*ResultTvlPoolSum, err error) - } - - defaultTVLPoolModel struct { - sqlc.CachedConn - table string - DB *gorm.DB - } - - TVLPool struct { - gorm.Model - PoolId int64 `gorm:"index"` - AmountDeltaA int64 - AmountDeltaB int64 - Date time.Time `gorm:"index"` //days:hour - } -) - -func NewTVLPoolModel(conn sqlx.SqlConn, c cache.CacheConf, db *gorm.DB) TVLPoolModel { - return &defaultTVLPoolModel{ - CachedConn: sqlc.NewConn(conn, c), - table: `tvl_pool`, - DB: db, - } -} - -func (*TVLPool) TableName() string { - return `tvl_pool` -} - -/* - Func: CreateTVLPoolTable - Params: - Return: err error - Description: create TVLPool table -*/ -func (m *defaultTVLPoolModel) CreateTVLPoolTable() error { - return m.DB.AutoMigrate(TVLPool{}) -} - -/* - Func: DropTVLPoolTable - Params: - Return: err error - Description: drop TVLPool table -*/ -func (m *defaultTVLPoolModel) DropTVLPoolTable() error { - return m.DB.Migrator().DropTable(m.table) -} - -/* - Func: CreateTVLPool - Params: tvlpool *TVLPool - Return: error - Description: Insert New TVLPool -*/ - -func (m *defaultTVLPoolModel) CreateTVLPool(tvlpool *TVLPool) error { - dbTx := m.DB.Table(m.table).Create(tvlpool) - if dbTx.Error != nil { - logx.Errorf("[tvlpool.CreateTVLPool] %s", dbTx.Error) - return dbTx.Error - } - if dbTx.RowsAffected == 0 { - logx.Errorf("[tvlpool.CreateTVLPool] Delete Invalid Mempool Tx") - return errorcode.DbErrFailToCreateTVL - } - return nil -} - -/* - Func: CreateTVLPoolsInBatch - Params: tvlPools []*TVLPool - Return: error - Description: Insert New TVLPools in Batch -*/ -func (m *defaultTVLPoolModel) CreateTVLPoolsInBatch(tvlPools []*TVLPool) error { - dbTx := m.DB.Table(m.table).CreateInBatches(tvlPools, len(tvlPools)) - if dbTx.Error != nil { - logx.Errorf("[tvlpool.CreateTVLPoolsInBatch] %s", dbTx.Error) - return dbTx.Error - } - if dbTx.RowsAffected == 0 { - logx.Errorf("[tvlpool.CreateTVLPoolsInBatch] Create TVLPool Error") - return errorcode.DbErrFailToCreateTVL - } - return nil -} - -/* - Func: GetLockAmountSum - Params: tvls []*TVL - Return: error - Description: Insert New TVLs in Batch -*/ -type ResultTvlPoolSum struct { - PoolId int64 - TotalA int64 - TotalB int64 -} - -func (m *defaultTVLPoolModel) GetPoolAmountSum(date time.Time) (result []*ResultTvlPoolSum, err error) { - dbTx := m.DB.Table(m.table).Select("pool_id, sum(amount_delta_a) as total_a, sum(amount_delta_b) as total_b").Where("date <= ?", date).Group("pool_id").Order("pool_id").Find(&result) - if dbTx.Error != nil { - logx.Errorf("[tvl.GetPoolAmountSum] %s", dbTx.Error.Error()) - return nil, errorcode.DbErrSqlOperation - } else if dbTx.RowsAffected == 0 { - logx.Info("[volume.GetPoolAmountSum] no result in tvl pool table") - return nil, errorcode.DbErrNotFound - } - - return result, nil -} diff --git a/common/model/info/volume.go b/common/model/info/volume.go deleted file mode 100644 index 932d0a313..000000000 --- a/common/model/info/volume.go +++ /dev/null @@ -1,259 +0,0 @@ -/* - * Copyright © 2021 Zkbas Protocol - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package info - -import ( - "time" - - "github.com/zeromicro/go-zero/core/logx" - "github.com/zeromicro/go-zero/core/stores/cache" - "github.com/zeromicro/go-zero/core/stores/sqlc" - "github.com/zeromicro/go-zero/core/stores/sqlx" - "gorm.io/gorm" - - "github.com/bnb-chain/zkbas/errorcode" -) - -type ( - VolumeModel interface { - CreateVolumeTable() error - DropVolumeTable() error - CreateVolume(volume *Volume) error - CreateVolumesInBatch(volumes []*Volume) error - CreateVolumesAndTVLsInBatch(volumes []*Volume, tvls []*TVL, volumesPool []*VolumePool, tvlsPool []*TVLPool) error - GetLatestBlockHeight() (blockHeight int64, err error) - GetVolumeSumBetweenDate(date1 time.Time, date2 time.Time) (result []*ResultVolumeSum, err error) - GetVolumeSumGroupByDays() (result []*ResultVolumeDaySum, err error) - } - - defaultVolumeModel struct { - sqlc.CachedConn - table string - DB *gorm.DB - } - - Volume struct { - gorm.Model - AssetId int64 `gorm:"index"` - VolumeDelta int64 - BlockHeight int64 - Date time.Time `gorm:"index"` //days:hour - } -) - -func NewVolumeModel(conn sqlx.SqlConn, c cache.CacheConf, db *gorm.DB) VolumeModel { - return &defaultVolumeModel{ - CachedConn: sqlc.NewConn(conn, c), - table: `volume`, - DB: db, - } -} - -func (*Volume) TableName() string { - return `volume` -} - -/* - Func: CreateVolumeTable - Params: - Return: err error - Description: create Volume table -*/ -func (m *defaultVolumeModel) CreateVolumeTable() error { - return m.DB.AutoMigrate(Volume{}) -} - -/* - Func: DropVolumeTable - Params: - Return: err error - Description: drop Volume table -*/ -func (m *defaultVolumeModel) DropVolumeTable() error { - return m.DB.Migrator().DropTable(m.table) -} - -/* - Func: CreateVolume - Params: volume *Volume - Return: error - Description: Insert New Volume -*/ - -func (m *defaultVolumeModel) CreateVolume(volume *Volume) error { - dbTx := m.DB.Table(m.table).Create(volume) - if dbTx.Error != nil { - logx.Errorf("[volume.CreateVolume] %s", dbTx.Error) - return dbTx.Error - } - if dbTx.RowsAffected == 0 { - logx.Errorf("[volume.CreateVolume] Create Volume Error") - return errorcode.DbErrFailToCreateVolume - } - return nil -} - -/* - Func: CreateVolumesInBatch - Params: volumes []*Volume - Return: error - Description: Insert New Volumes in Batch -*/ - -func (m *defaultVolumeModel) CreateVolumesInBatch(volumes []*Volume) error { - dbTx := m.DB.Table(m.table).CreateInBatches(volumes, len(volumes)) - if dbTx.Error != nil { - logx.Errorf("[volume.CreateVolumesInBatch] %s", dbTx.Error) - return dbTx.Error - } - if dbTx.RowsAffected == 0 { - logx.Errorf("[volume.CreateVolumesInBatch] Create Volume Error") - return errorcode.DbErrFailToCreateVolume - } - return nil -} - -/* - Func: CreateVolumesAndTVLInBatch - Params: volumes []*Volume - Return: error - Description: Insert New Volumes in Batch -*/ - -func (m *defaultVolumeModel) CreateVolumesAndTVLsInBatch(volumes []*Volume, tvls []*TVL, volumesPool []*VolumePool, tvlsPool []*TVLPool) error { - var ( - tvlTableName = "tvl" - volumePoolName = "volume_pool" - tvlPoolName = "tvl_pool" - ) - err := m.DB.Transaction(func(tx *gorm.DB) error { - dbTx := tx.Table(m.table).CreateInBatches(volumes, len(volumes)) - if dbTx.Error != nil { - logx.Errorf("[volume.CreateVolumesAndTVLInBatch] %s", dbTx.Error) - return dbTx.Error - } - if dbTx.RowsAffected == 0 { - logx.Errorf("[volume.CreateVolumesAndTVLInBatch] Create Volume Error") - return errorcode.DbErrFailToCreateVolume - } - - if len(tvls) != 0 { - dbTx = tx.Table(tvlTableName).CreateInBatches(tvls, len(tvls)) - if dbTx.Error != nil { - logx.Errorf("[tvl.CreateVolumesAndTVLInBatch] %s", dbTx.Error) - return dbTx.Error - } - if dbTx.RowsAffected == 0 { - logx.Errorf("[tvl.CreateVolumesAndTVLInBatch] Create TVL Error") - return errorcode.DbErrFailToCreateTVL - } - } - - if len(volumesPool) != 0 { - dbTx = tx.Table(volumePoolName).CreateInBatches(volumesPool, len(volumesPool)) - if dbTx.Error != nil { - logx.Errorf("[tvl.CreateVolumesAndTVLInBatch] %s", dbTx.Error) - return dbTx.Error - } - if dbTx.RowsAffected == 0 { - logx.Errorf("[tvl.CreateVolumesAndTVLInBatch] Create TVL Error") - return errorcode.DbErrFailToCreateTVL - } - } - - if len(tvlsPool) != 0 { - dbTx = tx.Table(tvlPoolName).CreateInBatches(tvlsPool, len(tvlsPool)) - if dbTx.Error != nil { - logx.Errorf("[tvl.CreateVolumesAndTVLInBatch] %s", dbTx.Error) - return dbTx.Error - } - if dbTx.RowsAffected == 0 { - logx.Errorf("[tvl.CreateVolumesAndTVLInBatch] Create TVL Error") - return errorcode.DbErrFailToCreateTVL - } - } - - return nil - }) - - return err -} - -/* - Func: CreateVolume - Params: volume *Volume - Return: error - Description: Insert New Volume -*/ - -func (m *defaultVolumeModel) GetLatestBlockHeight() (blockHeight int64, err error) { - dbTx := m.DB.Table(m.table).Select("block_height").Order("block_height desc").Limit(1).Find(&blockHeight) - if dbTx.Error != nil { - logx.Errorf("[volume.CreateVolume] %s", dbTx.Error) - return 0, dbTx.Error - } - if dbTx.RowsAffected == 0 { - logx.Info("[volume.CreateVolume] no result in volume table") - return 0, errorcode.DbErrNotFound - } - return blockHeight, nil -} - -/* - Func: GetVolumeSum - Params: tvls []*TVL - Return: error - Description: Insert New TVLs in Batch -*/ -type ResultVolumeSum struct { - AssetId int64 - Total int64 -} - -func (m *defaultVolumeModel) GetVolumeSumBetweenDate(date1 time.Time, date2 time.Time) (result []*ResultVolumeSum, err error) { - dbTx := m.DB.Table(m.table).Select("asset_id, sum(volume_delta) as total").Where("date <= ? and date > ?", date1, date2).Group("asset_id").Order("asset_id").Find(&result) - if dbTx.Error != nil { - logx.Errorf("[tvl.GetVolumeSum] %s", dbTx.Error.Error()) - return nil, errorcode.DbErrSqlOperation - } else if dbTx.RowsAffected == 0 { - logx.Info("[volume.CreateVolume] no result in volume table") - return nil, errorcode.DbErrNotFound - } - - return result, nil -} - -type ResultVolumeDaySum struct { - Total int64 - AssetId int64 - Date time.Time -} - -func (m *defaultVolumeModel) GetVolumeSumGroupByDays() (result []*ResultVolumeDaySum, err error) { - // SELECT SUM( lock_amount_delta ), asset_id, date_trunc( 'day', DATE ) FROM tvl GROUP BY date_trunc( 'day', DATE ), asset_id ORDER BY date_trunc( 'day', DATE ), asset_id - dbTx := m.DB.Table(m.table).Debug().Select("sum(volume_delta) as total, asset_id, date_trunc( 'day', DATE )::date as date").Group("date_trunc( 'day', DATE ), asset_id").Order("date_trunc( 'day', DATE ), asset_id").Find(&result) - if dbTx.Error != nil { - logx.Errorf("[tvl.GetVolumeSumGroupByDays] %s", dbTx.Error.Error()) - return nil, errorcode.DbErrSqlOperation - } else if dbTx.RowsAffected == 0 { - logx.Info("[volume.GetVolumeSumGroupByDays] no result in tvl table") - return nil, errorcode.DbErrNotFound - } - - return result, nil -} diff --git a/common/model/info/volumePool.go b/common/model/info/volumePool.go deleted file mode 100644 index 5d0249f93..000000000 --- a/common/model/info/volumePool.go +++ /dev/null @@ -1,130 +0,0 @@ -/* - * Copyright © 2021 Zkbas Protocol - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package info - -import ( - "time" - - "github.com/zeromicro/go-zero/core/logx" - "github.com/zeromicro/go-zero/core/stores/cache" - "github.com/zeromicro/go-zero/core/stores/sqlc" - "github.com/zeromicro/go-zero/core/stores/sqlx" - "gorm.io/gorm" - - "github.com/bnb-chain/zkbas/errorcode" -) - -type ( - VolumePoolModel interface { - CreateVolumePoolTable() error - DropVolumePoolTable() error - CreateVolumePool(volume *VolumePool) error - GetPoolVolumeSumBetweenDate(date1 time.Time, date2 time.Time) (result []*ResultVolumePoolSum, err error) - } - - defaultVolumePoolModel struct { - sqlc.CachedConn - table string - DB *gorm.DB - } - - VolumePool struct { - gorm.Model - PoolId int64 `gorm:"index"` - VolumeDeltaA int64 - VolumeDeltaB int64 - Date time.Time `gorm:"index"` //days:hour - } -) - -func NewVolumePoolModel(conn sqlx.SqlConn, c cache.CacheConf, db *gorm.DB) VolumePoolModel { - return &defaultVolumePoolModel{ - CachedConn: sqlc.NewConn(conn, c), - table: `volume_pool`, - DB: db, - } -} - -func (*VolumePool) TableName() string { - return `volume_pool` -} - -/* - Func: CreateVolumePoolTable - Params: - Return: err error - Description: create VolumePool table -*/ -func (m *defaultVolumePoolModel) CreateVolumePoolTable() error { - return m.DB.AutoMigrate(VolumePool{}) -} - -/* - Func: DropVolumePoolTable - Params: - Return: err error - Description: drop VolumePool table -*/ -func (m *defaultVolumePoolModel) DropVolumePoolTable() error { - return m.DB.Migrator().DropTable(m.table) -} - -/* - Func: CreateVolumePool - Params: volumePool *VolumePool - Return: error - Description: Insert New VolumePool -*/ - -func (m *defaultVolumePoolModel) CreateVolumePool(volumePool *VolumePool) error { - dbTx := m.DB.Table(m.table).Create(volumePool) - if dbTx.Error != nil { - logx.Errorf("[volumePool.CreateVolumePool] %s", dbTx.Error) - return dbTx.Error - } - if dbTx.RowsAffected == 0 { - logx.Errorf("[volumePool.CreateVolumePool] Create VolumePool Error") - return errorcode.DbErrFailToCreateVolume - } - return nil -} - -/* - Func: GetLockVolumeSum - Params: tvls []*TVL - Return: error - Description: Insert New TVLs in Batch -*/ -type ResultVolumePoolSum struct { - PoolId int64 - TotalA int64 - TotalB int64 -} - -func (m *defaultVolumePoolModel) GetPoolVolumeSumBetweenDate(date1 time.Time, date2 time.Time) (result []*ResultVolumePoolSum, err error) { - dbTx := m.DB.Table(m.table).Select("pool_id, sum(volume_delta_a) as total_a, sum(volume_delta_b) as total_b").Where("date <= ? and date > ?", date1, date2).Group("pool_id").Order("pool_id").Find(&result) - if dbTx.Error != nil { - logx.Errorf("[tvl.GetPoolVolumeSum] %s", dbTx.Error.Error()) - return nil, errorcode.DbErrSqlOperation - } else if dbTx.RowsAffected == 0 { - logx.Info("[volume.GetPoolVolumeSum] no result in tvl pool table") - return nil, errorcode.DbErrNotFound - } - - return result, nil -} diff --git a/common/model/init/init.go b/common/model/init/init.go deleted file mode 100644 index 119459f56..000000000 --- a/common/model/init/init.go +++ /dev/null @@ -1,298 +0,0 @@ -/* - * Copyright © 2021 Zkbas Protocol - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package main - -import ( - "encoding/json" - "flag" - - "github.com/ethereum/go-ethereum/common" - "github.com/stretchr/testify/assert" - "github.com/zeromicro/go-zero/core/conf" - "github.com/zeromicro/go-zero/core/logx" - "github.com/zeromicro/go-zero/core/stores/redis" - - "github.com/bnb-chain/zkbas/common/model/account" - asset "github.com/bnb-chain/zkbas/common/model/assetInfo" - "github.com/bnb-chain/zkbas/common/model/basic" - "github.com/bnb-chain/zkbas/common/model/block" - "github.com/bnb-chain/zkbas/common/model/blockForCommit" - "github.com/bnb-chain/zkbas/common/model/blockForProof" - "github.com/bnb-chain/zkbas/common/model/l1BlockMonitor" - "github.com/bnb-chain/zkbas/common/model/l1TxSender" - "github.com/bnb-chain/zkbas/common/model/l2BlockEventMonitor" - "github.com/bnb-chain/zkbas/common/model/l2TxEventMonitor" - "github.com/bnb-chain/zkbas/common/model/liquidity" - "github.com/bnb-chain/zkbas/common/model/mempool" - "github.com/bnb-chain/zkbas/common/model/nft" - "github.com/bnb-chain/zkbas/common/model/proofSender" - "github.com/bnb-chain/zkbas/common/model/sysconfig" - "github.com/bnb-chain/zkbas/common/model/tx" - "github.com/bnb-chain/zkbas/common/sysconfigName" - "github.com/bnb-chain/zkbas/common/tree" -) - -var configFile = flag.String("f", "./contractaddr.yaml", "the config file") -var svrConf config - -const ( - BSC_Test_Network_RPC = "http://tf-dex-preview-validator-nlb-6fd109ac8b9d390a.elb.ap-northeast-1.amazonaws.com:8545" - Local_Test_Network_RPC = "http://127.0.0.1:8545/" -) - -func main() { - conf.MustLoad(*configFile, &svrConf) - - unmarshal, _ := json.Marshal(svrConf) - logx.Infof("init configs: %s", string(unmarshal)) - - dropTables() - initTable() -} - -func initSysConfig() []*sysconfig.Sysconfig { - return []*sysconfig.Sysconfig{ - { - Name: sysconfigName.SysGasFee, - Value: "100000000000000", - ValueType: "string", - Comment: "based on BNB", - }, - { - Name: sysconfigName.TreasuryAccountIndex, - Value: "0", - ValueType: "int", - Comment: "treasury index", - }, - { - Name: sysconfigName.GasAccountIndex, - Value: "1", - ValueType: "int", - Comment: "gas index", - }, - { - Name: sysconfigName.ZkbasContract, - Value: svrConf.ZkbasProxy, - ValueType: "string", - Comment: "Zkbas contract on BSC", - }, - // Governance Contract - { - Name: sysconfigName.GovernanceContract, - Value: svrConf.Governance, - ValueType: "string", - Comment: "Governance contract on BSC", - }, - - // Asset_Governance Contract - //{ - // Name: sysconfigName.AssetGovernanceContract, - // Value: AssetGovernanceContractAddr, - // ValueType: "string", - // Comment: "Asset_Governance contract on BSC", - //}, - - // Verifier Contract - //{ - // Name: sysconfigName.VerifierContract, - // Value: VerifierContractAddr, - // ValueType: "string", - // Comment: "Verifier contract on BSC", - //}, - // network rpc - { - Name: sysconfigName.BscTestNetworkRpc, - Value: BSC_Test_Network_RPC, - ValueType: "string", - Comment: "BSC network rpc", - }, - // TODO - { - Name: sysconfigName.LocalTestNetworkRpc, - Value: Local_Test_Network_RPC, - ValueType: "string", - Comment: "Local network rpc", - }, - { - Name: sysconfigName.ZnsPriceOracle, - Value: svrConf.ZnsPriceOracle, - ValueType: "string", - Comment: "Zns Price Oracle", - }, - } -} - -func initAssetsInfo() []*asset.AssetInfo { - return []*asset.AssetInfo{ - { - AssetId: 0, - L1Address: "0x00", - AssetName: "BNB", - AssetSymbol: "BNB", - Decimals: 18, - Status: 0, - IsGasAsset: asset.IsGasAsset, - }, - //{ - // AssetId: 1, - // AssetName: "LEG", - // AssetSymbol: "LEG", - // Decimals: 18, - // Status: 0, - //}, - //{ - // AssetId: 2, - // AssetName: "REY", - // AssetSymbol: "REY", - // Decimals: 18, - // Status: 0, - //}, - } -} - -func WithRedis(redisType string, redisPass string) redis.Option { - return func(p *redis.Redis) { - p.Type = redisType - p.Pass = redisPass - } -} - -var ( - redisConn = redis.New(basic.CacheConf[0].Host, WithRedis(basic.CacheConf[0].Type, basic.CacheConf[0].Pass)) - sysconfigModel = sysconfig.NewSysconfigModel(basic.Connection, basic.CacheConf, basic.DB) - //priceModel = price.NewPriceModel(basic.Connection, basic.CacheConf, basic.DB) - accountModel = account.NewAccountModel(basic.Connection, basic.CacheConf, basic.DB) - accountHistoryModel = account.NewAccountHistoryModel(basic.Connection, basic.CacheConf, basic.DB) - assetInfoModel = asset.NewAssetInfoModel(basic.Connection, basic.CacheConf, basic.DB) - mempoolDetailModel = mempool.NewMempoolDetailModel(basic.Connection, basic.CacheConf, basic.DB) - mempoolModel = mempool.NewMempoolModel(basic.Connection, basic.CacheConf, basic.DB) - failTxModel = tx.NewFailTxModel(basic.Connection, basic.CacheConf, basic.DB) - txDetailModel = tx.NewTxDetailModel(basic.Connection, basic.CacheConf, basic.DB) - txModel = tx.NewTxModel(basic.Connection, basic.CacheConf, basic.DB, redisConn) - blockModel = block.NewBlockModel(basic.Connection, basic.CacheConf, basic.DB, redisConn) - blockForCommitModel = blockForCommit.NewBlockForCommitModel(basic.Connection, basic.CacheConf, basic.DB) - blockForProofModel = blockForProof.NewBlockForProofModel(basic.Connection, basic.CacheConf, basic.DB) - proofSenderModel = proofSender.NewProofSenderModel(basic.DB) - l1BlockMonitorModel = l1BlockMonitor.NewL1BlockMonitorModel(basic.Connection, basic.CacheConf, basic.DB) - l2TxEventMonitorModel = l2TxEventMonitor.NewL2TxEventMonitorModel(basic.Connection, basic.CacheConf, basic.DB) - l2BlockEventMonitorModel = l2BlockEventMonitor.NewL2BlockEventMonitorModel(basic.Connection, basic.CacheConf, basic.DB) - l1TxSenderModel = l1TxSender.NewL1TxSenderModel(basic.Connection, basic.CacheConf, basic.DB) - liquidityModel = liquidity.NewLiquidityModel(basic.Connection, basic.CacheConf, basic.DB) - liquidityHistoryModel = liquidity.NewLiquidityHistoryModel(basic.Connection, basic.CacheConf, basic.DB) - nftModel = nft.NewL2NftModel(basic.Connection, basic.CacheConf, basic.DB) - offerModel = nft.NewOfferModel(basic.Connection, basic.CacheConf, basic.DB) - nftHistoryModel = nft.NewL2NftHistoryModel(basic.Connection, basic.CacheConf, basic.DB) - nftExchangeModel = nft.NewL2NftExchangeModel(basic.Connection, basic.CacheConf, basic.DB) - nftCollectionModel = nft.NewL2NftCollectionModel(basic.Connection, basic.CacheConf, basic.DB) - nftWithdrawHistoryModel = nft.NewL2NftWithdrawHistoryModel(basic.Connection, basic.CacheConf, basic.DB) -) - -func dropTables() { - assert.Nil(nil, sysconfigModel.DropSysconfigTable()) - assert.Nil(nil, accountModel.DropAccountTable()) - assert.Nil(nil, accountHistoryModel.DropAccountHistoryTable()) - assert.Nil(nil, assetInfoModel.DropAssetInfoTable()) - assert.Nil(nil, mempoolDetailModel.DropMempoolDetailTable()) - assert.Nil(nil, mempoolModel.DropMempoolTxTable()) - assert.Nil(nil, failTxModel.DropFailTxTable()) - assert.Nil(nil, txDetailModel.DropTxDetailTable()) - assert.Nil(nil, txModel.DropTxTable()) - assert.Nil(nil, blockModel.DropBlockTable()) - assert.Nil(nil, blockForCommitModel.DropBlockForCommitTable()) - assert.Nil(nil, blockForProofModel.DropBlockForProofTable()) - assert.Nil(nil, proofSenderModel.DropProofSenderTable()) - assert.Nil(nil, l1BlockMonitorModel.DropL1BlockMonitorTable()) - assert.Nil(nil, l2TxEventMonitorModel.DropL2TxEventMonitorTable()) - assert.Nil(nil, l2BlockEventMonitorModel.DropL2BlockEventMonitorTable()) - assert.Nil(nil, l1TxSenderModel.DropL1TxSenderTable()) - assert.Nil(nil, liquidityModel.DropLiquidityTable()) - assert.Nil(nil, liquidityHistoryModel.DropLiquidityHistoryTable()) - assert.Nil(nil, nftModel.DropL2NftTable()) - assert.Nil(nil, offerModel.DropOfferTable()) - assert.Nil(nil, nftHistoryModel.DropL2NftHistoryTable()) - assert.Nil(nil, nftExchangeModel.DropL2NftExchangeTable()) - assert.Nil(nil, nftCollectionModel.DropL2NftCollectionTable()) - assert.Nil(nil, nftWithdrawHistoryModel.DropL2NftWithdrawHistoryTable()) -} - -func initTable() { - assert.Nil(nil, sysconfigModel.CreateSysconfigTable()) - assert.Nil(nil, accountModel.CreateAccountTable()) - assert.Nil(nil, accountHistoryModel.CreateAccountHistoryTable()) - assert.Nil(nil, assetInfoModel.CreateAssetInfoTable()) - assert.Nil(nil, mempoolModel.CreateMempoolTxTable()) - assert.Nil(nil, mempoolDetailModel.CreateMempoolDetailTable()) - assert.Nil(nil, failTxModel.CreateFailTxTable()) - assert.Nil(nil, blockModel.CreateBlockTable()) - assert.Nil(nil, txModel.CreateTxTable()) - assert.Nil(nil, txDetailModel.CreateTxDetailTable()) - assert.Nil(nil, blockForCommitModel.CreateBlockForCommitTable()) - assert.Nil(nil, blockForProofModel.CreateBlockForProofTable()) - assert.Nil(nil, proofSenderModel.CreateProofSenderTable()) - assert.Nil(nil, l1BlockMonitorModel.CreateL1BlockMonitorTable()) - assert.Nil(nil, l2TxEventMonitorModel.CreateL2TxEventMonitorTable()) - assert.Nil(nil, l2BlockEventMonitorModel.CreateL2BlockEventMonitorTable()) - assert.Nil(nil, l1TxSenderModel.CreateL1TxSenderTable()) - assert.Nil(nil, liquidityModel.CreateLiquidityTable()) - assert.Nil(nil, liquidityHistoryModel.CreateLiquidityHistoryTable()) - assert.Nil(nil, nftModel.CreateL2NftTable()) - assert.Nil(nil, offerModel.CreateOfferTable()) - assert.Nil(nil, nftHistoryModel.CreateL2NftHistoryTable()) - assert.Nil(nil, nftExchangeModel.CreateL2NftExchangeTable()) - assert.Nil(nil, nftCollectionModel.CreateL2NftCollectionTable()) - assert.Nil(nil, nftWithdrawHistoryModel.CreateL2NftWithdrawHistoryTable()) - rowsAffected, err := assetInfoModel.CreateAssetsInfoInBatches(initAssetsInfo()) - if err != nil { - panic(err) - } - logx.Infof("l2 assets info rows affected: %d", rowsAffected) - rowsAffected, err = sysconfigModel.CreateSysconfigInBatches(initSysConfig()) - if err != nil { - panic(err) - } - logx.Infof("sys config rows affected: %d", rowsAffected) - err = blockModel.CreateGenesisBlock(&block.Block{ - BlockCommitment: "0000000000000000000000000000000000000000000000000000000000000000", - BlockHeight: 0, - StateRoot: common.Bytes2Hex(tree.NilStateRoot), - PriorityOperations: 0, - PendingOnChainOperationsHash: "c5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470", - CommittedTxHash: "", - CommittedAt: 0, - VerifiedTxHash: "", - VerifiedAt: 0, - BlockStatus: block.StatusVerifiedAndExecuted, - }) - if err != nil { - panic(err) - } -} - -type config struct { - Governance string - AssetGovernance string - VerifierProxy string - ZnsControllerProxy string - ZnsResolverProxy string - ZkbasProxy string - UpgradeGateKeeper string - LEGToken string - REYToken string - ERC721 string - ZnsPriceOracle string -} diff --git a/common/model/l1BlockMonitor/constant.go b/common/model/l1BlockMonitor/constant.go deleted file mode 100644 index 655d5ead0..000000000 --- a/common/model/l1BlockMonitor/constant.go +++ /dev/null @@ -1,25 +0,0 @@ -/* - * Copyright © 2021 Zkbas Protocol - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package l1BlockMonitor - -const ( - TableName = "l1_block_monitor" - - MonitorTypeBlock = 0 - MonitorTypeGovernance = 1 -) diff --git a/common/model/l1BlockMonitor/l1BlockMonitor.go b/common/model/l1BlockMonitor/l1BlockMonitor.go deleted file mode 100644 index 50ffe4424..000000000 --- a/common/model/l1BlockMonitor/l1BlockMonitor.go +++ /dev/null @@ -1,290 +0,0 @@ -/* - * Copyright © 2021 Zkbas Protocol - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package l1BlockMonitor - -import ( - "errors" - "fmt" - - "github.com/zeromicro/go-zero/core/logx" - "github.com/zeromicro/go-zero/core/stores/cache" - "github.com/zeromicro/go-zero/core/stores/sqlc" - "github.com/zeromicro/go-zero/core/stores/sqlx" - "gorm.io/gorm" - - asset "github.com/bnb-chain/zkbas/common/model/assetInfo" - "github.com/bnb-chain/zkbas/common/model/l2BlockEventMonitor" - "github.com/bnb-chain/zkbas/common/model/l2TxEventMonitor" - "github.com/bnb-chain/zkbas/common/model/sysconfig" - "github.com/bnb-chain/zkbas/errorcode" -) - -type ( - L1BlockMonitorModel interface { - CreateL1BlockMonitorTable() error - DropL1BlockMonitorTable() error - CreateL1BlockMonitor(tx *L1BlockMonitor) (bool, error) - CreateL1BlockMonitorsInBatches(blockInfos []*L1BlockMonitor) (rowsAffected int64, err error) - CreateMonitorsInfo(blockInfo *L1BlockMonitor, txEventMonitors []*l2TxEventMonitor.L2TxEventMonitor, blockEventMonitors []*l2BlockEventMonitor.L2BlockEventMonitor) (err error) - CreateGovernanceMonitorInfo( - blockInfo *L1BlockMonitor, - l2AssetInfos []*asset.AssetInfo, - pendingUpdateL2AssetInfos []*asset.AssetInfo, - pendingNewSysconfigInfos []*sysconfig.Sysconfig, - pendingUpdateSysconfigInfos []*sysconfig.Sysconfig, - ) (err error) - GetL1BlockMonitors() (blockInfos []*L1BlockMonitor, err error) - GetLatestL1BlockMonitorByBlock() (blockInfo *L1BlockMonitor, err error) - GetLatestL1BlockMonitorByGovernance() (blockInfo *L1BlockMonitor, err error) - } - - defaultL1BlockMonitorModel struct { - sqlc.CachedConn - table string - DB *gorm.DB - } - - L1BlockMonitor struct { - gorm.Model - // l1 block height - L1BlockHeight int64 - // block info, array of hashes - BlockInfo string - MonitorType int - } -) - -func (*L1BlockMonitor) TableName() string { - return TableName -} - -func NewL1BlockMonitorModel(conn sqlx.SqlConn, c cache.CacheConf, db *gorm.DB) L1BlockMonitorModel { - return &defaultL1BlockMonitorModel{ - CachedConn: sqlc.NewConn(conn, c), - table: TableName, - DB: db, - } -} - -/* - Func: CreateL1BlockMonitorTable - Params: - Return: err error - Description: create l2 txVerification event monitor table -*/ -func (m *defaultL1BlockMonitorModel) CreateL1BlockMonitorTable() error { - return m.DB.AutoMigrate(L1BlockMonitor{}) -} - -/* - Func: DropL1BlockMonitorTable - Params: - Return: err error - Description: drop l2 txVerification event monitor table -*/ -func (m *defaultL1BlockMonitorModel) DropL1BlockMonitorTable() error { - return m.DB.Migrator().DropTable(m.table) -} - -/* - Func: CreateL1BlockMonitor - Params: asset *L1BlockMonitor - Return: bool, error - Description: create L1BlockMonitor txVerification -*/ -func (m *defaultL1BlockMonitorModel) CreateL1BlockMonitor(tx *L1BlockMonitor) (bool, error) { - dbTx := m.DB.Table(m.table).Create(tx) - if dbTx.Error != nil { - err := fmt.Sprintf("[l1BlockMonitor.CreateL1BlockMonitor] %s", dbTx.Error) - logx.Error(err) - return false, dbTx.Error - } else if dbTx.RowsAffected == 0 { - ErrInvalidL1BlockMonitor := errors.New("invalid l1BlockMonitor") - err := fmt.Sprintf("[l1BlockMonitor.CreateL1BlockMonitor] %s", ErrInvalidL1BlockMonitor) - logx.Error(err) - return false, ErrInvalidL1BlockMonitor - } - return true, nil -} - -/* - Func: CreateL1BlockMonitorsInBatches - Params: []*L1BlockMonitor - Return: rowsAffected int64, err error - Description: create L1BlockMonitor batches -*/ -func (m *defaultL1BlockMonitorModel) CreateL1BlockMonitorsInBatches(blockInfos []*L1BlockMonitor) (rowsAffected int64, err error) { - dbTx := m.DB.Table(m.table).CreateInBatches(blockInfos, len(blockInfos)) - if dbTx.Error != nil { - err := fmt.Sprintf("[l1BlockMonitor.CreateL1AssetsMonitorInBatches] %s", dbTx.Error) - logx.Error(err) - return 0, dbTx.Error - } - if dbTx.RowsAffected == 0 { - return 0, nil - } - return dbTx.RowsAffected, nil -} - -func (m *defaultL1BlockMonitorModel) CreateMonitorsInfo( - blockInfo *L1BlockMonitor, - txEventMonitors []*l2TxEventMonitor.L2TxEventMonitor, - blockEventMonitors []*l2BlockEventMonitor.L2BlockEventMonitor, -) (err error) { - err = m.DB.Transaction( - func(tx *gorm.DB) error { // transact - // create data for l1 block info - dbTx := tx.Table(m.table).Create(blockInfo) - if dbTx.Error != nil { - return dbTx.Error - } - if dbTx.RowsAffected == 0 { - return errors.New("[CreateMonitorsInfo] unable to create l1 block info") - } - // create data in batches for l2 txVerification event monitor - dbTx = tx.Table(l2TxEventMonitor.TableName).CreateInBatches(txEventMonitors, len(txEventMonitors)) - if dbTx.Error != nil { - return dbTx.Error - } - if dbTx.RowsAffected != int64(len(txEventMonitors)) { - return errors.New("[CreateMonitorsInfo] unable to create l2 txVerification event monitors") - } - // create data in batches for l2 block event monitor - dbTx = tx.Table(l2BlockEventMonitor.TableName).CreateInBatches(blockEventMonitors, len(blockEventMonitors)) - if dbTx.Error != nil { - return dbTx.Error - } - if dbTx.RowsAffected != int64(len(blockEventMonitors)) { - return errors.New("[CreateMonitorsInfo] unable to create l2 block event monitors") - } - return nil - }, - ) - return err -} - -func (m *defaultL1BlockMonitorModel) CreateGovernanceMonitorInfo( - blockInfo *L1BlockMonitor, - pendingNewL2AssetInfos []*asset.AssetInfo, - pendingUpdateL2AssetInfos []*asset.AssetInfo, - pendingNewSysconfigInfos []*sysconfig.Sysconfig, - pendingUpdateSysconfigInfos []*sysconfig.Sysconfig, -) (err error) { - err = m.DB.Transaction( - func(tx *gorm.DB) error { // transact - // create data for l1 block info - dbTx := tx.Table(m.table).Create(blockInfo) - if dbTx.Error != nil { - return dbTx.Error - } - if dbTx.RowsAffected == 0 { - return errors.New("[CreateGovernanceMonitorInfo] unable to create l1 block info") - } - // create l2 asset info - if len(pendingNewL2AssetInfos) != 0 { - dbTx = tx.Table(asset.AssetInfoTableName).CreateInBatches(pendingNewL2AssetInfos, len(pendingNewL2AssetInfos)) - if dbTx.Error != nil { - return dbTx.Error - } - if dbTx.RowsAffected != int64(len(pendingNewL2AssetInfos)) { - logx.Errorf("[CreateGovernanceMonitorInfo] invalid l2 asset info") - return errors.New("[CreateGovernanceMonitorInfo] invalid l2 asset info") - } - } - // update l2 asset info - for _, pendingUpdateL2AssetInfo := range pendingUpdateL2AssetInfos { - dbTx = tx.Table(asset.AssetInfoTableName).Where("id = ?", pendingUpdateL2AssetInfo.ID).Select("*").Updates(&pendingUpdateL2AssetInfo) - if dbTx.Error != nil { - return dbTx.Error - } - } - // create new sys config - if len(pendingNewSysconfigInfos) != 0 { - dbTx = tx.Table(sysconfig.TableName).CreateInBatches(pendingNewSysconfigInfos, len(pendingNewSysconfigInfos)) - if dbTx.Error != nil { - return dbTx.Error - } - if dbTx.RowsAffected != int64(len(pendingNewSysconfigInfos)) { - logx.Errorf("[CreateGovernanceMonitorInfo] invalid sys config info") - return errors.New("[CreateGovernanceMonitorInfo] invalid sys config info") - } - } - // update sys config - for _, pendingUpdateSysconfigInfo := range pendingUpdateSysconfigInfos { - dbTx = tx.Table(sysconfig.TableName).Where("id = ?", pendingUpdateSysconfigInfo.ID).Select("*").Updates(&pendingUpdateSysconfigInfo) - if dbTx.Error != nil { - return dbTx.Error - } - } - return nil - }, - ) - return err -} - -/* - GetL1BlockMonitors: get all L1BlockMonitors -*/ -func (m *defaultL1BlockMonitorModel) GetL1BlockMonitors() (blockInfos []*L1BlockMonitor, err error) { - dbTx := m.DB.Table(m.table).Find(&blockInfos).Order("l1_block_height") - if dbTx.Error != nil { - err := fmt.Sprintf("[l1BlockMonitor.GetL1BlockMonitors] %s", dbTx.Error) - logx.Error(err) - return nil, errorcode.DbErrSqlOperation - } else if dbTx.RowsAffected == 0 { - err := fmt.Sprintf("[l1BlockMonitor.GetL1BlockMonitors] %s", errorcode.DbErrNotFound) - logx.Error(err) - return nil, errorcode.DbErrNotFound - } - return blockInfos, dbTx.Error -} - -/* - Func: GetLatestL1BlockMonitor - Return: blockInfos []*L1BlockMonitor, err error - Description: get latest l1 block monitor info -*/ -func (m *defaultL1BlockMonitorModel) GetLatestL1BlockMonitorByBlock() (blockInfo *L1BlockMonitor, err error) { - dbTx := m.DB.Table(m.table).Where("monitor_type = ?", MonitorTypeBlock).Order("l1_block_height desc").Find(&blockInfo) - if dbTx.Error != nil { - err := fmt.Sprintf("[l1BlockMonitor.GetLatestL1BlockMonitorByBlock] %s", dbTx.Error) - logx.Error(err) - return nil, errorcode.DbErrSqlOperation - } - if dbTx.RowsAffected == 0 { - err := fmt.Sprintf("[l1BlockMonitor.GetLatestL1BlockMonitorByBlock] %s", errorcode.DbErrNotFound) - logx.Error(err) - return nil, errorcode.DbErrNotFound - } - return blockInfo, nil -} - -func (m *defaultL1BlockMonitorModel) GetLatestL1BlockMonitorByGovernance() (blockInfo *L1BlockMonitor, err error) { - dbTx := m.DB.Table(m.table).Where("monitor_type = ?", MonitorTypeGovernance).Order("l1_block_height desc").Find(&blockInfo) - if dbTx.Error != nil { - err := fmt.Sprintf("[l1BlockMonitor.GetLatestL1BlockMonitorByGovernance] %s", dbTx.Error) - logx.Error(err) - return nil, errorcode.DbErrSqlOperation - } - if dbTx.RowsAffected == 0 { - err := fmt.Sprintf("[l1BlockMonitor.GetLatestL1BlockMonitorByGovernance] %s", errorcode.DbErrNotFound) - logx.Error(err) - return nil, errorcode.DbErrNotFound - } - return blockInfo, nil -} diff --git a/common/model/l1TxSender/constant.go b/common/model/l1TxSender/constant.go deleted file mode 100644 index c5cffb3b7..000000000 --- a/common/model/l1TxSender/constant.go +++ /dev/null @@ -1,31 +0,0 @@ -/* - * Copyright © 2021 Zkbas Protocol - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package l1TxSender - -const ( - TableName = "l1_tx_sender" - - // status - PendingStatus = 1 - HandledStatus = 2 - - // txVerification type - CommitTxType = 1 - VerifyAndExecuteTxType = 2 - RevertTxType = 3 -) diff --git a/common/model/l1TxSender/l1TxSender.go b/common/model/l1TxSender/l1TxSender.go deleted file mode 100644 index faa137200..000000000 --- a/common/model/l1TxSender/l1TxSender.go +++ /dev/null @@ -1,370 +0,0 @@ -/* - * Copyright © 2021 Zkbas Protocol - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package l1TxSender - -import ( - "encoding/json" - "errors" - "fmt" - - "github.com/zeromicro/go-zero/core/logx" - "github.com/zeromicro/go-zero/core/stores/cache" - "github.com/zeromicro/go-zero/core/stores/sqlc" - "github.com/zeromicro/go-zero/core/stores/sqlx" - "gorm.io/gorm" - - "github.com/bnb-chain/zkbas/common/model/block" - "github.com/bnb-chain/zkbas/common/model/mempool" - "github.com/bnb-chain/zkbas/common/model/proofSender" - "github.com/bnb-chain/zkbas/errorcode" -) - -type ( - L1TxSenderModel interface { - CreateL1TxSenderTable() error - DropL1TxSenderTable() error - CreateL1TxSender(tx *L1TxSender) (bool, error) - CreateL1TxSendersInBatches(txs []*L1TxSender) (rowsAffected int64, err error) - GetL1TxSenders() (txs []*L1TxSender, err error) - GetLatestHandledBlock(txType int64) (txSender *L1TxSender, err error) - GetLatestL1TxSender() (blockInfo *L1TxSender, err error) - GetLatestPendingBlock(txType int64) (txSender *L1TxSender, err error) - GetL1TxSendersByTxStatus(txStatus int) (txs []*L1TxSender, err error) - GetL1TxSendersByTxTypeAndStatus(txType uint8, txStatus int) (rowsAffected int64, txs []*L1TxSender, err error) - GetL1TxSendersByTxHashAndTxType(txHash string, txType uint8) (rowsAffected int64, txs []*L1TxSender, err error) - DeleteL1TxSender(sender *L1TxSender) error - UpdateRelatedEventsAndResetRelatedAssetsAndTxs( - pendingUpdateBlocks []*block.Block, - pendingUpdateSenders []*L1TxSender, - pendingUpdateMempoolTxs []*mempool.MempoolTx, - pendingUpdateProofSenderStatus map[int64]int, - ) (err error) - } - - defaultL1TxSenderModel struct { - sqlc.CachedConn - table string - DB *gorm.DB - } - - L1TxSender struct { - gorm.Model - // txVerification hash - L1TxHash string - // txVerification status, 1 - pending, 2 - handled - TxStatus int - // txVerification type: commit / verify - TxType uint8 - // layer-2 block height - L2BlockHeight int64 - } -) - -func (*L1TxSender) TableName() string { - return TableName -} - -func NewL1TxSenderModel(conn sqlx.SqlConn, c cache.CacheConf, db *gorm.DB) L1TxSenderModel { - return &defaultL1TxSenderModel{ - CachedConn: sqlc.NewConn(conn, c), - table: TableName, - DB: db, - } -} - -/* - Func: CreateL1TxSenderTable - Params: - Return: err error - Description: create l2 txVerification event monitor table -*/ -func (m *defaultL1TxSenderModel) CreateL1TxSenderTable() error { - return m.DB.AutoMigrate(L1TxSender{}) -} - -/* - Func: DropL1TxSenderTable - Params: - Return: err error - Description: drop l2 txVerification event monitor table -*/ -func (m *defaultL1TxSenderModel) DropL1TxSenderTable() error { - return m.DB.Migrator().DropTable(m.table) -} - -/* - Func: CreateL1TxSender - Params: asset *L1TxSender - Return: bool, error - Description: create L1TxSender txVerification -*/ -func (m *defaultL1TxSenderModel) CreateL1TxSender(tx *L1TxSender) (bool, error) { - dbTx := m.DB.Table(m.table).Create(tx) - if dbTx.Error != nil { - err := fmt.Sprintf("[l1TxSender.CreateL1TxSender] %s", dbTx.Error) - logx.Error(err) - return false, dbTx.Error - } else if dbTx.RowsAffected == 0 { - ErrInvalidL1TxSender := errors.New("invalid l1TxSender") - err := fmt.Sprintf("[l1TxSender.CreateL1TxSender] %s", ErrInvalidL1TxSender) - logx.Error(err) - return false, ErrInvalidL1TxSender - } - return true, nil -} - -/* - Func: CreateL1TxSendersInBatches - Params: []*L1TxSender - Return: rowsAffected int64, err error - Description: create L1TxSender batches -*/ -func (m *defaultL1TxSenderModel) CreateL1TxSendersInBatches(txs []*L1TxSender) (rowsAffected int64, err error) { - dbTx := m.DB.Table(m.table).CreateInBatches(txs, len(txs)) - if dbTx.Error != nil { - err := fmt.Sprintf("[l1TxSender.CreateL1TxSendersInBatches] %s", dbTx.Error) - logx.Error(err) - return 0, dbTx.Error - } - if dbTx.RowsAffected == 0 { - return 0, nil - } - return dbTx.RowsAffected, nil -} - -/* - GetL1TxSenders: get all L1TxSenders -*/ -func (m *defaultL1TxSenderModel) GetL1TxSenders() (txs []*L1TxSender, err error) { - dbTx := m.DB.Table(m.table).Find(&txs) - if dbTx.Error != nil { - err := fmt.Sprintf("[l1TxSender.GetL1TxSenders] %s", dbTx.Error) - logx.Error(err) - return nil, errorcode.DbErrSqlOperation - } else if dbTx.RowsAffected == 0 { - err := fmt.Sprintf("[l1TxSender.GetL1TxSenders] %s", errorcode.DbErrNotFound) - logx.Error(err) - return nil, errorcode.DbErrNotFound - } - return txs, dbTx.Error -} - -/* - Func: GetLatestL1TxSender - Return: txVerification []*L1TxSender, err error - Description: get latest l1 block monitor info -*/ -func (m *defaultL1TxSenderModel) GetLatestL1TxSender() (blockInfo *L1TxSender, err error) { - dbTx := m.DB.Table(m.table).First(&blockInfo) - if dbTx.Error != nil { - err := fmt.Sprintf("[l1TxSender.GetLatestL1TxSender] %s", dbTx.Error) - logx.Error(err) - return nil, errorcode.DbErrSqlOperation - } else if dbTx.RowsAffected == 0 { - err := fmt.Sprintf("[l1TxSender.GetLatestL1TxSender] %s", errorcode.DbErrNotFound) - logx.Error(err) - return nil, errorcode.DbErrNotFound - } - return blockInfo, nil -} - -/* - Func: GetL1TxSendersByTxStatus - Return: txVerification []*L1TxSender, err error - Description: get L1TxSender by txVerification status -*/ -func (m *defaultL1TxSenderModel) GetL1TxSendersByTxStatus(txStatus int) (txs []*L1TxSender, err error) { - dbTx := m.DB.Table(m.table).Where("tx_status = ?", txStatus).Order("l2_block_height, tx_type").Find(&txs) - if dbTx.Error != nil { - err := fmt.Sprintf("[l1TxSender.GetL1TxSendersByTxStatus] %s", dbTx.Error) - logx.Error(err) - return nil, errorcode.DbErrSqlOperation - } else if dbTx.RowsAffected == 0 { - err := fmt.Sprintf("[l1TxSender.GetL1TxSendersByTxStatus] %s", errorcode.DbErrNotFound) - logx.Error(err) - return nil, errorcode.DbErrNotFound - } - return txs, nil -} - -/* - Func: GetL1TxSendersByTxStatus - Return: txVerification []*L1TxSender, err error - Description: get L1TxSender by txVerification type and status -*/ -func (m *defaultL1TxSenderModel) GetL1TxSendersByTxTypeAndStatus(txType uint8, txStatus int) (rowsAffected int64, txs []*L1TxSender, err error) { - dbTx := m.DB.Table(m.table).Where("tx_type = ? AND tx_status = ?", txType, txStatus).Find(&txs) - if dbTx.Error != nil { - err := fmt.Sprintf("[l1TxSender.GetL1TxSendersByTxTypeAndStatus] %s", dbTx.Error) - logx.Error(err) - return 0, nil, dbTx.Error - } - return dbTx.RowsAffected, txs, nil -} - -func (m *defaultL1TxSenderModel) GetL1TxSendersByTxHashAndTxType(txHash string, txType uint8) (rowsAffected int64, txs []*L1TxSender, err error) { - dbTx := m.DB.Table(m.table).Where("l1_tx_hash = ? AND tx_type = ?", txHash, txType).Find(&txs) - if dbTx.Error != nil { - err := fmt.Sprintf("[l1TxSender.GetL1TxSendersByTxHashAndTxType] %s", dbTx.Error) - logx.Error(err) - return 0, nil, dbTx.Error - } - return dbTx.RowsAffected, txs, nil -} - -func (m *defaultL1TxSenderModel) DeleteL1TxSender(sender *L1TxSender) error { - return m.DB.Transaction(func(tx *gorm.DB) error { // transact - dbTx := tx.Table(m.table).Where("id = ?", sender.ID).Delete(&sender) - if dbTx.Error != nil { - logx.Errorf("[l1TxSender.DeleteL1TxSender] %s", dbTx.Error.Error()) - return dbTx.Error - } - if dbTx.RowsAffected == 0 { - logx.Error("[l1TxSender.DeleteL1TxSender] Delete invalid sender") - return errors.New("[l1TxSender.DeleteL1TxSender] delete invalid sender") - } - return nil - }) -} - -func (m *defaultL1TxSenderModel) UpdateRelatedEventsAndResetRelatedAssetsAndTxs( - pendingUpdateBlocks []*block.Block, - pendingUpdateSenders []*L1TxSender, - pendingUpdateMempoolTxs []*mempool.MempoolTx, - pendingUpdateProofSenderStatus map[int64]int, -) (err error) { - const ( - Txs = "Txs" - ) - err = m.DB.Transaction(func(tx *gorm.DB) error { - // update blocks - for _, pendingUpdateBlock := range pendingUpdateBlocks { - dbTx := tx.Table(block.BlockTableName).Where("id = ?", pendingUpdateBlock.ID). - Omit(Txs). - Select("*"). - Updates(&pendingUpdateBlock) - if dbTx.Error != nil { - err := fmt.Sprintf("[UpdateRelatedEventsAndResetRelatedAssetsAndTxs] %s", dbTx.Error) - logx.Error(err) - return dbTx.Error - } - if dbTx.RowsAffected == 0 { - blocksInfo, err := json.Marshal(pendingUpdateBlocks) - if err != nil { - res := fmt.Sprintf("[UpdateRelatedEventsAndResetRelatedAssetsAndTxs] %s", err) - logx.Error(res) - return err - } - logx.Error("[UpdateRelatedEventsAndResetRelatedAssetsAndTxs] Invalid block: " + string(blocksInfo)) - return errors.New("Invalid blocks: " + string(blocksInfo)) - } - } - // update sender - for _, pendingUpdateSender := range pendingUpdateSenders { - dbTx := tx.Table(TableName).Where("id = ?", pendingUpdateSender.ID). - Select("*"). - Updates(&pendingUpdateSender) - if dbTx.Error != nil { - err := fmt.Sprintf("[UpdateRelatedEventsAndResetRelatedAssetsAndTxs] %s", dbTx.Error) - logx.Error(err) - return dbTx.Error - } - if dbTx.RowsAffected == 0 { - senderInfo, err := json.Marshal(pendingUpdateSender) - if err != nil { - res := fmt.Sprintf("[UpdateRelatedEventsAndResetRelatedAssetsAndTxs] %s", err) - logx.Error(res) - return err - } - logx.Error("[UpdateRelatedEventsAndResetRelatedAssetsAndTxs] %s" + "Invalid sender: " + string(senderInfo)) - return errors.New("Invalid sender: " + string(senderInfo)) - } - } - // delete mempool txs - for _, pendingDeleteMempoolTx := range pendingUpdateMempoolTxs { - for _, detail := range pendingDeleteMempoolTx.MempoolDetails { - dbTx := tx.Table(mempool.DetailTableName).Where("id = ?", detail.ID).Delete(&detail) - if dbTx.Error != nil { - logx.Errorf("[UpdateRelatedEventsAndResetRelatedAssetsAndTxs] %s", dbTx.Error) - return dbTx.Error - } - if dbTx.RowsAffected == 0 { - logx.Errorf("[UpdateRelatedEventsAndResetRelatedAssetsAndTxs] Delete Invalid Mempool Tx") - return errors.New("[UpdateRelatedEventsAndResetRelatedAssetsAndTxs] Delete Invalid Mempool Tx") - } - } - dbTx := tx.Table(mempool.MempoolTableName).Where("id = ?", pendingDeleteMempoolTx.ID).Delete(&pendingDeleteMempoolTx) - if dbTx.Error != nil { - logx.Errorf("[UpdateRelatedEventsAndResetRelatedAssetsAndTxs] %s", dbTx.Error) - return dbTx.Error - } - if dbTx.RowsAffected == 0 { - logx.Error("[UpdateRelatedEventsAndResetRelatedAssetsAndTxs] Delete Invalid Mempool Tx") - return errors.New("[UpdateRelatedEventsAndResetRelatedAssetsAndTxs] Delete Invalid Mempool Tx") - } - } - // modify proofSender Status - for blockHeight, newStatus := range pendingUpdateProofSenderStatus { - var row *proofSender.ProofSender - dbTx := tx.Table(proofSender.TableName).Where("block_number = ?", blockHeight).Find(&row) - if dbTx.Error != nil { - logx.Errorf("[UpdateRelatedEventsAndResetRelatedAssetsAndTxs] %s", dbTx.Error) - return dbTx.Error - } - if dbTx.RowsAffected == 0 { - logx.Errorf("[UpdateRelatedEventsAndResetRelatedAssetsAndTxs] No such proof. Height: %d", blockHeight) - return fmt.Errorf("[UpdateRelatedEventsAndResetRelatedAssetsAndTxs] No such proof. Height: %d", blockHeight) - } - dbTx = tx.Model(&row). - Select("status"). - Updates(&proofSender.ProofSender{Status: int64(newStatus)}) - if dbTx.Error != nil { - logx.Errorf("[UpdateRelatedEventsAndResetRelatedAssetsAndTxs] %s", dbTx.Error) - return dbTx.Error - } - if dbTx.RowsAffected == 0 { - logx.Errorf("[UpdateRelatedEventsAndResetRelatedAssetsAndTxs] Update No Proof: %d", row.BlockNumber) - return fmt.Errorf("[UpdateRelatedEventsAndResetRelatedAssetsAndTxs] Update No Proof: %d", row.BlockNumber) - } - } - return nil - }) - return err -} - -func (m *defaultL1TxSenderModel) GetLatestHandledBlock(txType int64) (txSender *L1TxSender, err error) { - dbTx := m.DB.Table(m.table).Where("tx_type = ? AND tx_status = ?", txType, HandledStatus).Order("l2_block_height desc").Find(&txSender) - if dbTx.Error != nil { - logx.Errorf("[GetLatestHandledBlock] unable to get latest handled block: %s", dbTx.Error.Error()) - return nil, errorcode.DbErrSqlOperation - } else if dbTx.RowsAffected == 0 { - return nil, errorcode.DbErrNotFound - } - return txSender, nil -} - -func (m *defaultL1TxSenderModel) GetLatestPendingBlock(txType int64) (txSender *L1TxSender, err error) { - dbTx := m.DB.Table(m.table).Where("tx_type = ? AND tx_status = ?", txType, PendingStatus).Find(&txSender) - if dbTx.Error != nil { - logx.Errorf("[GetLatestHandledBlock] unable to get latest pending block: %s", dbTx.Error.Error()) - return nil, errorcode.DbErrSqlOperation - } else if dbTx.RowsAffected == 0 { - return nil, errorcode.DbErrNotFound - } - return txSender, nil -} diff --git a/common/model/l2BlockEventMonitor/constant.go b/common/model/l2BlockEventMonitor/constant.go deleted file mode 100644 index 9beb4ebc7..000000000 --- a/common/model/l2BlockEventMonitor/constant.go +++ /dev/null @@ -1,31 +0,0 @@ -/* - * Copyright © 2021 Zkbas Protocol - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package l2BlockEventMonitor - -const ( - TableName = "l2_block_event_monitor" - - // status - PendingStatus = 1 - HandledStatus = 2 - - // block event type - CommittedBlockEventType = 1 - VerifiedBlockEventType = 2 - RevertedBlockEventType = 3 -) diff --git a/common/model/l2BlockEventMonitor/l2BlockEventMonitor.go b/common/model/l2BlockEventMonitor/l2BlockEventMonitor.go deleted file mode 100644 index a206b9c81..000000000 --- a/common/model/l2BlockEventMonitor/l2BlockEventMonitor.go +++ /dev/null @@ -1,246 +0,0 @@ -/* - * Copyright © 2021 Zkbas Protocol - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package l2BlockEventMonitor - -import ( - "errors" - "fmt" - - "github.com/zeromicro/go-zero/core/logx" - "github.com/zeromicro/go-zero/core/stores/cache" - "github.com/zeromicro/go-zero/core/stores/sqlc" - "github.com/zeromicro/go-zero/core/stores/sqlx" - "gorm.io/gorm" - - "github.com/bnb-chain/zkbas/errorcode" -) - -type ( - L2BlockEventMonitorModel interface { - CreateL2BlockEventMonitorTable() error - DropL2BlockEventMonitorTable() error - CreateL2BlockEventMonitor(tx *L2BlockEventMonitor) (bool, error) - CreateL2BlockEventMonitorsInBatches(l2TxEventMonitors []*L2BlockEventMonitor) (rowsAffected int64, err error) - GetL2BlockEventMonitors() (events []*L2BlockEventMonitor, err error) - GetL2BlockEventMonitorsByEventType(blockEventType uint8) (events []*L2BlockEventMonitor, err error) - GetL2BlockEventMonitorsByEventTypeAndStatus(eventType uint8, status int) (events []*L2BlockEventMonitor, err error) - GetL2BlockEventMonitorsByStatus(status int) (rowsAffected int64, events []*L2BlockEventMonitor, err error) - GetL2BlockEventMonitorsByTxType(txType uint8) (events []*L2BlockEventMonitor, err error) - GetL2BlockEventMonitorsByL2BlockHeight(l2BlockHeight int64) (events []*L2BlockEventMonitor, err error) - } - - defaultL2BlockEventMonitorModel struct { - sqlc.CachedConn - table string - DB *gorm.DB - } - - L2BlockEventMonitor struct { - gorm.Model - // event type, 1 - Committed, 2 - Verified, 3 - Reverted - BlockEventType uint8 `gorm:"index"` - // layer-1 block height - L1BlockHeight int64 - // layer-1 txVerification hash - L1TxHash string - // layer-2 block height - L2BlockHeight int64 `gorm:"index"` - // status - Status int - } -) - -func (*L2BlockEventMonitor) TableName() string { - return TableName -} - -func NewL2BlockEventMonitorModel(conn sqlx.SqlConn, c cache.CacheConf, db *gorm.DB) L2BlockEventMonitorModel { - return &defaultL2BlockEventMonitorModel{ - CachedConn: sqlc.NewConn(conn, c), - table: TableName, - DB: db, - } -} - -/* - Func: CreateL2BlockEventMonitorTable - Params: - Return: err error - Description: create l2 txVerification event monitor table -*/ -func (m *defaultL2BlockEventMonitorModel) CreateL2BlockEventMonitorTable() error { - return m.DB.AutoMigrate(L2BlockEventMonitor{}) -} - -/* - Func: DropL2BlockEventMonitorTable - Params: - Return: err error - Description: drop l2 txVerification event monitor table -*/ -func (m *defaultL2BlockEventMonitorModel) DropL2BlockEventMonitorTable() error { - return m.DB.Migrator().DropTable(m.table) -} - -/* - Func: CreateL2BlockEventMonitor - Params: asset *L2BlockEventMonitor - Return: bool, error - Description: create L2BlockEventMonitor txVerification -*/ -func (m *defaultL2BlockEventMonitorModel) CreateL2BlockEventMonitor(tx *L2BlockEventMonitor) (bool, error) { - dbTx := m.DB.Table(m.table).Create(tx) - if dbTx.Error != nil { - err := fmt.Sprintf("[l2BlockEventMonitor.go.CreateL2BlockEventMonitor] %s", dbTx.Error) - logx.Error(err) - return false, dbTx.Error - } else if dbTx.RowsAffected == 0 { - ErrInvalidL2BlockEventMonitor := errors.New("invalid l2BlockEventMonitor.go") - err := fmt.Sprintf("[l2BlockEventMonitor.go.CreateL2BlockEventMonitor] %s", ErrInvalidL2BlockEventMonitor) - logx.Error(err) - return false, ErrInvalidL2BlockEventMonitor - } - return true, nil -} - -/* - Func: CreateL2BlockEventMonitorsInBatches - Params: []*L2BlockEventMonitor - Return: rowsAffected int64, err error - Description: create L2BlockEventMonitor batches -*/ -func (m *defaultL2BlockEventMonitorModel) CreateL2BlockEventMonitorsInBatches(l2TxEventMonitors []*L2BlockEventMonitor) (rowsAffected int64, err error) { - dbTx := m.DB.Table(m.table).CreateInBatches(l2TxEventMonitors, len(l2TxEventMonitors)) - if dbTx.Error != nil { - err := fmt.Sprintf("[l2BlockEventMonitor.go.CreateL1AssetsMonitorInBatches] %s", dbTx.Error) - logx.Error(err) - return 0, dbTx.Error - } - if dbTx.RowsAffected == 0 { - return 0, nil - } - return dbTx.RowsAffected, nil -} - -/* - GetL2BlockEventMonitors: get all L2BlockEventMonitors -*/ -func (m *defaultL2BlockEventMonitorModel) GetL2BlockEventMonitors() (events []*L2BlockEventMonitor, err error) { - dbTx := m.DB.Table(m.table).Find(&events).Order("l2_block_height") - if dbTx.Error != nil { - err := fmt.Sprintf("[l2BlockEventMonitor.go.GetL2BlockEventMonitors] %s", dbTx.Error) - logx.Error(err) - return nil, errorcode.DbErrSqlOperation - } else if dbTx.RowsAffected == 0 { - err := fmt.Sprintf("[l2BlockEventMonitor.go.GetL2BlockEventMonitors] %s", errorcode.DbErrNotFound) - logx.Error(err) - return nil, errorcode.DbErrNotFound - } - return events, dbTx.Error -} - -/* - Func: GetL2BlockEventMonitorsByEventType - Return: events []*L2BlockEventMonitor, err error - Description: get l2TxEventMonitors by event type -*/ -func (m *defaultL2BlockEventMonitorModel) GetL2BlockEventMonitorsByEventType(blockEventType uint8) (events []*L2BlockEventMonitor, err error) { - dbTx := m.DB.Table(m.table).Where("block_event_type = ?", blockEventType).Find(&events).Order("l2_block_height") - if dbTx.Error != nil { - err := fmt.Sprintf("[l2BlockEventMonitor.go.GetL2BlockEventMonitorsByEventType] %s", dbTx.Error) - logx.Error(err) - return nil, errorcode.DbErrSqlOperation - } else if dbTx.RowsAffected == 0 { - err := fmt.Sprintf("[l2BlockEventMonitor.go.GetL2BlockEventMonitorsByEventType] %s", errorcode.DbErrNotFound) - logx.Error(err) - return nil, errorcode.DbErrNotFound - } - return events, nil -} - -/* - Func: GetL2BlockEventMonitorsByEventTypeAndStatus - Return: events []*L2BlockEventMonitor, err error - Description: get l2TxEventMonitors by event type and status -*/ -func (m *defaultL2BlockEventMonitorModel) GetL2BlockEventMonitorsByEventTypeAndStatus(eventType uint8, status int) (events []*L2BlockEventMonitor, err error) { - dbTx := m.DB.Table(m.table).Where("block_event_type = ? AND status = ?", eventType, status).Find(&events).Order("l2_block_height") - if dbTx.Error != nil { - err := fmt.Sprintf("[l2BlockEventMonitor.go.GetL2BlockEventMonitorsByEventTypeAndStatus] %s", dbTx.Error) - logx.Error(err) - return nil, errorcode.DbErrSqlOperation - } else if dbTx.RowsAffected == 0 { - err := fmt.Sprintf("[l2BlockEventMonitor.go.GetL2BlockEventMonitorsByEventTypeAndStatus] %s", errorcode.DbErrNotFound) - logx.Error(err) - return nil, errorcode.DbErrNotFound - } - return events, nil -} - -/* - Func: GetL2BlockEventMonitorsByEventTypeAndStatus - Return: events []*L2BlockEventMonitor, err error - Description: get l2TxEventMonitors by event type and status -*/ -func (m *defaultL2BlockEventMonitorModel) GetL2BlockEventMonitorsByStatus(status int) (rowsAffected int64, events []*L2BlockEventMonitor, err error) { - dbTx := m.DB.Table(m.table).Where("status = ?", status).Find(&events).Order("block_event_type AND l2_block_height") - if dbTx.Error != nil { - err := fmt.Sprintf("[l2BlockEventMonitor.go.GetL2BlockEventMonitorsByEventTypeAndStatus] %s", dbTx.Error) - logx.Error(err) - return 0, nil, dbTx.Error - } - return dbTx.RowsAffected, events, nil -} - -/* - Func: GetL2BlockEventMonitorsByTxType - Return: events []*L2BlockEventMonitor, err error - Description: get l2TxEventMonitors by txVerification type -*/ -func (m *defaultL2BlockEventMonitorModel) GetL2BlockEventMonitorsByTxType(txType uint8) (events []*L2BlockEventMonitor, err error) { - dbTx := m.DB.Table(m.table).Where("tx_type = ?", txType).Find(&events).Order("l2_block_height") - if dbTx.Error != nil { - err := fmt.Sprintf("[l2BlockEventMonitor.go.GetL2BlockEventMonitorsByTxType] %s", dbTx.Error) - logx.Error(err) - return nil, errorcode.DbErrSqlOperation - } else if dbTx.RowsAffected == 0 { - err := fmt.Sprintf("[l2BlockEventMonitor.go.GetL2BlockEventMonitorsByTxType] %s", errorcode.DbErrNotFound) - logx.Error(err) - return nil, errorcode.DbErrNotFound - } - return events, nil -} - -/* - Func: GetL2BlockEventMonitorsByL2BlockHeight - Return: events []*L2BlockEventMonitor, err error - Description: get l2TxEventMonitors by l2 block height -*/ -func (m *defaultL2BlockEventMonitorModel) GetL2BlockEventMonitorsByL2BlockHeight(l2BlockHeight int64) (events []*L2BlockEventMonitor, err error) { - dbTx := m.DB.Table(m.table).Where("l2_block_height = ?", l2BlockHeight).Find(&events).Order("l2_block_height") - if dbTx.Error != nil { - err := fmt.Sprintf("[l2BlockEventMonitor.go.GetL2BlockEventMonitorsByL2BlockHeight] %s", dbTx.Error) - logx.Error(err) - return nil, errorcode.DbErrSqlOperation - } else if dbTx.RowsAffected == 0 { - err := fmt.Sprintf("[l2BlockEventMonitor.go.GetL2BlockEventMonitorsByL2BlockHeight] %s", errorcode.DbErrNotFound) - logx.Error(err) - return nil, errorcode.DbErrNotFound - } - return events, nil -} diff --git a/common/model/l2TxEventMonitor/constant.go b/common/model/l2TxEventMonitor/constant.go deleted file mode 100644 index c16dc86ed..000000000 --- a/common/model/l2TxEventMonitor/constant.go +++ /dev/null @@ -1,25 +0,0 @@ -/* - * Copyright © 2021 Zkbas Protocol - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package l2TxEventMonitor - -const ( - TableName = "l2_tx_event_monitor" - - PendingStatus = 1 - HandledStatus = 2 -) diff --git a/common/model/l2TxEventMonitor/l2TxEventMonitor.go b/common/model/l2TxEventMonitor/l2TxEventMonitor.go deleted file mode 100644 index 13bfd8e4b..000000000 --- a/common/model/l2TxEventMonitor/l2TxEventMonitor.go +++ /dev/null @@ -1,295 +0,0 @@ -/* - * Copyright © 2021 Zkbas Protocol - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package l2TxEventMonitor - -import ( - "errors" - - "github.com/zeromicro/go-zero/core/logx" - "github.com/zeromicro/go-zero/core/stores/cache" - "github.com/zeromicro/go-zero/core/stores/sqlc" - "github.com/zeromicro/go-zero/core/stores/sqlx" - "gorm.io/gorm" - - "github.com/bnb-chain/zkbas/common/model/account" - "github.com/bnb-chain/zkbas/common/model/liquidity" - "github.com/bnb-chain/zkbas/common/model/mempool" - "github.com/bnb-chain/zkbas/common/model/nft" - "github.com/bnb-chain/zkbas/errorcode" -) - -type ( - L2TxEventMonitorModel interface { - CreateL2TxEventMonitorTable() error - DropL2TxEventMonitorTable() error - CreateL2TxEventMonitor(tx *L2TxEventMonitor) (bool, error) - CreateL2TxEventMonitorsInBatches(l2TxEventMonitors []*L2TxEventMonitor) (rowsAffected int64, err error) - GetL2TxEventMonitorsByStatus(status int) (txs []*L2TxEventMonitor, err error) - GetL2TxEventMonitorsBySenderAddress(senderAddr string) (txs []*L2TxEventMonitor, err error) - GetL2TxEventMonitorsByTxType(txType uint8) (txs []*L2TxEventMonitor, err error) - CreateMempoolAndActiveAccount( - pendingNewAccount []*account.Account, - pendingNewMempoolTxs []*mempool.MempoolTx, - pendingNewLiquidityInfos []*liquidity.Liquidity, - pendingNewNfts []*nft.L2Nft, - pendingUpdateL2Events []*L2TxEventMonitor) (err error) - GetLastHandledRequestId() (requestId int64, err error) - } - - defaultL2TxEventMonitorModel struct { - sqlc.CachedConn - table string - DB *gorm.DB - } - - L2TxEventMonitor struct { - gorm.Model - // related txVerification hash - L1TxHash string - // related block height - L1BlockHeight int64 - // sender - SenderAddress string - // request id - RequestId int64 - // tx type - TxType int64 - // pub data - Pubdata string - // expirationBlock - ExpirationBlock int64 - // status - Status int - } -) - -func (*L2TxEventMonitor) TableName() string { - return TableName -} - -func NewL2TxEventMonitorModel(conn sqlx.SqlConn, c cache.CacheConf, db *gorm.DB) L2TxEventMonitorModel { - return &defaultL2TxEventMonitorModel{ - CachedConn: sqlc.NewConn(conn, c), - table: TableName, - DB: db, - } -} - -/* - Func: CreateL2TxEventMonitorTable - Params: - Return: err error - Description: create l2 txVerification event monitor table -*/ -func (m *defaultL2TxEventMonitorModel) CreateL2TxEventMonitorTable() error { - return m.DB.AutoMigrate(L2TxEventMonitor{}) -} - -/* - Func: DropL2TxEventMonitorTable - Params: - Return: err error - Description: drop l2 txVerification event monitor table -*/ -func (m *defaultL2TxEventMonitorModel) DropL2TxEventMonitorTable() error { - return m.DB.Migrator().DropTable(m.table) -} - -/* - Func: CreateL2TxEventMonitor - Params: asset *L2TxEventMonitor - Return: bool, error - Description: create L2TxEventMonitor txVerification -*/ -func (m *defaultL2TxEventMonitorModel) CreateL2TxEventMonitor(tx *L2TxEventMonitor) (bool, error) { - dbTx := m.DB.Table(m.table).Create(tx) - if dbTx.Error != nil { - logx.Errorf("[l2TxEventMonitor.CreateL2TxEventMonitor] %s", dbTx.Error.Error()) - return false, dbTx.Error - } else if dbTx.RowsAffected == 0 { - ErrInvalidL2TxEventMonitor := errors.New("invalid l2TxEventMonitor") - logx.Errorf("[l2TxEventMonitor.CreateL2TxEventMonitor] %s", ErrInvalidL2TxEventMonitor.Error()) - return false, ErrInvalidL2TxEventMonitor - } - return true, nil -} - -/* - Func: CreateL2TxEventMonitorsInBatches - Params: []*L2TxEventMonitor - Return: rowsAffected int64, err error - Description: create L2TxEventMonitor batches -*/ -func (m *defaultL2TxEventMonitorModel) CreateL2TxEventMonitorsInBatches(l2TxEventMonitors []*L2TxEventMonitor) (rowsAffected int64, err error) { - dbTx := m.DB.Table(m.table).CreateInBatches(l2TxEventMonitors, len(l2TxEventMonitors)) - if dbTx.Error != nil { - logx.Errorf("[l2TxEventMonitor.CreateL1AssetsMonitorInBatches] %s", dbTx.Error.Error()) - return 0, dbTx.Error - } - if dbTx.RowsAffected == 0 { - return 0, nil - } - return dbTx.RowsAffected, nil -} - -/* - GetL2TxEventMonitors: get all L2TxEventMonitors -*/ -func (m *defaultL2TxEventMonitorModel) GetL2TxEventMonitors() (txs []*L2TxEventMonitor, err error) { - dbTx := m.DB.Table(m.table).Find(&txs).Order("l1_block_height") - if dbTx.Error != nil { - logx.Errorf("[l2TxEventMonitor.GetL2TxEventMonitors] %s", dbTx.Error.Error()) - return nil, errorcode.DbErrSqlOperation - } else if dbTx.RowsAffected == 0 { - logx.Error("[l2TxEventMonitor.GetL2TxEventMonitors] not found") - return nil, errorcode.DbErrNotFound - } - return txs, dbTx.Error -} - -/* - Func: GetPendingL2TxEventMonitors - Return: txVerification []*L2TxEventMonitor, err error - Description: get pending l2TxEventMonitors -*/ -func (m *defaultL2TxEventMonitorModel) GetL2TxEventMonitorsByStatus(status int) (txs []*L2TxEventMonitor, err error) { - // todo order id - dbTx := m.DB.Table(m.table).Where("status = ?", status).Order("request_id").Find(&txs) - if dbTx.Error != nil { - logx.Errorf("[l2TxEventMonitor.GetL2TxEventMonitorsByStatus] %s", dbTx.Error.Error()) - return nil, errorcode.DbErrSqlOperation - } else if dbTx.RowsAffected == 0 { - logx.Infof("[l2TxEventMonitor.GetL2TxEventMonitorsByStatus] %s", errorcode.DbErrNotFound.Error()) - return nil, errorcode.DbErrNotFound - } - return txs, nil -} - -/* - Func: GetL2TxEventMonitorsByAccountName - Return: txVerification []*L2TxEventMonitor, err error - Description: get l2TxEventMonitors by account name -*/ -func (m *defaultL2TxEventMonitorModel) GetL2TxEventMonitorsBySenderAddress(senderAddr string) (txs []*L2TxEventMonitor, err error) { - // todo order id - dbTx := m.DB.Table(m.table).Where("sender_address = ?", senderAddr).Find(&txs).Order("l1_block_height") - if dbTx.Error != nil { - logx.Errorf("[l2TxEventMonitor.GetL2TxEventMonitorsBySenderAddress] %s", dbTx.Error.Error()) - return nil, errorcode.DbErrSqlOperation - } else if dbTx.RowsAffected == 0 { - logx.Errorf("[l2TxEventMonitor.GetL2TxEventMonitorsBySenderAddress] %s", errorcode.DbErrNotFound.Error()) - return nil, errorcode.DbErrNotFound - } - return txs, nil -} - -/* - Func: GetL2TxEventMonitorsByTxType - Return: txVerification []*L2TxEventMonitor, err error - Description: get l2TxEventMonitors by txVerification type -*/ -func (m *defaultL2TxEventMonitorModel) GetL2TxEventMonitorsByTxType(txType uint8) (txs []*L2TxEventMonitor, err error) { - // todo order id - dbTx := m.DB.Table(m.table).Where("tx_type = ?", txType).Find(&txs).Order("l1_block_height") - if dbTx.Error != nil { - logx.Errorf("[l2TxEventMonitor.GetL2TxEventMonitorsByTxType] %s", dbTx.Error.Error()) - return nil, errorcode.DbErrSqlOperation - } else if dbTx.RowsAffected == 0 { - logx.Errorf("[l2TxEventMonitor.GetL2TxEventMonitorsByTxType] %s", errorcode.DbErrNotFound.Error()) - return nil, errorcode.DbErrNotFound - } - return txs, nil -} - -func (m *defaultL2TxEventMonitorModel) CreateMempoolAndActiveAccount( - pendingNewAccount []*account.Account, - pendingNewMempoolTxs []*mempool.MempoolTx, - pendingNewLiquidityInfos []*liquidity.Liquidity, - pendingNewNfts []*nft.L2Nft, - pendingUpdateL2Events []*L2TxEventMonitor, -) (err error) { - err = m.DB.Transaction( - func(tx *gorm.DB) error { //transact - dbTx := tx.Table(account.AccountTableName).CreateInBatches(pendingNewAccount, len(pendingNewAccount)) - if dbTx.Error != nil { - logx.Errorf("[CreateMempoolAndActiveAccount] unable to create pending new account: %s", dbTx.Error.Error()) - return dbTx.Error - } - if dbTx.RowsAffected != int64(len(pendingNewAccount)) { - logx.Errorf("[CreateMempoolAndActiveAccount] invalid new account") - return errors.New("[CreateMempoolAndActiveAccount] invalid new account") - } - dbTx = tx.Table(mempool.MempoolTableName).CreateInBatches(pendingNewMempoolTxs, len(pendingNewMempoolTxs)) - if dbTx.Error != nil { - logx.Errorf("[CreateMempoolAndActiveAccount] unable to create pending new mempool txs: %s", dbTx.Error.Error()) - return dbTx.Error - } - if dbTx.RowsAffected != int64(len(pendingNewMempoolTxs)) { - logx.Errorf("[CreateMempoolAndActiveAccount] invalid new mempool txs") - return errors.New("[CreateMempoolAndActiveAccount] invalid new mempool txs") - } - if len(pendingNewLiquidityInfos) != 0 { - dbTx = tx.Table(liquidity.LiquidityTable).CreateInBatches(pendingNewLiquidityInfos, len(pendingNewLiquidityInfos)) - if dbTx.Error != nil { - logx.Errorf("[CreateMempoolAndActiveAccount] unable to create pending new liquidity infos: %s", dbTx.Error.Error()) - return dbTx.Error - } - if dbTx.RowsAffected != int64(len(pendingNewLiquidityInfos)) { - logx.Errorf("[CreateMempoolAndActiveAccount] invalid new liquidity infos") - return errors.New("[CreateMempoolAndActiveAccount] invalid new liquidity infos") - } - } - if len(pendingNewNfts) != 0 { - dbTx = tx.Table(nft.L2NftTableName).CreateInBatches(pendingNewNfts, len(pendingNewNfts)) - if dbTx.Error != nil { - logx.Errorf("[CreateMempoolAndActiveAccount] unable to create pending new nft infos: %s", dbTx.Error.Error()) - return dbTx.Error - } - if dbTx.RowsAffected != int64(len(pendingNewNfts)) { - logx.Errorf("[CreateMempoolAndActiveAccount] invalid new nft infos") - return errors.New("[CreateMempoolAndActiveAccount] invalid new nft infos") - } - } - for _, pendingUpdateL2Event := range pendingUpdateL2Events { - dbTx = tx.Table(m.table).Where("id = ?", pendingUpdateL2Event.ID).Select("*").Updates(&pendingUpdateL2Event) - if dbTx.Error != nil { - logx.Errorf("[CreateMempoolAndActiveAccount] unable to update l2 tx event: %s", dbTx.Error.Error()) - return dbTx.Error - } - if dbTx.RowsAffected == 0 { - logx.Errorf("[CreateMempoolAndActiveAccount] invalid l2 tx event") - return errors.New("[CreateMempoolAndActiveAccount] invalid l2 tx event") - } - } - return nil - }) - return err -} - -func (m *defaultL2TxEventMonitorModel) GetLastHandledRequestId() (requestId int64, err error) { - var event *L2TxEventMonitor - dbTx := m.DB.Table(m.table).Where("status = ?", HandledStatus).Order("request_id desc").Find(&event) - if dbTx.Error != nil { - logx.Errorf("[GetLastHandledRequestId] unable to get last handled request id: %s", dbTx.Error.Error()) - return -1, dbTx.Error - } - if dbTx.RowsAffected == 0 { - return -1, nil - } - return event.RequestId, nil -} diff --git a/common/model/liquidity/constant.go b/common/model/liquidity/constant.go deleted file mode 100644 index 6cba9006c..000000000 --- a/common/model/liquidity/constant.go +++ /dev/null @@ -1,23 +0,0 @@ -/* - * Copyright © 2021 Zkbas Protocol - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package liquidity - -const ( - LiquidityTable = `liquidity` - LiquidityHistoryTable = `liquidity_history` -) diff --git a/common/model/liquidity/liquidity.go b/common/model/liquidity/liquidity.go deleted file mode 100644 index 117c403a3..000000000 --- a/common/model/liquidity/liquidity.go +++ /dev/null @@ -1,154 +0,0 @@ -/* - * Copyright © 2021 Zkbas Protocol - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package liquidity - -import ( - "fmt" - - "github.com/zeromicro/go-zero/core/logx" - "github.com/zeromicro/go-zero/core/stores/cache" - "github.com/zeromicro/go-zero/core/stores/sqlc" - "github.com/zeromicro/go-zero/core/stores/sqlx" - "gorm.io/gorm" - - "github.com/bnb-chain/zkbas/errorcode" -) - -type ( - LiquidityModel interface { - CreateLiquidityTable() error - DropLiquidityTable() error - CreateLiquidity(liquidity *Liquidity) error - CreateLiquidityInBatches(entities []*Liquidity) error - GetLiquidityByPairIndex(pairIndex int64) (entity *Liquidity, err error) - } - - defaultLiquidityModel struct { - sqlc.CachedConn - table string - DB *gorm.DB - } - - Liquidity struct { - gorm.Model - PairIndex int64 - AssetAId int64 - AssetA string - AssetBId int64 - AssetB string - LpAmount string - KLast string - FeeRate int64 - TreasuryAccountIndex int64 - TreasuryRate int64 - } -) - -func NewLiquidityModel(conn sqlx.SqlConn, c cache.CacheConf, db *gorm.DB) LiquidityModel { - return &defaultLiquidityModel{ - CachedConn: sqlc.NewConn(conn, c), - table: LiquidityTable, - DB: db, - } -} - -func (*Liquidity) TableName() string { - return LiquidityTable -} - -/* - Func: CreateAccountLiquidityTable - Params: - Return: err error - Description: create account liquidity table -*/ -func (m *defaultLiquidityModel) CreateLiquidityTable() error { - return m.DB.AutoMigrate(Liquidity{}) -} - -/* - Func: DropAccountLiquidityTable - Params: - Return: err error - Description: drop account liquidity table -*/ -func (m *defaultLiquidityModel) DropLiquidityTable() error { - return m.DB.Migrator().DropTable(m.table) -} - -/* - Func: CreateAccountLiquidity - Params: liquidity *Liquidity - Return: err error - Description: create account liquidity entity -*/ -func (m *defaultLiquidityModel) CreateLiquidity(liquidity *Liquidity) error { - dbTx := m.DB.Table(m.table).Create(liquidity) - if dbTx.Error != nil { - err := fmt.Sprintf("[liquidity.CreateLiquidity] %s", dbTx.Error) - logx.Error(err) - return dbTx.Error - } - if dbTx.RowsAffected == 0 { - err := fmt.Sprintf("[liquidity.CreateLiquidity] %s", errorcode.DbErrFailToCreateLiquidity) - logx.Error(err) - return errorcode.DbErrFailToCreateLiquidity - } - return nil -} - -/* - Func: CreateAccountLiquidityInBatches - Params: entities []*Liquidity - Return: err error - Description: create account liquidity entities -*/ -func (m *defaultLiquidityModel) CreateLiquidityInBatches(entities []*Liquidity) error { - dbTx := m.DB.Table(m.table).CreateInBatches(entities, len(entities)) - if dbTx.Error != nil { - err := fmt.Sprintf("[liquidity.CreateLiquidityInBatches] %s", dbTx.Error) - logx.Error(err) - return dbTx.Error - } - if dbTx.RowsAffected == 0 { - err := fmt.Sprintf("[liquidity.CreateLiquidityInBatches] %s", errorcode.DbErrFailToCreateLiquidity) - logx.Error(err) - return errorcode.DbErrFailToCreateLiquidity - } - return nil -} - -/* - Func: GetAccountLiquidityByPairIndex - Params: pairIndex int64 - Return: entities []*Liquidity, err error - Description: get account liquidity entities by account index -*/ -func (m *defaultLiquidityModel) GetLiquidityByPairIndex(pairIndex int64) (entity *Liquidity, err error) { - dbTx := m.DB.Table(m.table).Where("pair_index = ?", pairIndex).Find(&entity) - if dbTx.Error != nil { - err := fmt.Sprintf("[liquidity.GetLiquidityByPairIndex] %s", dbTx.Error) - logx.Error(err) - return nil, errorcode.DbErrSqlOperation - } else if dbTx.RowsAffected == 0 { - err := fmt.Sprintf("[liquidity.GetLiquidityByPairIndex] %s", errorcode.DbErrNotFound) - logx.Error(err) - return nil, errorcode.DbErrNotFound - } - return entity, nil -} diff --git a/common/model/liquidity/liquidityHistory.go b/common/model/liquidity/liquidityHistory.go deleted file mode 100644 index e201261c8..000000000 --- a/common/model/liquidity/liquidityHistory.go +++ /dev/null @@ -1,183 +0,0 @@ -/* - * Copyright © 2021 Zkbas Protocol - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package liquidity - -import ( - "fmt" - - "github.com/zeromicro/go-zero/core/logx" - "github.com/zeromicro/go-zero/core/stores/cache" - "github.com/zeromicro/go-zero/core/stores/sqlc" - "github.com/zeromicro/go-zero/core/stores/sqlx" - "gorm.io/gorm" - - "github.com/bnb-chain/zkbas/errorcode" -) - -type ( - LiquidityHistoryModel interface { - CreateLiquidityHistoryTable() error - DropLiquidityHistoryTable() error - CreateLiquidityHistory(liquidity *LiquidityHistory) error - CreateLiquidityHistoryInBatches(entities []*LiquidityHistory) error - GetAccountLiquidityHistoryByPairIndex(pairIndex int64) (entities []*LiquidityHistory, err error) - GetLatestLiquidityByBlockHeight(blockHeight int64) (entities []*LiquidityHistory, err error) - GetLatestLiquidityByPairIndex(pairIndex int64) (entity *LiquidityHistory, err error) - } - - defaultLiquidityHistoryModel struct { - sqlc.CachedConn - table string - DB *gorm.DB - } - - LiquidityHistory struct { - gorm.Model - PairIndex int64 - AssetAId int64 - AssetA string - AssetBId int64 - AssetB string - LpAmount string - KLast string - FeeRate int64 - TreasuryAccountIndex int64 - TreasuryRate int64 - L2BlockHeight int64 - } -) - -func NewLiquidityHistoryModel(conn sqlx.SqlConn, c cache.CacheConf, db *gorm.DB) LiquidityHistoryModel { - return &defaultLiquidityHistoryModel{ - CachedConn: sqlc.NewConn(conn, c), - table: LiquidityHistoryTable, - DB: db, - } -} - -func (*LiquidityHistory) TableName() string { - return LiquidityHistoryTable -} - -/* - Func: CreateAccountLiquidityHistoryTable - Params: - Return: err error - Description: create account liquidity table -*/ -func (m *defaultLiquidityHistoryModel) CreateLiquidityHistoryTable() error { - return m.DB.AutoMigrate(LiquidityHistory{}) -} - -/* - Func: DropAccountLiquidityHistoryTable - Params: - Return: err error - Description: drop account liquidity table -*/ -func (m *defaultLiquidityHistoryModel) DropLiquidityHistoryTable() error { - return m.DB.Migrator().DropTable(m.table) -} - -/* - Func: CreateAccountLiquidityHistory - Params: liquidity *LiquidityHistory - Return: err error - Description: create account liquidity entity -*/ -func (m *defaultLiquidityHistoryModel) CreateLiquidityHistory(liquidity *LiquidityHistory) error { - dbTx := m.DB.Table(m.table).Create(liquidity) - if dbTx.Error != nil { - err := fmt.Sprintf("[liquidity.CreateLiquidityHistory] %s", dbTx.Error) - logx.Error(err) - return dbTx.Error - } - if dbTx.RowsAffected == 0 { - err := fmt.Sprintf("[liquidity.CreateLiquidityHistory] %s", errorcode.DbErrFailToCreateLiquidity) - logx.Error(err) - return errorcode.DbErrFailToCreateLiquidity - } - return nil -} - -/* - Func: CreateAccountLiquidityHistoryInBatches - Params: entities []*LiquidityHistory - Return: err error - Description: create account liquidity entities -*/ -func (m *defaultLiquidityHistoryModel) CreateLiquidityHistoryInBatches(entities []*LiquidityHistory) error { - dbTx := m.DB.Table(m.table).CreateInBatches(entities, len(entities)) - if dbTx.Error != nil { - err := fmt.Sprintf("[liquidity.CreateLiquidityHistoryInBatches] %s", dbTx.Error) - logx.Error(err) - return dbTx.Error - } - if dbTx.RowsAffected == 0 { - err := fmt.Sprintf("[liquidity.CreateLiquidityHistoryInBatches] %s", errorcode.DbErrFailToCreateLiquidity) - logx.Error(err) - return errorcode.DbErrFailToCreateLiquidity - } - return nil -} - -/* - Func: GetAccountLiquidityHistoryByPairIndex - Params: pairIndex int64 - Return: entities []*LiquidityHistory, err error - Description: get account liquidity entities by account index -*/ -func (m *defaultLiquidityHistoryModel) GetAccountLiquidityHistoryByPairIndex(pairIndex int64) (entities []*LiquidityHistory, err error) { - dbTx := m.DB.Table(m.table).Where("pair_index = ?", pairIndex).Find(&entities) - if dbTx.Error != nil { - err := fmt.Sprintf("[liquidity.GetAccountLiquidityHistoryByPairIndex] %s", dbTx.Error) - logx.Error(err) - return nil, errorcode.DbErrSqlOperation - } else if dbTx.RowsAffected == 0 { - err := fmt.Sprintf("[liquidity.GetAccountLiquidityHistoryByPairIndex] %s", errorcode.DbErrNotFound) - logx.Error(err) - return nil, errorcode.DbErrNotFound - } - return entities, nil -} - -func (m *defaultLiquidityHistoryModel) GetLatestLiquidityByBlockHeight(blockHeight int64) (entities []*LiquidityHistory, err error) { - dbTx := m.DB.Table(m.table). - Raw("SELECT a.* FROM liquidity_history a WHERE NOT EXISTS"+ - "(SELECT * FROM liquidity_history WHERE pair_index = a.pair_index AND l2_block_height <= ? AND l2_block_height > a.l2_block_height) "+ - "AND l2_block_height <= ? ORDER BY pair_index", blockHeight, blockHeight). - Find(&entities) - if dbTx.Error != nil { - logx.Errorf("[GetValidAccounts] unable to get related accounts: %s", dbTx.Error.Error()) - return nil, errorcode.DbErrSqlOperation - } else if dbTx.RowsAffected == 0 { - return nil, errorcode.DbErrNotFound - } - return entities, nil -} - -func (m *defaultLiquidityHistoryModel) GetLatestLiquidityByPairIndex(pairIndex int64) (entity *LiquidityHistory, err error) { - dbTx := m.DB.Table(m.table).Where("pair_index = ?", pairIndex).Order("l2_block_height desc").Find(&entity) - if dbTx.Error != nil { - logx.Errorf("[GetLatestLiquidityByPairIndex] unable to get related liquidity: %s", dbTx.Error.Error()) - return nil, errorcode.DbErrSqlOperation - } else if dbTx.RowsAffected == 0 { - return nil, errorcode.DbErrNotFound - } - return entity, nil -} diff --git a/common/model/mempool/constant.go b/common/model/mempool/constant.go deleted file mode 100644 index f67557195..000000000 --- a/common/model/mempool/constant.go +++ /dev/null @@ -1,29 +0,0 @@ -/* - * Copyright © 2021 Zkbas Protocol - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package mempool - -const ( - MempoolTableName = `mempool_tx` - DetailTableName = `mempool_tx_detail` -) - -const ( - PendingTxStatus = iota - SuccessTxStatus - FailTxStatus -) diff --git a/common/model/mempool/mempool.go b/common/model/mempool/mempool.go deleted file mode 100644 index 5dba02614..000000000 --- a/common/model/mempool/mempool.go +++ /dev/null @@ -1,805 +0,0 @@ -/* - * Copyright © 2021 Zkbas Protocol - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package mempool - -import ( - "fmt" - - "github.com/zeromicro/go-zero/core/logx" - "github.com/zeromicro/go-zero/core/stores/cache" - "github.com/zeromicro/go-zero/core/stores/sqlc" - "github.com/zeromicro/go-zero/core/stores/sqlx" - "gorm.io/gorm" - - "github.com/bnb-chain/zkbas/common/commonConstant" - "github.com/bnb-chain/zkbas/common/model/nft" - "github.com/bnb-chain/zkbas/errorcode" -) - -type ( - MempoolModel interface { - CreateMempoolTxTable() error - DropMempoolTxTable() error - GetMempoolTxByTxId(id uint) (mempoolTx *MempoolTx, err error) - GetAllMempoolTxsList() (mempoolTxs []*MempoolTx, err error) - GetMempoolTxsListForCommitter() (mempoolTxs []*MempoolTx, err error) - GetMempoolTxsList(limit int64, offset int64) (mempoolTxs []*MempoolTx, err error) - GetMempoolTxsListByAccountIndex(accountIndex int64, limit int64, offset int64) (mempoolTxs []*MempoolTx, err error) - GetMempoolTxsListByAccountIndexAndTxType(accountIndex int64, txType uint8, limit int64, offset int64) (mempoolTxs []*MempoolTx, err error) - GetMempoolTxsListByAccountIndexAndTxTypeArray(accountIndex int64, txTypeArray []uint8, limit int64, offset int64) (mempoolTxs []*MempoolTx, err error) - GetMempoolTxsTotalCount() (count int64, err error) - GetMempoolTxsTotalCountByAccountIndex(accountIndex int64) (count int64, err error) - GetMempoolTxsTotalCountByAccountIndexAndTxType(accountIndex int64, txType uint8) (count int64, err error) - GetMempoolTxsTotalCountByAccountIndexAndTxTypeArray(accountIndex int64, txTypeArray []uint8) (count int64, err error) - GetMempoolTxsTotalCountByPublicKey(pk string) (count int64, err error) - GetMempoolTxByTxHash(hash string) (mempoolTxs *MempoolTx, err error) - GetMempoolTxsByBlockHeight(l2BlockHeight int64) (rowsAffected int64, mempoolTxs []*MempoolTx, err error) - GetPendingLiquidityTxs() (mempoolTxs []*MempoolTx, err error) - GetPendingNftTxs() (mempoolTxs []*MempoolTx, err error) - CreateBatchedMempoolTxs(mempoolTxs []*MempoolTx) error - CreateMempoolTxAndL2CollectionAndNonce(mempoolTx *MempoolTx, nftInfo *nft.L2NftCollection) error - CreateMempoolTxAndL2Nft(mempoolTx *MempoolTx, nftInfo *nft.L2Nft) error - CreateMempoolTxAndL2NftExchange(mempoolTx *MempoolTx, offers []*nft.Offer, nftExchange *nft.L2NftExchange) error - CreateMempoolTxAndUpdateOffer(mempoolTx *MempoolTx, offer *nft.Offer, isUpdate bool) error - DeleteMempoolTxs(txIds []*int64) error - - GetPendingMempoolTxsByAccountIndex(accountIndex int64) (mempoolTxs []*MempoolTx, err error) - GetLatestL2MempoolTxByAccountIndex(accountIndex int64) (mempoolTx *MempoolTx, err error) - } - - defaultMempoolModel struct { - sqlc.CachedConn - table string - DB *gorm.DB - } - - MempoolTx struct { - gorm.Model - TxHash string `gorm:"uniqueIndex"` - TxType int64 - GasFeeAssetId int64 - GasFee string - NftIndex int64 - PairIndex int64 - AssetId int64 - TxAmount string - NativeAddress string - TxInfo string - ExtraInfo string - Memo string - AccountIndex int64 - Nonce int64 - ExpiredAt int64 - L2BlockHeight int64 - Status int `gorm:"index"` // 0: pending tx; 1: committed tx; 2: verified tx; - - MempoolDetails []*MempoolTxDetail `json:"mempool_details" gorm:"foreignKey:TxId"` - } -) - -func NewMempoolModel(conn sqlx.SqlConn, c cache.CacheConf, db *gorm.DB) MempoolModel { - return &defaultMempoolModel{ - CachedConn: sqlc.NewConn(conn, c), - table: MempoolTableName, - DB: db, - } -} - -func (*MempoolTx) TableName() string { - return MempoolTableName -} - -/* - Func: CreateMempoolTxTable - Params: - Return: err error - Description: create MempoolTx table -*/ -func (m *defaultMempoolModel) CreateMempoolTxTable() error { - return m.DB.AutoMigrate(MempoolTx{}) -} - -/* - Func: DropMempoolTxTable - Params: - Return: err error - Description: drop MempoolTx table -*/ -func (m *defaultMempoolModel) DropMempoolTxTable() error { - return m.DB.Migrator().DropTable(m.table) -} - -/* - Func: GetAllMempoolTxsList - Params: - Return: []*MempoolTx, err error - Description: used for Init globalMap -*/ - -func (m *defaultMempoolModel) OrderMempoolTxDetails(tx *MempoolTx) (err error) { - var mempoolForeignKeyColumn = `MempoolDetails` - var tmpMempoolTxDetails []*MempoolTxDetail - err = m.DB.Model(&tx).Association(mempoolForeignKeyColumn).Find(&tmpMempoolTxDetails) - tx.MempoolDetails = make([]*MempoolTxDetail, len(tmpMempoolTxDetails)) - for i := 0; i < len(tmpMempoolTxDetails); i++ { - tx.MempoolDetails[tmpMempoolTxDetails[i].Order] = tmpMempoolTxDetails[i] - } - return err -} - -func (m *defaultMempoolModel) GetAllMempoolTxsList() (mempoolTxs []*MempoolTx, err error) { - dbTx := m.DB.Table(m.table).Order("created_at, id").Find(&mempoolTxs) - if dbTx.Error != nil { - logx.Errorf("[mempool.GetMempoolTxsList] %s", dbTx.Error.Error()) - return nil, errorcode.DbErrSqlOperation - } - // TODO: cache operation - for _, mempoolTx := range mempoolTxs { - err := m.OrderMempoolTxDetails(mempoolTx) - if err != nil { - logx.Errorf("[mempool.GetMempoolTxsList] Get Associate MempoolDetails Error") - return nil, err - } - } - return mempoolTxs, nil -} - -/* - Func: GetMempoolTxsList - Params: limit int, offset int - Return: []*MempoolTx, err error - Description: used for /api/v1/txVerification/getMempoolTxsList -*/ -func (m *defaultMempoolModel) GetMempoolTxsList(limit int64, offset int64) (mempoolTxs []*MempoolTx, err error) { - dbTx := m.DB.Table(m.table).Where("status = ?", PendingTxStatus).Limit(int(limit)).Offset(int(offset)).Order("created_at desc, id desc").Find(&mempoolTxs) - if dbTx.Error != nil { - logx.Errorf("[mempool.GetMempoolTxsList] %s", dbTx.Error.Error()) - return nil, errorcode.DbErrSqlOperation - } - // TODO: cache operation - for _, mempoolTx := range mempoolTxs { - err := m.OrderMempoolTxDetails(mempoolTx) - if err != nil { - logx.Errorf("[mempool.GetMempoolTxsList] Get Associate MempoolDetails Error") - return nil, err - } - } - return mempoolTxs, nil -} - -func (m *defaultMempoolModel) GetMempoolTxsByBlockHeight(l2BlockHeight int64) (rowsAffected int64, mempoolTxs []*MempoolTx, err error) { - dbTx := m.DB.Table(m.table).Where("l2_block_height = ?", l2BlockHeight).Find(&mempoolTxs) - if dbTx.Error != nil { - logx.Errorf("[mempool.GetMempoolTxsByBlockHeight] %s", dbTx.Error) - return 0, nil, dbTx.Error - } - // TODO: cache operation - for _, mempoolTx := range mempoolTxs { - err := m.OrderMempoolTxDetails(mempoolTx) - if err != nil { - logx.Errorf("[mempool.GetMempoolTxsByBlockHeight] Get Associate MempoolDetails Error") - return 0, nil, err - } - } - return dbTx.RowsAffected, mempoolTxs, nil -} - -/* - Func: GetMempoolTxsListForCommitter - Return: []*MempoolTx, err error - Description: query unhandled mempool txVerification -*/ - -func (m *defaultMempoolModel) GetMempoolTxsListForCommitter() (mempoolTxs []*MempoolTx, err error) { - dbTx := m.DB.Table(m.table).Where("status = ?", PendingTxStatus).Order("created_at, id").Find(&mempoolTxs) - if dbTx.Error != nil { - logx.Errorf("[mempool.GetMempoolTxsList] %s", dbTx.Error.Error()) - return nil, errorcode.DbErrSqlOperation - } - // TODO: cache operation - for _, mempoolTx := range mempoolTxs { - err := m.OrderMempoolTxDetails(mempoolTx) - if err != nil { - logx.Errorf("[mempool.GetMempoolTxsList] Get Associate MempoolDetails Error") - return nil, err - } - } - return mempoolTxs, nil -} - -/* - Func: GetMempoolTxsListByAccountIndex - Params: accountIndex int64, limit int, offset int - Return: []*MempoolTx, err error - Description: used for /api/v1/txVerification/getMempoolTxsListByAccountIndex -*/ - -func (m *defaultMempoolModel) GetMempoolTxsListByAccountIndex(accountIndex int64, limit int64, offset int64) (mempoolTxs []*MempoolTx, err error) { - var ( - mempoolDetailTable = `mempool_tx_detail` - mempoolIds []int64 - ) - var mempoolTxDetails []*MempoolTxDetail - dbTx := m.DB.Table(mempoolDetailTable).Select("tx_id").Where("account_index = ?", accountIndex).Find(&mempoolTxDetails).Group("tx_id").Find(&mempoolIds) - if dbTx.Error != nil { - logx.Errorf("[mempool.GetMempoolTxsListByAccountIndex] %s", dbTx.Error.Error()) - return nil, errorcode.DbErrSqlOperation - } else if dbTx.RowsAffected == 0 { - logx.Info("[mempool.GetMempoolTxsListByAccountIndex] No rows in mempool list") - return nil, errorcode.DbErrNotFound - } - - dbTx = m.DB.Table(m.table).Where("status = ?", PendingTxStatus).Order("created_at desc").Offset(int(offset)).Limit(int(limit)).Find(&mempoolTxs, mempoolIds) - if dbTx.Error != nil { - logx.Errorf("[mempool.GetMempoolTxsListByAccountIndex] %s", dbTx.Error.Error()) - return nil, errorcode.DbErrSqlOperation - } else if dbTx.RowsAffected == 0 { - logx.Info("[mempool.GetMempoolTxsListByAccountIndex] No rows in mempool with Pending Status") - return nil, errorcode.DbErrNotFound - } - // TODO: cache operation - for _, mempoolTx := range mempoolTxs { - err := m.OrderMempoolTxDetails(mempoolTx) - if err != nil { - logx.Errorf("[mempool.GetMempoolTxsListByAccountIndex] Get Associate MempoolDetails Error") - return nil, err - } - } - return mempoolTxs, nil -} - -/* - Func: GetMempoolTxsListByAccountIndexAndTxType - Params: accountIndex int64, txType uint8, limit int64, offset int64 - Return: []*MempoolTx, err error - Description: -*/ - -func (m *defaultMempoolModel) GetMempoolTxsListByAccountIndexAndTxType(accountIndex int64, txType uint8, limit int64, offset int64) (mempoolTxs []*MempoolTx, err error) { - var ( - mempoolDetailTable = `mempool_tx_detail` - mempoolIds []int64 - ) - var mempoolTxDetails []*MempoolTxDetail - dbTx := m.DB.Table(mempoolDetailTable).Select("tx_id").Where("account_index = ?", accountIndex).Find(&mempoolTxDetails).Group("tx_id").Find(&mempoolIds) - if dbTx.Error != nil { - logx.Errorf("[mempool.GetMempoolTxsListByAccountIndexAndTxType] %s", dbTx.Error.Error()) - return nil, errorcode.DbErrSqlOperation - } else if dbTx.RowsAffected == 0 { - logx.Errorf("[mempool.GetMempoolTxsListByAccountIndexAndTxType] Get MempoolIds Error") - return nil, errorcode.DbErrNotFound - } - dbTx = m.DB.Table(m.table).Where("status = ? and tx_type = ?", PendingTxStatus, txType).Order("created_at desc").Offset(int(offset)).Limit(int(limit)).Find(&mempoolTxs, mempoolIds) - if dbTx.Error != nil { - logx.Errorf("[mempool.GetMempoolTxsListByAccountIndexAndTxType] %s", dbTx.Error.Error()) - return nil, errorcode.DbErrSqlOperation - } else if dbTx.RowsAffected == 0 { - logx.Errorf("[mempool.GetMempoolTxsListByAccountIndexAndTxType] Get MempoolTxs Error") - return nil, errorcode.DbErrNotFound - } - // TODO: cache operation - for _, mempoolTx := range mempoolTxs { - err := m.OrderMempoolTxDetails(mempoolTx) - if err != nil { - logx.Errorf("[mempool.GetMempoolTxsListByAccountIndexAndTxType] Get Associate MempoolDetails Error") - return nil, err - } - } - return mempoolTxs, nil -} - -func (m *defaultMempoolModel) GetMempoolTxsListByAccountIndexAndTxTypeArray(accountIndex int64, txTypeArray []uint8, limit int64, offset int64) (mempoolTxs []*MempoolTx, err error) { - var ( - mempoolDetailTable = `mempool_tx_detail` - mempoolIds []int64 - ) - var mempoolTxDetails []*MempoolTxDetail - dbTx := m.DB.Table(mempoolDetailTable).Select("tx_id").Where("account_index = ?", accountIndex).Find(&mempoolTxDetails).Group("tx_id").Find(&mempoolIds) - if dbTx.Error != nil { - logx.Errorf("[mempool.GetMempoolTxsListByAccountIndexAndTxType] %s", dbTx.Error.Error()) - return nil, errorcode.DbErrSqlOperation - } else if dbTx.RowsAffected == 0 { - logx.Errorf("[mempool.GetMempoolTxsListByAccountIndexAndTxType] Get MempoolIds Error") - return nil, errorcode.DbErrNotFound - } - dbTx = m.DB.Table(m.table).Where("status = ? and tx_type in (?)", PendingTxStatus, txTypeArray).Order("created_at desc").Offset(int(offset)).Limit(int(limit)).Find(&mempoolTxs, mempoolIds) - if dbTx.Error != nil { - logx.Errorf("[mempool.GetMempoolTxsListByAccountIndexAndTxType] %s", dbTx.Error.Error()) - return nil, errorcode.DbErrSqlOperation - } else if dbTx.RowsAffected == 0 { - logx.Errorf("[mempool.GetMempoolTxsListByAccountIndexAndTxType] Get MempoolTxs Error") - return nil, errorcode.DbErrNotFound - } - // TODO: cache operation - for _, mempoolTx := range mempoolTxs { - err := m.OrderMempoolTxDetails(mempoolTx) - if err != nil { - logx.Errorf("[mempool.GetMempoolTxsListByAccountIndexAndTxType] Get Associate MempoolDetails Error") - return nil, err - } - } - return mempoolTxs, nil -} - -/* - Func: GetMempoolTxsTotalCount - Params: - Return: count int64, err error - Description: used for counting total transactions in mempool for explorer dashboard -*/ -func (m *defaultMempoolModel) GetMempoolTxsTotalCount() (count int64, err error) { - dbTx := m.DB.Table(m.table).Where("status = ? and deleted_at is NULL", PendingTxStatus).Count(&count) - if dbTx.Error != nil { - logx.Errorf("[txVerification.GetTxsTotalCount] %s", dbTx.Error) - return 0, dbTx.Error - } else if dbTx.RowsAffected == 0 { - return 0, nil - } - return count, nil -} - -/* - Func: GetMempoolTxsTotalCountByAccountIndex - Params: accountIndex int64 - Return: count int64, err error - Description: used for counting total transactions in mempool for explorer dashboard -*/ -func (m *defaultMempoolModel) GetMempoolTxsTotalCountByAccountIndex(accountIndex int64) (count int64, err error) { - var ( - mempoolDetailTable = `mempool_tx_detail` - mempoolIds []int64 - ) - var mempoolTxDetails []*MempoolTxDetail - dbTx := m.DB.Table(mempoolDetailTable).Select("tx_id").Where("account_index = ?", accountIndex).Find(&mempoolTxDetails).Group("tx_id").Find(&mempoolIds) - if dbTx.Error != nil { - logx.Errorf("[mempool.GetMempoolTxsTotalCountByAccountIndex] %s", dbTx.Error) - return 0, dbTx.Error - } else if dbTx.RowsAffected == 0 { - return 0, nil - } - dbTx = m.DB.Table(m.table).Where("status = ? and id in (?) and deleted_at is NULL", PendingTxStatus, mempoolIds).Count(&count) - if dbTx.Error != nil { - logx.Errorf("[mempool.GetMempoolTxsTotalCountByAccountIndex] %s", dbTx.Error) - return 0, dbTx.Error - } else if dbTx.RowsAffected == 0 { - logx.Infof("[mempool.GetMempoolTxsTotalCountByAccountIndex] no txVerification of account index %d in mempool", accountIndex) - return 0, nil - } - return count, nil -} - -/* - Func: GetMempoolTxsTotalCountByAccountIndexAndTxType - Params: accountIndex int64, txType uint8 - Return: count int64, err error - Description: used for counting total transactions in mempool for explorer dashboard -*/ -func (m *defaultMempoolModel) GetMempoolTxsTotalCountByAccountIndexAndTxType(accountIndex int64, txType uint8) (count int64, err error) { - var ( - mempoolDetailTable = `mempool_tx_detail` - mempoolIds []int64 - ) - var mempoolTxDetails []*MempoolTxDetail - dbTx := m.DB.Table(mempoolDetailTable).Select("tx_id").Where("account_index = ?", accountIndex).Find(&mempoolTxDetails).Group("tx_id").Find(&mempoolIds) - if dbTx.Error != nil { - logx.Errorf("[mempool.GetMempoolTxsTotalCountByAccountIndexAndTxType] %s", dbTx.Error) - return 0, dbTx.Error - } else if dbTx.RowsAffected == 0 { - return 0, nil - } - dbTx = m.DB.Table(m.table).Where("status = ? and id in (?) and deleted_at is NULL and tx_type = ?", PendingTxStatus, mempoolIds, txType).Count(&count) - if dbTx.Error != nil { - logx.Errorf("[mempool.GetMempoolTxsTotalCountByAccountIndexAndTxType] %s", dbTx.Error) - return 0, dbTx.Error - } else if dbTx.RowsAffected == 0 { - logx.Infof("[mempool.GetMempoolTxsTotalCountByAccountIndexAndTxType] no txVerification of account index %d and txVerification type = %d in mempool", accountIndex, txType) - return 0, nil - } - return count, nil -} - -/* - Func: GetMempoolTxsTotalCountByAccountIndexAndTxTypeArray - Params: accountIndex int64, txTypeArray []uint8 - Return: count int64, err error - Description: used for counting total transactions in mempool for explorer dashboard -*/ -func (m *defaultMempoolModel) GetMempoolTxsTotalCountByAccountIndexAndTxTypeArray(accountIndex int64, txTypeArray []uint8) (count int64, err error) { - var ( - mempoolDetailTable = `mempool_tx_detail` - mempoolIds []int64 - ) - var mempoolTxDetails []*MempoolTxDetail - dbTx := m.DB.Table(mempoolDetailTable).Select("tx_id").Where("account_index = ?", accountIndex).Find(&mempoolTxDetails).Group("tx_id").Find(&mempoolIds) - if dbTx.Error != nil { - logx.Errorf("[mempool.GetMempoolTxsTotalCountByAccountIndexAndTxType] %s", dbTx.Error) - return 0, dbTx.Error - } else if dbTx.RowsAffected == 0 { - return 0, nil - } - dbTx = m.DB.Table(m.table).Where("status = ? and id in (?) and deleted_at is NULL and tx_type in (?)", PendingTxStatus, mempoolIds, txTypeArray).Count(&count) - if dbTx.Error != nil { - logx.Errorf("[mempool.GetMempoolTxsTotalCountByAccountIndexAndTxType] %s", dbTx.Error) - return 0, dbTx.Error - } else if dbTx.RowsAffected == 0 { - logx.Infof("[mempool.GetMempoolTxsTotalCountByAccountIndexAndTxType] no txVerification of account index %d and txVerification type = %v in mempool", accountIndex, txTypeArray) - return 0, nil - } - return count, nil -} - -/* - Func: GetMempoolTxsTotalCountByPublicKey - Params: pk string - Return: count int64, err error - Description: used for counting total transactions in mempool for explorer dashboard -*/ -func (m *defaultMempoolModel) GetMempoolTxsTotalCountByPublicKey(pk string) (count int64, err error) { - var ( - accountTable = `account` - accountIndex int64 - mempoolDetailTable = `mempool_tx_detail` - mempoolIds []int64 - ) - dbTx := m.DB.Table(accountTable).Select("account_index").Where("public_key = ?", pk).Find(&accountIndex) - if dbTx.Error != nil { - logx.Errorf("[mempool.GetMempoolTxsListByPublicKey] %s", dbTx.Error) - return 0, dbTx.Error - } else if dbTx.RowsAffected == 0 { - return 0, nil - } - var mempoolTxDetails []*MempoolTxDetail - dbTx = m.DB.Table(mempoolDetailTable).Select("tx_id").Where("account_index = ?", accountIndex).Find(&mempoolTxDetails).Group("tx_id").Find(&mempoolIds) - if dbTx.Error != nil { - logx.Errorf("[mempool.GetMempoolTxsListByPublicKey] %s", dbTx.Error) - return 0, dbTx.Error - } else if dbTx.RowsAffected == 0 { - return 0, nil - } - dbTx = m.DB.Table(m.table).Where("status = ? and id in (?) and deleted_at is NULL", PendingTxStatus, mempoolIds).Count(&count) - if dbTx.Error != nil { - logx.Errorf("[mempool.GetMempoolTxsListByPublicKey] %s", dbTx.Error) - return 0, dbTx.Error - } else if dbTx.RowsAffected == 0 { - logx.Infof("[mempool.GetMempoolTxsListByPublicKey] no txVerification of account index %d in mempool", accountIndex) - return 0, nil - } - return count, nil -} - -/* - Func: GetMempoolTxByTxHash - Params: hash string - Return: mempoolTxs *MempoolTx, err error - Description: used for get transactions in mempool by txVerification hash -*/ -func (m *defaultMempoolModel) GetMempoolTxByTxHash(hash string) (mempoolTx *MempoolTx, err error) { - dbTx := m.DB.Table(m.table).Where("status = ? and tx_hash = ?", PendingTxStatus, hash).Find(&mempoolTx) - if dbTx.Error != nil { - if dbTx.Error == errorcode.DbErrNotFound { - return mempoolTx, dbTx.Error - } else { - logx.Errorf("[mempool.GetMempoolTxByTxHash] %s", dbTx.Error) - return nil, errorcode.DbErrSqlOperation - } - } else if dbTx.RowsAffected == 0 { - err := fmt.Sprintf("[mempool.GetMempoolTxByTxHash] %s", errorcode.DbErrNotFound) - logx.Info(err) - return nil, errorcode.DbErrNotFound - } - err = m.OrderMempoolTxDetails(mempoolTx) - if err != nil { - logx.Errorf("[mempool.GetMempoolTxByTxHash] Get Associate MempoolDetails Error") - return nil, err - } - return mempoolTx, nil -} - -/* - Func: CreateBatchedMempoolTxs - Params: []*MempoolTx - Return: error - Description: Insert MempoolTxs when sendTx request. -*/ - -func (m *defaultMempoolModel) CreateBatchedMempoolTxs(mempoolTxs []*MempoolTx) error { - return m.DB.Transaction(func(tx *gorm.DB) error { // transact - dbTx := tx.Table(m.table).Create(mempoolTxs) - if dbTx.Error != nil { - logx.Errorf("[mempool.CreateMempoolTxs] %s", dbTx.Error) - return dbTx.Error - } - if dbTx.RowsAffected == 0 { - logx.Errorf("[mempool.CreateMempoolTxs] Create Invalid Mempool Tx") - return errorcode.DbErrFailToCreateMempoolTx - } - return nil - }) -} - -/* - Func: DeleteMempoolTxs - Params: TxId []*int64 - Return: error - Description: Delete MempoolTxs when Committer pack new layer2 block. -*/ -func (m *defaultMempoolModel) DeleteMempoolTxs(txIds []*int64) error { - //var mempoolDetailTable = `mempool_tx_detail` - // TODO: clean cache operation - return m.DB.Transaction(func(tx *gorm.DB) error { // transact - for _, txId := range txIds { - var mempoolTx *MempoolTx - dbTx := tx.Table(m.table).Where("id = ?", txId).Delete(&mempoolTx) - if dbTx.Error != nil { - logx.Errorf("[mempool.DeleteMempoolTxs] %s", dbTx.Error) - return dbTx.Error - } - if dbTx.RowsAffected == 0 { - logx.Errorf("[mempool.DeleteMempoolTxs] Delete Invalid Mempool Tx") - return errorcode.DbErrFailToCreateMempoolTx - } - - //var mempoolTxDetail *MempoolTxDetail - //dbTx = m.DB.Table(mempoolDetailTable).Where("tx_id = ?", txId).Delete(&mempoolTxDetail) - //if dbTx.Error != nil { - // logx.Errorf("[mempool.DeleteMempoolTxs] %s", dbTx.Error) - // return dbTx.Error - //} - //if dbTx.RowsAffected == 0 { - // logx.Errorf("[mempool.DeleteMempoolTxs] Delete Invalid Mempool TxDtail") - // return ErrInvalidMempoolTxDetail - //} - } - return nil - }) -} - -/* - Func: GetMempoolTxIdsListByL2BlockHeight - Params: blockHeight - Return: []*MempoolTx, err error - Description: used for verifier get txIds from Mempool and deleting the transaction in mempool table after -*/ -func (m *defaultMempoolModel) GetMempoolTxsListByL2BlockHeight(blockHeight int64) (mempoolTxs []*MempoolTx, err error) { - dbTx := m.DB.Table(m.table).Where("status = ? and l2_block_height <= ?", SuccessTxStatus, blockHeight).Find(&mempoolTxs) - if dbTx.Error != nil { - logx.Errorf("[mempool.GetMempoolTxsListByL2BlockHeight] %s", dbTx.Error.Error()) - return nil, errorcode.DbErrSqlOperation - } else if dbTx.RowsAffected == 0 { - logx.Errorf("[mempool.GetMempoolTxsListByL2BlockHeight] Get MempoolTxs Error") - return nil, errorcode.DbErrNotFound - } - - return mempoolTxs, nil -} - -func (m *defaultMempoolModel) GetLatestL2MempoolTxByAccountIndex(accountIndex int64) (mempoolTx *MempoolTx, err error) { - dbTx := m.DB.Table(m.table).Where("account_index = ? and nonce != -1", accountIndex). - Order("created_at desc, id desc").Find(&mempoolTx) - if dbTx.Error != nil { - logx.Errorf("[GetLatestL2MempoolTxByAccountIndex] %s", dbTx.Error.Error()) - return nil, errorcode.DbErrSqlOperation - } else if dbTx.RowsAffected == 0 { - logx.Errorf("[GetLatestL2MempoolTxByAccountIndex] Get MempoolTxs Error") - return nil, errorcode.DbErrNotFound - } - return mempoolTx, nil -} - -func (m *defaultMempoolModel) GetPendingMempoolTxsByAccountIndex(accountIndex int64) (mempoolTxs []*MempoolTx, err error) { - dbTx := m.DB.Table(m.table).Where("status = ? AND account_index = ?", PendingTxStatus, accountIndex). - Order("created_at, id").Find(&mempoolTxs) - if dbTx.Error != nil { - logx.Errorf("[GetPendingMempoolTxsByAccountIndex] %s", dbTx.Error.Error()) - return nil, errorcode.DbErrSqlOperation - } else if dbTx.RowsAffected == 0 { - logx.Errorf("[GetPendingMempoolTxsByAccountIndex] Get MempoolTxs Error") - return nil, errorcode.DbErrNotFound - } - for _, mempoolTx := range mempoolTxs { - err = m.OrderMempoolTxDetails(mempoolTx) - if err != nil { - logx.Errorf("[GetPendingMempoolTxsByAccountIndex] Get Associate MempoolDetails Error") - return nil, err - } - } - return mempoolTxs, nil -} - -func (m *defaultMempoolModel) GetPendingLiquidityTxs() (mempoolTxs []*MempoolTx, err error) { - dbTx := m.DB.Table(m.table).Where("status = ? and pair_index != ?", PendingTxStatus, commonConstant.NilPairIndex). - Find(&mempoolTxs) - if dbTx.Error != nil { - errInfo := fmt.Sprintf("[mempool.GetMempoolTxByTxHash] %s", dbTx.Error) - logx.Errorf(errInfo) - return nil, errorcode.DbErrSqlOperation - } else if dbTx.RowsAffected == 0 { - err := fmt.Sprintf("[mempool.GetMempoolTxByTxHash] %s", errorcode.DbErrNotFound) - logx.Info(err) - return nil, errorcode.DbErrNotFound - } - for _, mempoolTx := range mempoolTxs { - err = m.OrderMempoolTxDetails(mempoolTx) - if err != nil { - logx.Errorf("[mempool.GetMempoolTxByTxHash] Get Associate MempoolDetails Error") - return nil, err - } - } - return mempoolTxs, nil -} - -func (m *defaultMempoolModel) GetPendingNftTxs() (mempoolTxs []*MempoolTx, err error) { - dbTx := m.DB.Table(m.table).Where("status = ? and nft_index != ?", PendingTxStatus, commonConstant.NilTxNftIndex). - Find(&mempoolTxs) - if dbTx.Error != nil { - errInfo := fmt.Sprintf("[mempool.GetMempoolTxByTxHash] %s", dbTx.Error.Error()) - logx.Errorf(errInfo) - return nil, errorcode.DbErrSqlOperation - } else if dbTx.RowsAffected == 0 { - err := fmt.Sprintf("[mempool.GetMempoolTxByTxHash] %s", errorcode.DbErrNotFound) - logx.Info(err) - return nil, errorcode.DbErrNotFound - } - for _, mempoolTx := range mempoolTxs { - err = m.OrderMempoolTxDetails(mempoolTx) - if err != nil { - logx.Errorf("[mempool.GetMempoolTxByTxHash] Get Associate MempoolDetails Error") - return nil, err - } - } - return mempoolTxs, nil -} - -func (m *defaultMempoolModel) CreateMempoolTxAndL2CollectionAndNonce(mempoolTx *MempoolTx, nftCollectionInfo *nft.L2NftCollection) error { - return m.DB.Transaction(func(db *gorm.DB) error { // transact - dbTx := db.Table(m.table).Create(mempoolTx) - if dbTx.Error != nil { - logx.Errorf("[mempool.CreateMempoolTxAndL2Collection] %s", dbTx.Error) - return errorcode.DbErrSqlOperation - } - if dbTx.RowsAffected == 0 { - logx.Errorf("[mempool.CreateMempoolTxAndL2Collection] Create Invalid Mempool Tx") - return errorcode.DbErrFailToCreateMempoolTx - } - dbTx = db.Table(nft.L2NftCollectionTableName).Create(nftCollectionInfo) - if dbTx.Error != nil { - logx.Errorf("[mempool.CreateMempoolTxAndL2Collection] %s", dbTx.Error) - return errorcode.DbErrSqlOperation - } - if dbTx.RowsAffected == 0 { - logx.Errorf("[mempool.CreateMempoolTxAndL2Collection] Create Invalid nft collection info") - return errorcode.DbErrFailToCreateMempoolTx - } - return nil - }) -} - -func (m *defaultMempoolModel) CreateMempoolTxAndL2Nft(mempoolTx *MempoolTx, nftInfo *nft.L2Nft) error { - return m.DB.Transaction(func(tx *gorm.DB) error { // transact - dbTx := tx.Table(m.table).Create(mempoolTx) - if dbTx.Error != nil { - logx.Errorf("[mempool.CreateMempoolTxAndL2Nft] %s", dbTx.Error) - return errorcode.DbErrSqlOperation - } - if dbTx.RowsAffected == 0 { - logx.Errorf("[mempool.CreateMempoolTxAndL2Nft] Create Invalid Mempool Tx") - return errorcode.DbErrFailToCreateMempoolTx - } - dbTx = tx.Table(nft.L2NftTableName).Create(nftInfo) - if dbTx.Error != nil { - logx.Errorf("[mempool.CreateMempoolTxAndL2Nft] %s", dbTx.Error) - return errorcode.DbErrSqlOperation - } - if dbTx.RowsAffected == 0 { - logx.Errorf("[mempool.CreateMempoolTxAndL2Nft] Create Invalid nft info") - return errorcode.DbErrFailToCreateMempoolTx - } - return nil - }) -} - -func (m *defaultMempoolModel) CreateMempoolTxAndL2NftExchange(mempoolTx *MempoolTx, offers []*nft.Offer, nftExchange *nft.L2NftExchange) error { - return m.DB.Transaction(func(tx *gorm.DB) error { // transact - dbTx := tx.Table(m.table).Create(mempoolTx) - if dbTx.Error != nil { - logx.Errorf("[mempool.CreateMempoolTxAndL2NftExchange] %s", dbTx.Error) - return dbTx.Error - } - if dbTx.RowsAffected == 0 { - logx.Errorf("[mempool.CreateMempoolTxAndL2NftExchange] Create Invalid Mempool Tx") - return errorcode.DbErrFailToCreateMempoolTx - } - if len(offers) != 0 { - dbTx = tx.Table(nft.OfferTableName).CreateInBatches(offers, len(offers)) - if dbTx.Error != nil { - logx.Errorf("[mempool.CreateMempoolTxAndL2NftExchange] %s", dbTx.Error) - return dbTx.Error - } - if dbTx.RowsAffected == 0 { - logx.Errorf("[mempool.CreateMempoolTxAndL2NftExchange] Create Invalid nft info") - return errorcode.DbErrFailToCreateMempoolTx - } - } - dbTx = tx.Table(nft.L2NftExchangeTableName).Create(nftExchange) - if dbTx.Error != nil { - logx.Errorf("[mempool.CreateMempoolTxAndL2NftExchange] %s", dbTx.Error) - return dbTx.Error - } - if dbTx.RowsAffected == 0 { - logx.Errorf("[mempool.CreateMempoolTxAndL2NftExchange] Create Invalid nft info") - return errorcode.DbErrFailToCreateMempoolTx - } - return nil - }) -} - -func (m *defaultMempoolModel) CreateMempoolTxAndUpdateOffer(mempoolTx *MempoolTx, offer *nft.Offer, isUpdate bool) error { - return m.DB.Transaction(func(tx *gorm.DB) error { // transact - dbTx := tx.Table(m.table).Create(mempoolTx) - if dbTx.Error != nil { - logx.Errorf("[mempool.CreateMempoolTxAndUpdateOffer] %s", dbTx.Error) - return dbTx.Error - } - if dbTx.RowsAffected == 0 { - logx.Errorf("[mempool.CreateMempoolTxAndUpdateOffer] Create Invalid Mempool Tx") - return errorcode.DbErrFailToCreateMempoolTx - } - if isUpdate { - dbTx = tx.Table(nft.OfferTableName).Where("id = ?", offer.ID).Select("*").Updates(&offer) - if dbTx.Error != nil { - logx.Errorf("[mempool.CreateMempoolTxAndUpdateOffer] %s", dbTx.Error) - return dbTx.Error - } - if dbTx.RowsAffected == 0 { - logx.Errorf("[mempool.CreateMempoolTxAndUpdateOffer] Create Invalid nft info") - return errorcode.DbErrFailToCreateMempoolTx - } - } else { - dbTx = tx.Table(nft.OfferTableName).Create(offer) - if dbTx.Error != nil { - logx.Errorf("[mempool.CreateMempoolTxAndUpdateOffer] %s", dbTx.Error) - return dbTx.Error - } - if dbTx.RowsAffected == 0 { - logx.Errorf("[mempool.CreateMempoolTxAndUpdateOffer] Create Invalid nft info") - return errorcode.DbErrFailToCreateMempoolTx - } - } - return nil - }) -} - -func (m *defaultMempoolModel) GetMempoolTxByTxId(id uint) (mempoolTx *MempoolTx, err error) { - dbTx := m.DB.Table(m.table).Where("id = ?", id). - Find(&mempoolTx) - if dbTx.Error != nil { - errInfo := fmt.Sprintf("[mempool.GetMempoolTxByTxId] %s", dbTx.Error) - logx.Errorf(errInfo) - return nil, errorcode.DbErrSqlOperation - } else if dbTx.RowsAffected == 0 { - err := fmt.Sprintf("[mempool.GetMempoolTxByTxId] %s", errorcode.DbErrNotFound) - logx.Info(err) - return nil, errorcode.DbErrNotFound - } - err = m.OrderMempoolTxDetails(mempoolTx) - if err != nil { - logx.Errorf("[mempool.GetMempoolTxByTxHash] Get Associate MempoolDetails Error") - return nil, err - } - return mempoolTx, nil -} diff --git a/common/model/mempool/mempoolDetail.go b/common/model/mempool/mempoolDetail.go deleted file mode 100644 index ba94d2a8b..000000000 --- a/common/model/mempool/mempoolDetail.go +++ /dev/null @@ -1,238 +0,0 @@ -/* - * Copyright © 2021 Zkbas Protocol - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package mempool - -import ( - "time" - - "github.com/zeromicro/go-zero/core/logx" - "github.com/zeromicro/go-zero/core/stores/cache" - "github.com/zeromicro/go-zero/core/stores/sqlc" - "github.com/zeromicro/go-zero/core/stores/sqlx" - "gorm.io/gorm" - - "github.com/bnb-chain/zkbas/errorcode" -) - -type ( - MempoolTxDetailModel interface { - CreateMempoolDetailTable() error - DropMempoolDetailTable() error - GetLatestMempoolDetail(accountIndex int64, assetId int64, assetType int64) (mempoolTxDetail *MempoolTxDetail, err error) - GetAccountAssetsMempoolDetails(accountIndex int64, assetType int64) (mempoolTxDetails []*MempoolTxDetail, err error) - GetAccountMempoolDetails(accountIndex int64) (mempoolTxDetails []*MempoolTxDetail, err error) - GetMempoolTxDetailsByAccountIndex(accountIndex int64) (mempoolTxDetails []*MempoolTxDetail, err error) - GetAccountAssetMempoolDetails(accountIndex int64, assetId int64, assetType int64) (mempoolTxDetails []*MempoolTxDetail, err error) - GetLatestAccountAssetMempoolDetail(accountIndex int64, assetId int64, assetType int64) (mempoolTxDetail *MempoolTxDetail, err error) - GetMempoolTxDetailsByAssetType(assetType int) (mempoolTxDetails []*MempoolTxDetail, err error) - GetMempoolTxDetailsByAssetIdAndAssetType(assetId int64, assetType int) (mempoolTxDetails []*MempoolTxDetail, err error) - } - - defaultMempoolDetailModel struct { - sqlc.CachedConn - table string - DB *gorm.DB - } - - MempoolTxDetail struct { - gorm.Model - TxId int64 `json:"tx_id" gorm:"index;not null"` - AssetId int64 - AssetType int64 - AccountIndex int64 `gorm:"index"` - AccountName string - BalanceDelta string - Order int64 - AccountOrder int64 - } - - LatestTimeMempoolDetails struct { - Max time.Time - AssetId int64 - } -) - -func NewMempoolDetailModel(conn sqlx.SqlConn, c cache.CacheConf, db *gorm.DB) MempoolTxDetailModel { - return &defaultMempoolDetailModel{ - CachedConn: sqlc.NewConn(conn, c), - table: DetailTableName, - DB: db, - } -} - -func (*MempoolTxDetail) TableName() string { - return DetailTableName -} - -/* - Func: CreateMempoolDetailTable - Params: - Return: err error - Description: create mempool detail table -*/ - -func (m *defaultMempoolDetailModel) CreateMempoolDetailTable() error { - return m.DB.AutoMigrate(MempoolTxDetail{}) -} - -/* - Func: DropMempoolDetailTable - Params: - Return: err error - Description: drop MempoolDetail table -*/ - -func (m *defaultMempoolDetailModel) DropMempoolDetailTable() error { - return m.DB.Migrator().DropTable(m.table) -} - -/* - Func: GetLatestMempoolDetail - Params: AccountIndex int64, AssetId int64, AssetType int64 - Return: err error - Description: get latest(create_at desc) mempool detail info from mempool_detail table by accountIndex, assetId and assetType. - It will be used to check if the value in Balance global map is valid. -*/ -func (m *defaultMempoolDetailModel) GetLatestMempoolDetail(accountIndex int64, assetId int64, assetType int64) (mempoolTxDetail *MempoolTxDetail, err error) { - dbTx := m.DB.Table(m.table).Where( - "account_index = ? and asset_id = ? and asset_type = ?", accountIndex, assetId, assetType). - Order("created_at desc, id desc").Limit(1).Find(&mempoolTxDetail) - if dbTx.Error != nil { - logx.Errorf("[mempoolDetail.GetLatestMempoolDetail] %s", dbTx.Error.Error()) - return nil, errorcode.DbErrSqlOperation - } else if dbTx.RowsAffected == 0 { - logx.Errorf("[mempoolDetail.GetLatestMempoolDetail] Get MempoolTxDetail Error") - return nil, errorcode.DbErrNotFound - } - return mempoolTxDetail, nil -} - -/* - Func: GetAccountAssetsMempoolDetails - Params: accountIndex int64, assetType int64 - Return: mempoolTxDetails []*MempoolTxDetail, err error - Description: used for get globalmap data source -*/ -func (m *defaultMempoolDetailModel) GetAccountAssetsMempoolDetails(accountIndex int64, assetType int64) (mempoolTxDetails []*MempoolTxDetail, err error) { - var dbTx *gorm.DB - dbTx = m.DB.Table(m.table).Where("account_index = ? and asset_type = ? and chain_id != -1", accountIndex, assetType). - Order("created_at, id").Find(&mempoolTxDetails) - if dbTx.Error != nil { - logx.Errorf("[mempoolDetail.GetAccountAssetsMempoolDetails] error: %s", dbTx.Error.Error()) - return nil, errorcode.DbErrSqlOperation - } else if dbTx.RowsAffected == 0 { - logx.Error("[mempoolDetail.GetAccountAssetsMempoolDetails] Get MempoolTxDetails Error") - return nil, errorcode.DbErrNotFound - } - return mempoolTxDetails, nil -} - -/* - Func: GetAccountAssetMempoolDetails - Params: AccountIndex int64, AssetId int64, AssetType int64 - Return: err error - Description: used for get globalmap data source -*/ -func (m *defaultMempoolDetailModel) GetAccountAssetMempoolDetails(accountIndex int64, assetId int64, assetType int64) (mempoolTxDetails []*MempoolTxDetail, err error) { - var dbTx *gorm.DB - dbTx = m.DB.Table(m.table).Where("account_index = ? and asset_id = ? and asset_type = ? ", accountIndex, assetId, assetType). - Order("created_at, id").Find(&mempoolTxDetails) - if dbTx.Error != nil { - logx.Errorf("[mempoolDetail.GetAccountAssetMempoolDetails] %s", dbTx.Error.Error()) - return nil, errorcode.DbErrSqlOperation - } else if dbTx.RowsAffected == 0 { - logx.Errorf("[mempoolDetail.GetAccountAssetMempoolDetails] Get MempoolTxDetails Error") - return nil, errorcode.DbErrNotFound - } - return mempoolTxDetails, nil -} - -func (m *defaultMempoolDetailModel) GetLatestAccountAssetMempoolDetail( - accountIndex int64, assetId int64, assetType int64, -) (mempoolTxDetail *MempoolTxDetail, err error) { - var dbTx *gorm.DB - dbTx = m.DB.Table(m.table).Where("account_index = ? and asset_id = ? and asset_type = ? ", - accountIndex, assetId, assetType). - Order("created_at desc, id desc").Find(&mempoolTxDetail) - if dbTx.Error != nil { - logx.Errorf("[mempoolDetail.GetAccountAssetMempoolDetails] %s", dbTx.Error.Error()) - return nil, errorcode.DbErrSqlOperation - } else if dbTx.RowsAffected == 0 { - logx.Errorf("[mempoolDetail.GetAccountAssetMempoolDetails] no related mempool tx detail") - return nil, errorcode.DbErrNotFound - } - return mempoolTxDetail, nil -} - -func (m *defaultMempoolDetailModel) GetAccountMempoolDetails(accountIndex int64) (mempoolTxDetails []*MempoolTxDetail, err error) { - var dbTx *gorm.DB - dbTx = m.DB.Table(m.table).Where("account_index = ?", accountIndex). - Order("created_at").Find(&mempoolTxDetails) - if dbTx.Error != nil { - logx.Errorf("[mempoolDetail.GetAccountMempoolDetails] %s", dbTx.Error.Error()) - return nil, errorcode.DbErrSqlOperation - } else if dbTx.RowsAffected == 0 { - logx.Errorf("[mempoolDetail.GetAccountMempoolDetails] no related mempool tx details") - return nil, errorcode.DbErrNotFound - } - return mempoolTxDetails, nil -} - -func (m *defaultMempoolDetailModel) GetMempoolTxDetailsByAssetType(assetType int) (mempoolTxDetails []*MempoolTxDetail, err error) { - var dbTx *gorm.DB - dbTx = m.DB.Table(m.table).Where("asset_type = ?", assetType). - Order("created_at").Find(&mempoolTxDetails) - if dbTx.Error != nil { - logx.Errorf("[mempoolDetail.GetAccountMempoolDetails] %s", dbTx.Error.Error()) - return nil, errorcode.DbErrSqlOperation - } else if dbTx.RowsAffected == 0 { - logx.Errorf("[mempoolDetail.GetAccountMempoolDetails] no related mempool tx details") - return nil, errorcode.DbErrNotFound - } - return mempoolTxDetails, nil -} - -func (m *defaultMempoolDetailModel) GetMempoolTxDetailsByAssetIdAndAssetType( - assetId int64, - assetType int, -) ( - mempoolTxDetails []*MempoolTxDetail, err error) { - var dbTx *gorm.DB - dbTx = m.DB.Table(m.table).Where("asset_id = ? AND asset_type = ?", assetId, assetType). - Order("created_at").Find(&mempoolTxDetails) - if dbTx.Error != nil { - logx.Errorf("[mempoolDetail.GetAccountMempoolDetails] %s", dbTx.Error.Error()) - return nil, errorcode.DbErrSqlOperation - } else if dbTx.RowsAffected == 0 { - logx.Errorf("[mempoolDetail.GetAccountMempoolDetails] no related mempool tx details") - return nil, errorcode.DbErrNotFound - } - return mempoolTxDetails, nil -} - -func (m *defaultMempoolDetailModel) GetMempoolTxDetailsByAccountIndex(accountIndex int64) (mempoolTxDetails []*MempoolTxDetail, err error) { - var dbTx *gorm.DB - dbTx = m.DB.Table(m.table).Where("account_index = ?", accountIndex).Find(&mempoolTxDetails) - if dbTx.Error != nil { - logx.Errorf("[GetMempoolTxDetailsByAccountIndex] unable to get by account index: %s", dbTx.Error.Error()) - return nil, errorcode.DbErrSqlOperation - } else if dbTx.RowsAffected == 0 { - return nil, errorcode.DbErrNotFound - } - return mempoolTxDetails, nil -} diff --git a/common/model/nft/constants.go b/common/model/nft/constants.go deleted file mode 100644 index 490991bf9..000000000 --- a/common/model/nft/constants.go +++ /dev/null @@ -1,33 +0,0 @@ -/* - * Copyright © 2021 Zkbas Protocol - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package nft - -const ( - L2NftTableName = `l2_nft` - L2NftHistoryTableName = `l2_nft_history` - L2NftCollectionTableName = `l2_nft_collection` - L2NftExchangeTableName = `l2_nft_exchange` - L2NftWithdrawHistoryTableName = `l2_nft_withdraw_history` - - OfferTableName = `offer` - - OfferFinishedStatus = 1 - - CollectionPending = 0 // create collection request received by api - CollectionCreated = 1 // collection created in l2 -) diff --git a/common/model/nft/nftCollection.go b/common/model/nft/nftCollection.go deleted file mode 100644 index f946bbbed..000000000 --- a/common/model/nft/nftCollection.go +++ /dev/null @@ -1,98 +0,0 @@ -/* - * Copyright © 2021 Zkbas Protocol - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package nft - -import ( - "github.com/zeromicro/go-zero/core/logx" - "github.com/zeromicro/go-zero/core/stores/cache" - "github.com/zeromicro/go-zero/core/stores/sqlc" - "github.com/zeromicro/go-zero/core/stores/sqlx" - "gorm.io/gorm" - - "github.com/bnb-chain/zkbas/errorcode" -) - -type ( - L2NftCollectionModel interface { - CreateL2NftCollectionTable() error - DropL2NftCollectionTable() error - IfCollectionExistsByCollectionId(collectionId int64) (bool, error) - } - defaultL2NftCollectionModel struct { - sqlc.CachedConn - table string - DB *gorm.DB - } - - L2NftCollection struct { - gorm.Model - AccountIndex int64 - CollectionId int64 - Name string - Introduction string - Status int //Collection status indicates whether it is certified by L2. 0 means no, 1 means yes,Change this state with a transaction in the future - } -) - -func NewL2NftCollectionModel(conn sqlx.SqlConn, c cache.CacheConf, db *gorm.DB) L2NftCollectionModel { - return &defaultL2NftCollectionModel{ - CachedConn: sqlc.NewConn(conn, c), - table: L2NftCollectionTableName, - DB: db, - } -} - -func (*L2NftCollection) TableName() string { - return L2NftCollectionTableName -} - -/* - Func: CreateL2NftCollectionTable - Params: - Return: err error - Description: create account l2 nft table -*/ -func (m *defaultL2NftCollectionModel) CreateL2NftCollectionTable() error { - return m.DB.AutoMigrate(L2NftCollection{}) -} - -/* - Func: DropL2NftCollectionTable - Params: - Return: err error - Description: drop account nft collection table -*/ -func (m *defaultL2NftCollectionModel) DropL2NftCollectionTable() error { - return m.DB.Migrator().DropTable(m.table) -} -func (m *defaultL2NftCollectionModel) IfCollectionExistsByCollectionId(collectionId int64) (bool, error) { - var res int64 - dbTx := m.DB.Table(m.table).Where("collection_id = ? and deleted_at is NULL", collectionId).Count(&res) - - if dbTx.Error != nil { - logx.Error("[collection.IfCollectionExistsByCollectionId] %s", dbTx.Error) - return true, errorcode.DbErrSqlOperation - } else if res == 0 { - return false, nil - } else if res != 1 { - logx.Errorf("[collection.IfCollectionExistsByCollectionId] %s", errorcode.DbErrDuplicatedCollectionIndex) - return true, errorcode.DbErrDuplicatedCollectionIndex - } else { - return true, nil - } -} diff --git a/common/model/nft/nftExchange.go b/common/model/nft/nftExchange.go deleted file mode 100644 index 27391a6f5..000000000 --- a/common/model/nft/nftExchange.go +++ /dev/null @@ -1,78 +0,0 @@ -/* - * Copyright © 2021 Zkbas Protocol - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package nft - -import ( - "github.com/zeromicro/go-zero/core/stores/cache" - "github.com/zeromicro/go-zero/core/stores/sqlc" - "github.com/zeromicro/go-zero/core/stores/sqlx" - "gorm.io/gorm" -) - -type ( - L2NftExchangeModel interface { - CreateL2NftExchangeTable() error - DropL2NftExchangeTable() error - } - defaultL2NftExchangeModel struct { - sqlc.CachedConn - table string - DB *gorm.DB - } - - L2NftExchange struct { - gorm.Model - BuyerAccountIndex int64 - OwnerAccountIndex int64 - NftIndex int64 - AssetId int64 - AssetAmount string - } -) - -func NewL2NftExchangeModel(conn sqlx.SqlConn, c cache.CacheConf, db *gorm.DB) L2NftExchangeModel { - return &defaultL2NftExchangeModel{ - CachedConn: sqlc.NewConn(conn, c), - table: L2NftExchangeTableName, - DB: db, - } -} - -func (*L2NftExchange) TableName() string { - return L2NftExchangeTableName -} - -/* - Func: CreateL2NftExchangeTable - Params: - Return: err error - Description: create account l2 nft table -*/ -func (m *defaultL2NftExchangeModel) CreateL2NftExchangeTable() error { - return m.DB.AutoMigrate(L2NftExchange{}) -} - -/* - Func: DropL2NftExchangeTable - Params: - Return: err error - Description: drop account nft exchange table -*/ -func (m *defaultL2NftExchangeModel) DropL2NftExchangeTable() error { - return m.DB.Migrator().DropTable(m.table) -} diff --git a/common/model/nft/nftHistory.go b/common/model/nft/nftHistory.go deleted file mode 100644 index b273d662b..000000000 --- a/common/model/nft/nftHistory.go +++ /dev/null @@ -1,136 +0,0 @@ -/* - * Copyright © 2021 Zkbas Protocol - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package nft - -import ( - "fmt" - - "github.com/zeromicro/go-zero/core/logx" - "github.com/zeromicro/go-zero/core/stores/cache" - "github.com/zeromicro/go-zero/core/stores/sqlc" - "github.com/zeromicro/go-zero/core/stores/sqlx" - "gorm.io/gorm" - - "github.com/bnb-chain/zkbas/errorcode" -) - -type ( - L2NftHistoryModel interface { - CreateL2NftHistoryTable() error - DropL2NftHistoryTable() error - GetLatestNftAssetsByBlockHeight(height int64) ( - rowsAffected int64, nftAssets []*L2NftHistory, err error, - ) - GetLatestNftAsset(nftIndex int64) ( - nftAsset *L2NftHistory, err error, - ) - GetNftAssetsByBlockHeight(l2BlockHeight int64) (rowsAffected int64, nftAssets []*L2NftHistory, err error) - } - defaultL2NftHistoryModel struct { - sqlc.CachedConn - table string - DB *gorm.DB - } - - L2NftHistory struct { - gorm.Model - NftIndex int64 - CreatorAccountIndex int64 - OwnerAccountIndex int64 - NftContentHash string - NftL1Address string - NftL1TokenId string - CreatorTreasuryRate int64 - CollectionId int64 - Status int - L2BlockHeight int64 - } -) - -func NewL2NftHistoryModel(conn sqlx.SqlConn, c cache.CacheConf, db *gorm.DB) L2NftHistoryModel { - return &defaultL2NftHistoryModel{ - CachedConn: sqlc.NewConn(conn, c), - table: L2NftHistoryTableName, - DB: db, - } -} - -func (*L2NftHistory) TableName() string { - return L2NftHistoryTableName -} - -/* - Func: CreateL2NftHistoryTable - Params: - Return: err error - Description: create account l2 nft table -*/ -func (m *defaultL2NftHistoryModel) CreateL2NftHistoryTable() error { - return m.DB.AutoMigrate(L2NftHistory{}) -} - -/* - Func: DropL2NftHistoryTable - Params: - Return: err error - Description: drop account l2 nft history table -*/ -func (m *defaultL2NftHistoryModel) DropL2NftHistoryTable() error { - return m.DB.Migrator().DropTable(m.table) -} - -func (m *defaultL2NftHistoryModel) GetLatestNftAssetsByBlockHeight(height int64) ( - rowsAffected int64, accountNftAssets []*L2NftHistory, err error, -) { - // TODO sql - dbTx := m.DB.Table(m.table). - Raw("SELECT a.* FROM l2_nft_history a WHERE NOT EXISTS"+ - "(SELECT * FROM l2_nft_history WHERE nft_index = a.nft_index AND l2_block_height <= ? AND l2_block_height > a.l2_block_height) "+ - "AND l2_block_height <= ? ORDER BY nft_index", height, height). - Find(&accountNftAssets) - if dbTx.Error != nil { - logx.Errorf("[GetLatestNftAssetsByBlockHeight] unable to get related nft assets: %s", dbTx.Error.Error()) - return 0, nil, dbTx.Error - } - return dbTx.RowsAffected, accountNftAssets, nil -} - -func (m *defaultL2NftHistoryModel) GetLatestNftAsset(nftIndex int64) ( - nftAsset *L2NftHistory, err error, -) { - dbTx := m.DB.Table(m.table).Where("nft_index = ?", nftIndex).Order("l2_block_height desc").First(&nftAsset) - - if dbTx.Error != nil { - logx.Errorf("[GetLatestNftAsset] unable to get related nft asset: %s", dbTx.Error.Error()) - return nil, errorcode.DbErrSqlOperation - } else if dbTx.RowsAffected == 0 { - logx.Errorf("[GetLatestNftAsset] no such info") - return nil, errorcode.DbErrNotFound - } - return nftAsset, nil -} - -func (m *defaultL2NftHistoryModel) GetNftAssetsByBlockHeight(l2BlockHeight int64) (rowsAffected int64, nftAssets []*L2NftHistory, err error) { - dbTx := m.DB.Table(m.table).Where("l2_block_height = ?", l2BlockHeight).Find(&nftAssets) - if dbTx.Error != nil { - errInfo := fmt.Sprintf("[GetLiquidityAssetsByBlockHeight] unable to get related assets: %s", err.Error()) - logx.Error(errInfo) - return 0, nil, dbTx.Error - } - return dbTx.RowsAffected, nftAssets, nil -} diff --git a/common/model/nft/nftWithdrawHistory.go b/common/model/nft/nftWithdrawHistory.go deleted file mode 100644 index 36180661d..000000000 --- a/common/model/nft/nftWithdrawHistory.go +++ /dev/null @@ -1,97 +0,0 @@ -/* - * Copyright © 2021 Zkbas Protocol - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package nft - -import ( - "github.com/zeromicro/go-zero/core/logx" - "github.com/zeromicro/go-zero/core/stores/cache" - "github.com/zeromicro/go-zero/core/stores/sqlc" - "github.com/zeromicro/go-zero/core/stores/sqlx" - "gorm.io/gorm" - - "github.com/bnb-chain/zkbas/errorcode" -) - -type ( - L2NftWithdrawHistoryModel interface { - CreateL2NftWithdrawHistoryTable() error - DropL2NftWithdrawHistoryTable() error - GetNftAsset(nftIndex int64) (nftAsset *L2NftWithdrawHistory, err error) - } - defaultL2NftWithdrawHistoryModel struct { - sqlc.CachedConn - table string - DB *gorm.DB - } - - L2NftWithdrawHistory struct { - gorm.Model - NftIndex int64 `gorm:"uniqueIndex"` - CreatorAccountIndex int64 - OwnerAccountIndex int64 - NftContentHash string - NftL1Address string - NftL1TokenId string - CreatorTreasuryRate int64 - CollectionId int64 - } -) - -func NewL2NftWithdrawHistoryModel(conn sqlx.SqlConn, c cache.CacheConf, db *gorm.DB) L2NftWithdrawHistoryModel { - return &defaultL2NftWithdrawHistoryModel{ - CachedConn: sqlc.NewConn(conn, c), - table: L2NftWithdrawHistoryTableName, - DB: db, - } -} - -func (*L2NftWithdrawHistory) TableName() string { - return L2NftWithdrawHistoryTableName -} - -/* - Func: CreateL2NftWithdrawHistoryTable - Params: - Return: err error - Description: create account l2 nft table -*/ -func (m *defaultL2NftWithdrawHistoryModel) CreateL2NftWithdrawHistoryTable() error { - return m.DB.AutoMigrate(L2NftWithdrawHistory{}) -} - -/* - Func: DropL2NftWithdrawHistoryTable - Params: - Return: err error - Description: drop account l2 nft table -*/ -func (m *defaultL2NftWithdrawHistoryModel) DropL2NftWithdrawHistoryTable() error { - return m.DB.Migrator().DropTable(m.table) -} - -func (m *defaultL2NftWithdrawHistoryModel) GetNftAsset(nftIndex int64) (nftAsset *L2NftWithdrawHistory, err error) { - dbTx := m.DB.Table(m.table).Where("nft_index = ?", nftIndex).Find(&nftAsset) - if dbTx.Error != nil { - logx.Errorf("[GetNftAsset] unable to get nft asset: %s", dbTx.Error.Error()) - return nil, errorcode.DbErrSqlOperation - } else if dbTx.RowsAffected == 0 { - logx.Errorf("[GetNftAsset] no such info") - return nil, errorcode.DbErrNotFound - } - return nftAsset, nil -} diff --git a/common/model/nft/offer.go b/common/model/nft/offer.go deleted file mode 100644 index bcc4631e2..000000000 --- a/common/model/nft/offer.go +++ /dev/null @@ -1,128 +0,0 @@ -/* - * Copyright © 2021 Zkbas Protocol - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package nft - -import ( - "errors" - - "github.com/zeromicro/go-zero/core/logx" - "github.com/zeromicro/go-zero/core/stores/cache" - "github.com/zeromicro/go-zero/core/stores/sqlc" - "github.com/zeromicro/go-zero/core/stores/sqlx" - "gorm.io/gorm" - - "github.com/bnb-chain/zkbas/errorcode" -) - -type ( - OfferModel interface { - CreateOfferTable() error - DropOfferTable() error - GetOfferByAccountIndexAndOfferId(accountIndex int64, offerId int64) (offer *Offer, err error) - GetLatestOfferId(accountIndex int64) (offerId int64, err error) - CreateOffer(offer *Offer) (err error) - } - defaultOfferModel struct { - sqlc.CachedConn - table string - DB *gorm.DB - } - - Offer struct { - gorm.Model - OfferType int64 - OfferId int64 - AccountIndex int64 - NftIndex int64 - AssetId int64 - AssetAmount string - ListedAt int64 - ExpiredAt int64 - TreasuryRate int64 - Sig string - Status int - } -) - -func NewOfferModel(conn sqlx.SqlConn, c cache.CacheConf, db *gorm.DB) OfferModel { - return &defaultOfferModel{ - CachedConn: sqlc.NewConn(conn, c), - table: OfferTableName, - DB: db, - } -} - -func (*Offer) TableName() string { - return OfferTableName -} - -/* - Func: CreateOfferTable - Params: - Return: err error - Description: create account l2 nft table -*/ -func (m *defaultOfferModel) CreateOfferTable() error { - return m.DB.AutoMigrate(Offer{}) -} - -/* - Func: DropOfferTable - Params: - Return: err error - Description: drop account l2 nft history table -*/ -func (m *defaultOfferModel) DropOfferTable() error { - return m.DB.Migrator().DropTable(m.table) -} - -func (m *defaultOfferModel) GetLatestOfferId(accountIndex int64) (offerId int64, err error) { - var offer *Offer - dbTx := m.DB.Table(m.table).Where("account_index = ?", accountIndex).Order("offer_id desc").Find(&offer) - if dbTx.Error != nil { - logx.Errorf("[GetLatestOfferId] unable to get latest offer info: %s", dbTx.Error.Error()) - return -1, dbTx.Error - } else if dbTx.RowsAffected == 0 { - return -1, errorcode.DbErrNotFound - } - return offer.OfferId, nil -} - -func (m *defaultOfferModel) CreateOffer(offer *Offer) (err error) { - dbTx := m.DB.Table(m.table).Create(offer) - if dbTx.Error != nil { - logx.Errorf("[CreateOffer] unable to create offer: %s", dbTx.Error.Error()) - return dbTx.Error - } else if dbTx.RowsAffected == 0 { - logx.Errorf("[CreateOffer] invalid offer info") - return errors.New("[CreateOffer] invalid offer info") - } - return nil -} - -func (m *defaultOfferModel) GetOfferByAccountIndexAndOfferId(accountIndex int64, offerId int64) (offer *Offer, err error) { - dbTx := m.DB.Table(m.table).Where("account_index = ? AND offer_id = ?", accountIndex, offerId).Find(&offer) - if dbTx.Error != nil { - logx.Errorf("[CreateOffer] unable to create offer: %s", dbTx.Error.Error()) - return nil, errorcode.DbErrSqlOperation - } else if dbTx.RowsAffected == 0 { - logx.Errorf("[CreateOffer] invalid offer info") - return nil, errorcode.DbErrNotFound - } - return offer, nil -} diff --git a/common/model/price/constant.go b/common/model/price/constant.go deleted file mode 100644 index 095dd6bf9..000000000 --- a/common/model/price/constant.go +++ /dev/null @@ -1,51 +0,0 @@ -package price - -import ( - "errors" -) - -type Status struct { - Timestamp string `json:"timestamp"` - ErrorCode int `json:"error_code"` - ErrorMessage *string `json:"error_message"` - Elapsed int `json:"elapsed"` - CreditCount int `json:"credit_count"` -} - -// Quote is the quote structure -type Quote struct { - Price float64 `json:"price"` - Volume24H float64 `json:"volume_24h"` - PercentChange1H float64 `json:"percent_change_1h"` - PercentChange24H float64 `json:"percent_change_24h"` - PercentChange7D float64 `json:"percent_change_7d"` - MarketCap float64 `json:"market_cap"` - LastUpdated string `json:"last_updated"` -} - -// QuoteLatest is the quotes structure -type QuoteLatest struct { - ID float64 `json:"id"` - Name string `json:"name"` - Symbol string `json:"symbol"` - Slug string `json:"slug"` - CirculatingSupply float64 `json:"circulating_supply"` - TotalSupply float64 `json:"total_supply"` - MaxSupply float64 `json:"max_supply"` - DateAdded string `json:"date_added"` - NumMarketPairs float64 `json:"num_market_pairs"` - CMCRank float64 `json:"cmc_rank"` - LastUpdated string `json:"last_updated"` - Quote map[string]*Quote `json:"quote"` -} - -type CurrencyPrice struct { - Status Status `json:"status"` - Data interface{} `json:"data"` -} - -var BinancePriceUrl = "https://api.binance.com/api/v3/ticker/price?symbol=" - -var CoinMarketCap = "https://pro-api.coinmarketcap.com/v1/cryptocurrency/quotes/latest?symbol=" - -var ErrTypeAssertion = errors.New("type assertion error") diff --git a/common/model/price/price.go b/common/model/price/price.go deleted file mode 100644 index 8f7541c4a..000000000 --- a/common/model/price/price.go +++ /dev/null @@ -1,246 +0,0 @@ -package price - -import ( - "encoding/json" - "fmt" - "io/ioutil" - "net/http" - - "github.com/zeromicro/go-zero/core/logx" - "github.com/zeromicro/go-zero/core/stores/cache" - "github.com/zeromicro/go-zero/core/stores/sqlc" - "github.com/zeromicro/go-zero/core/stores/sqlx" - "gorm.io/gorm" - - asset "github.com/bnb-chain/zkbas/common/model/assetInfo" - "github.com/bnb-chain/zkbas/errorcode" -) - -var ( - cachePriceSymbolPrefix = "cache::price:symbol:" -) - -type ( - PriceModel interface { - UpdateCurrencyPrice() error - UpdateCurrencyPriceBySymbol(symbol string) error - GetCurrencyPrice(currency string) (price float64, err error) - } - - defaultPriceModel struct { - sqlc.CachedConn - table string - DB *gorm.DB - } - - Price struct { - gorm.Model - } -) - -func NewPriceModel(conn sqlx.SqlConn, c cache.CacheConf, db *gorm.DB) PriceModel { - return &defaultPriceModel{ - CachedConn: sqlc.NewConn(conn, c), - table: `price`, - DB: db, - } -} - -func (*Price) TableName() string { - return `price` -} - -func GetQuotesLatest(l2Symbol string, client *http.Client) (quotesLatest []*QuoteLatest, err error) { - currency := l2Symbol - url := fmt.Sprintf("%s%s", CoinMarketCap, currency) - - // Get Request - request, err := http.NewRequest("GET", url, nil) - if err != nil { - logx.Errorf("[price] New Request Error %s", err.Error()) - return nil, err - } - - // Add Header - request.Header.Add("X-CMC_PRO_API_KEY", "cfce503f-dd3d-4847-9570-bbab5257dac8") - request.Header.Add("Accept", "application/json") - - resp, err := client.Do(request) - if err != nil { - errInfo := fmt.Sprintf("[price] Network Error %s", err.Error()) - logx.Error(errInfo) - return nil, err - } - defer resp.Body.Close() - body, _ := ioutil.ReadAll(resp.Body) - - currencyPrice := new(CurrencyPrice) - err = json.Unmarshal(body, ¤cyPrice) - if err != nil { - errInfo := fmt.Errorf("[price] JSON Error: [%s]. Response body: [%s]", err.Error(), string(body)) - logx.Error(errInfo) - return nil, err - } - - ifcs, ok := currencyPrice.Data.(interface{}) - if !ok { - errInfo := fmt.Sprintf("[price] %s", ErrTypeAssertion) - logx.Error(errInfo) - return nil, ErrTypeAssertion - } - - for _, coinObj := range ifcs.(map[string]interface{}) { - quoteLatest := new(QuoteLatest) - b, err := json.Marshal(coinObj) - if err != nil { - logx.Error("[price] Marshal Error") - return nil, err - } - - err = json.Unmarshal(b, quoteLatest) - if err != nil { - logx.Error("[price] Unmarshal Error") - return nil, err - } - - quotesLatest = append(quotesLatest, quoteLatest) - } - - return quotesLatest, nil -} - -/* - Func: UpdateCurrencyPrice - Params: - Return: err - Description: update currency price cache -*/ -func (m *defaultPriceModel) UpdateCurrencyPrice() error { - myClient := &http.Client{} - - var ( - l2AssetInfos []*asset.AssetInfo - ) - dbTx := m.DB.Table(asset.AssetInfoTableName).Find(&l2AssetInfos) - if dbTx.Error != nil { - err := fmt.Sprintf("[price.GetL2AssetsList] %s", dbTx.Error) - logx.Error(err) - return dbTx.Error - } - if dbTx.RowsAffected == 0 { - err := fmt.Sprintf("[price.GetL2AssetsList] %s", errorcode.DbErrNotFound) - logx.Error(err) - return errorcode.DbErrNotFound - } - - var l2Symbol string - for i := 0; i < len(l2AssetInfos); i++ { - // REY IS NOT YET - if l2AssetInfos[i].AssetSymbol == "REY" { - continue - } - if len(l2Symbol) == 0 { - l2Symbol += l2AssetInfos[i].AssetSymbol - } else { - l2Symbol += "," + l2AssetInfos[i].AssetSymbol - } - } - - quotesLatest, err := GetQuotesLatest(l2Symbol, myClient) - if err != nil { - errInfo := fmt.Sprintf("[PriceModel.UpdatePrice.GetQuotesLatest] %s", err) - logx.Error(errInfo) - return err - } - - for _, quoteLatest := range quotesLatest { - key := fmt.Sprintf("%s%v", cachePriceSymbolPrefix, quoteLatest.Symbol) - - if quoteLatest.Quote["USD"] != nil { - err = m.SetCache(key, quoteLatest.Quote["USD"].Price) - if err != nil { - errInfo := fmt.Sprintf("[PriceModel.UpdatePrice.Setcache] %s", err) - logx.Error(errInfo) - return err - } - - logx.Info(fmt.Sprintf("Currency:%s, Price:%+v", quoteLatest.Symbol, quoteLatest.Quote["USD"].Price)) - } else { - errInfo := fmt.Sprintf("[PriceModel.UpdatePrice] get %s usd price from coinmarketcap failed", quoteLatest.Symbol) - logx.Error(errInfo) - } - } - - // set REYUSDT to 0.8 - key := fmt.Sprintf("%s%v", cachePriceSymbolPrefix, "REY") - err = m.SetCache(key, 0.8) - if err != nil { - errInfo := fmt.Sprintf("[PriceModel.UpdatePrice.Setcache] %s", err) - logx.Error(errInfo) - return err - } - - return nil -} - -/* - Func: UpdateCurrencyPriceBySymbol - Params: - Return: err - Description: update currency price cache by symbol -*/ -func (m *defaultPriceModel) UpdateCurrencyPriceBySymbol(symbol string) error { - // // proxy server setup - // dialSocksProxy, err := proxy.SOCKS5("tcp", "172.30.144.1:7890", nil, proxy.Direct) - // if err != nil { - // fmt.Println("Error connecting to proxy:", err) - // } - // tr := &http.Transport{Dial: dialSocksProxy.Dial} - - // // Create client - // myClient := &http.Client{ - // Transport: tr, - // } - - myClient := &http.Client{} - - quotesLatest, err := GetQuotesLatest(symbol, myClient) - if err != nil { - errInfo := fmt.Sprintf("[PriceModel.UpdatePrice.GetQuotesLatest] %s", err) - logx.Error(errInfo) - return err - } - - for _, quoteLatest := range quotesLatest { - key := fmt.Sprintf("%s%v", cachePriceSymbolPrefix, quoteLatest.Symbol) - err = m.SetCache(key, quoteLatest.Quote["USD"].Price) - if err != nil { - errInfo := fmt.Sprintf("[PriceModel.UpdatePrice.Setcache] %s", err) - logx.Error(errInfo) - return err - } - - logx.Info(fmt.Sprintf("%+v", quoteLatest.Quote["USD"].Price)) - } - - return nil -} - -/* - Func: GetCurrencyPrice - Params: currency string - Return: price float64, err error - Description: get currency price cache by currency symbol -*/ -func (m *defaultPriceModel) GetCurrencyPrice(currency string) (price float64, err error) { - key := fmt.Sprintf("%s%v", cachePriceSymbolPrefix, currency) - err = m.QueryRow(&price, key, func(conn sqlx.SqlConn, v interface{}) error { - return errorcode.DbErrNotFound - }) - if err != nil { - errInfo := fmt.Sprintf("[PriceModel.GetCurrencyPrice.Getcache] %s %s", key, err) - logx.Error(errInfo) - return 0, err - } - return price, nil -} diff --git a/common/model/proofSender/constants.go b/common/model/proofSender/constants.go deleted file mode 100644 index cde5d102e..000000000 --- a/common/model/proofSender/constants.go +++ /dev/null @@ -1,27 +0,0 @@ -/* - * Copyright © 2021 Zkbas Protocol - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package proofSender - -const ( - TableName = "proof_sender" -) - -const ( - NotSent = iota - NotConfirmed - Confirmed -) diff --git a/common/model/proofSender/proofSender.go b/common/model/proofSender/proofSender.go deleted file mode 100644 index c4edf450d..000000000 --- a/common/model/proofSender/proofSender.go +++ /dev/null @@ -1,192 +0,0 @@ -/* - * Copyright © 2021 Zkbas Protocol - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package proofSender - -import ( - "github.com/zeromicro/go-zero/core/logx" - "gorm.io/gorm" - - "github.com/bnb-chain/zkbas/errorcode" -) - -type ( - ProofSenderModel interface { - CreateProofSenderTable() error - DropProofSenderTable() error - CreateProof(row *ProofSender) error - GetProofsByBlockRange(start int64, end int64, maxProofsCount int) (proofs []*ProofSender, err error) - GetProofStartBlockNumber() (num int64, err error) - GetLatestConfirmedProof() (p *ProofSender, err error) - GetProofByBlockNumber(num int64) (p *ProofSender, err error) - } - - defaultProofSenderModel struct { - table string - DB *gorm.DB - } - - ProofSender struct { - gorm.Model - ProofInfo string - BlockNumber int64 `gorm:"index:idx_number,unique"` - Status int64 - } -) - -func (*ProofSender) TableName() string { - return TableName -} - -func NewProofSenderModel(db *gorm.DB) ProofSenderModel { - return &defaultProofSenderModel{ - table: TableName, - DB: db, - } -} - -/* - Func: CreateProofSenderTable - Params: - Return: err error - Description: create proofSender table -*/ - -func (m *defaultProofSenderModel) CreateProofSenderTable() error { - return m.DB.AutoMigrate(ProofSender{}) -} - -/* - Func: DropProofSenderTable - Params: - Return: err error - Description: drop proofSender table -*/ - -func (m *defaultProofSenderModel) DropProofSenderTable() error { - return m.DB.Migrator().DropTable(m.table) -} - -/* - Func: InsertProof - Params: - Return: err error - Description: insert proof and block info in proofSender Table -*/ - -func (m *defaultProofSenderModel) CreateProof(row *ProofSender) error { - dbTx := m.DB.Table(m.table).Create(row) - if dbTx.Error != nil { - logx.Errorf("[proofSender.CreateProof] %s", dbTx.Error) - return dbTx.Error - } - if dbTx.RowsAffected == 0 { - logx.Errorf("[proofSender.CreateProof] Create Invalid Proof") - return errorcode.DbErrFailToCreateProof - } - return nil -} - -/* - Func: GetProof - Params: - Return: err error - Description: getProofsByBlockRange -*/ - -func (m *defaultProofSenderModel) GetProofsByBlockRange(start int64, end int64, maxProofsCount int) (proofs []*ProofSender, err error) { - - dbTx := m.DB.Debug().Table(m.table).Where("block_number >= ? AND block_number <= ? AND status = ?", - start, - end, - NotSent). - Order("block_number"). - Limit(maxProofsCount). - Find(&proofs) - - if dbTx.Error != nil { - logx.Errorf("[proofSender.GetProofsByBlockRange] %s", dbTx.Error.Error()) - return proofs, dbTx.Error - } else if dbTx.RowsAffected == 0 { - logx.Error("[proofSender.GetProofsByBlockRange] error not found") - return proofs, errorcode.DbErrNotFound - } - - return proofs, err -} - -/* - Func: GetStartProofBlockNumber - Params: - Return: err error - Description: Get the latest proof block number. It is used to support the prover hub to handle crypto blocks; the result will determine the start range. -*/ - -func (m *defaultProofSenderModel) GetProofStartBlockNumber() (num int64, err error) { - var row *ProofSender - dbTx := m.DB.Table(m.table).Order("block_number desc").Limit(1).Find(&row) - if dbTx.Error != nil { - logx.Errorf("[proofSender.GetProofStartBlockNumber] %s", dbTx.Error.Error()) - return num, dbTx.Error - } else if dbTx.RowsAffected == 0 { - logx.Errorf("[proofSender.GetProofStartBlockNumber] not found") - return num, errorcode.DbErrNotFound - } else { - return row.BlockNumber, nil - } -} - -/* - Func: GetLatestSentProof - Params: - Return: p *ProofSender, err error - Description: get latest sent proof block number, - it used to support prover hub to init merkle tree. -*/ -func (m *defaultProofSenderModel) GetLatestConfirmedProof() (p *ProofSender, err error) { - var row *ProofSender - dbTx := m.DB.Table(m.table).Where("status >= ?", NotConfirmed).Order("block_number desc").Limit(1).Find(&row) - if dbTx.Error != nil { - logx.Errorf("[proofSender.GetLatestSentProof] %s", dbTx.Error.Error()) - return nil, errorcode.DbErrSqlOperation - } else if dbTx.RowsAffected == 0 { - logx.Errorf("[proofSender.GetLatestSentProof] not found") - return nil, errorcode.DbErrNotFound - } else { - return row, nil - } -} - -/* - Func: GetProofByBlockNumber - Params: - Return: p *ProofSender, err error - Description: get certain blockNumber proof - it used to support prover hub to init unproved block. -*/ -func (m *defaultProofSenderModel) GetProofByBlockNumber(num int64) (p *ProofSender, err error) { - var row *ProofSender - dbTx := m.DB.Table(m.table).Where("block_number = ?", num).Find(&row) - if dbTx.Error != nil { - logx.Errorf("[proofSender.GetProofByBlockNumber] %s", dbTx.Error.Error()) - return nil, errorcode.DbErrSqlOperation - } else if dbTx.RowsAffected == 0 { - logx.Errorf("[proofSender.GetProofByBlockNumber] not found") - return nil, errorcode.DbErrNotFound - } else { - return row, nil - } -} diff --git a/common/model/sysconfig/constant.go b/common/model/sysconfig/constant.go deleted file mode 100644 index 965d3391e..000000000 --- a/common/model/sysconfig/constant.go +++ /dev/null @@ -1,26 +0,0 @@ -/* - * Copyright © 2021 Zkbas Protocol - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package sysconfig - -const ( - TableName = `sys_config` - NameColumn = "name" - ValueColumn = "value" - ValueTypeColumn = "value_type" - CommentColumn = "comment" -) diff --git a/common/model/sysconfig/sysConfig.go b/common/model/sysconfig/sysConfig.go deleted file mode 100644 index 942249e16..000000000 --- a/common/model/sysconfig/sysConfig.go +++ /dev/null @@ -1,160 +0,0 @@ -/* - * Copyright © 2021 Zkbas Protocol - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package sysconfig - -import ( - "fmt" - - "github.com/zeromicro/go-zero/core/logx" - "github.com/zeromicro/go-zero/core/stores/cache" - "github.com/zeromicro/go-zero/core/stores/sqlc" - "github.com/zeromicro/go-zero/core/stores/sqlx" - "gorm.io/gorm" - - "github.com/bnb-chain/zkbas/errorcode" -) - -type ( - SysconfigModel interface { - CreateSysconfigTable() error - DropSysconfigTable() error - GetSysconfigByName(name string) (info *Sysconfig, err error) - CreateSysconfig(config *Sysconfig) error - CreateSysconfigInBatches(configs []*Sysconfig) (rowsAffected int64, err error) - UpdateSysconfig(config *Sysconfig) error - } - - defaultSysconfigModel struct { - sqlc.CachedConn - table string - DB *gorm.DB - } - - Sysconfig struct { - gorm.Model - Name string - Value string - ValueType string - Comment string - } -) - -func NewSysconfigModel(conn sqlx.SqlConn, c cache.CacheConf, db *gorm.DB) SysconfigModel { - return &defaultSysconfigModel{ - CachedConn: sqlc.NewConn(conn, c), - table: TableName, - DB: db, - } -} - -func (*Sysconfig) TableName() string { - return TableName -} - -/* - Func: CreateSysconfigTable - Params: - Return: err error - Description: create Sysconfig table -*/ -func (m *defaultSysconfigModel) CreateSysconfigTable() error { - return m.DB.AutoMigrate(Sysconfig{}) -} - -/* - Func: DropSysconfigTable - Params: - Return: err error - Description: drop Sysconfig table -*/ -func (m *defaultSysconfigModel) DropSysconfigTable() error { - return m.DB.Migrator().DropTable(m.table) -} - -/* - Func: GetSysconfigByName - Params: name string - Return: info *Sysconfig, err error - Description: get sysconfig by config name -*/ -func (m *defaultSysconfigModel) GetSysconfigByName(name string) (config *Sysconfig, err error) { - dbTx := m.DB.Table(m.table).Where("name = ?", name).Find(&config) - if dbTx.Error != nil { - err := fmt.Sprintf("[sysconfig.GetSysconfigByName] %s", dbTx.Error) - logx.Error(err) - return nil, errorcode.DbErrSqlOperation - } else if dbTx.RowsAffected == 0 { - err := fmt.Sprintf("[sysconfig.GetSysconfigByName] %s", errorcode.DbErrNotFound) - logx.Error(err) - return nil, errorcode.DbErrNotFound - } - return config, nil -} - -/* - Func: CreateSysconfig - Params: config *Sysconfig - Return: error - Description: Insert New Sysconfig -*/ -func (m *defaultSysconfigModel) CreateSysconfig(config *Sysconfig) error { - dbTx := m.DB.Table(m.table).Create(config) - if dbTx.Error != nil { - logx.Errorf("[sysconfig.sysconfig] %s", dbTx.Error.Error()) - return dbTx.Error - } - if dbTx.RowsAffected == 0 { - logx.Error("[sysconfig.sysconfig] Create Invalid Sysconfig") - return errorcode.DbErrFailToCreateSysconfig - } - return nil -} - -func (m *defaultSysconfigModel) CreateSysconfigInBatches(configs []*Sysconfig) (rowsAffected int64, err error) { - dbTx := m.DB.Table(m.table).CreateInBatches(configs, len(configs)) - if dbTx.Error != nil { - logx.Errorf("[sysconfig.CreateSysconfigInBatches] %s", dbTx.Error.Error()) - return 0, dbTx.Error - } - if dbTx.RowsAffected == 0 { - logx.Error("[sysconfig.CreateSysconfigInBatches] Create Invalid Sysconfig Batches") - return 0, errorcode.DbErrFailToCreateSysconfig - } - return dbTx.RowsAffected, nil -} - -/* - Func: UpdateSysconfigByName - Params: config *Sysconfig - Return: err error - Description: update sysconfig by config name -*/ -func (m *defaultSysconfigModel) UpdateSysconfig(config *Sysconfig) error { - dbTx := m.DB.Table(m.table).Where("name = ?", config.Name).Select(NameColumn, ValueColumn, ValueTypeColumn, CommentColumn). - Updates(config) - if dbTx.Error != nil { - err := fmt.Sprintf("[sysconfig.UpdateSysconfig] %s", dbTx.Error) - logx.Error(err) - return dbTx.Error - } else if dbTx.RowsAffected == 0 { - err := fmt.Sprintf("[sysconfig.UpdateSysconfig] %s", errorcode.DbErrNotFound) - logx.Error(err) - return errorcode.DbErrNotFound - } - return nil -} diff --git a/common/model/tx/constant.go b/common/model/tx/constant.go deleted file mode 100644 index 0ad064c5b..000000000 --- a/common/model/tx/constant.go +++ /dev/null @@ -1,32 +0,0 @@ -/* - * Copyright © 2021 Zkbas Protocol - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package tx - -const ( - TxDetailTableName = `tx_detail` - TxTableName = `tx` -) - -const ( - _ = iota - StatusPending - StatusSuccess - StatusFail -) - -const maxBlocks = 1000 diff --git a/common/model/tx/tx.go b/common/model/tx/tx.go deleted file mode 100644 index 1e4e42735..000000000 --- a/common/model/tx/tx.go +++ /dev/null @@ -1,683 +0,0 @@ -/* - * Copyright © 2021 Zkbas Protocol - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package tx - -import ( - "encoding/json" - "fmt" - "math/rand" - "sort" - "strconv" - - "github.com/zeromicro/go-zero/core/logx" - "github.com/zeromicro/go-zero/core/stores/cache" - "github.com/zeromicro/go-zero/core/stores/redis" - "github.com/zeromicro/go-zero/core/stores/sqlc" - "github.com/zeromicro/go-zero/core/stores/sqlx" - "gorm.io/gorm" - - "github.com/bnb-chain/zkbas/errorcode" -) - -var ( - cacheZkbasTxIdPrefix = "cache:zkbas:txVerification:id:" - - cacheZkbasTxTxCountPrefix = "cache:zkbas:txVerification:txCount" -) - -type ( - TxModel interface { - CreateTxTable() error - DropTxTable() error - GetTxsListByBlockHeight(blockHeight int64, limit int, offset int) (txs []*Tx, err error) - GetTxsListByAccountIndex(accountIndex int64, limit int, offset int) (txs []*Tx, err error) - GetTxsListByAccountIndexAndTxType(accountIndex int64, txType uint8, limit int, offset int) (txs []*Tx, err error) - GetTxsListByAccountIndexAndTxTypeArray(accountIndex int64, txTypeArray []uint8, limit int, offset int) (txs []*Tx, err error) - GetTxsListByAccountName(accountName string, limit int, offset int) (txs []*Tx, err error) - GetTxsTotalCount() (count int64, err error) - GetTxsTotalCountByAccountIndex(accountIndex int64) (count int64, err error) - GetTxsTotalCountByAccountIndexAndTxType(accountIndex int64, txType uint8) (count int64, err error) - GetTxsTotalCountByAccountIndexAndTxTypeArray(accountIndex int64, txTypeArray []uint8) (count int64, err error) - GetTxsTotalCountByBlockHeight(blockHeight int64) (count int64, err error) - GetTxByTxHash(txHash string) (tx *Tx, err error) - GetTxByTxId(id uint) (tx *Tx, err error) - GetTxsListGreaterThanBlockHeight(blockHeight int64) (txs []*Tx, err error) - } - - defaultTxModel struct { - sqlc.CachedConn - table string - DB *gorm.DB - RedisConn *redis.Redis - } - - Tx struct { - gorm.Model - TxHash string `gorm:"uniqueIndex"` - TxType int64 - GasFee string - GasFeeAssetId int64 - TxStatus int64 - BlockHeight int64 `gorm:"index"` - BlockId int64 `gorm:"index"` - StateRoot string - NftIndex int64 - PairIndex int64 - AssetId int64 - TxAmount string - NativeAddress string - TxInfo string - TxDetails []*TxDetail `gorm:"foreignKey:TxId"` - ExtraInfo string - Memo string - AccountIndex int64 - Nonce int64 - ExpiredAt int64 - TxIndex int64 - } -) - -func NewTxModel(conn sqlx.SqlConn, c cache.CacheConf, db *gorm.DB, redisConn *redis.Redis) TxModel { - return &defaultTxModel{ - CachedConn: sqlc.NewConn(conn, c), - table: TxTableName, - DB: db, - RedisConn: redisConn, - } -} - -func (*Tx) TableName() string { - return TxTableName -} - -/* - Func: CreateTxTable - Params: - Return: err error - Description: create txVerification table -*/ -func (m *defaultTxModel) CreateTxTable() error { - return m.DB.AutoMigrate(Tx{}) -} - -/* - Func: DropTxTable - Params: - Return: err error - Description: drop txVerification table -*/ -func (m *defaultTxModel) DropTxTable() error { - return m.DB.Migrator().DropTable(m.table) -} - -/* - Func: GetTxsListByBlockHeight - Params: blockHeight int64, limit int64, offset int64 - Return: txVerification []*Tx, err error - Description: used for getTxsListByBlockHeight API -*/ - -func (m *defaultTxModel) GetTxsListByBlockHeight(blockHeight int64, limit int, offset int) (txs []*Tx, err error) { - var txForeignKeyColumn = `TxDetails` - // todo cache optimize - dbTx := m.DB.Table(m.table).Where("block_height = ?", blockHeight).Order("created_at desc, id desc").Offset(offset).Limit(limit).Find(&txs) - if dbTx.Error != nil { - logx.Errorf("[txVerification.GetTxsListByBlockHeight] %s", dbTx.Error.Error()) - return nil, errorcode.DbErrSqlOperation - } else if dbTx.RowsAffected == 0 { - logx.Error("[txVerification.GetTxsListByBlockHeight] Get Txs Error") - return nil, errorcode.DbErrNotFound - } - - for _, tx := range txs { - key := fmt.Sprintf("%s%v", cacheZkbasTxIdPrefix, tx.ID) - val, err := m.RedisConn.Get(key) - if err != nil { - errInfo := fmt.Sprintf("[txVerification.GetTxsListByBlockHeight] Get Redis Error: %s, key:%s", err.Error(), key) - logx.Errorf(errInfo) - return nil, err - } else if val == "" { - err := m.DB.Model(&tx).Association(txForeignKeyColumn).Find(&tx.TxDetails) - if err != nil { - logx.Error("[txVerification.GetTxsListByBlockHeight] Get Associate TxDetails Error") - return nil, err - } - - // json string - jsonString, err := json.Marshal(tx.TxDetails) - if err != nil { - logx.Errorf("[txVerification.GetTxsListByBlockHeight] json.Marshal Error: %s, value: %v", err.Error(), tx.TxDetails) - return nil, err - } - // todo - err = m.RedisConn.Setex(key, string(jsonString), 60*10+rand.Intn(60*3)) - if err != nil { - logx.Errorf("[txVerification.GetTxsListByBlockHeight] redis set error: %s", err.Error()) - return nil, err - } - } else { - // json string unmarshal - var ( - nTxDetails []*TxDetail - ) - err = json.Unmarshal([]byte(val), &nTxDetails) - if err != nil { - logx.Errorf("[txVerification.GetTxsListByBlockHeight] json.Unmarshal error: %s, value : %s", err.Error(), val) - return nil, err - } - tx.TxDetails = nTxDetails - } - - } - return txs, nil -} - -/* - Func: GetTxsListByAccountIndex - Params: accountIndex int64, limit int64, offset int64 - Return: txVerification []*Tx, err error - Description: used for getTxsListByAccountIndex API, return all txVerification related to accountIndex. - Because there are many accountIndex in - sorted by created_time - Associate With TxDetail Table -*/ - -func (m *defaultTxModel) GetTxsListByAccountIndex(accountIndex int64, limit int, offset int) (txs []*Tx, err error) { - var ( - txDetailTable = `tx_detail` - txIds []int64 - txForeignKeyColumn = `TxDetails` - ) - dbTx := m.DB.Table(txDetailTable).Select("tx_id").Where("account_index = ? and deleted_at is NULL", accountIndex).Group("tx_id").Find(&txIds) - if dbTx.Error != nil { - logx.Errorf("[txVerification.GetTxsListByAccountIndex] %s", dbTx.Error.Error()) - return nil, errorcode.DbErrSqlOperation - } else if dbTx.RowsAffected == 0 { - logx.Info("[info.GetTxsListByAccountIndex] Get TxIds Error") - return nil, errorcode.DbErrNotFound - } - dbTx = m.DB.Table(m.table).Order("created_at desc, id desc").Offset(offset).Limit(limit).Find(&txs, txIds) - if dbTx.Error != nil { - logx.Errorf("[txVerification.GetTxsListByAccountIndex] %s", dbTx.Error.Error()) - return nil, errorcode.DbErrSqlOperation - } else if dbTx.RowsAffected == 0 { - logx.Error("[txVerification.GetTxsListByAccountIndex] Get Txs Error") - return nil, errorcode.DbErrNotFound - } - //TODO: cache operation - for _, tx := range txs { - key := fmt.Sprintf("%s%v", cacheZkbasTxIdPrefix, tx.ID) - val, err := m.RedisConn.Get(key) - if err != nil { - errInfo := fmt.Sprintf("[txVerification.GetTxsListByAccountIndex] Get Redis Error: %s, key:%s", err.Error(), key) - logx.Errorf(errInfo) - return nil, err - - } else if val == "" { - err := m.DB.Model(&tx).Association(txForeignKeyColumn).Find(&tx.TxDetails) - if err != nil { - logx.Error("[txVerification.GetTxsListByAccountIndex] Get Associate TxDetails Error") - return nil, err - } - - // json string - jsonString, err := json.Marshal(tx.TxDetails) - if err != nil { - logx.Errorf("[txVerification.GetTxsListByAccountIndex] json.Marshal Error: %s, value: %v", err.Error(), tx.TxDetails) - return nil, err - } - // todo - err = m.RedisConn.Setex(key, string(jsonString), 60*10+rand.Intn(60*3)) - if err != nil { - logx.Errorf("[txVerification.GetTxsListByAccountIndex] redis set error: %s", err.Error()) - return nil, err - } - } else { - // json string unmarshal - var ( - nTxDetails []*TxDetail - ) - err = json.Unmarshal([]byte(val), &nTxDetails) - if err != nil { - logx.Errorf("[txVerification.GetTxsListByAccountIndex] json.Unmarshal error: %s, value : %s", err.Error(), val) - return nil, err - } - tx.TxDetails = nTxDetails - } - } - return txs, nil -} - -/* - Func: GetTxsListByAccountIndexAndTxType - Params: accountIndex int64, txType uint8,limit int, offset int - Return: txVerification []*Tx, err error - Description: used for getTxsListByAccountIndex API, return all txVerification related to accountIndex and txType. - Because there are many accountIndex in - sorted by created_time - Associate With TxDetail Table -*/ - -func (m *defaultTxModel) GetTxsListByAccountIndexAndTxType(accountIndex int64, txType uint8, limit int, offset int) (txs []*Tx, err error) { - var ( - txDetailTable = `tx_detail` - txIds []int64 - txForeignKeyColumn = `TxDetails` - ) - dbTx := m.DB.Table(txDetailTable).Select("tx_id").Where("account_index = ? and deleted_at is NULL", accountIndex).Group("tx_id").Find(&txIds) - if dbTx.Error != nil { - logx.Errorf("[txVerification.GetTxsListByAccountIndexAndTxType] %s", dbTx.Error.Error()) - return nil, errorcode.DbErrSqlOperation - } else if dbTx.RowsAffected == 0 { - logx.Info("[info.GetTxsListByAccountIndexAndTxType] Get TxIds Error") - return nil, errorcode.DbErrNotFound - } - dbTx = m.DB.Table(m.table).Order("created_at desc").Where("tx_type = ?", txType).Offset(offset).Limit(limit).Find(&txs, txIds) - if dbTx.Error != nil { - logx.Errorf("[txVerification.GetTxsListByAccountIndexAndTxType] %s", dbTx.Error.Error()) - return nil, errorcode.DbErrSqlOperation - } else if dbTx.RowsAffected == 0 { - logx.Error("[txVerification.GetTxsListByAccountIndexAndTxType] Get Txs Error") - return nil, errorcode.DbErrNotFound - } - //TODO: cache operation - for _, tx := range txs { - key := fmt.Sprintf("%s%v:txType:%v", cacheZkbasTxIdPrefix, tx.ID, txType) - val, err := m.RedisConn.Get(key) - if err != nil { - errInfo := fmt.Sprintf("[txVerification.GetTxsListByAccountIndexAndTxType] Get Redis Error: %s, key:%s", err.Error(), key) - logx.Errorf(errInfo) - return nil, err - - } else if val == "" { - err := m.DB.Model(&tx).Association(txForeignKeyColumn).Find(&tx.TxDetails) - if err != nil { - logx.Error("[txVerification.GetTxsListByAccountIndexAndTxType] Get Associate TxDetails Error") - return nil, err - } - - // json string - jsonString, err := json.Marshal(tx.TxDetails) - if err != nil { - logx.Errorf("[txVerification.GetTxsListByAccountIndexAndTxType] json.Marshal Error: %s, value: %v", err.Error(), tx.TxDetails) - return nil, err - } - // todo - err = m.RedisConn.Setex(key, string(jsonString), 30) - if err != nil { - logx.Errorf("[txVerification.GetTxsListByAccountIndexAndTxType] redis set error: %s", err.Error()) - return nil, err - } - } else { - // json string unmarshal - var ( - nTxDetails []*TxDetail - ) - err = json.Unmarshal([]byte(val), &nTxDetails) - if err != nil { - logx.Errorf("[txVerification.GetTxsListByAccountIndexAndTxType] json.Unmarshal error: %s, value : %s", err.Error(), val) - return nil, err - } - tx.TxDetails = nTxDetails - } - } - return txs, nil -} - -/* - Func: GetTxsListByAccountIndexAndTxTypeArray - Params: accountIndex int64, txTypeArray []uint8, limit int, offset int - Return: txVerification []*Tx, err error - Description: used for getTxsListByAccountIndex API, return all txVerification related to accountIndex and txTypeArray. - Because there are many accountIndex in - sorted by created_time - Associate With TxDetail Table -*/ - -func (m *defaultTxModel) GetTxsListByAccountIndexAndTxTypeArray(accountIndex int64, txTypeArray []uint8, limit int, offset int) (txs []*Tx, err error) { - var ( - txDetailTable = `tx_detail` - txIds []int64 - txForeignKeyColumn = `TxDetails` - ) - dbTx := m.DB.Table(txDetailTable).Select("tx_id").Where("account_index = ? and deleted_at is NULL", accountIndex).Group("tx_id").Find(&txIds) - if dbTx.Error != nil { - logx.Errorf("[txVerification.GetTxsListByAccountIndexAndTxTypeArray] %s", dbTx.Error.Error()) - return nil, errorcode.DbErrSqlOperation - } else if dbTx.RowsAffected == 0 { - logx.Info("[info.GetTxsListByAccountIndexAndTxTypeArray] Get TxIds Error") - return nil, errorcode.DbErrNotFound - } - dbTx = m.DB.Table(m.table).Order("created_at desc").Where("tx_type in (?)", txTypeArray).Offset(offset).Limit(limit).Find(&txs, txIds) - if dbTx.Error != nil { - logx.Errorf("[txVerification.GetTxsListByAccountIndexAndTxTypeArray] %s", dbTx.Error.Error()) - return nil, errorcode.DbErrSqlOperation - } else if dbTx.RowsAffected == 0 { - logx.Error("[GetTxsListByAccountIndexAndTxTypeArray] Get Txs Error") - return nil, errorcode.DbErrNotFound - } - //TODO: cache operation - for _, tx := range txs { - key := fmt.Sprintf("%s%v:txTypeArray:%s", cacheZkbasTxIdPrefix, tx.ID, txTypeArray) - val, err := m.RedisConn.Get(key) - if err != nil { - errInfo := fmt.Sprintf("[txVerification.GetTxsListByAccountIndexAndTxTypeArray] Get Redis Error: %s, key:%s", err.Error(), key) - logx.Errorf(errInfo) - return nil, err - - } else if val == "" { - err := m.DB.Model(&tx).Association(txForeignKeyColumn).Find(&tx.TxDetails) - if err != nil { - logx.Error("[txVerification.GetTxsListByAccountIndexAndTxTypeArray] Get Associate TxDetails Error") - return nil, err - } - - // json string - jsonString, err := json.Marshal(tx.TxDetails) - if err != nil { - logx.Errorf("[txVerification.GetTxsListByAccountIndexAndTxTypeArray] json.Marshal Error: %s, value: %v", tx.TxDetails) - return nil, err - } - // todo - err = m.RedisConn.Setex(key, string(jsonString), 30) - if err != nil { - logx.Errorf("[txVerification.GetTxsListByAccountIndexAndTxTypeArray] redis set error: %s", err.Error()) - return nil, err - } - } else { - // json string unmarshal - var ( - nTxDetails []*TxDetail - ) - err = json.Unmarshal([]byte(val), &nTxDetails) - if err != nil { - logx.Errorf("[txVerification.GetTxsListByAccountIndexAndTxTypeArray] json.Unmarshal error: %s, value : %s", err.Error(), val) - return nil, err - } - tx.TxDetails = nTxDetails - } - } - return txs, nil -} - -/* - Func: GetTxsListByAccountName - Params: accountName string, limit int64, offset int64 - Return: txVerification []*Tx, err error - Description: used for getTxsListByAccountName API - sorted by created_time - Associate With TxDetail Table -*/ -func (m *defaultTxModel) GetTxsListByAccountName(accountName string, limit int, offset int) (txs []*Tx, err error) { - var ( - txDetailTable = `tx_detail` - txIds []int64 - txForeignKeyColumn = `TxDetails` - ) - dbTx := m.DB.Table(txDetailTable).Select("tx_id").Where("account_name = ? and deleted_at is NULL", accountName).Group("tx_id").Find(&txIds) - if dbTx.Error != nil { - logx.Errorf("[txVerification.GetTxsListByAccountName] %s", dbTx.Error.Error()) - return nil, errorcode.DbErrSqlOperation - } else if dbTx.RowsAffected == 0 { - logx.Error("[txVerification.GetTxsListByAccountName] Get TxIds Error") - return nil, errorcode.DbErrNotFound - } - dbTx = m.DB.Table(m.table).Order("created_at desc, id desc").Offset(offset).Limit(limit).Find(&txs, txIds) - if dbTx.Error != nil { - logx.Errorf("[txVerification.GetTxsListByAccountName] %s", dbTx.Error.Error()) - return nil, errorcode.DbErrSqlOperation - } else if dbTx.RowsAffected == 0 { - logx.Error("[txVerification.GetTxsListByAccountName] Get Txs Error") - return nil, errorcode.DbErrNotFound - } - //TODO: cache operation - for _, tx := range txs { - err := m.DB.Model(&tx).Association(txForeignKeyColumn).Find(&tx.TxDetails) - if err != nil { - logx.Error("[txVerification.GetTxsListByAccountName] Get Associate TxDetails Error") - return nil, err - } - } - return txs, nil -} - -/* - Func: GetTxsTotalCount - Params: - Return: count int64, err error - Description: used for counting total transactions for explorer dashboard -*/ -func (m *defaultTxModel) GetTxsTotalCount() (count int64, err error) { - - key := fmt.Sprintf("%s", cacheZkbasTxTxCountPrefix) - val, err := m.RedisConn.Get(key) - if err != nil { - errInfo := fmt.Sprintf("[txVerification.GetTxsTotalCount] Get Redis Error: %s, key:%s", err.Error(), key) - logx.Errorf(errInfo) - return 0, err - - } else if val == "" { - dbTx := m.DB.Table(m.table).Where("deleted_at is NULL").Count(&count) - if dbTx.Error != nil { - if dbTx.Error == errorcode.DbErrNotFound { - return 0, nil - } - logx.Error("[txVerification.GetTxsTotalCount] Get Tx Count Error") - return 0, err - } - - err = m.RedisConn.Setex(key, strconv.FormatInt(count, 10), 120) - if err != nil { - logx.Errorf("[txVerification.GetTxsTotalCount] redis set error: %s", err.Error()) - return 0, err - } - } else { - count, err = strconv.ParseInt(val, 10, 64) - if err != nil { - logx.Errorf("[txVerification.GetTxsListByAccountIndex] strconv.ParseInt error: %s, value : %s", err.Error(), val) - return 0, err - } - } - - return count, nil -} - -/* - Func: GetTxsTotalCount - Params: accountIndex int64 - Return: count int64, err error - Description: used for counting total transactions for explorer dashboard -*/ -func (m *defaultTxModel) GetTxsTotalCountByAccountIndex(accountIndex int64) (count int64, err error) { - var ( - txDetailTable = `tx_detail` - ) - dbTx := m.DB.Table(txDetailTable).Select("tx_id").Where("account_index = ? and deleted_at is NULL", accountIndex).Group("tx_id").Count(&count) - if dbTx.Error != nil { - logx.Errorf("[txVerification.GetTxsTotalCountByAccountIndex] %s", dbTx.Error.Error()) - return 0, dbTx.Error - } else if dbTx.RowsAffected == 0 { - logx.Info("[txVerification.GetTxsTotalCountByAccountIndex] No Txs of account index %d in Tx Table", accountIndex) - return 0, nil - } - return count, nil -} - -/* - Func: GetTxsTotalCountByAccountIndexAndTxType - Params: accountIndex int64, txType uint8 - Return: count int64, err error - Description: used for counting total transactions for explorer dashboard -*/ -func (m *defaultTxModel) GetTxsTotalCountByAccountIndexAndTxType(accountIndex int64, txType uint8) (count int64, err error) { - var ( - txDetailTable = `tx_detail` - txIds []int64 - ) - dbTx := m.DB.Table(txDetailTable).Select("tx_id").Where("account_index = ? and deleted_at is NULL", accountIndex).Group("tx_id").Find(&txIds) - if dbTx.Error != nil { - logx.Errorf("[txVerification.GetTxsTotalCountByAccountIndexAndTxType] %s", dbTx.Error.Error()) - return 0, dbTx.Error - } else if dbTx.RowsAffected == 0 { - logx.Info("[txVerification.GetTxsTotalCountByAccountIndexAndTxType] No Txs of account index %d and txVerification type %d in Tx Table", accountIndex, txType) - return 0, nil - } - dbTx = m.DB.Table(m.table).Where("id in (?) and deleted_at is NULL and tx_type = ?", txIds, txType).Count(&count) - if dbTx.Error != nil { - logx.Errorf("[txVerification.GetTxsTotalCountByAccountIndexAndTxTypee] %s", dbTx.Error.Error()) - return 0, dbTx.Error - } else if dbTx.RowsAffected == 0 { - logx.Infof("[txVerification.GetTxsTotalCountByAccountIndexAndTxType] no txVerification of account index %d and txVerification type = %d in mempool", accountIndex, txType) - return 0, nil - } - return count, nil -} - -/* - Func: GetTxsTotalCountByAccountIndexAndTxTypeArray - Params: accountIndex int64, txTypeArray []uint8 - Return: count int64, err error - Description: used for counting total transactions for explorer dashboard -*/ -func (m *defaultTxModel) GetTxsTotalCountByAccountIndexAndTxTypeArray(accountIndex int64, txTypeArray []uint8) (count int64, err error) { - var ( - txDetailTable = `tx_detail` - txIds []int64 - ) - dbTx := m.DB.Table(txDetailTable).Select("tx_id").Where("account_index = ? and deleted_at is NULL", accountIndex).Group("tx_id").Find(&txIds) - if dbTx.Error != nil { - logx.Errorf("[txVerification.GetTxsTotalCountByAccountIndexAndTxTypeArray] %s", dbTx.Error.Error()) - return 0, errorcode.DbErrSqlOperation - } else if dbTx.RowsAffected == 0 { - logx.Infof("[txVerification.GetTxsTotalCountByAccountIndexAndTxTypeArray] No Txs of account index %d and txVerification type %v in Tx Table", accountIndex, txTypeArray) - return 0, nil - } - dbTx = m.DB.Table(m.table).Where("id in (?) and deleted_at is NULL and tx_type in (?)", txIds, txTypeArray).Count(&count) - if dbTx.Error != nil { - logx.Errorf("[txVerification.GetTxsTotalCountByAccountIndexAndTxTypeArray] %s", dbTx.Error.Error()) - return 0, errorcode.DbErrSqlOperation - } else if dbTx.RowsAffected == 0 { - logx.Infof("[txVerification.GetTxsTotalCountByAccountIndexAndTxTypeArray] no txVerification of account index %d and txVerification type = %v in mempool", accountIndex, txTypeArray) - return 0, nil - } - return count, nil -} - -/* - Func: GetTxsTotalCountByBlockHeight - Params: blockHeight int64 - Return: count int64, err error - Description: used for counting total transactions for explorer dashboard -*/ -func (m *defaultTxModel) GetTxsTotalCountByBlockHeight(blockHeight int64) (count int64, err error) { - dbTx := m.DB.Table(m.table).Where("block_height = ? and deleted_at is NULL", blockHeight).Count(&count) - if dbTx.Error != nil { - logx.Errorf("[txVerification.GetTxsTotalCountByBlockHeight] %s", dbTx.Error.Error()) - return 0, dbTx.Error - } else if dbTx.RowsAffected == 0 { - logx.Info("[txVerification.GetTxsTotalCountByBlockHeight] No Txs of block height %d in Tx Table", blockHeight) - return 0, nil - } - return count, nil -} - -/* - Func: GetTxByTxHash - Params: txHash string - Return: txVerification Tx, err error - Description: used for /api/v1/txVerification/getTxByHash -*/ -func (m *defaultTxModel) GetTxByTxHash(txHash string) (tx *Tx, err error) { - var txForeignKeyColumn = `TxDetails` - - dbTx := m.DB.Table(m.table).Where("tx_hash = ?", txHash).Find(&tx) - if dbTx.Error != nil { - logx.Errorf("[txVerification.GetTxByTxHash] %s", dbTx.Error.Error()) - return nil, errorcode.DbErrSqlOperation - } else if dbTx.RowsAffected == 0 { - logx.Errorf("[txVerification.GetTxByTxHash] No such Tx with txHash: %s", txHash) - return nil, errorcode.DbErrNotFound - } - err = m.DB.Model(&tx).Association(txForeignKeyColumn).Find(&tx.TxDetails) - if err != nil { - logx.Error("[txVerification.GetTxByTxHash] Get Associate TxDetails Error") - return nil, err - } - // re-order tx details - sort.SliceStable(tx.TxDetails, func(i, j int) bool { - return tx.TxDetails[i].Order < tx.TxDetails[j].Order - }) - - return tx, nil -} - -func (m *defaultTxModel) GetTxByTxId(id uint) (tx *Tx, err error) { - var txForeignKeyColumn = `TxDetails` - - dbTx := m.DB.Table(m.table).Where("id = ?", id).Find(&tx) - if dbTx.Error != nil { - logx.Errorf("[txVerification.GetTxByTxId] %s", dbTx.Error.Error()) - return nil, errorcode.DbErrSqlOperation - } else if dbTx.RowsAffected == 0 { - logx.Errorf("[txVerification.GetTxByTxId] No such Tx with tx id: %d", id) - return nil, errorcode.DbErrNotFound - } - err = m.DB.Model(&tx).Association(txForeignKeyColumn).Find(&tx.TxDetails) - if err != nil { - logx.Error("[txVerification.GetTxByTxId] Get Associate TxDetails Error") - return nil, err - } - // re-order tx details - sort.SliceStable(tx.TxDetails, func(i, j int) bool { - return tx.TxDetails[i].Order < tx.TxDetails[j].Order - }) - - return tx, nil -} - -/* - Func: GetTxsListGreaterThanBlockHeight - Params: blockHeight int64 - Return: txVerification []*Tx, err error - Description: used for info service -*/ - -func (m *defaultTxModel) GetTxsListGreaterThanBlockHeight(blockHeight int64) (txs []*Tx, err error) { - var ( - txForeignKeyColumn = `TxDetails` - ) - - dbTx := m.DB.Table(m.table).Where("block_height >= ? and block_height < ?", blockHeight, blockHeight+maxBlocks).Order("created_at, id").Find(&txs) - - if dbTx.Error != nil { - logx.Errorf("[txVerification.GetTxsListGreaterThanBlockHeight] %s", dbTx.Error.Error()) - return nil, errorcode.DbErrSqlOperation - } else if dbTx.RowsAffected == 0 { - logx.Infof("[txVerification.GetTxsListGreaterThanBlockHeight] No txVerification blockHeight greater than %d", blockHeight) - return nil, nil - } - - for _, tx := range txs { - err := m.DB.Model(&tx).Association(txForeignKeyColumn).Find(&tx.TxDetails) - if err != nil { - logx.Error("[txVerification.GetTxsListGreaterThanBlockHeight] Get Associate TxDetails Error") - return nil, err - } - } - return txs, nil -} diff --git a/common/pack.go b/common/pack.go new file mode 100644 index 000000000..e36e834b0 --- /dev/null +++ b/common/pack.go @@ -0,0 +1,25 @@ +package common + +import ( + "math/big" + + "github.com/bnb-chain/zkbas-crypto/util" +) + +// ToPackedAmount : convert big int to 40 bit, 5 bits for 10^x, 35 bits for a * 10^x +func ToPackedAmount(amount *big.Int) (res int64, err error) { + return util.ToPackedAmount(amount) +} + +func CleanPackedAmount(amount *big.Int) (nAmount *big.Int, err error) { + return util.CleanPackedAmount(amount) +} + +// ToPackedFee : convert big int to 16 bit, 5 bits for 10^x, 11 bits for a * 10^x +func ToPackedFee(amount *big.Int) (res int64, err error) { + return util.ToPackedFee(amount) +} + +func CleanPackedFee(amount *big.Int) (nAmount *big.Int, err error) { + return util.CleanPackedFee(amount) +} diff --git a/common/util/packedAmountHelper_test.go b/common/packed_test.go similarity index 79% rename from common/util/packedAmountHelper_test.go rename to common/packed_test.go index c11add8cf..f1ce5fb8c 100644 --- a/common/util/packedAmountHelper_test.go +++ b/common/packed_test.go @@ -1,5 +1,5 @@ /* - * Copyright © 2021 Zkbas Protocol + * Copyright © 2021 ZkBAS Protocol * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -15,28 +15,25 @@ * */ -package util +package common import ( - "fmt" "math/big" "testing" + + "github.com/stretchr/testify/assert" ) func TestToPackedAmount(t *testing.T) { a, _ := new(big.Int).SetString("34359738361", 10) amount, err := ToPackedAmount(a) - if err != nil { - t.Fatal(err) - } - fmt.Println(amount) + assert.NoError(t, err) + assert.Equal(t, amount, int64(1099511627552)) } func TestToPackedFee(t *testing.T) { amount, _ := new(big.Int).SetString("100000000000000", 10) fee, err := ToPackedFee(amount) - if err != nil { - t.Fatal(err) - } - fmt.Println(fee) + assert.NoError(t, err) + assert.Equal(t, fee, int64(32011)) } diff --git a/common/prove/add_liquidity.go b/common/prove/add_liquidity.go new file mode 100644 index 000000000..d3c358df2 --- /dev/null +++ b/common/prove/add_liquidity.go @@ -0,0 +1,86 @@ +/* + * Copyright © 2021 ZkBAS Protocol + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package prove + +import ( + "github.com/consensys/gnark-crypto/ecc/bn254/twistededwards/eddsa" + + "github.com/bnb-chain/zkbas/common" + "github.com/bnb-chain/zkbas/types" +) + +func (w *WitnessHelper) constructAddLiquidityTxWitness(witness *TxWitness, oTx *Tx) (*TxWitness, error) { + txInfo, err := types.ParseAddLiquidityTxInfo(oTx.TxInfo) + if err != nil { + return nil, err + } + cryptoTxInfo, err := toCryptoAddLiquidityTx(txInfo) + if err != nil { + return nil, err + } + witness.AddLiquidityTxInfo = cryptoTxInfo + witness.ExpiredAt = txInfo.ExpiredAt + witness.Signature = new(eddsa.Signature) + _, err = witness.Signature.SetBytes(txInfo.Sig) + if err != nil { + return nil, err + } + return witness, nil +} + +func toCryptoAddLiquidityTx(txInfo *types.AddLiquidityTxInfo) (info *CryptoAddLiquidityTx, err error) { + packedAAmount, err := common.ToPackedAmount(txInfo.AssetAAmount) + if err != nil { + return nil, err + } + packedBAmount, err := common.ToPackedAmount(txInfo.AssetBAmount) + if err != nil { + return nil, err + } + packedLpAmount, err := common.ToPackedAmount(txInfo.LpAmount) + if err != nil { + return nil, err + } + packedTreasuryAmount, err := common.ToPackedAmount(txInfo.TreasuryAmount) + if err != nil { + return nil, err + } + packedKLast, err := common.ToPackedAmount(txInfo.KLast) + if err != nil { + return nil, err + } + packedFee, err := common.ToPackedFee(txInfo.GasFeeAssetAmount) + if err != nil { + return nil, err + } + info = &CryptoAddLiquidityTx{ + FromAccountIndex: txInfo.FromAccountIndex, + PairIndex: txInfo.PairIndex, + AssetAId: txInfo.AssetAId, + AssetAAmount: packedAAmount, + AssetBId: txInfo.AssetBId, + AssetBAmount: packedBAmount, + LpAmount: packedLpAmount, + KLast: packedKLast, + TreasuryAmount: packedTreasuryAmount, + GasAccountIndex: txInfo.GasAccountIndex, + GasFeeAssetId: txInfo.GasFeeAssetId, + GasFeeAssetAmount: packedFee, + } + return info, nil +} diff --git a/common/prove/atomic_match.go b/common/prove/atomic_match.go new file mode 100644 index 000000000..72be5963b --- /dev/null +++ b/common/prove/atomic_match.go @@ -0,0 +1,106 @@ +/* + * Copyright © 2021 ZkBAS Protocol + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package prove + +import ( + "github.com/consensys/gnark-crypto/ecc/bn254/twistededwards/eddsa" + + "github.com/bnb-chain/zkbas/common" + "github.com/bnb-chain/zkbas/types" +) + +func (w *WitnessHelper) constructAtomicMatchTxWitness(cryptoTx *TxWitness, oTx *Tx) (*TxWitness, error) { + txInfo, err := types.ParseAtomicMatchTxInfo(oTx.TxInfo) + if err != nil { + return nil, err + } + cryptoTxInfo, err := toCryptoAtomicMatchTx(txInfo) + if err != nil { + return nil, err + } + cryptoTx.AtomicMatchTxInfo = cryptoTxInfo + cryptoTx.ExpiredAt = txInfo.ExpiredAt + cryptoTx.Signature = new(eddsa.Signature) + _, err = cryptoTx.Signature.SetBytes(txInfo.Sig) + if err != nil { + return nil, err + } + return cryptoTx, nil +} + +func toCryptoAtomicMatchTx(txInfo *types.AtomicMatchTxInfo) (info *CryptoAtomicMatchTx, err error) { + packedFee, err := common.ToPackedFee(txInfo.GasFeeAssetAmount) + if err != nil { + return nil, err + } + packedAmount, err := common.ToPackedAmount(txInfo.BuyOffer.AssetAmount) + if err != nil { + return nil, err + } + packedCreatorAmount, err := common.ToPackedAmount(txInfo.CreatorAmount) + if err != nil { + return nil, err + } + packedTreasuryAmount, err := common.ToPackedAmount(txInfo.TreasuryAmount) + if err != nil { + return nil, err + } + buySig := new(eddsa.Signature) + _, err = buySig.SetBytes(txInfo.BuyOffer.Sig) + if err != nil { + return nil, err + } + sellSig := new(eddsa.Signature) + _, err = sellSig.SetBytes(txInfo.SellOffer.Sig) + if err != nil { + return nil, err + } + info = &CryptoAtomicMatchTx{ + AccountIndex: txInfo.AccountIndex, + BuyOffer: &CryptoOfferTx{ + Type: txInfo.BuyOffer.Type, + OfferId: txInfo.BuyOffer.OfferId, + AccountIndex: txInfo.BuyOffer.AccountIndex, + NftIndex: txInfo.BuyOffer.NftIndex, + AssetId: txInfo.BuyOffer.AssetId, + AssetAmount: packedAmount, + ListedAt: txInfo.BuyOffer.ListedAt, + ExpiredAt: txInfo.BuyOffer.ExpiredAt, + TreasuryRate: txInfo.BuyOffer.TreasuryRate, + Sig: buySig, + }, + SellOffer: &CryptoOfferTx{ + Type: txInfo.SellOffer.Type, + OfferId: txInfo.SellOffer.OfferId, + AccountIndex: txInfo.SellOffer.AccountIndex, + NftIndex: txInfo.SellOffer.NftIndex, + AssetId: txInfo.SellOffer.AssetId, + AssetAmount: packedAmount, + ListedAt: txInfo.SellOffer.ListedAt, + ExpiredAt: txInfo.SellOffer.ExpiredAt, + TreasuryRate: txInfo.SellOffer.TreasuryRate, + Sig: sellSig, + }, + CreatorAmount: packedCreatorAmount, + TreasuryAmount: packedTreasuryAmount, + GasAccountIndex: txInfo.GasAccountIndex, + GasFeeAssetId: txInfo.GasFeeAssetId, + GasFeeAssetAmount: packedFee, + } + return info, nil +} diff --git a/common/prove/cancel_offer.go b/common/prove/cancel_offer.go new file mode 100644 index 000000000..44ad16caa --- /dev/null +++ b/common/prove/cancel_offer.go @@ -0,0 +1,59 @@ +/* + * Copyright © 2021 ZkBAS Protocol + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package prove + +import ( + "github.com/consensys/gnark-crypto/ecc/bn254/twistededwards/eddsa" + + "github.com/bnb-chain/zkbas/common" + "github.com/bnb-chain/zkbas/types" +) + +func (w *WitnessHelper) constructCancelOfferTxWitness(cryptoTx *TxWitness, oTx *Tx) (*TxWitness, error) { + txInfo, err := types.ParseCancelOfferTxInfo(oTx.TxInfo) + if err != nil { + return nil, err + } + cryptoTxInfo, err := toCryptoCancelOfferTx(txInfo) + if err != nil { + return nil, err + } + cryptoTx.CancelOfferTxInfo = cryptoTxInfo + cryptoTx.ExpiredAt = txInfo.ExpiredAt + cryptoTx.Signature = new(eddsa.Signature) + _, err = cryptoTx.Signature.SetBytes(txInfo.Sig) + if err != nil { + return nil, err + } + return cryptoTx, nil +} + +func toCryptoCancelOfferTx(txInfo *types.CancelOfferTxInfo) (info *CryptoCancelOfferTx, err error) { + packedFee, err := common.ToPackedFee(txInfo.GasFeeAssetAmount) + if err != nil { + return nil, err + } + info = &CryptoCancelOfferTx{ + AccountIndex: txInfo.AccountIndex, + OfferId: txInfo.OfferId, + GasAccountIndex: txInfo.GasAccountIndex, + GasFeeAssetId: txInfo.GasFeeAssetId, + GasFeeAssetAmount: packedFee, + } + return info, nil +} diff --git a/common/prove/create_collection.go b/common/prove/create_collection.go new file mode 100644 index 000000000..be232fd9f --- /dev/null +++ b/common/prove/create_collection.go @@ -0,0 +1,61 @@ +/* + * Copyright © 2021 ZkBAS Protocol + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package prove + +import ( + "github.com/consensys/gnark-crypto/ecc/bn254/twistededwards/eddsa" + + "github.com/bnb-chain/zkbas/common" + "github.com/bnb-chain/zkbas/types" +) + +func (w *WitnessHelper) constructCreateCollectionTxWitness(cryptoTx *TxWitness, oTx *Tx) (*TxWitness, error) { + txInfo, err := types.ParseCreateCollectionTxInfo(oTx.TxInfo) + if err != nil { + return nil, err + } + cryptoTxInfo, err := toCryptoCreateCollectionTx(txInfo) + if err != nil { + return nil, err + } + cryptoTx.CreateCollectionTxInfo = cryptoTxInfo + cryptoTx.ExpiredAt = txInfo.ExpiredAt + cryptoTx.Signature = new(eddsa.Signature) + _, err = cryptoTx.Signature.SetBytes(txInfo.Sig) + if err != nil { + return nil, err + } + return cryptoTx, nil +} + +func toCryptoCreateCollectionTx(txInfo *types.CreateCollectionTxInfo) (info *CryptoCreateCollectionTx, err error) { + packedFee, err := common.ToPackedFee(txInfo.GasFeeAssetAmount) + if err != nil { + return nil, err + } + info = &CryptoCreateCollectionTx{ + AccountIndex: txInfo.AccountIndex, + CollectionId: txInfo.CollectionId, + GasAccountIndex: txInfo.GasAccountIndex, + GasFeeAssetId: txInfo.GasFeeAssetId, + GasFeeAssetAmount: packedFee, + ExpiredAt: txInfo.ExpiredAt, + Nonce: txInfo.Nonce, + } + return info, nil +} diff --git a/common/prove/create_pair.go b/common/prove/create_pair.go new file mode 100644 index 000000000..ae4f9c5fe --- /dev/null +++ b/common/prove/create_pair.go @@ -0,0 +1,50 @@ +/* + * Copyright © 2021 ZkBAS Protocol + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package prove + +import ( + "github.com/bnb-chain/zkbas-crypto/legend/circuit/bn254/std" + "github.com/bnb-chain/zkbas-crypto/wasm/legend/legendTxTypes" + "github.com/bnb-chain/zkbas/types" +) + +func (w *WitnessHelper) constructCreatePairTxWitness(cryptoTx *TxWitness, oTx *Tx) (*TxWitness, error) { + txInfo, err := types.ParseCreatePairTxInfo(oTx.TxInfo) + if err != nil { + return nil, err + } + cryptoTxInfo, err := toCryptoCreatePairTx(txInfo) + if err != nil { + return nil, err + } + cryptoTx.CreatePairTxInfo = cryptoTxInfo + cryptoTx.Signature = std.EmptySignature() + return cryptoTx, nil +} + +func toCryptoCreatePairTx(txInfo *legendTxTypes.CreatePairTxInfo) (info *CryptoCreatePairTx, err error) { + info = &CryptoCreatePairTx{ + PairIndex: txInfo.PairIndex, + AssetAId: txInfo.AssetAId, + AssetBId: txInfo.AssetBId, + FeeRate: txInfo.FeeRate, + TreasuryAccountIndex: txInfo.TreasuryAccountIndex, + TreasuryRate: txInfo.TreasuryRate, + } + return info, nil +} diff --git a/common/prove/deposit.go b/common/prove/deposit.go new file mode 100644 index 000000000..1f93d0cee --- /dev/null +++ b/common/prove/deposit.go @@ -0,0 +1,48 @@ +/* + * Copyright © 2021 ZkBAS Protocol + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package prove + +import ( + "github.com/bnb-chain/zkbas-crypto/legend/circuit/bn254/std" + "github.com/bnb-chain/zkbas-crypto/wasm/legend/legendTxTypes" + "github.com/bnb-chain/zkbas/types" +) + +func (w *WitnessHelper) constructDepositTxWitness(cryptoTx *TxWitness, oTx *Tx) (*TxWitness, error) { + txInfo, err := types.ParseDepositTxInfo(oTx.TxInfo) + if err != nil { + return nil, err + } + cryptoTxInfo, err := toCryptoDepositTx(txInfo) + if err != nil { + return nil, err + } + cryptoTx.DepositTxInfo = cryptoTxInfo + cryptoTx.Signature = std.EmptySignature() + return cryptoTx, nil +} + +func toCryptoDepositTx(txInfo *legendTxTypes.DepositTxInfo) (info *CryptoDepositTx, err error) { + info = &CryptoDepositTx{ + AccountIndex: txInfo.AccountIndex, + AccountNameHash: txInfo.AccountNameHash, + AssetId: txInfo.AssetId, + AssetAmount: txInfo.AssetAmount, + } + return info, nil +} diff --git a/common/prove/deposit_nft.go b/common/prove/deposit_nft.go new file mode 100644 index 000000000..0cf2829f5 --- /dev/null +++ b/common/prove/deposit_nft.go @@ -0,0 +1,53 @@ +/* + * Copyright © 2021 ZkBAS Protocol + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package prove + +import ( + "github.com/bnb-chain/zkbas-crypto/legend/circuit/bn254/std" + "github.com/bnb-chain/zkbas-crypto/wasm/legend/legendTxTypes" + "github.com/bnb-chain/zkbas/types" +) + +func (w *WitnessHelper) constructDepositNftTxWitness(cryptoTx *TxWitness, oTx *Tx) (*TxWitness, error) { + txInfo, err := types.ParseDepositNftTxInfo(oTx.TxInfo) + if err != nil { + return nil, err + } + cryptoTxInfo, err := toCryptoDepositNftTx(txInfo) + if err != nil { + return nil, err + } + cryptoTx.DepositNftTxInfo = cryptoTxInfo + cryptoTx.Signature = std.EmptySignature() + return cryptoTx, nil +} + +func toCryptoDepositNftTx(txInfo *legendTxTypes.DepositNftTxInfo) (info *CryptoDepositNftTx, err error) { + info = &CryptoDepositNftTx{ + AccountIndex: txInfo.AccountIndex, + NftIndex: txInfo.NftIndex, + NftL1Address: txInfo.NftL1Address, + AccountNameHash: txInfo.AccountNameHash, + NftContentHash: txInfo.NftContentHash, + NftL1TokenId: txInfo.NftL1TokenId, + CreatorAccountIndex: txInfo.CreatorAccountIndex, + CreatorTreasuryRate: txInfo.CreatorTreasuryRate, + CollectionId: txInfo.CollectionId, + } + return info, nil +} diff --git a/common/prove/fullexit.go b/common/prove/fullexit.go new file mode 100644 index 000000000..25965d414 --- /dev/null +++ b/common/prove/fullexit.go @@ -0,0 +1,48 @@ +/* + * Copyright © 2021 ZkBAS Protocol + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package prove + +import ( + "github.com/bnb-chain/zkbas-crypto/legend/circuit/bn254/std" + "github.com/bnb-chain/zkbas-crypto/wasm/legend/legendTxTypes" + "github.com/bnb-chain/zkbas/types" +) + +func (w *WitnessHelper) constructFullExitTxWitness(cryptoTx *TxWitness, oTx *Tx) (*TxWitness, error) { + txInfo, err := types.ParseFullExitTxInfo(oTx.TxInfo) + if err != nil { + return nil, err + } + cryptoTxInfo, err := toCryptoFullExitTx(txInfo) + if err != nil { + return nil, err + } + cryptoTx.FullExitTxInfo = cryptoTxInfo + cryptoTx.Signature = std.EmptySignature() + return cryptoTx, nil +} + +func toCryptoFullExitTx(txInfo *legendTxTypes.FullExitTxInfo) (info *CryptoFullExitTx, err error) { + info = &CryptoFullExitTx{ + AccountIndex: txInfo.AccountIndex, + AssetId: txInfo.AssetId, + AssetAmount: txInfo.AssetAmount, + AccountNameHash: txInfo.AccountNameHash, + } + return info, nil +} diff --git a/common/prove/fullexit_nft.go b/common/prove/fullexit_nft.go new file mode 100644 index 000000000..9e81b6852 --- /dev/null +++ b/common/prove/fullexit_nft.go @@ -0,0 +1,54 @@ +/* + * Copyright © 2021 ZkBAS Protocol + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package prove + +import ( + "github.com/bnb-chain/zkbas-crypto/legend/circuit/bn254/std" + "github.com/bnb-chain/zkbas-crypto/wasm/legend/legendTxTypes" + "github.com/bnb-chain/zkbas/types" +) + +func (w *WitnessHelper) constructFullExitNftTxWitness(cryptoTx *TxWitness, oTx *Tx) (*TxWitness, error) { + txInfo, err := types.ParseFullExitNftTxInfo(oTx.TxInfo) + if err != nil { + return nil, err + } + cryptoTxInfo, err := toCryptoFullExitNftTx(txInfo) + if err != nil { + return nil, err + } + cryptoTx.FullExitNftTxInfo = cryptoTxInfo + cryptoTx.Signature = std.EmptySignature() + return cryptoTx, nil +} + +func toCryptoFullExitNftTx(txInfo *legendTxTypes.FullExitNftTxInfo) (info *CryptoFullExitNftTx, err error) { + info = &CryptoFullExitNftTx{ + AccountIndex: txInfo.AccountIndex, + AccountNameHash: txInfo.AccountNameHash, + CreatorAccountIndex: txInfo.CreatorAccountIndex, + CreatorAccountNameHash: txInfo.CreatorAccountNameHash, + CreatorTreasuryRate: txInfo.CreatorTreasuryRate, + NftIndex: txInfo.NftIndex, + CollectionId: txInfo.CollectionId, + NftContentHash: txInfo.NftContentHash, + NftL1Address: txInfo.NftL1Address, + NftL1TokenId: txInfo.NftL1TokenId, + } + return info, nil +} diff --git a/common/prove/mint_nft.go b/common/prove/mint_nft.go new file mode 100644 index 000000000..12b6389d3 --- /dev/null +++ b/common/prove/mint_nft.go @@ -0,0 +1,66 @@ +/* + * Copyright © 2021 ZkBAS Protocol + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package prove + +import ( + "github.com/consensys/gnark-crypto/ecc/bn254/twistededwards/eddsa" + "github.com/ethereum/go-ethereum/common" + + common2 "github.com/bnb-chain/zkbas/common" + "github.com/bnb-chain/zkbas/types" +) + +func (w *WitnessHelper) constructMintNftTxWitness(cryptoTx *TxWitness, oTx *Tx) (*TxWitness, error) { + txInfo, err := types.ParseMintNftTxInfo(oTx.TxInfo) + if err != nil { + return nil, err + } + cryptoTxInfo, err := toCryptoMintNftTx(txInfo) + if err != nil { + return nil, err + } + cryptoTx.MintNftTxInfo = cryptoTxInfo + cryptoTx.ExpiredAt = txInfo.ExpiredAt + cryptoTx.Signature = new(eddsa.Signature) + _, err = cryptoTx.Signature.SetBytes(txInfo.Sig) + if err != nil { + return nil, err + } + return cryptoTx, nil +} + +func toCryptoMintNftTx(txInfo *types.MintNftTxInfo) (info *CryptoMintNftTx, err error) { + packedFee, err := common2.ToPackedFee(txInfo.GasFeeAssetAmount) + if err != nil { + return nil, err + } + info = &CryptoMintNftTx{ + CreatorAccountIndex: txInfo.CreatorAccountIndex, + ToAccountIndex: txInfo.ToAccountIndex, + ToAccountNameHash: common.FromHex(txInfo.ToAccountNameHash), + NftIndex: txInfo.NftIndex, + NftContentHash: common.FromHex(txInfo.NftContentHash), + CreatorTreasuryRate: txInfo.CreatorTreasuryRate, + GasAccountIndex: txInfo.GasAccountIndex, + GasFeeAssetId: txInfo.GasFeeAssetId, + GasFeeAssetAmount: packedFee, + CollectionId: txInfo.NftCollectionId, + ExpiredAt: txInfo.ExpiredAt, + } + return info, nil +} diff --git a/common/util/proverHelper.go b/common/prove/proof_keys.go similarity index 51% rename from common/util/proverHelper.go rename to common/prove/proof_keys.go index 29477399c..5c7a76778 100644 --- a/common/util/proverHelper.go +++ b/common/prove/proof_keys.go @@ -1,25 +1,18 @@ -package util +package prove import ( "bytes" - "errors" "fmt" "math/big" "os" - "time" - cryptoBlock "github.com/bnb-chain/zkbas-crypto/legend/circuit/bn254/block" - "github.com/bnb-chain/zkbas-crypto/legend/circuit/bn254/std" "github.com/consensys/gnark-crypto/ecc" "github.com/consensys/gnark/backend" "github.com/consensys/gnark/backend/groth16" "github.com/consensys/gnark/frontend" - "github.com/zeromicro/go-zero/core/logx" -) -const ( - COO_MODE = 1 - COM_MODE = 2 + cryptoBlock "github.com/bnb-chain/zkbas-crypto/legend/circuit/bn254/block" + "github.com/bnb-chain/zkbas-crypto/legend/circuit/bn254/std" ) func LoadProvingKey(filepath string) (pk groth16.ProvingKey, err error) { @@ -28,7 +21,7 @@ func LoadProvingKey(filepath string) (pk groth16.ProvingKey, err error) { f, _ := os.Open(filepath) _, err = pk.ReadFrom(f) if err != nil { - return pk, errors.New("read file error") + return pk, fmt.Errorf("read file error") } f.Close() @@ -40,7 +33,7 @@ func LoadVerifyingKey(filepath string) (verifyingKey groth16.VerifyingKey, err e f, _ := os.Open(filepath) _, err = verifyingKey.ReadFrom(f) if err != nil { - return verifyingKey, errors.New("read file error") + return verifyingKey, fmt.Errorf("read file error") } f.Close() @@ -56,7 +49,6 @@ func GenerateProof( // verify CryptoBlock blockWitness, err := cryptoBlock.SetBlockWitness(cBlock) if err != nil { - logx.Errorf("[GenerateProof] unable to set block witness: %s", err.Error()) return proof, err } var verifyWitness cryptoBlock.BlockConstraints @@ -65,69 +57,24 @@ func GenerateProof( verifyWitness.BlockCommitment = cBlock.BlockCommitment witness, err := frontend.NewWitness(&blockWitness, ecc.BN254) if err != nil { - logx.Errorf("[GenerateProof] unable to generate new witness: %s", err.Error()) return proof, err } vWitness, err := frontend.NewWitness(&verifyWitness, ecc.BN254, frontend.PublicOnly()) if err != nil { - logx.Errorf("[GenerateProof] unable to generate new witness: %s", err.Error()) return proof, err } - elapse := time.Now() - logx.Info("start proving") proof, err = groth16.Prove(r1cs, provingKey, witness, backend.WithHints(std.Keccak256, std.ComputeSLp)) if err != nil { - logx.Errorf("[GenerateProof] unable to make a proof: %s", err.Error()) return proof, err } - fmt.Println("finish proving: ", time.Since(elapse)) - elapse = time.Now() - logx.Info("start verifying") err = groth16.Verify(proof, verifyingKey, vWitness) if err != nil { - logx.Errorf("[GenerateProof] invalid block proof: %s", err.Error()) return proof, err } return proof, nil } -func VerifyProof( - proof groth16.Proof, - vk groth16.VerifyingKey, - cBlock *cryptoBlock.Block, -) error { - // verify CryptoBlock - blockWitness, err := cryptoBlock.SetBlockWitness(cBlock) - if err != nil { - logx.Errorf("[VerifyProof] unable to set block witness: %s", err.Error()) - return err - } - - var verifyWitness cryptoBlock.BlockConstraints - verifyWitness.OldStateRoot = cBlock.OldStateRoot - verifyWitness.NewStateRoot = cBlock.NewStateRoot - verifyWitness.BlockCommitment = cBlock.BlockCommitment - _, err = frontend.NewWitness(&blockWitness, ecc.BN254) - if err != nil { - logx.Errorf("[VerifyProof] unable to generate new witness: %s", err.Error()) - return err - } - - vWitness, err := frontend.NewWitness(&verifyWitness, ecc.BN254, frontend.PublicOnly()) - if err != nil { - logx.Errorf("[VerifyProof] unable to generate new witness: %s", err.Error()) - return err - } - - err = groth16.Verify(proof, vk, vWitness) - if err != nil { - logx.Errorf("[VerifyProof] invalid block proof: %s", err.Error()) - return err - } - return nil -} - type FormattedProof struct { A [2]*big.Int B [2][2]*big.Int @@ -141,7 +88,6 @@ func FormatProof(oProof groth16.Proof, oldRoot, newRoot, commitment []byte) (pro var buf bytes.Buffer _, err = oProof.WriteRawTo(&buf) if err != nil { - logx.Errorf("[FormatProof] unable to format proof: %s", err.Error()) return nil, err } proofBytes := buf.Bytes() @@ -161,43 +107,3 @@ func FormatProof(oProof groth16.Proof, oldRoot, newRoot, commitment []byte) (pro proof.Inputs[2] = new(big.Int).SetBytes(commitment) return proof, nil } - -func UnformatProof(proof *FormattedProof) (oProof groth16.Proof, err error) { - var buf bytes.Buffer - // write bytes to buffer - buf.Write(proof.A[0].Bytes()) - buf.Write(proof.A[1].Bytes()) - buf.Write(proof.B[0][0].Bytes()) - buf.Write(proof.B[0][1].Bytes()) - buf.Write(proof.B[1][0].Bytes()) - buf.Write(proof.B[1][1].Bytes()) - buf.Write(proof.C[0].Bytes()) - buf.Write(proof.C[1].Bytes()) - - // init oProof - oProof = groth16.NewProof(ecc.BN254) - - // read buffer - _, err = oProof.ReadFrom(bytes.NewReader(buf.Bytes())) - if err != nil { - logx.Errorf("[UnformatProof] unable to ReadFrom proof buffer: %s", err.Error()) - return oProof, err - } - - return oProof, nil -} - -func CompactProofs(proofs []*FormattedProof) []*big.Int { - var res []*big.Int - for _, proof := range proofs { - res = append(res, proof.A[0]) - res = append(res, proof.A[1]) - res = append(res, proof.B[0][0]) - res = append(res, proof.B[0][1]) - res = append(res, proof.B[1][0]) - res = append(res, proof.B[1][1]) - res = append(res, proof.C[0]) - res = append(res, proof.C[1]) - } - return res -} diff --git a/common/prove/readme.md b/common/prove/readme.md new file mode 100644 index 000000000..9dab894e4 --- /dev/null +++ b/common/prove/readme.md @@ -0,0 +1,22 @@ +# How UT works in this package + +We need mock data to run the testcase under this package, the mock data is a +snapshot of postgres after running the integration test. + +We just create a snapshot of `postgres` container and push it on github as a docker +image and reuse it. + +## How to create mock data docker images + +After you have run the integration test, while the `postgres` container is not deleted: + +`docker commit -m 'add zkbas mock data' postgres zkbas-ut-postgres` +`docker tag zkbas-ut-postgres ghcr.io/bnb-chain/zkbas/zkbas-ut-postgres:latest` +`docker push ghcr.io/bnb-chain/zkbas/zkbas-ut-postgres:latest` + +Note: you need login the docker registry before pushing. +```shell +export CR_PAT={your github token} +echo $CR_PAT | docker login ghcr.io -u {your user name} --password-stdin +``` + diff --git a/common/prove/register_zns.go b/common/prove/register_zns.go new file mode 100644 index 000000000..60b68c76f --- /dev/null +++ b/common/prove/register_zns.go @@ -0,0 +1,58 @@ +/* + * Copyright © 2021 ZkBAS Protocol + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package prove + +import ( + "strings" + + "github.com/bnb-chain/zkbas-crypto/legend/circuit/bn254/std" + "github.com/bnb-chain/zkbas-crypto/wasm/legend/legendTxTypes" + "github.com/bnb-chain/zkbas/common" + "github.com/bnb-chain/zkbas/types" +) + +func (w *WitnessHelper) constructRegisterZnsTxWitness(cryptoTx *TxWitness, oTx *Tx) (*TxWitness, error) { + txInfo, err := types.ParseRegisterZnsTxInfo(oTx.TxInfo) + if err != nil { + return nil, err + } + cryptoTxInfo, err := toCryptoRegisterZnsTx(txInfo) + if err != nil { + return nil, err + } + cryptoTx.Signature = std.EmptySignature() + cryptoTx.RegisterZnsTxInfo = cryptoTxInfo + return cryptoTx, nil +} + +func toCryptoRegisterZnsTx(txInfo *legendTxTypes.RegisterZnsTxInfo) (info *CryptoRegisterZnsTx, err error) { + accountName := make([]byte, 32) + realName := strings.Split(txInfo.AccountName, types.AccountNameSuffix)[0] + copy(accountName[:], realName) + pk, err := common.ParsePubKey(txInfo.PubKey) + if err != nil { + return nil, err + } + info = &CryptoRegisterZnsTx{ + AccountIndex: txInfo.AccountIndex, + AccountName: accountName, + AccountNameHash: txInfo.AccountNameHash, + PubKey: pk, + } + return info, nil +} diff --git a/common/prove/remove_liquidity.go b/common/prove/remove_liquidity.go new file mode 100644 index 000000000..4174667a5 --- /dev/null +++ b/common/prove/remove_liquidity.go @@ -0,0 +1,96 @@ +/* + * Copyright © 2021 ZkBAS Protocol + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package prove + +import ( + "github.com/consensys/gnark-crypto/ecc/bn254/twistededwards/eddsa" + + "github.com/bnb-chain/zkbas/common" + "github.com/bnb-chain/zkbas/types" +) + +func (w *WitnessHelper) constructRemoveLiquidityTxWitness(cryptoTx *TxWitness, oTx *Tx) (*TxWitness, error) { + txInfo, err := types.ParseRemoveLiquidityTxInfo(oTx.TxInfo) + if err != nil { + return nil, err + } + cryptoTxInfo, err := toCryptoRemoveLiquidityTx(txInfo) + if err != nil { + return nil, err + } + cryptoTx.RemoveLiquidityTxInfo = cryptoTxInfo + cryptoTx.ExpiredAt = txInfo.ExpiredAt + cryptoTx.Signature = new(eddsa.Signature) + _, err = cryptoTx.Signature.SetBytes(txInfo.Sig) + if err != nil { + return nil, err + } + return cryptoTx, nil +} + +func toCryptoRemoveLiquidityTx(txInfo *types.RemoveLiquidityTxInfo) (info *CryptoRemoveLiquidityTx, err error) { + packedAMinAmount, err := common.ToPackedAmount(txInfo.AssetAMinAmount) + if err != nil { + return nil, err + } + packedBMinAmount, err := common.ToPackedAmount(txInfo.AssetBMinAmount) + if err != nil { + return nil, err + } + packedAAmount, err := common.ToPackedAmount(txInfo.AssetAAmountDelta) + if err != nil { + return nil, err + } + packedBAmount, err := common.ToPackedAmount(txInfo.AssetBAmountDelta) + if err != nil { + return nil, err + } + packedLpAmount, err := common.ToPackedAmount(txInfo.LpAmount) + if err != nil { + return nil, err + } + packedKLast, err := common.ToPackedAmount(txInfo.KLast) + if err != nil { + return nil, err + } + packedTreasuryAmount, err := common.ToPackedAmount(txInfo.TreasuryAmount) + if err != nil { + return nil, err + } + packedFee, err := common.ToPackedFee(txInfo.GasFeeAssetAmount) + if err != nil { + return nil, err + } + info = &CryptoRemoveLiquidityTx{ + FromAccountIndex: txInfo.FromAccountIndex, + PairIndex: txInfo.PairIndex, + AssetAId: txInfo.AssetAId, + AssetAMinAmount: packedAMinAmount, + AssetBId: txInfo.AssetBId, + AssetBMinAmount: packedBMinAmount, + LpAmount: packedLpAmount, + KLast: packedKLast, + TreasuryAmount: packedTreasuryAmount, + AssetAAmountDelta: packedAAmount, + AssetBAmountDelta: packedBAmount, + GasAccountIndex: txInfo.GasAccountIndex, + GasFeeAssetId: txInfo.GasFeeAssetId, + GasFeeAssetAmount: packedFee, + } + return info, nil +} diff --git a/common/prove/swap.go b/common/prove/swap.go new file mode 100644 index 000000000..ae361ec24 --- /dev/null +++ b/common/prove/swap.go @@ -0,0 +1,76 @@ +/* + * Copyright © 2021 ZkBAS Protocol + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package prove + +import ( + "github.com/consensys/gnark-crypto/ecc/bn254/twistededwards/eddsa" + + "github.com/bnb-chain/zkbas/common" + "github.com/bnb-chain/zkbas/types" +) + +func (w *WitnessHelper) constructSwapTxWitness(cryptoTx *TxWitness, oTx *Tx) (*TxWitness, error) { + txInfo, err := types.ParseSwapTxInfo(oTx.TxInfo) + if err != nil { + return nil, err + } + cryptoTxInfo, err := toCryptoSwapTx(txInfo) + if err != nil { + return nil, err + } + cryptoTx.SwapTxInfo = cryptoTxInfo + cryptoTx.ExpiredAt = txInfo.ExpiredAt + cryptoTx.Signature = new(eddsa.Signature) + _, err = cryptoTx.Signature.SetBytes(txInfo.Sig) + if err != nil { + return nil, err + } + return cryptoTx, nil +} + +func toCryptoSwapTx(txInfo *types.SwapTxInfo) (info *CryptoSwapTx, err error) { + packedAAmount, err := common.ToPackedAmount(txInfo.AssetAAmount) + if err != nil { + return nil, err + } + packedBMinAmount, err := common.ToPackedAmount(txInfo.AssetBMinAmount) + if err != nil { + return nil, err + } + packedBAmount, err := common.ToPackedAmount(txInfo.AssetBAmountDelta) + if err != nil { + return nil, err + } + packedFee, err := common.ToPackedFee(txInfo.GasFeeAssetAmount) + if err != nil { + return nil, err + } + info = &CryptoSwapTx{ + FromAccountIndex: txInfo.FromAccountIndex, + PairIndex: txInfo.PairIndex, + AssetAId: txInfo.AssetAId, + AssetAAmount: packedAAmount, + AssetBId: txInfo.AssetBId, + AssetBMinAmount: packedBMinAmount, + AssetBAmountDelta: packedBAmount, + GasAccountIndex: txInfo.GasAccountIndex, + GasFeeAssetId: txInfo.GasFeeAssetId, + GasFeeAssetAmount: packedFee, + } + return info, nil +} diff --git a/common/prove/transfer.go b/common/prove/transfer.go new file mode 100644 index 000000000..f24cc43d3 --- /dev/null +++ b/common/prove/transfer.go @@ -0,0 +1,68 @@ +/* + * Copyright © 2021 ZkBAS Protocol + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package prove + +import ( + "github.com/consensys/gnark-crypto/ecc/bn254/twistededwards/eddsa" + "github.com/ethereum/go-ethereum/common" + + common2 "github.com/bnb-chain/zkbas/common" + "github.com/bnb-chain/zkbas/types" +) + +func (w *WitnessHelper) constructTransferTxWitness(cryptoTx *TxWitness, oTx *Tx) (*TxWitness, error) { + txInfo, err := types.ParseTransferTxInfo(oTx.TxInfo) + if err != nil { + return nil, err + } + cryptoTxInfo, err := toCryptoTransferTx(txInfo) + if err != nil { + return nil, err + } + cryptoTx.TransferTxInfo = cryptoTxInfo + cryptoTx.ExpiredAt = oTx.ExpiredAt + cryptoTx.Signature = new(eddsa.Signature) + _, err = cryptoTx.Signature.SetBytes(txInfo.Sig) + if err != nil { + return nil, err + } + return cryptoTx, nil +} + +func toCryptoTransferTx(txInfo *types.TransferTxInfo) (info *CryptoTransferTx, err error) { + packedAmount, err := common2.ToPackedAmount(txInfo.AssetAmount) + if err != nil { + return nil, err + } + packedFee, err := common2.ToPackedFee(txInfo.GasFeeAssetAmount) + if err != nil { + return nil, err + } + info = &CryptoTransferTx{ + FromAccountIndex: txInfo.FromAccountIndex, + ToAccountIndex: txInfo.ToAccountIndex, + ToAccountNameHash: common.FromHex(txInfo.ToAccountNameHash), + AssetId: txInfo.AssetId, + AssetAmount: packedAmount, + GasAccountIndex: txInfo.GasAccountIndex, + GasFeeAssetId: txInfo.GasFeeAssetId, + GasFeeAssetAmount: packedFee, + CallDataHash: txInfo.CallDataHash, + } + return info, nil +} diff --git a/common/prove/transfer_nft.go b/common/prove/transfer_nft.go new file mode 100644 index 000000000..6a3cece9a --- /dev/null +++ b/common/prove/transfer_nft.go @@ -0,0 +1,63 @@ +/* + * Copyright © 2021 ZkBAS Protocol + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package prove + +import ( + "github.com/consensys/gnark-crypto/ecc/bn254/twistededwards/eddsa" + "github.com/ethereum/go-ethereum/common" + + common2 "github.com/bnb-chain/zkbas/common" + "github.com/bnb-chain/zkbas/types" +) + +func (w *WitnessHelper) constructTransferNftTxWitness(cryptoTx *TxWitness, oTx *Tx) (*TxWitness, error) { + txInfo, err := types.ParseTransferNftTxInfo(oTx.TxInfo) + if err != nil { + return nil, err + } + cryptoTxInfo, err := toCryptoTransferNftTx(txInfo) + if err != nil { + return nil, err + } + cryptoTx.TransferNftTxInfo = cryptoTxInfo + cryptoTx.ExpiredAt = txInfo.ExpiredAt + cryptoTx.Signature = new(eddsa.Signature) + _, err = cryptoTx.Signature.SetBytes(txInfo.Sig) + if err != nil { + return nil, err + } + return cryptoTx, nil +} + +func toCryptoTransferNftTx(txInfo *types.TransferNftTxInfo) (info *CryptoTransferNftTx, err error) { + packedFee, err := common2.ToPackedFee(txInfo.GasFeeAssetAmount) + if err != nil { + return nil, err + } + info = &CryptoTransferNftTx{ + FromAccountIndex: txInfo.FromAccountIndex, + ToAccountIndex: txInfo.ToAccountIndex, + ToAccountNameHash: common.FromHex(txInfo.ToAccountNameHash), + NftIndex: txInfo.NftIndex, + GasAccountIndex: txInfo.GasAccountIndex, + GasFeeAssetId: txInfo.GasFeeAssetId, + GasFeeAssetAmount: packedFee, + CallDataHash: txInfo.CallDataHash, + } + return info, nil +} diff --git a/common/proverUtil/constant.go b/common/prove/types.go similarity index 78% rename from common/proverUtil/constant.go rename to common/prove/types.go index 86db418aa..e4da45ba4 100644 --- a/common/proverUtil/constant.go +++ b/common/prove/types.go @@ -1,5 +1,5 @@ /* - * Copyright © 2021 Zkbas Protocol + * Copyright © 2021 ZkBAS Protocol * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -15,17 +15,16 @@ * */ -package proverUtil +package prove import ( "github.com/bnb-chain/zkbas-crypto/legend/circuit/bn254/block" "github.com/bnb-chain/zkbas-crypto/legend/circuit/bn254/std" - - "github.com/bnb-chain/zkbas/common/commonAsset" - "github.com/bnb-chain/zkbas/common/model/account" - "github.com/bnb-chain/zkbas/common/model/liquidity" - "github.com/bnb-chain/zkbas/common/model/nft" - "github.com/bnb-chain/zkbas/common/model/tx" + "github.com/bnb-chain/zkbas/dao/account" + "github.com/bnb-chain/zkbas/dao/liquidity" + "github.com/bnb-chain/zkbas/dao/nft" + "github.com/bnb-chain/zkbas/dao/tx" + "github.com/bnb-chain/zkbas/types" ) type ( @@ -33,10 +32,10 @@ type ( TxDetail = tx.TxDetail Account = account.Account - AccountAsset = commonAsset.AccountAsset + AccountAsset = types.AccountAsset - PoolInfo = commonAsset.LiquidityInfo - NftInfo = commonAsset.NftInfo + PoolInfo = types.LiquidityInfo + NftInfo = types.NftInfo AccountModel = account.AccountModel AccountHistoryModel = account.AccountHistoryModel @@ -47,7 +46,7 @@ type ( NftModel = nft.L2NftModel NftHistoryModel = nft.L2NftHistoryModel - CryptoTx = block.Tx + TxWitness = block.Tx CryptoAccount = std.Account CryptoAccountAsset = std.AccountAsset @@ -87,3 +86,19 @@ const ( LastPairIndex = 65535 LastNftIndex = 1099511627775 ) + +type AccountWitnessInfo struct { + AccountInfo *Account + AccountAssets []*AccountAsset + AssetsRelatedTxDetails []*TxDetail +} + +type LiquidityWitnessInfo struct { + LiquidityInfo *PoolInfo + LiquidityRelatedTxDetail *TxDetail +} + +type NftWitnessInfo struct { + NftInfo *NftInfo + NftRelatedTxDetail *TxDetail +} diff --git a/common/prove/update_pairrate.go b/common/prove/update_pairrate.go new file mode 100644 index 000000000..1091ac206 --- /dev/null +++ b/common/prove/update_pairrate.go @@ -0,0 +1,48 @@ +/* + * Copyright © 2021 ZkBAS Protocol + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package prove + +import ( + "github.com/bnb-chain/zkbas-crypto/legend/circuit/bn254/std" + "github.com/bnb-chain/zkbas-crypto/wasm/legend/legendTxTypes" + "github.com/bnb-chain/zkbas/types" +) + +func (w *WitnessHelper) constructUpdatePairRateTxWitness(cryptoTx *TxWitness, oTx *Tx) (*TxWitness, error) { + txInfo, err := types.ParseUpdatePairRateTxInfo(oTx.TxInfo) + if err != nil { + return nil, err + } + cryptoTxInfo, err := toCryptoUpdatePairRateTx(txInfo) + if err != nil { + return nil, err + } + cryptoTx.UpdatePairRateTxInfo = cryptoTxInfo + cryptoTx.Signature = std.EmptySignature() + return cryptoTx, nil +} + +func toCryptoUpdatePairRateTx(txInfo *legendTxTypes.UpdatePairRateTxInfo) (info *CryptoUpdatePairRateTx, err error) { + info = &CryptoUpdatePairRateTx{ + PairIndex: txInfo.PairIndex, + FeeRate: txInfo.FeeRate, + TreasuryAccountIndex: txInfo.TreasuryAccountIndex, + TreasuryRate: txInfo.TreasuryRate, + } + return info, nil +} diff --git a/common/prove/withdraw.go b/common/prove/withdraw.go new file mode 100644 index 000000000..adbb2d3dc --- /dev/null +++ b/common/prove/withdraw.go @@ -0,0 +1,65 @@ +/* + * Copyright © 2021 ZkBAS Protocol + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package prove + +import ( + "math/big" + + "github.com/consensys/gnark-crypto/ecc/bn254/twistededwards/eddsa" + + "github.com/bnb-chain/zkbas-crypto/wasm/legend/legendTxTypes" + "github.com/bnb-chain/zkbas/common" + "github.com/bnb-chain/zkbas/types" +) + +func (w *WitnessHelper) constructWithdrawTxWitness(cryptoTx *TxWitness, oTx *Tx) (*TxWitness, error) { + txInfo, err := types.ParseWithdrawTxInfo(oTx.TxInfo) + if err != nil { + return nil, err + } + cryptoTxInfo, err := toCryptoWithdrawTx(txInfo) + if err != nil { + return nil, err + } + cryptoTx.WithdrawTxInfo = cryptoTxInfo + cryptoTx.ExpiredAt = oTx.ExpiredAt + cryptoTx.Signature = new(eddsa.Signature) + _, err = cryptoTx.Signature.SetBytes(txInfo.Sig) + if err != nil { + return nil, err + } + return cryptoTx, nil +} + +func toCryptoWithdrawTx(txInfo *types.WithdrawTxInfo) (info *CryptoWithdrawTx, err error) { + packedFee, err := common.ToPackedFee(txInfo.GasFeeAssetAmount) + if err != nil { + return nil, err + } + addrBytes := legendTxTypes.PaddingAddressToBytes32(txInfo.ToAddress) + info = &CryptoWithdrawTx{ + FromAccountIndex: txInfo.FromAccountIndex, + AssetId: txInfo.AssetId, + AssetAmount: txInfo.AssetAmount, + GasAccountIndex: txInfo.GasAccountIndex, + GasFeeAssetId: txInfo.GasFeeAssetId, + GasFeeAssetAmount: packedFee, + ToAddress: new(big.Int).SetBytes(addrBytes), + } + return info, nil +} diff --git a/common/prove/withdraw_nft.go b/common/prove/withdraw_nft.go new file mode 100644 index 000000000..999a97795 --- /dev/null +++ b/common/prove/withdraw_nft.go @@ -0,0 +1,67 @@ +/* + * Copyright © 2021 ZkBAS Protocol + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package prove + +import ( + "github.com/consensys/gnark-crypto/ecc/bn254/twistededwards/eddsa" + + "github.com/bnb-chain/zkbas/common" + "github.com/bnb-chain/zkbas/types" +) + +func (w *WitnessHelper) constructWithdrawNftTxWitness(cryptoTx *TxWitness, oTx *Tx) (*TxWitness, error) { + txInfo, err := types.ParseWithdrawNftTxInfo(oTx.TxInfo) + if err != nil { + return nil, err + } + cryptoTxInfo, err := toCryptoWithdrawNftTx(txInfo) + if err != nil { + return nil, err + } + cryptoTx.WithdrawNftTxInfo = cryptoTxInfo + cryptoTx.ExpiredAt = txInfo.ExpiredAt + cryptoTx.Signature = new(eddsa.Signature) + _, err = cryptoTx.Signature.SetBytes(txInfo.Sig) + if err != nil { + return nil, err + } + return cryptoTx, nil +} + +func toCryptoWithdrawNftTx(txInfo *types.WithdrawNftTxInfo) (info *CryptoWithdrawNftTx, err error) { + packedFee, err := common.ToPackedFee(txInfo.GasFeeAssetAmount) + if err != nil { + return nil, err + } + info = &CryptoWithdrawNftTx{ + AccountIndex: txInfo.AccountIndex, + CreatorAccountIndex: txInfo.CreatorAccountIndex, + CreatorAccountNameHash: txInfo.CreatorAccountNameHash, + CreatorTreasuryRate: txInfo.CreatorTreasuryRate, + NftIndex: txInfo.NftIndex, + NftContentHash: txInfo.NftContentHash, + NftL1Address: txInfo.NftL1Address, + NftL1TokenId: txInfo.NftL1TokenId, + ToAddress: txInfo.ToAddress, + GasAccountIndex: txInfo.GasAccountIndex, + GasFeeAssetId: txInfo.GasFeeAssetId, + GasFeeAssetAmount: packedFee, + CollectionId: txInfo.CollectionId, + } + return info, nil +} diff --git a/common/prove/witness_helper.go b/common/prove/witness_helper.go new file mode 100644 index 000000000..1af2f5098 --- /dev/null +++ b/common/prove/witness_helper.go @@ -0,0 +1,719 @@ +/* + * Copyright © 2021 ZkBAS Protocol + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package prove + +import ( + "fmt" + "math/big" + + "github.com/ethereum/go-ethereum/common" + + "github.com/bnb-chain/zkbas-crypto/legend/circuit/bn254/std" + bsmt "github.com/bnb-chain/zkbas-smt" + common2 "github.com/bnb-chain/zkbas/common" + "github.com/bnb-chain/zkbas/common/chain" + "github.com/bnb-chain/zkbas/tree" + "github.com/bnb-chain/zkbas/types" +) + +type WitnessHelper struct { + treeCtx *tree.Context + + accountModel AccountModel + + // Trees + accountTree bsmt.SparseMerkleTree + assetTrees *[]bsmt.SparseMerkleTree + liquidityTree bsmt.SparseMerkleTree + nftTree bsmt.SparseMerkleTree +} + +func NewWitnessHelper(treeCtx *tree.Context, accountTree, liquidityTree, nftTree bsmt.SparseMerkleTree, + assetTrees *[]bsmt.SparseMerkleTree, accountModel AccountModel) *WitnessHelper { + return &WitnessHelper{ + treeCtx: treeCtx, + accountModel: accountModel, + accountTree: accountTree, + assetTrees: assetTrees, + liquidityTree: liquidityTree, + nftTree: nftTree, + } +} + +func (w *WitnessHelper) ConstructTxWitness(oTx *Tx, finalityBlockNr uint64, +) (cryptoTx *TxWitness, err error) { + switch oTx.TxType { + case types.TxTypeEmpty: + return nil, fmt.Errorf("there should be no empty tx") + default: + cryptoTx, err = w.constructTxWitness(oTx, finalityBlockNr) + if err != nil { + return nil, err + } + } + return cryptoTx, nil +} + +func (w *WitnessHelper) constructTxWitness(oTx *Tx, finalityBlockNr uint64) (witness *TxWitness, err error) { + if oTx == nil || w.accountTree == nil || w.assetTrees == nil || w.liquidityTree == nil || w.nftTree == nil { + return nil, fmt.Errorf("failed because of nil tx or tree") + } + witness, err = w.constructWitnessInfo(oTx, finalityBlockNr) + if err != nil { + return nil, err + } + witness.TxType = uint8(oTx.TxType) + witness.Nonce = oTx.Nonce + switch oTx.TxType { + case types.TxTypeRegisterZns: + return w.constructRegisterZnsTxWitness(witness, oTx) + case types.TxTypeCreatePair: + return w.constructCreatePairTxWitness(witness, oTx) + case types.TxTypeUpdatePairRate: + return w.constructUpdatePairRateTxWitness(witness, oTx) + case types.TxTypeDeposit: + return w.constructDepositTxWitness(witness, oTx) + case types.TxTypeDepositNft: + return w.constructDepositNftTxWitness(witness, oTx) + case types.TxTypeTransfer: + return w.constructTransferTxWitness(witness, oTx) + case types.TxTypeSwap: + return w.constructSwapTxWitness(witness, oTx) + case types.TxTypeAddLiquidity: + return w.constructAddLiquidityTxWitness(witness, oTx) + case types.TxTypeRemoveLiquidity: + return w.constructRemoveLiquidityTxWitness(witness, oTx) + case types.TxTypeWithdraw: + return w.constructWithdrawTxWitness(witness, oTx) + case types.TxTypeCreateCollection: + return w.constructCreateCollectionTxWitness(witness, oTx) + case types.TxTypeMintNft: + return w.constructMintNftTxWitness(witness, oTx) + case types.TxTypeTransferNft: + return w.constructTransferNftTxWitness(witness, oTx) + case types.TxTypeAtomicMatch: + return w.constructAtomicMatchTxWitness(witness, oTx) + case types.TxTypeCancelOffer: + return w.constructCancelOfferTxWitness(witness, oTx) + case types.TxTypeWithdrawNft: + return w.constructWithdrawNftTxWitness(witness, oTx) + case types.TxTypeFullExit: + return w.constructFullExitTxWitness(witness, oTx) + case types.TxTypeFullExitNft: + return w.constructFullExitNftTxWitness(witness, oTx) + default: + return nil, fmt.Errorf("tx type error") + } +} + +func (w *WitnessHelper) constructWitnessInfo( + oTx *Tx, + finalityBlockNr uint64, +) ( + cryptoTx *TxWitness, + err error, +) { + accountKeys, proverAccounts, proverLiquidityInfo, proverNftInfo, err := w.constructSimpleWitnessInfo(oTx) + if err != nil { + return nil, err + } + // construct account witness + accountRootBefore, accountsInfoBefore, merkleProofsAccountAssetsBefore, merkleProofsAccountBefore, err := + w.constructAccountWitness(oTx, finalityBlockNr, accountKeys, proverAccounts) + if err != nil { + return nil, err + } + // construct liquidity witness + liquidityRootBefore, liquidityBefore, merkleProofsLiquidityBefore, err := + w.constructLiquidityWitness(proverLiquidityInfo) + if err != nil { + return nil, err + } + // construct nft witness + nftRootBefore, nftBefore, merkleProofsNftBefore, err := + w.constructNftWitness(proverNftInfo) + if err != nil { + return nil, err + } + stateRootBefore := tree.ComputeStateRootHash(accountRootBefore, liquidityRootBefore, nftRootBefore) + stateRootAfter := tree.ComputeStateRootHash(w.accountTree.Root(), w.liquidityTree.Root(), w.nftTree.Root()) + cryptoTx = &TxWitness{ + AccountRootBefore: accountRootBefore, + AccountsInfoBefore: accountsInfoBefore, + LiquidityRootBefore: liquidityRootBefore, + LiquidityBefore: liquidityBefore, + NftRootBefore: nftRootBefore, + NftBefore: nftBefore, + StateRootBefore: stateRootBefore, + MerkleProofsAccountAssetsBefore: merkleProofsAccountAssetsBefore, + MerkleProofsAccountBefore: merkleProofsAccountBefore, + MerkleProofsLiquidityBefore: merkleProofsLiquidityBefore, + MerkleProofsNftBefore: merkleProofsNftBefore, + StateRootAfter: stateRootAfter, + } + return cryptoTx, nil +} + +func (w *WitnessHelper) constructAccountWitness( + oTx *Tx, + finalityBlockNr uint64, + accountKeys []int64, + proverAccounts []*AccountWitnessInfo, +) ( + accountRootBefore []byte, + // account before info, size is 5 + accountsInfoBefore [NbAccountsPerTx]*CryptoAccount, + // before account asset merkle proof + merkleProofsAccountAssetsBefore [NbAccountsPerTx][NbAccountAssetsPerAccount][AssetMerkleLevels][]byte, + // before account merkle proof + merkleProofsAccountBefore [NbAccountsPerTx][AccountMerkleLevels][]byte, + err error, +) { + accountRootBefore = w.accountTree.Root() + var ( + accountCount = 0 + ) + for _, accountKey := range accountKeys { + var ( + cryptoAccount = new(CryptoAccount) + // get account asset before + assetCount = 0 + ) + // get account before + accountMerkleProofs, err := w.accountTree.GetProof(uint64(accountKey)) + if err != nil { + return accountRootBefore, accountsInfoBefore, merkleProofsAccountAssetsBefore, merkleProofsAccountBefore, err + } + // it means this is a registerZNS tx + if proverAccounts == nil { + if accountKey != int64(len(*w.assetTrees)) { + return accountRootBefore, accountsInfoBefore, merkleProofsAccountAssetsBefore, merkleProofsAccountBefore, + fmt.Errorf("invalid key") + } + emptyAccountAssetTree, err := tree.NewEmptyAccountAssetTree(w.treeCtx, accountKey, finalityBlockNr) + if err != nil { + return accountRootBefore, accountsInfoBefore, merkleProofsAccountAssetsBefore, merkleProofsAccountBefore, err + } + *w.assetTrees = append(*w.assetTrees, emptyAccountAssetTree) + cryptoAccount = std.EmptyAccount(accountKey, tree.NilAccountAssetRoot) + // update account info + accountInfo, err := w.accountModel.GetConfirmedAccountByIndex(accountKey) + if err != nil { + return accountRootBefore, accountsInfoBefore, merkleProofsAccountAssetsBefore, merkleProofsAccountBefore, err + } + proverAccounts = append(proverAccounts, &AccountWitnessInfo{ + AccountInfo: &Account{ + AccountIndex: accountInfo.AccountIndex, + AccountName: accountInfo.AccountName, + PublicKey: accountInfo.PublicKey, + AccountNameHash: accountInfo.AccountNameHash, + L1Address: accountInfo.L1Address, + Nonce: types.NilNonce, + CollectionNonce: types.NilCollectionId, + AssetInfo: types.NilAssetInfo, + AssetRoot: common.Bytes2Hex(tree.NilAccountAssetRoot), + Status: accountInfo.Status, + }, + }) + } else { + proverAccountInfo := proverAccounts[accountCount] + pk, err := common2.ParsePubKey(proverAccountInfo.AccountInfo.PublicKey) + if err != nil { + return accountRootBefore, accountsInfoBefore, merkleProofsAccountAssetsBefore, merkleProofsAccountBefore, err + } + cryptoAccount = &CryptoAccount{ + AccountIndex: accountKey, + AccountNameHash: common.FromHex(proverAccountInfo.AccountInfo.AccountNameHash), + AccountPk: pk, + Nonce: proverAccountInfo.AccountInfo.Nonce, + CollectionNonce: proverAccountInfo.AccountInfo.CollectionNonce, + AssetRoot: (*w.assetTrees)[accountKey].Root(), + } + for i, accountAsset := range proverAccountInfo.AccountAssets { + assetMerkleProof, err := (*w.assetTrees)[accountKey].GetProof(uint64(accountAsset.AssetId)) + if err != nil { + return accountRootBefore, accountsInfoBefore, merkleProofsAccountAssetsBefore, merkleProofsAccountBefore, err + } + // set crypto account asset + cryptoAccount.AssetsInfo[assetCount] = &CryptoAccountAsset{ + AssetId: accountAsset.AssetId, + Balance: accountAsset.Balance, + LpAmount: accountAsset.LpAmount, + OfferCanceledOrFinalized: accountAsset.OfferCanceledOrFinalized, + } + + // set merkle proof + merkleProofsAccountAssetsBefore[accountCount][assetCount], err = SetFixedAccountAssetArray(assetMerkleProof) + if err != nil { + return accountRootBefore, accountsInfoBefore, merkleProofsAccountAssetsBefore, merkleProofsAccountBefore, err + } + // update asset merkle tree + nBalance, err := chain.ComputeNewBalance( + proverAccountInfo.AssetsRelatedTxDetails[i].AssetType, + proverAccountInfo.AssetsRelatedTxDetails[i].Balance, + proverAccountInfo.AssetsRelatedTxDetails[i].BalanceDelta, + ) + if err != nil { + return accountRootBefore, accountsInfoBefore, merkleProofsAccountAssetsBefore, merkleProofsAccountBefore, err + } + nAsset, err := types.ParseAccountAsset(nBalance) + if err != nil { + return accountRootBefore, accountsInfoBefore, merkleProofsAccountAssetsBefore, merkleProofsAccountBefore, err + } + nAssetHash, err := tree.ComputeAccountAssetLeafHash(nAsset.Balance.String(), nAsset.LpAmount.String(), nAsset.OfferCanceledOrFinalized.String()) + if err != nil { + return accountRootBefore, accountsInfoBefore, merkleProofsAccountAssetsBefore, merkleProofsAccountBefore, err + } + err = (*w.assetTrees)[accountKey].Set(uint64(accountAsset.AssetId), nAssetHash) + if err != nil { + return accountRootBefore, accountsInfoBefore, merkleProofsAccountAssetsBefore, merkleProofsAccountBefore, err + } + + assetCount++ + } + if err != nil { + return accountRootBefore, accountsInfoBefore, merkleProofsAccountAssetsBefore, merkleProofsAccountBefore, err + } + } + // padding empty account asset + for assetCount < NbAccountAssetsPerAccount { + cryptoAccount.AssetsInfo[assetCount] = std.EmptyAccountAsset(LastAccountAssetId) + assetMerkleProof, err := (*w.assetTrees)[accountKey].GetProof(LastAccountAssetId) + if err != nil { + return accountRootBefore, accountsInfoBefore, merkleProofsAccountAssetsBefore, merkleProofsAccountBefore, err + } + merkleProofsAccountAssetsBefore[accountCount][assetCount], err = SetFixedAccountAssetArray(assetMerkleProof) + if err != nil { + return accountRootBefore, accountsInfoBefore, merkleProofsAccountAssetsBefore, merkleProofsAccountBefore, err + } + assetCount++ + } + // set account merkle proof + merkleProofsAccountBefore[accountCount], err = SetFixedAccountArray(accountMerkleProofs) + if err != nil { + return accountRootBefore, accountsInfoBefore, merkleProofsAccountAssetsBefore, merkleProofsAccountBefore, err + } + // update account merkle tree + nonce := cryptoAccount.Nonce + collectionNonce := cryptoAccount.CollectionNonce + if oTx.AccountIndex == accountKey && types.IsL2Tx(oTx.TxType) { + nonce = oTx.Nonce + 1 // increase nonce if tx is initiated in l2 + } + if oTx.AccountIndex == accountKey && oTx.TxType == types.TxTypeCreateCollection { + collectionNonce++ + } + nAccountHash, err := tree.ComputeAccountLeafHash( + proverAccounts[accountCount].AccountInfo.AccountNameHash, + proverAccounts[accountCount].AccountInfo.PublicKey, + nonce, + collectionNonce, + (*w.assetTrees)[accountKey].Root(), + ) + if err != nil { + return accountRootBefore, accountsInfoBefore, merkleProofsAccountAssetsBefore, merkleProofsAccountBefore, err + } + err = w.accountTree.Set(uint64(accountKey), nAccountHash) + if err != nil { + return accountRootBefore, accountsInfoBefore, merkleProofsAccountAssetsBefore, merkleProofsAccountBefore, err + } + // set account info before + accountsInfoBefore[accountCount] = cryptoAccount + // add count + accountCount++ + } + if err != nil { + return accountRootBefore, accountsInfoBefore, merkleProofsAccountAssetsBefore, merkleProofsAccountBefore, err + } + // padding empty account + emptyAssetTree, err := tree.NewMemAccountAssetTree() + if err != nil { + return accountRootBefore, accountsInfoBefore, merkleProofsAccountAssetsBefore, merkleProofsAccountBefore, err + } + for accountCount < NbAccountsPerTx { + accountsInfoBefore[accountCount] = std.EmptyAccount(LastAccountIndex, tree.NilAccountAssetRoot) + // get account before + accountMerkleProofs, err := w.accountTree.GetProof(LastAccountIndex) + if err != nil { + return accountRootBefore, accountsInfoBefore, merkleProofsAccountAssetsBefore, merkleProofsAccountBefore, err + } + // set account merkle proof + merkleProofsAccountBefore[accountCount], err = SetFixedAccountArray(accountMerkleProofs) + if err != nil { + return accountRootBefore, accountsInfoBefore, merkleProofsAccountAssetsBefore, merkleProofsAccountBefore, err + } + for i := 0; i < NbAccountAssetsPerAccount; i++ { + assetMerkleProof, err := emptyAssetTree.GetProof(0) + if err != nil { + return accountRootBefore, accountsInfoBefore, merkleProofsAccountAssetsBefore, merkleProofsAccountBefore, err + } + merkleProofsAccountAssetsBefore[accountCount][i], err = SetFixedAccountAssetArray(assetMerkleProof) + if err != nil { + return accountRootBefore, accountsInfoBefore, merkleProofsAccountAssetsBefore, merkleProofsAccountBefore, err + } + } + accountCount++ + + } + return accountRootBefore, accountsInfoBefore, merkleProofsAccountAssetsBefore, merkleProofsAccountBefore, nil +} + +func (w *WitnessHelper) constructLiquidityWitness( + proverLiquidityInfo *LiquidityWitnessInfo, +) ( + // liquidity root before + LiquidityRootBefore []byte, + // liquidity before + LiquidityBefore *CryptoLiquidity, + // before liquidity merkle proof + MerkleProofsLiquidityBefore [LiquidityMerkleLevels][]byte, + err error, +) { + LiquidityRootBefore = w.liquidityTree.Root() + if proverLiquidityInfo == nil { + liquidityMerkleProofs, err := w.liquidityTree.GetProof(LastPairIndex) + if err != nil { + return LiquidityRootBefore, LiquidityBefore, MerkleProofsLiquidityBefore, err + } + MerkleProofsLiquidityBefore, err = SetFixedLiquidityArray(liquidityMerkleProofs) + if err != nil { + return LiquidityRootBefore, LiquidityBefore, MerkleProofsLiquidityBefore, err + } + LiquidityBefore = std.EmptyLiquidity(LastPairIndex) + return LiquidityRootBefore, LiquidityBefore, MerkleProofsLiquidityBefore, nil + } + liquidityMerkleProofs, err := w.liquidityTree.GetProof(uint64(proverLiquidityInfo.LiquidityInfo.PairIndex)) + if err != nil { + return LiquidityRootBefore, LiquidityBefore, MerkleProofsLiquidityBefore, err + } + MerkleProofsLiquidityBefore, err = SetFixedLiquidityArray(liquidityMerkleProofs) + if err != nil { + return LiquidityRootBefore, LiquidityBefore, MerkleProofsLiquidityBefore, err + } + LiquidityBefore = &CryptoLiquidity{ + PairIndex: proverLiquidityInfo.LiquidityInfo.PairIndex, + AssetAId: proverLiquidityInfo.LiquidityInfo.AssetAId, + AssetA: proverLiquidityInfo.LiquidityInfo.AssetA, + AssetBId: proverLiquidityInfo.LiquidityInfo.AssetBId, + AssetB: proverLiquidityInfo.LiquidityInfo.AssetB, + LpAmount: proverLiquidityInfo.LiquidityInfo.LpAmount, + KLast: proverLiquidityInfo.LiquidityInfo.KLast, + FeeRate: proverLiquidityInfo.LiquidityInfo.FeeRate, + TreasuryAccountIndex: proverLiquidityInfo.LiquidityInfo.TreasuryAccountIndex, + TreasuryRate: proverLiquidityInfo.LiquidityInfo.TreasuryRate, + } + // update liquidity tree + nBalance, err := chain.ComputeNewBalance( + proverLiquidityInfo.LiquidityRelatedTxDetail.AssetType, + proverLiquidityInfo.LiquidityRelatedTxDetail.Balance, + proverLiquidityInfo.LiquidityRelatedTxDetail.BalanceDelta, + ) + if err != nil { + return LiquidityRootBefore, LiquidityBefore, MerkleProofsLiquidityBefore, err + } + nPoolInfo, err := types.ParseLiquidityInfo(nBalance) + if err != nil { + return LiquidityRootBefore, LiquidityBefore, MerkleProofsLiquidityBefore, err + } + nLiquidityHash, err := tree.ComputeLiquidityAssetLeafHash( + nPoolInfo.AssetAId, + nPoolInfo.AssetA.String(), + nPoolInfo.AssetBId, + nPoolInfo.AssetB.String(), + nPoolInfo.LpAmount.String(), + nPoolInfo.KLast.String(), + nPoolInfo.FeeRate, + nPoolInfo.TreasuryAccountIndex, + nPoolInfo.TreasuryRate, + ) + if err != nil { + return LiquidityRootBefore, LiquidityBefore, MerkleProofsLiquidityBefore, err + } + err = w.liquidityTree.Set(uint64(proverLiquidityInfo.LiquidityInfo.PairIndex), nLiquidityHash) + if err != nil { + return LiquidityRootBefore, LiquidityBefore, MerkleProofsLiquidityBefore, err + } + return LiquidityRootBefore, LiquidityBefore, MerkleProofsLiquidityBefore, nil +} + +func (w *WitnessHelper) constructNftWitness( + proverNftInfo *NftWitnessInfo, +) ( + // nft root before + nftRootBefore []byte, + // nft before + nftBefore *CryptoNft, + // before nft tree merkle proof + merkleProofsNftBefore [NftMerkleLevels][]byte, + err error, +) { + nftRootBefore = w.nftTree.Root() + if proverNftInfo == nil { + liquidityMerkleProofs, err := w.nftTree.GetProof(LastNftIndex) + if err != nil { + return nftRootBefore, nftBefore, merkleProofsNftBefore, err + } + merkleProofsNftBefore, err = SetFixedNftArray(liquidityMerkleProofs) + if err != nil { + return nftRootBefore, nftBefore, merkleProofsNftBefore, err + } + nftBefore = std.EmptyNft(LastNftIndex) + return nftRootBefore, nftBefore, merkleProofsNftBefore, nil + } + nftMerkleProofs, err := w.nftTree.GetProof(uint64(proverNftInfo.NftInfo.NftIndex)) + if err != nil { + return nftRootBefore, nftBefore, merkleProofsNftBefore, err + } + merkleProofsNftBefore, err = SetFixedNftArray(nftMerkleProofs) + if err != nil { + return nftRootBefore, nftBefore, merkleProofsNftBefore, err + } + nftL1TokenId, isValid := new(big.Int).SetString(proverNftInfo.NftInfo.NftL1TokenId, 10) + if !isValid { + return nftRootBefore, nftBefore, merkleProofsNftBefore, fmt.Errorf("unable to parse big int") + } + nftBefore = &CryptoNft{ + NftIndex: proverNftInfo.NftInfo.NftIndex, + NftContentHash: common.FromHex(proverNftInfo.NftInfo.NftContentHash), + CreatorAccountIndex: proverNftInfo.NftInfo.CreatorAccountIndex, + OwnerAccountIndex: proverNftInfo.NftInfo.OwnerAccountIndex, + NftL1Address: new(big.Int).SetBytes(common.FromHex(proverNftInfo.NftInfo.NftL1Address)), + NftL1TokenId: nftL1TokenId, + CreatorTreasuryRate: proverNftInfo.NftInfo.CreatorTreasuryRate, + CollectionId: proverNftInfo.NftInfo.CollectionId, + } + // update liquidity tree + nBalance, err := chain.ComputeNewBalance( + proverNftInfo.NftRelatedTxDetail.AssetType, + proverNftInfo.NftRelatedTxDetail.Balance, + proverNftInfo.NftRelatedTxDetail.BalanceDelta, + ) + if err != nil { + return nftRootBefore, nftBefore, merkleProofsNftBefore, err + } + nNftInfo, err := types.ParseNftInfo(nBalance) + if err != nil { + return nftRootBefore, nftBefore, merkleProofsNftBefore, err + } + nNftHash, err := tree.ComputeNftAssetLeafHash( + nNftInfo.CreatorAccountIndex, + nNftInfo.OwnerAccountIndex, + nNftInfo.NftContentHash, + nNftInfo.NftL1Address, + nNftInfo.NftL1TokenId, + nNftInfo.CreatorTreasuryRate, + nNftInfo.CollectionId, + ) + + if err != nil { + return nftRootBefore, nftBefore, merkleProofsNftBefore, err + } + err = w.nftTree.Set(uint64(proverNftInfo.NftInfo.NftIndex), nNftHash) + if err != nil { + return nftRootBefore, nftBefore, merkleProofsNftBefore, err + } + if err != nil { + return nftRootBefore, nftBefore, merkleProofsNftBefore, err + } + return nftRootBefore, nftBefore, merkleProofsNftBefore, nil +} + +func SetFixedAccountArray(proof [][]byte) (res [AccountMerkleLevels][]byte, err error) { + if len(proof) != AccountMerkleLevels { + return res, fmt.Errorf("invalid size") + } + copy(res[:], proof[:]) + return res, nil +} + +func SetFixedAccountAssetArray(proof [][]byte) (res [AssetMerkleLevels][]byte, err error) { + if len(proof) != AssetMerkleLevels { + return res, fmt.Errorf("invalid size") + } + copy(res[:], proof[:]) + return res, nil +} + +func SetFixedLiquidityArray(proof [][]byte) (res [LiquidityMerkleLevels][]byte, err error) { + if len(proof) != LiquidityMerkleLevels { + return res, fmt.Errorf("invalid size") + } + copy(res[:], proof[:]) + return res, nil +} + +func SetFixedNftArray(proof [][]byte) (res [NftMerkleLevels][]byte, err error) { + if len(proof) != NftMerkleLevels { + return res, fmt.Errorf("invalid size") + } + copy(res[:], proof[:]) + return res, nil +} + +func (w *WitnessHelper) constructSimpleWitnessInfo(oTx *Tx) ( + accountKeys []int64, + accountWitnessInfo []*AccountWitnessInfo, + liquidityWitnessInfo *LiquidityWitnessInfo, + nftWitnessInfo *NftWitnessInfo, + err error, +) { + var ( + // dbinitializer account asset map, because if we have the same asset detail, the before will be the after of the last one + accountAssetMap = make(map[int64]map[int64]*AccountAsset) + accountMap = make(map[int64]*Account) + lastAccountOrder = int64(-2) + accountCount = -1 + ) + // dbinitializer prover account map + if oTx.TxType == types.TxTypeRegisterZns { + accountKeys = append(accountKeys, oTx.AccountIndex) + } + for _, txDetail := range oTx.TxDetails { + switch txDetail.AssetType { + case types.FungibleAssetType: + // get account info + if accountMap[txDetail.AccountIndex] == nil { + accountInfo, err := w.accountModel.GetConfirmedAccountByIndex(txDetail.AccountIndex) + if err != nil { + return nil, nil, nil, nil, err + } + // get current nonce + accountInfo.Nonce = txDetail.Nonce + accountMap[txDetail.AccountIndex] = accountInfo + } else { + if lastAccountOrder != txDetail.AccountOrder { + if oTx.AccountIndex == txDetail.AccountIndex { + accountMap[txDetail.AccountIndex].Nonce = oTx.Nonce + 1 + } + } + } + if lastAccountOrder != txDetail.AccountOrder { + accountKeys = append(accountKeys, txDetail.AccountIndex) + lastAccountOrder = txDetail.AccountOrder + accountWitnessInfo = append(accountWitnessInfo, &AccountWitnessInfo{ + AccountInfo: &Account{ + AccountIndex: accountMap[txDetail.AccountIndex].AccountIndex, + AccountName: accountMap[txDetail.AccountIndex].AccountName, + PublicKey: accountMap[txDetail.AccountIndex].PublicKey, + AccountNameHash: accountMap[txDetail.AccountIndex].AccountNameHash, + L1Address: accountMap[txDetail.AccountIndex].L1Address, + Nonce: accountMap[txDetail.AccountIndex].Nonce, + CollectionNonce: txDetail.CollectionNonce, + AssetInfo: accountMap[txDetail.AccountIndex].AssetInfo, + AssetRoot: accountMap[txDetail.AccountIndex].AssetRoot, + Status: accountMap[txDetail.AccountIndex].Status, + }, + }) + accountCount++ + } + if accountAssetMap[txDetail.AccountIndex] == nil { + accountAssetMap[txDetail.AccountIndex] = make(map[int64]*AccountAsset) + } + if accountAssetMap[txDetail.AccountIndex][txDetail.AssetId] == nil { + // set account before info + oAsset, err := types.ParseAccountAsset(txDetail.Balance) + if err != nil { + return nil, nil, nil, nil, err + } + accountWitnessInfo[accountCount].AccountAssets = append( + accountWitnessInfo[accountCount].AccountAssets, + oAsset, + ) + } else { + // set account before info + accountWitnessInfo[accountCount].AccountAssets = append( + accountWitnessInfo[accountCount].AccountAssets, + &AccountAsset{ + AssetId: accountAssetMap[txDetail.AccountIndex][txDetail.AssetId].AssetId, + Balance: accountAssetMap[txDetail.AccountIndex][txDetail.AssetId].Balance, + LpAmount: accountAssetMap[txDetail.AccountIndex][txDetail.AssetId].LpAmount, + OfferCanceledOrFinalized: accountAssetMap[txDetail.AccountIndex][txDetail.AssetId].OfferCanceledOrFinalized, + }, + ) + } + // set tx detail + accountWitnessInfo[accountCount].AssetsRelatedTxDetails = append( + accountWitnessInfo[accountCount].AssetsRelatedTxDetails, + txDetail, + ) + // update asset info + newBalance, err := chain.ComputeNewBalance(txDetail.AssetType, txDetail.Balance, txDetail.BalanceDelta) + if err != nil { + return nil, nil, nil, nil, err + } + nAsset, err := types.ParseAccountAsset(newBalance) + if err != nil { + return nil, nil, nil, nil, err + } + accountAssetMap[txDetail.AccountIndex][txDetail.AssetId] = nAsset + case types.LiquidityAssetType: + liquidityWitnessInfo = new(LiquidityWitnessInfo) + liquidityWitnessInfo.LiquidityRelatedTxDetail = txDetail + poolInfo, err := types.ParseLiquidityInfo(txDetail.Balance) + if err != nil { + return nil, nil, nil, nil, err + } + liquidityWitnessInfo.LiquidityInfo = poolInfo + case types.NftAssetType: + nftWitnessInfo = new(NftWitnessInfo) + nftWitnessInfo.NftRelatedTxDetail = txDetail + nftInfo, err := types.ParseNftInfo(txDetail.Balance) + if err != nil { + return nil, nil, nil, nil, err + } + nftWitnessInfo.NftInfo = nftInfo + case types.CollectionNonceAssetType: + // get account info + if accountMap[txDetail.AccountIndex] == nil { + accountInfo, err := w.accountModel.GetConfirmedAccountByIndex(txDetail.AccountIndex) + if err != nil { + return nil, nil, nil, nil, err + } + // get current nonce + accountInfo.Nonce = txDetail.Nonce + accountInfo.CollectionNonce = txDetail.CollectionNonce + accountMap[txDetail.AccountIndex] = accountInfo + if lastAccountOrder != txDetail.AccountOrder { + accountKeys = append(accountKeys, txDetail.AccountIndex) + lastAccountOrder = txDetail.AccountOrder + accountWitnessInfo = append(accountWitnessInfo, &AccountWitnessInfo{ + AccountInfo: &Account{ + AccountIndex: accountMap[txDetail.AccountIndex].AccountIndex, + AccountName: accountMap[txDetail.AccountIndex].AccountName, + PublicKey: accountMap[txDetail.AccountIndex].PublicKey, + AccountNameHash: accountMap[txDetail.AccountIndex].AccountNameHash, + L1Address: accountMap[txDetail.AccountIndex].L1Address, + Nonce: accountMap[txDetail.AccountIndex].Nonce, + CollectionNonce: txDetail.CollectionNonce, + AssetInfo: accountMap[txDetail.AccountIndex].AssetInfo, + AssetRoot: accountMap[txDetail.AccountIndex].AssetRoot, + Status: accountMap[txDetail.AccountIndex].Status, + }, + }) + accountCount++ + } + } else { + accountMap[txDetail.AccountIndex].Nonce = txDetail.Nonce + accountMap[txDetail.AccountIndex].CollectionNonce = txDetail.CollectionNonce + } + default: + return nil, nil, nil, nil, + fmt.Errorf("invalid asset type") + } + } + return accountKeys, accountWitnessInfo, liquidityWitnessInfo, nftWitnessInfo, nil +} diff --git a/common/prove/witness_test.go b/common/prove/witness_test.go new file mode 100644 index 000000000..bc4aa6441 --- /dev/null +++ b/common/prove/witness_test.go @@ -0,0 +1,128 @@ +/* + * Copyright © 2021 ZkBAS Protocol + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package prove + +import ( + "encoding/json" + "fmt" + "os/exec" + "testing" + "time" + + "github.com/stretchr/testify/assert" + "gorm.io/driver/postgres" + "gorm.io/gorm" + + cryptoBlock "github.com/bnb-chain/zkbas-crypto/legend/circuit/bn254/block" + "github.com/bnb-chain/zkbas-smt/database/memory" + + "github.com/bnb-chain/zkbas/dao/account" + "github.com/bnb-chain/zkbas/dao/block" + "github.com/bnb-chain/zkbas/dao/blockwitness" + "github.com/bnb-chain/zkbas/dao/liquidity" + "github.com/bnb-chain/zkbas/dao/nft" + "github.com/bnb-chain/zkbas/tree" +) + +var ( + dsn = "host=localhost user=postgres password=Zkbas@123 dbname=zkbas port=5434 sslmode=disable" + blockModel block.BlockModel + witnessModel blockwitness.BlockWitnessModel + accountModel account.AccountModel + accountHistoryModel account.AccountHistoryModel + liquidityHistoryModel liquidity.LiquidityHistoryModel + nftHistoryModel nft.L2NftHistoryModel +) + +func TestConstructTxWitness(t *testing.T) { + testDBSetup() + defer testDBShutdown() + maxTestBlockHeight := int64(33) + for h := int64(1); h < maxTestBlockHeight; h++ { + witnessHelper, err := getWitnessHelper(h - 1) + assert.NoError(t, err) + b, err := blockModel.GetBlocksBetween(h, h) + assert.NoError(t, err) + w, err := witnessModel.GetBlockWitnessByNumber(h) + assert.NoError(t, err) + var cBlock cryptoBlock.Block + err = json.Unmarshal([]byte(w.WitnessData), &cBlock) + assert.NoError(t, err) + for idx, tx := range b[0].Txs { + txWitness, err := witnessHelper.ConstructTxWitness(tx, uint64(0)) + assert.NoError(t, err) + expectedBz, _ := json.Marshal(cBlock.Txs[idx]) + actualBz, _ := json.Marshal(txWitness) + assert.Equal(t, string(actualBz), string(expectedBz), fmt.Sprintf("block %d, tx %d generate witness failed, tx type: %d", h, idx, tx.TxType)) + } + } +} + +func getWitnessHelper(blockHeight int64) (*WitnessHelper, error) { + ctx := &tree.Context{ + Driver: tree.MemoryDB, + TreeDB: memory.NewMemoryDB(), + } + accountTree, accountAssetTrees, err := tree.InitAccountTree(accountModel, accountHistoryModel, blockHeight, ctx) + if err != nil { + return nil, err + } + liquidityTree, err := tree.InitLiquidityTree(liquidityHistoryModel, blockHeight, ctx) + if err != nil { + return nil, err + } + nftTree, err := tree.InitNftTree(nftHistoryModel, blockHeight, ctx) + if err != nil { + return nil, err + } + return NewWitnessHelper(ctx, + accountTree, + liquidityTree, + nftTree, + &accountAssetTrees, + accountModel), nil +} + +func testDBSetup() { + testDBShutdown() + time.Sleep(5 * time.Second) + cmd := exec.Command("docker", "run", "--name", "postgres-ut-witness", "-p", "5434:5432", + "-e", "POSTGRES_PASSWORD=Zkbas@123", "-e", "POSTGRES_USER=postgres", "-e", "POSTGRES_DB=zkbas", + "-e", "PGDATA=/var/lib/postgresql/pgdata", "-d", "ghcr.io/bnb-chain/zkbas/zkbas-ut-postgres:0.0.2") + if err := cmd.Run(); err != nil { + panic(err) + } + time.Sleep(5 * time.Second) + db, _ := gorm.Open(postgres.Open(dsn), &gorm.Config{}) + blockModel = block.NewBlockModel(db) + witnessModel = blockwitness.NewBlockWitnessModel(db) + accountModel = account.NewAccountModel(db) + accountHistoryModel = account.NewAccountHistoryModel(db) + liquidityHistoryModel = liquidity.NewLiquidityHistoryModel(db) + nftHistoryModel = nft.NewL2NftHistoryModel(db) +} + +func testDBShutdown() { + cmd := exec.Command("docker", "kill", "postgres-ut-witness") + //nolint:errcheck + cmd.Run() + time.Sleep(time.Second) + cmd = exec.Command("docker", "rm", "postgres-ut") + //nolint:errcheck + cmd.Run() +} diff --git a/common/proverUtil/accountHelper.go b/common/proverUtil/accountHelper.go deleted file mode 100644 index e842aaa5f..000000000 --- a/common/proverUtil/accountHelper.go +++ /dev/null @@ -1,34 +0,0 @@ -/* - * Copyright © 2021 Zkbas Protocol - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package proverUtil - -type ProverAccountInfo struct { - AccountInfo *Account - AccountAssets []*AccountAsset - AssetsRelatedTxDetails []*TxDetail -} - -type ProverLiquidityInfo struct { - LiquidityInfo *PoolInfo - LiquidityRelatedTxDetail *TxDetail -} - -type ProverNftInfo struct { - NftInfo *NftInfo - NftRelatedTxDetail *TxDetail -} diff --git a/common/proverUtil/addLiquidity.go b/common/proverUtil/addLiquidity.go deleted file mode 100644 index 8fbc71773..000000000 --- a/common/proverUtil/addLiquidity.go +++ /dev/null @@ -1,142 +0,0 @@ -/* - * Copyright © 2021 Zkbas Protocol - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package proverUtil - -import ( - "errors" - - bsmt "github.com/bnb-chain/bas-smt" - "github.com/consensys/gnark-crypto/ecc/bn254/twistededwards/eddsa" - "github.com/zeromicro/go-zero/core/logx" - - "github.com/bnb-chain/zkbas/common/commonTx" - "github.com/bnb-chain/zkbas/common/util" - "github.com/bnb-chain/zkbas/pkg/treedb" -) - -func ConstructAddLiquidityCryptoTx( - oTx *Tx, - treeCtx *treedb.Context, - finalityBlockNr uint64, - accountTree bsmt.SparseMerkleTree, - accountAssetsTree *[]bsmt.SparseMerkleTree, - liquidityTree bsmt.SparseMerkleTree, - nftTree bsmt.SparseMerkleTree, - accountModel AccountModel, -) (cryptoTx *CryptoTx, err error) { - if oTx.TxType != commonTx.TxTypeAddLiquidity { - logx.Errorf("[ConstructAddLiquidityCryptoTx] invalid tx type") - return nil, errors.New("[ConstructAddLiquidityCryptoTx] invalid tx type") - } - if oTx == nil || accountTree == nil || accountAssetsTree == nil || liquidityTree == nil || nftTree == nil { - logx.Errorf("[ConstructAddLiquidityCryptoTx] invalid params") - return nil, errors.New("[ConstructAddLiquidityCryptoTx] invalid params") - } - txInfo, err := commonTx.ParseAddLiquidityTxInfo(oTx.TxInfo) - if err != nil { - logx.Errorf("[ConstructAddLiquidityCryptoTx] unable to parse register zns tx info:%s", err.Error()) - return nil, err - } - cryptoTxInfo, err := ToCryptoAddLiquidityTx(txInfo) - if err != nil { - logx.Errorf("[ConstructAddLiquidityCryptoTx] unable to convert to crypto register zns tx: %s", err.Error()) - return nil, err - } - accountKeys, proverAccounts, proverLiquidityInfo, proverNftInfo, err := ConstructProverInfo(oTx, accountModel) - if err != nil { - logx.Errorf("[ConstructAddLiquidityCryptoTx] unable to construct prover info: %s", err.Error()) - return nil, err - } - cryptoTx, err = ConstructWitnessInfo( - oTx, - accountModel, - treeCtx, - finalityBlockNr, - accountTree, - accountAssetsTree, - liquidityTree, - nftTree, - accountKeys, - proverAccounts, - proverLiquidityInfo, - proverNftInfo, - ) - if err != nil { - logx.Errorf("[ConstructAddLiquidityCryptoTx] unable to construct witness info: %s", err.Error()) - return nil, err - } - cryptoTx.TxType = uint8(oTx.TxType) - cryptoTx.AddLiquidityTxInfo = cryptoTxInfo - cryptoTx.Nonce = oTx.Nonce - cryptoTx.ExpiredAt = txInfo.ExpiredAt - cryptoTx.Signature = new(eddsa.Signature) - _, err = cryptoTx.Signature.SetBytes(txInfo.Sig) - if err != nil { - logx.Errorf("[ConstructAddLiquidityCryptoTx] invalid sig bytes: %s", err.Error()) - return nil, err - } - return cryptoTx, nil -} - -func ToCryptoAddLiquidityTx(txInfo *commonTx.AddLiquidityTxInfo) (info *CryptoAddLiquidityTx, err error) { - packedAAmount, err := util.ToPackedAmount(txInfo.AssetAAmount) - if err != nil { - logx.Errorf("[ToCryptoAddLiquidityTx] unable to convert to packed amount: %s", err.Error()) - return nil, err - } - packedBAmount, err := util.ToPackedAmount(txInfo.AssetBAmount) - if err != nil { - logx.Errorf("[ToCryptoAddLiquidityTx] unable to convert to packed amount: %s", err.Error()) - return nil, err - } - packedLpAmount, err := util.ToPackedAmount(txInfo.LpAmount) - if err != nil { - logx.Errorf("[ToCryptoAddLiquidityTx] unable to convert to packed amount: %s", err.Error()) - return nil, err - } - packedTreasuryAmount, err := util.ToPackedAmount(txInfo.TreasuryAmount) - if err != nil { - logx.Errorf("[ToCryptoAddLiquidityTx] unable to convert to packed amount: %s", err.Error()) - return nil, err - } - packedKLast, err := util.ToPackedAmount(txInfo.KLast) - if err != nil { - logx.Errorf("[ToCryptoAddLiquidityTx] unable to convert to packed amount: %s", err.Error()) - return nil, err - } - packedFee, err := util.ToPackedFee(txInfo.GasFeeAssetAmount) - if err != nil { - logx.Errorf("[ToCryptoAddLiquidityTx] unable to convert to packed fee: %s", err.Error()) - return nil, err - } - info = &CryptoAddLiquidityTx{ - FromAccountIndex: txInfo.FromAccountIndex, - PairIndex: txInfo.PairIndex, - AssetAId: txInfo.AssetAId, - AssetAAmount: packedAAmount, - AssetBId: txInfo.AssetBId, - AssetBAmount: packedBAmount, - LpAmount: packedLpAmount, - KLast: packedKLast, - TreasuryAmount: packedTreasuryAmount, - GasAccountIndex: txInfo.GasAccountIndex, - GasFeeAssetId: txInfo.GasFeeAssetId, - GasFeeAssetAmount: packedFee, - } - return info, nil -} diff --git a/common/proverUtil/addLiquidity_test.go b/common/proverUtil/addLiquidity_test.go deleted file mode 100644 index a81f331c9..000000000 --- a/common/proverUtil/addLiquidity_test.go +++ /dev/null @@ -1,83 +0,0 @@ -/* - * Copyright © 2021 Zkbas Protocol - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package proverUtil - -import ( - "encoding/json" - "fmt" - "testing" - - "github.com/bnb-chain/bas-smt/database/memory" - "github.com/zeromicro/go-zero/core/stores/redis" - - "github.com/bnb-chain/zkbas/common/model/account" - "github.com/bnb-chain/zkbas/common/model/basic" - "github.com/bnb-chain/zkbas/common/model/liquidity" - "github.com/bnb-chain/zkbas/common/model/nft" - "github.com/bnb-chain/zkbas/common/model/tx" - "github.com/bnb-chain/zkbas/common/tree" - "github.com/bnb-chain/zkbas/pkg/treedb" -) - -func TestConstructAddLiquidityCryptoTxFirst(t *testing.T) { - redisConn := redis.New(basic.CacheConf[0].Host, WithRedis(basic.CacheConf[0].Type, basic.CacheConf[0].Pass)) - txModel := tx.NewTxModel(basic.Connection, basic.CacheConf, basic.DB, redisConn) - accountModel := account.NewAccountModel(basic.Connection, basic.CacheConf, basic.DB) - accountHistoryModel := account.NewAccountHistoryModel(basic.Connection, basic.CacheConf, basic.DB) - //liquidityModel := liquidity.NewLiquidityModel(basic.Connection, basic.CacheConf, basic.DB) - liquidityHistoryModel := liquidity.NewLiquidityHistoryModel(basic.Connection, basic.CacheConf, basic.DB) - //nftModel := nft.NewL2NftModel(basic.Connection, basic.CacheConf, basic.DB) - nftHistoryModel := nft.NewL2NftHistoryModel(basic.Connection, basic.CacheConf, basic.DB) - ctx := &treedb.Context{ - Driver: treedb.MemoryDB, - TreeDB: memory.NewMemoryDB(), - } - txInfo, err := txModel.GetTxByTxId(18) - if err != nil { - t.Fatal(err) - } - blockHeight := int64(17) - accountTree, accountAssetTrees, err := tree.InitAccountTree(accountModel, accountHistoryModel, blockHeight, ctx) - if err != nil { - t.Fatal(err) - } - liquidityTree, err := tree.InitLiquidityTree(liquidityHistoryModel, blockHeight, ctx) - if err != nil { - t.Fatal(err) - } - nftTree, err := tree.InitNftTree(nftHistoryModel, blockHeight, ctx) - if err != nil { - t.Fatal(err) - } - cryptoTx, err := ConstructAddLiquidityCryptoTx( - txInfo, - ctx, 0, - accountTree, &accountAssetTrees, - liquidityTree, - nftTree, - accountModel, - ) - if err != nil { - t.Fatal(err) - } - txBytes, err := json.Marshal(cryptoTx) - if err != nil { - t.Fatal(err) - } - fmt.Println(string(txBytes)) -} diff --git a/common/proverUtil/afterCommitter.sql b/common/proverUtil/afterCommitter.sql deleted file mode 100644 index 73936e743..000000000 --- a/common/proverUtil/afterCommitter.sql +++ /dev/null @@ -1,1851 +0,0 @@ -/* - Navicat Premium Data Transfer - - Source Server : local_docker - Source Server Type : PostgreSQL - Source Server Version : 140003 - Source Host : localhost:5432 - Source Catalog : zecreyLegend - Source Schema : public - - Target Server Type : PostgreSQL - Target Server Version : 140003 - File Encoding : 65001 - - Date: 09/06/2022 15:34:20 -*/ - - --- ---------------------------- --- Sequence structure for account_history_id_seq --- ---------------------------- -DROP SEQUENCE IF EXISTS "public"."account_history_id_seq"; -CREATE SEQUENCE "public"."account_history_id_seq" -INCREMENT 1 -MINVALUE 1 -MAXVALUE 9223372036854775807 -START 1 -CACHE 1; - --- ---------------------------- --- Sequence structure for account_id_seq --- ---------------------------- -DROP SEQUENCE IF EXISTS "public"."account_id_seq"; -CREATE SEQUENCE "public"."account_id_seq" -INCREMENT 1 -MINVALUE 1 -MAXVALUE 9223372036854775807 -START 1 -CACHE 1; - --- ---------------------------- --- Sequence structure for block_for_commit_id_seq --- ---------------------------- -DROP SEQUENCE IF EXISTS "public"."block_for_commit_id_seq"; -CREATE SEQUENCE "public"."block_for_commit_id_seq" -INCREMENT 1 -MINVALUE 1 -MAXVALUE 9223372036854775807 -START 1 -CACHE 1; - --- ---------------------------- --- Sequence structure for block_id_seq --- ---------------------------- -DROP SEQUENCE IF EXISTS "public"."block_id_seq"; -CREATE SEQUENCE "public"."block_id_seq" -INCREMENT 1 -MINVALUE 1 -MAXVALUE 9223372036854775807 -START 1 -CACHE 1; - --- ---------------------------- --- Sequence structure for fail_tx_id_seq --- ---------------------------- -DROP SEQUENCE IF EXISTS "public"."fail_tx_id_seq"; -CREATE SEQUENCE "public"."fail_tx_id_seq" -INCREMENT 1 -MINVALUE 1 -MAXVALUE 9223372036854775807 -START 1 -CACHE 1; - --- ---------------------------- --- Sequence structure for l1_amount_id_seq --- ---------------------------- -DROP SEQUENCE IF EXISTS "public"."l1_amount_id_seq"; -CREATE SEQUENCE "public"."l1_amount_id_seq" -INCREMENT 1 -MINVALUE 1 -MAXVALUE 9223372036854775807 -START 1 -CACHE 1; - --- ---------------------------- --- Sequence structure for l1_block_monitor_id_seq --- ---------------------------- -DROP SEQUENCE IF EXISTS "public"."l1_block_monitor_id_seq"; -CREATE SEQUENCE "public"."l1_block_monitor_id_seq" -INCREMENT 1 -MINVALUE 1 -MAXVALUE 9223372036854775807 -START 1 -CACHE 1; - --- ---------------------------- --- Sequence structure for l1_tx_sender_id_seq --- ---------------------------- -DROP SEQUENCE IF EXISTS "public"."l1_tx_sender_id_seq"; -CREATE SEQUENCE "public"."l1_tx_sender_id_seq" -INCREMENT 1 -MINVALUE 1 -MAXVALUE 9223372036854775807 -START 1 -CACHE 1; - --- ---------------------------- --- Sequence structure for l2_asset_info_id_seq --- ---------------------------- -DROP SEQUENCE IF EXISTS "public"."l2_asset_info_id_seq"; -CREATE SEQUENCE "public"."l2_asset_info_id_seq" -INCREMENT 1 -MINVALUE 1 -MAXVALUE 9223372036854775807 -START 1 -CACHE 1; - --- ---------------------------- --- Sequence structure for l2_block_event_monitor_id_seq --- ---------------------------- -DROP SEQUENCE IF EXISTS "public"."l2_block_event_monitor_id_seq"; -CREATE SEQUENCE "public"."l2_block_event_monitor_id_seq" -INCREMENT 1 -MINVALUE 1 -MAXVALUE 9223372036854775807 -START 1 -CACHE 1; - --- ---------------------------- --- Sequence structure for l2_nft_collection_id_seq --- ---------------------------- -DROP SEQUENCE IF EXISTS "public"."l2_nft_collection_id_seq"; -CREATE SEQUENCE "public"."l2_nft_collection_id_seq" -INCREMENT 1 -MINVALUE 1 -MAXVALUE 9223372036854775807 -START 1 -CACHE 1; - --- ---------------------------- --- Sequence structure for l2_nft_exchange_history_id_seq --- ---------------------------- -DROP SEQUENCE IF EXISTS "public"."l2_nft_exchange_history_id_seq"; -CREATE SEQUENCE "public"."l2_nft_exchange_history_id_seq" -INCREMENT 1 -MINVALUE 1 -MAXVALUE 9223372036854775807 -START 1 -CACHE 1; - --- ---------------------------- --- Sequence structure for l2_nft_exchange_id_seq --- ---------------------------- -DROP SEQUENCE IF EXISTS "public"."l2_nft_exchange_id_seq"; -CREATE SEQUENCE "public"."l2_nft_exchange_id_seq" -INCREMENT 1 -MINVALUE 1 -MAXVALUE 9223372036854775807 -START 1 -CACHE 1; - --- ---------------------------- --- Sequence structure for l2_nft_history_id_seq --- ---------------------------- -DROP SEQUENCE IF EXISTS "public"."l2_nft_history_id_seq"; -CREATE SEQUENCE "public"."l2_nft_history_id_seq" -INCREMENT 1 -MINVALUE 1 -MAXVALUE 9223372036854775807 -START 1 -CACHE 1; - --- ---------------------------- --- Sequence structure for l2_nft_id_seq --- ---------------------------- -DROP SEQUENCE IF EXISTS "public"."l2_nft_id_seq"; -CREATE SEQUENCE "public"."l2_nft_id_seq" -INCREMENT 1 -MINVALUE 1 -MAXVALUE 9223372036854775807 -START 1 -CACHE 1; - --- ---------------------------- --- Sequence structure for l2_nft_withdraw_history_id_seq --- ---------------------------- -DROP SEQUENCE IF EXISTS "public"."l2_nft_withdraw_history_id_seq"; -CREATE SEQUENCE "public"."l2_nft_withdraw_history_id_seq" -INCREMENT 1 -MINVALUE 1 -MAXVALUE 9223372036854775807 -START 1 -CACHE 1; - --- ---------------------------- --- Sequence structure for l2_tx_event_monitor_id_seq --- ---------------------------- -DROP SEQUENCE IF EXISTS "public"."l2_tx_event_monitor_id_seq"; -CREATE SEQUENCE "public"."l2_tx_event_monitor_id_seq" -INCREMENT 1 -MINVALUE 1 -MAXVALUE 9223372036854775807 -START 1 -CACHE 1; - --- ---------------------------- --- Sequence structure for liquidity_history_id_seq --- ---------------------------- -DROP SEQUENCE IF EXISTS "public"."liquidity_history_id_seq"; -CREATE SEQUENCE "public"."liquidity_history_id_seq" -INCREMENT 1 -MINVALUE 1 -MAXVALUE 9223372036854775807 -START 1 -CACHE 1; - --- ---------------------------- --- Sequence structure for liquidity_id_seq --- ---------------------------- -DROP SEQUENCE IF EXISTS "public"."liquidity_id_seq"; -CREATE SEQUENCE "public"."liquidity_id_seq" -INCREMENT 1 -MINVALUE 1 -MAXVALUE 9223372036854775807 -START 1 -CACHE 1; - --- ---------------------------- --- Sequence structure for mempool_tx_detail_id_seq --- ---------------------------- -DROP SEQUENCE IF EXISTS "public"."mempool_tx_detail_id_seq"; -CREATE SEQUENCE "public"."mempool_tx_detail_id_seq" -INCREMENT 1 -MINVALUE 1 -MAXVALUE 9223372036854775807 -START 1 -CACHE 1; - --- ---------------------------- --- Sequence structure for mempool_tx_id_seq --- ---------------------------- -DROP SEQUENCE IF EXISTS "public"."mempool_tx_id_seq"; -CREATE SEQUENCE "public"."mempool_tx_id_seq" -INCREMENT 1 -MINVALUE 1 -MAXVALUE 9223372036854775807 -START 1 -CACHE 1; - --- ---------------------------- --- Sequence structure for offer_id_seq --- ---------------------------- -DROP SEQUENCE IF EXISTS "public"."offer_id_seq"; -CREATE SEQUENCE "public"."offer_id_seq" -INCREMENT 1 -MINVALUE 1 -MAXVALUE 9223372036854775807 -START 1 -CACHE 1; - --- ---------------------------- --- Sequence structure for proof_sender_id_seq --- ---------------------------- -DROP SEQUENCE IF EXISTS "public"."proof_sender_id_seq"; -CREATE SEQUENCE "public"."proof_sender_id_seq" -INCREMENT 1 -MINVALUE 1 -MAXVALUE 9223372036854775807 -START 1 -CACHE 1; - --- ---------------------------- --- Sequence structure for sys_config_id_seq --- ---------------------------- -DROP SEQUENCE IF EXISTS "public"."sys_config_id_seq"; -CREATE SEQUENCE "public"."sys_config_id_seq" -INCREMENT 1 -MINVALUE 1 -MAXVALUE 9223372036854775807 -START 1 -CACHE 1; - --- ---------------------------- --- Sequence structure for tx_detail_id_seq --- ---------------------------- -DROP SEQUENCE IF EXISTS "public"."tx_detail_id_seq"; -CREATE SEQUENCE "public"."tx_detail_id_seq" -INCREMENT 1 -MINVALUE 1 -MAXVALUE 9223372036854775807 -START 1 -CACHE 1; - --- ---------------------------- --- Sequence structure for tx_id_seq --- ---------------------------- -DROP SEQUENCE IF EXISTS "public"."tx_id_seq"; -CREATE SEQUENCE "public"."tx_id_seq" -INCREMENT 1 -MINVALUE 1 -MAXVALUE 9223372036854775807 -START 1 -CACHE 1; - --- ---------------------------- --- Table structure for account --- ---------------------------- -DROP TABLE IF EXISTS "public"."account"; -CREATE TABLE "public"."account" ( - "id" int8 NOT NULL DEFAULT nextval('account_id_seq'::regclass), - "created_at" timestamptz(6), - "updated_at" timestamptz(6), - "deleted_at" timestamptz(6), - "account_index" int8, - "account_name" text COLLATE "pg_catalog"."default", - "public_key" text COLLATE "pg_catalog"."default", - "account_name_hash" text COLLATE "pg_catalog"."default", - "l1_address" text COLLATE "pg_catalog"."default", - "nonce" int8, - "collection_nonce" int8, - "asset_info" text COLLATE "pg_catalog"."default", - "asset_root" text COLLATE "pg_catalog"."default", - "status" int8 -) -; - --- ---------------------------- --- Records of account --- ---------------------------- -INSERT INTO "public"."account" VALUES (1, '0001-01-01 00:00:00+00', '2022-06-09 07:09:06.228049+00', NULL, 0, 'treasury.legend', 'fcb8470d33c59a5cbf5e10df426eb97c2773ab890c3364f4162ba782a56ca998', '167c5363088a40a4839912a872f43164270740c7e986ec55397b2d583317ab4a', '0x56744Dc80a3a520F0cCABf083AC874a4bf6433F3', 0, 0, '{"0":{"AssetId":0,"Balance":0,"LpAmount":0,"OfferCanceledOrFinalized":0}}', '2c642dc4ac8b021154b4248c4ab4a0b0fbcfebc1557ecc218fc3a3c19ece7f47', 1); -INSERT INTO "public"."account" VALUES (3, '0001-01-01 00:00:00+00', '2022-06-09 07:09:06.296772+00', NULL, 2, 'sher.legend', 'b0b6f7466154578ec66d51a335ead65ffd6a7210567fad9e68b6df8a5ce5dd85', '214a2d7af2022dfaee49dadb8992d3d7c225d8ae36109b531c28406dd69aad45', '0x56744Dc80a3a520F0cCABf083AC874a4bf6433F3', 9, 1, '{"0":{"AssetId":0,"Balance":99999999989799998,"LpAmount":99900,"OfferCanceledOrFinalized":3},"1":{"AssetId":1,"Balance":0,"LpAmount":0,"OfferCanceledOrFinalized":0},"2":{"AssetId":2,"Balance":99999999999999865000,"LpAmount":0,"OfferCanceledOrFinalized":0}}', '135f35977f2abf9cb4029cc418b45ba79cc45cf39685be661c67da75ead45d9a', 1); -INSERT INTO "public"."account" VALUES (2, '0001-01-01 00:00:00+00', '2022-06-09 07:09:06.295772+00', NULL, 1, 'gas.legend', '1ec94e497abe0fbb87f9ed2843e21163e17e3e97f6bbbae7a88399b826474f93', '0a48e9892a45a04d0c5b0f235a3aeb07b92137ba71a59b9c457774bafde95983', '0x56744Dc80a3a520F0cCABf083AC874a4bf6433F3', 0, 0, '{"0":{"AssetId":0,"Balance":20200,"LpAmount":0,"OfferCanceledOrFinalized":0},"2":{"AssetId":2,"Balance":35000,"LpAmount":0,"OfferCanceledOrFinalized":0}}', '219d2d2c0bb8cba744ec53ea8388da6c961b555f62bd5aa290e97109d186c467', 1); -INSERT INTO "public"."account" VALUES (4, '0001-01-01 00:00:00+00', '2022-06-09 07:09:06.296273+00', NULL, 3, 'gavin.legend', '0500ccea3ca064968f5292b850ac8d4d3ee48d499357351a5ebfa2f30bb6070e', '1c54c09c98f7ade9d5eeba4124ac7c912e65699a3f76fa65d71eaf6359d9bceb', '0x56744Dc80a3a520F0cCABf083AC874a4bf6433F3', 2, 0, '{"0":{"AssetId":0,"Balance":100000000000080000,"LpAmount":0,"OfferCanceledOrFinalized":1},"2":{"AssetId":2,"Balance":0,"LpAmount":0,"OfferCanceledOrFinalized":0}}', '20e11089ec56b54159ea65fc328d75c7b15011b11f5c73653073ddd0bdf1423e', 1); - --- ---------------------------- --- Table structure for account_history --- ---------------------------- -DROP TABLE IF EXISTS "public"."account_history"; -CREATE TABLE "public"."account_history" ( - "id" int8 NOT NULL DEFAULT nextval('account_history_id_seq'::regclass), - "created_at" timestamptz(6), - "updated_at" timestamptz(6), - "deleted_at" timestamptz(6), - "account_index" int8, - "nonce" int8, - "collection_nonce" int8, - "asset_info" text COLLATE "pg_catalog"."default", - "asset_root" text COLLATE "pg_catalog"."default", - "l2_block_height" int8 -) -; - --- ---------------------------- --- Records of account_history --- ---------------------------- -INSERT INTO "public"."account_history" VALUES (1, '2022-06-09 07:09:06.051241+00', '2022-06-09 07:09:06.051241+00', NULL, 0, 0, 0, '{}', '2c642dc4ac8b021154b4248c4ab4a0b0fbcfebc1557ecc218fc3a3c19ece7f47', 1); -INSERT INTO "public"."account_history" VALUES (2, '2022-06-09 07:09:06.060525+00', '2022-06-09 07:09:06.060525+00', NULL, 1, 0, 0, '{}', '2c642dc4ac8b021154b4248c4ab4a0b0fbcfebc1557ecc218fc3a3c19ece7f47', 2); -INSERT INTO "public"."account_history" VALUES (3, '2022-06-09 07:09:06.069319+00', '2022-06-09 07:09:06.069319+00', NULL, 2, 0, 0, '{}', '2c642dc4ac8b021154b4248c4ab4a0b0fbcfebc1557ecc218fc3a3c19ece7f47', 3); -INSERT INTO "public"."account_history" VALUES (4, '2022-06-09 07:09:06.076707+00', '2022-06-09 07:09:06.076707+00', NULL, 3, 0, 0, '{}', '2c642dc4ac8b021154b4248c4ab4a0b0fbcfebc1557ecc218fc3a3c19ece7f47', 4); -INSERT INTO "public"."account_history" VALUES (5, '2022-06-09 07:09:06.085616+00', '2022-06-09 07:09:06.085616+00', NULL, 2, 0, 0, '{"0":{"AssetId":0,"Balance":100000000000000000,"LpAmount":0,"OfferCanceledOrFinalized":0}}', '06ce582922720755debe04d60415a9c28bc4e788d012d3ea1700549f0e190c9a', 5); -INSERT INTO "public"."account_history" VALUES (6, '2022-06-09 07:09:06.093199+00', '2022-06-09 07:09:06.093199+00', NULL, 3, 0, 0, '{"0":{"AssetId":0,"Balance":100000000000000000,"LpAmount":0,"OfferCanceledOrFinalized":0}}', '06ce582922720755debe04d60415a9c28bc4e788d012d3ea1700549f0e190c9a', 6); -INSERT INTO "public"."account_history" VALUES (7, '2022-06-09 07:09:06.099703+00', '2022-06-09 07:09:06.099703+00', NULL, 2, 0, 0, '{"0":{"AssetId":0,"Balance":100000000000000000,"LpAmount":0,"OfferCanceledOrFinalized":0},"1":{"AssetId":1,"Balance":100000000000000000000,"LpAmount":0,"OfferCanceledOrFinalized":0}}', '069e6e659595ff010898f90e61614c5a8d77de0d2984715be6f5b3f8505ae10c', 7); -INSERT INTO "public"."account_history" VALUES (8, '2022-06-09 07:09:06.10681+00', '2022-06-09 07:09:06.10681+00', NULL, 2, 0, 0, '{"0":{"AssetId":0,"Balance":100000000000000000,"LpAmount":0,"OfferCanceledOrFinalized":0},"1":{"AssetId":1,"Balance":100000000000000000000,"LpAmount":0,"OfferCanceledOrFinalized":0},"2":{"AssetId":2,"Balance":100000000000000000000,"LpAmount":0,"OfferCanceledOrFinalized":0}}', '17d8b1c33a32922ce0838eed568beb32728b6271d97e008e481edd92cca55f08', 8); -INSERT INTO "public"."account_history" VALUES (9, '2022-06-09 07:09:06.14888+00', '2022-06-09 07:09:06.14888+00', NULL, 2, 0, 0, '{"0":{"AssetId":0,"Balance":100000000000000000,"LpAmount":0,"OfferCanceledOrFinalized":0},"1":{"AssetId":1,"Balance":100000000000000000000,"LpAmount":0,"OfferCanceledOrFinalized":0},"2":{"AssetId":2,"Balance":100000000000000000000,"LpAmount":0,"OfferCanceledOrFinalized":0}}', '17d8b1c33a32922ce0838eed568beb32728b6271d97e008e481edd92cca55f08', 13); -INSERT INTO "public"."account_history" VALUES (10, '2022-06-09 07:09:06.157966+00', '2022-06-09 07:09:06.157966+00', NULL, 2, 0, 0, '{"0":{"AssetId":0,"Balance":100000000000000000,"LpAmount":0,"OfferCanceledOrFinalized":0},"1":{"AssetId":1,"Balance":0,"LpAmount":0,"OfferCanceledOrFinalized":0},"2":{"AssetId":2,"Balance":100000000000000000000,"LpAmount":0,"OfferCanceledOrFinalized":0}}', '09fe19fc526b3e67753a6d91cc709feb45f6b281f6a3a71773a0abebe50f517f', 14); -INSERT INTO "public"."account_history" VALUES (11, '2022-06-09 07:09:06.165729+00', '2022-06-09 07:09:06.165729+00', NULL, 2, 0, 0, '{"0":{"AssetId":0,"Balance":100000000000000000,"LpAmount":0,"OfferCanceledOrFinalized":0},"1":{"AssetId":1,"Balance":0,"LpAmount":0,"OfferCanceledOrFinalized":0},"2":{"AssetId":2,"Balance":100000000000000000000,"LpAmount":0,"OfferCanceledOrFinalized":0}}', '09fe19fc526b3e67753a6d91cc709feb45f6b281f6a3a71773a0abebe50f517f', 15); -INSERT INTO "public"."account_history" VALUES (12, '2022-06-09 07:09:06.18161+00', '2022-06-09 07:09:06.18161+00', NULL, 2, 1, 0, '{"0":{"AssetId":0,"Balance":99999999999900000,"LpAmount":0,"OfferCanceledOrFinalized":0},"1":{"AssetId":1,"Balance":0,"LpAmount":0,"OfferCanceledOrFinalized":0},"2":{"AssetId":2,"Balance":99999999999999995000,"LpAmount":0,"OfferCanceledOrFinalized":0}}', '1cd1016e23d9e514928a567cc8a4cddcce67b01e817021eb916edaac7e166242', 16); -INSERT INTO "public"."account_history" VALUES (13, '2022-06-09 07:09:06.18161+00', '2022-06-09 07:09:06.18161+00', NULL, 3, 0, 0, '{"0":{"AssetId":0,"Balance":100000000000100000,"LpAmount":0,"OfferCanceledOrFinalized":0}}', '14012ade6c7b76679cc709bbc6fe865ec94f0b55a7c976a9b93d8e214f2bf5e5', 16); -INSERT INTO "public"."account_history" VALUES (14, '2022-06-09 07:09:06.18161+00', '2022-06-09 07:09:06.18161+00', NULL, 1, 0, 0, '{"2":{"AssetId":2,"Balance":5000,"LpAmount":0,"OfferCanceledOrFinalized":0}}', '08e7c9a1858f6ad9986887426fdddc7231a93a806c81b5841171ec5cb834eabe', 16); -INSERT INTO "public"."account_history" VALUES (15, '2022-06-09 07:09:06.192937+00', '2022-06-09 07:09:06.192937+00', NULL, 2, 2, 0, '{"0":{"AssetId":0,"Balance":99999999989900000,"LpAmount":0,"OfferCanceledOrFinalized":0},"1":{"AssetId":1,"Balance":0,"LpAmount":0,"OfferCanceledOrFinalized":0},"2":{"AssetId":2,"Balance":99999999999999990000,"LpAmount":0,"OfferCanceledOrFinalized":0}}', '25cc5a90b005abb6c7c0d5d1fbd34907b12c70b7d2f11a6901cd5622186e584e', 17); -INSERT INTO "public"."account_history" VALUES (16, '2022-06-09 07:09:06.192937+00', '2022-06-09 07:09:06.192937+00', NULL, 1, 0, 0, '{"2":{"AssetId":2,"Balance":10000,"LpAmount":0,"OfferCanceledOrFinalized":0}}', '1e6cf281636a0d207da108b38aaada12c903f0f7531b3e60ff935675b9d64644', 17); -INSERT INTO "public"."account_history" VALUES (17, '2022-06-09 07:09:06.206169+00', '2022-06-09 07:09:06.206169+00', NULL, 2, 3, 0, '{"0":{"AssetId":0,"Balance":99999999989800000,"LpAmount":100000,"OfferCanceledOrFinalized":0},"1":{"AssetId":1,"Balance":0,"LpAmount":0,"OfferCanceledOrFinalized":0},"2":{"AssetId":2,"Balance":99999999999999885000,"LpAmount":0,"OfferCanceledOrFinalized":0}}', '3046d6422f86f1ab6e9cbe2a0e449604df61bbdd3f3199fadd7a5bc4f046d289', 18); -INSERT INTO "public"."account_history" VALUES (18, '2022-06-09 07:09:06.206169+00', '2022-06-09 07:09:06.206169+00', NULL, 0, 0, 0, '{"0":{"AssetId":0,"Balance":0,"LpAmount":0,"OfferCanceledOrFinalized":0}}', '2c642dc4ac8b021154b4248c4ab4a0b0fbcfebc1557ecc218fc3a3c19ece7f47', 18); -INSERT INTO "public"."account_history" VALUES (19, '2022-06-09 07:09:06.206169+00', '2022-06-09 07:09:06.206169+00', NULL, 1, 0, 0, '{"2":{"AssetId":2,"Balance":15000,"LpAmount":0,"OfferCanceledOrFinalized":0}}', '12aeb69e38371c4ef60475f6d1d5bd15fb602de9a3ac9d8ce98cb11b95685bee', 18); -INSERT INTO "public"."account_history" VALUES (20, '2022-06-09 07:09:06.217449+00', '2022-06-09 07:09:06.217449+00', NULL, 2, 4, 0, '{"0":{"AssetId":0,"Balance":99999999989795099,"LpAmount":100000,"OfferCanceledOrFinalized":0},"1":{"AssetId":1,"Balance":0,"LpAmount":0,"OfferCanceledOrFinalized":0},"2":{"AssetId":2,"Balance":99999999999999884900,"LpAmount":0,"OfferCanceledOrFinalized":0}}', '136b21d7d137ada052e748f45719da132e2a344e5ae6fe334d0b67012c331d6d', 19); -INSERT INTO "public"."account_history" VALUES (21, '2022-06-09 07:09:06.217449+00', '2022-06-09 07:09:06.217449+00', NULL, 1, 0, 0, '{"0":{"AssetId":0,"Balance":5000,"LpAmount":0,"OfferCanceledOrFinalized":0},"2":{"AssetId":2,"Balance":15000,"LpAmount":0,"OfferCanceledOrFinalized":0}}', '0a8e87b9a27934661653c3d37ea4b6b9cb7257d23d0ec85e0a77b0c62f6ca453', 19); -INSERT INTO "public"."account_history" VALUES (22, '2022-06-09 07:09:06.230155+00', '2022-06-09 07:09:06.230155+00', NULL, 2, 5, 0, '{"0":{"AssetId":0,"Balance":99999999989795198,"LpAmount":99900,"OfferCanceledOrFinalized":0},"1":{"AssetId":1,"Balance":0,"LpAmount":0,"OfferCanceledOrFinalized":0},"2":{"AssetId":2,"Balance":99999999999999880000,"LpAmount":0,"OfferCanceledOrFinalized":0}}', '13f7575c9228694a34eaec2e080115ac5cc06ec248abb0e5f9cdb7151b9acedd', 20); -INSERT INTO "public"."account_history" VALUES (23, '2022-06-09 07:09:06.230155+00', '2022-06-09 07:09:06.230155+00', NULL, 0, 0, 0, '{"0":{"AssetId":0,"Balance":0,"LpAmount":0,"OfferCanceledOrFinalized":0}}', '2c642dc4ac8b021154b4248c4ab4a0b0fbcfebc1557ecc218fc3a3c19ece7f47', 20); -INSERT INTO "public"."account_history" VALUES (24, '2022-06-09 07:09:06.230155+00', '2022-06-09 07:09:06.230155+00', NULL, 1, 0, 0, '{"0":{"AssetId":0,"Balance":5000,"LpAmount":0,"OfferCanceledOrFinalized":0},"2":{"AssetId":2,"Balance":20000,"LpAmount":0,"OfferCanceledOrFinalized":0}}', '1b0c02b49e7d799975e98665fc0f2062251e7e295001f43b0fc5013360d9f3cf', 20); -INSERT INTO "public"."account_history" VALUES (25, '2022-06-09 07:09:06.239599+00', '2022-06-09 07:09:06.239599+00', NULL, 2, 6, 1, '{"0":{"AssetId":0,"Balance":99999999989795198,"LpAmount":99900,"OfferCanceledOrFinalized":0},"1":{"AssetId":1,"Balance":0,"LpAmount":0,"OfferCanceledOrFinalized":0},"2":{"AssetId":2,"Balance":99999999999999875000,"LpAmount":0,"OfferCanceledOrFinalized":0}}', '2e7c81f2815d8f11d39097bb1a5eb9f10d3ba9b7b56c0ac0f7a49c3eba397579', 21); -INSERT INTO "public"."account_history" VALUES (26, '2022-06-09 07:09:06.239599+00', '2022-06-09 07:09:06.239599+00', NULL, 1, 0, 0, '{"0":{"AssetId":0,"Balance":5000,"LpAmount":0,"OfferCanceledOrFinalized":0},"2":{"AssetId":2,"Balance":25000,"LpAmount":0,"OfferCanceledOrFinalized":0}}', '02f3efe09b203142ae196d4555d58e060da742fa15d9213fe26e75d8c5505539', 21); -INSERT INTO "public"."account_history" VALUES (27, '2022-06-09 07:09:06.250069+00', '2022-06-09 07:09:06.250069+00', NULL, 2, 7, 1, '{"0":{"AssetId":0,"Balance":99999999989795198,"LpAmount":99900,"OfferCanceledOrFinalized":0},"1":{"AssetId":1,"Balance":0,"LpAmount":0,"OfferCanceledOrFinalized":0},"2":{"AssetId":2,"Balance":99999999999999870000,"LpAmount":0,"OfferCanceledOrFinalized":0}}', '135175067e21e4d0a1ec1f01d1eaacbb65a0ec3df762bf586df1c49a3a554e6d', 22); -INSERT INTO "public"."account_history" VALUES (28, '2022-06-09 07:09:06.250069+00', '2022-06-09 07:09:06.250069+00', NULL, 3, 0, 0, '{"0":{"AssetId":0,"Balance":100000000000100000,"LpAmount":0,"OfferCanceledOrFinalized":0},"2":{"AssetId":2,"Balance":0,"LpAmount":0,"OfferCanceledOrFinalized":0}}', '14012ade6c7b76679cc709bbc6fe865ec94f0b55a7c976a9b93d8e214f2bf5e5', 22); -INSERT INTO "public"."account_history" VALUES (29, '2022-06-09 07:09:06.250069+00', '2022-06-09 07:09:06.250069+00', NULL, 1, 0, 0, '{"0":{"AssetId":0,"Balance":5000,"LpAmount":0,"OfferCanceledOrFinalized":0},"2":{"AssetId":2,"Balance":30000,"LpAmount":0,"OfferCanceledOrFinalized":0}}', '2b264f5337dc9d06629ff7099ad6e0653eb3cdf7056dd2cd46752d50c1050b93', 22); -INSERT INTO "public"."account_history" VALUES (30, '2022-06-09 07:09:06.261097+00', '2022-06-09 07:09:06.261097+00', NULL, 3, 1, 0, '{"0":{"AssetId":0,"Balance":100000000000095000,"LpAmount":0,"OfferCanceledOrFinalized":0},"2":{"AssetId":2,"Balance":0,"LpAmount":0,"OfferCanceledOrFinalized":0}}', '2e2137673dbe998c6dce6b1555760686b57af98a8c23820337ef881703f534d2', 23); -INSERT INTO "public"."account_history" VALUES (31, '2022-06-09 07:09:06.261097+00', '2022-06-09 07:09:06.261097+00', NULL, 2, 7, 1, '{"0":{"AssetId":0,"Balance":99999999989795198,"LpAmount":99900,"OfferCanceledOrFinalized":0},"1":{"AssetId":1,"Balance":0,"LpAmount":0,"OfferCanceledOrFinalized":0},"2":{"AssetId":2,"Balance":99999999999999870000,"LpAmount":0,"OfferCanceledOrFinalized":0}}', '135175067e21e4d0a1ec1f01d1eaacbb65a0ec3df762bf586df1c49a3a554e6d', 23); -INSERT INTO "public"."account_history" VALUES (32, '2022-06-09 07:09:06.261097+00', '2022-06-09 07:09:06.261097+00', NULL, 1, 0, 0, '{"0":{"AssetId":0,"Balance":10000,"LpAmount":0,"OfferCanceledOrFinalized":0},"2":{"AssetId":2,"Balance":30000,"LpAmount":0,"OfferCanceledOrFinalized":0}}', '0ade8414224cd97b2841b34519db998c7873d2c386e87fcc93ac94f056424b9a', 23); -INSERT INTO "public"."account_history" VALUES (33, '2022-06-09 07:09:06.274718+00', '2022-06-09 07:09:06.274718+00', NULL, 1, 0, 0, '{"0":{"AssetId":0,"Balance":15200,"LpAmount":0,"OfferCanceledOrFinalized":0},"2":{"AssetId":2,"Balance":30000,"LpAmount":0,"OfferCanceledOrFinalized":0}}', '2118e6f94c540e6f3676a6cbae245679a59432115d48f7571a6ae5565edca611', 24); -INSERT INTO "public"."account_history" VALUES (34, '2022-06-09 07:09:06.274718+00', '2022-06-09 07:09:06.274718+00', NULL, 2, 8, 1, '{"0":{"AssetId":0,"Balance":99999999989799998,"LpAmount":99900,"OfferCanceledOrFinalized":1},"1":{"AssetId":1,"Balance":0,"LpAmount":0,"OfferCanceledOrFinalized":0},"2":{"AssetId":2,"Balance":99999999999999870000,"LpAmount":0,"OfferCanceledOrFinalized":0}}', '1a9e697d30b8b2f43aedc7251497f4a4308f221089f43c8794d8966d5bbf4769', 24); -INSERT INTO "public"."account_history" VALUES (35, '2022-06-09 07:09:06.274718+00', '2022-06-09 07:09:06.274718+00', NULL, 3, 1, 0, '{"0":{"AssetId":0,"Balance":100000000000085000,"LpAmount":0,"OfferCanceledOrFinalized":1},"2":{"AssetId":2,"Balance":0,"LpAmount":0,"OfferCanceledOrFinalized":0}}', '06d9b9fd9b2b3ab3ea7c44de16deef25a7dea1314ec20c5b92dcc1049d221f49', 24); -INSERT INTO "public"."account_history" VALUES (36, '2022-06-09 07:09:06.286505+00', '2022-06-09 07:09:06.286505+00', NULL, 2, 9, 1, '{"0":{"AssetId":0,"Balance":99999999989799998,"LpAmount":99900,"OfferCanceledOrFinalized":3},"1":{"AssetId":1,"Balance":0,"LpAmount":0,"OfferCanceledOrFinalized":0},"2":{"AssetId":2,"Balance":99999999999999865000,"LpAmount":0,"OfferCanceledOrFinalized":0}}', '135f35977f2abf9cb4029cc418b45ba79cc45cf39685be661c67da75ead45d9a', 25); -INSERT INTO "public"."account_history" VALUES (37, '2022-06-09 07:09:06.286505+00', '2022-06-09 07:09:06.286505+00', NULL, 1, 0, 0, '{"0":{"AssetId":0,"Balance":15200,"LpAmount":0,"OfferCanceledOrFinalized":0},"2":{"AssetId":2,"Balance":35000,"LpAmount":0,"OfferCanceledOrFinalized":0}}', '1a94962e42cbd751dc8fb4975ab18ee52493c4fd400d94fb1065719e24d019f3', 25); -INSERT INTO "public"."account_history" VALUES (38, '2022-06-09 07:09:06.297928+00', '2022-06-09 07:09:06.297928+00', NULL, 1, 0, 0, '{"0":{"AssetId":0,"Balance":20200,"LpAmount":0,"OfferCanceledOrFinalized":0},"2":{"AssetId":2,"Balance":35000,"LpAmount":0,"OfferCanceledOrFinalized":0}}', '219d2d2c0bb8cba744ec53ea8388da6c961b555f62bd5aa290e97109d186c467', 26); -INSERT INTO "public"."account_history" VALUES (39, '2022-06-09 07:09:06.297928+00', '2022-06-09 07:09:06.297928+00', NULL, 3, 2, 0, '{"0":{"AssetId":0,"Balance":100000000000080000,"LpAmount":0,"OfferCanceledOrFinalized":1},"2":{"AssetId":2,"Balance":0,"LpAmount":0,"OfferCanceledOrFinalized":0}}', '20e11089ec56b54159ea65fc328d75c7b15011b11f5c73653073ddd0bdf1423e', 26); -INSERT INTO "public"."account_history" VALUES (40, '2022-06-09 07:09:06.297928+00', '2022-06-09 07:09:06.297928+00', NULL, 2, 9, 1, '{"0":{"AssetId":0,"Balance":99999999989799998,"LpAmount":99900,"OfferCanceledOrFinalized":3},"1":{"AssetId":1,"Balance":0,"LpAmount":0,"OfferCanceledOrFinalized":0},"2":{"AssetId":2,"Balance":99999999999999865000,"LpAmount":0,"OfferCanceledOrFinalized":0}}', '135f35977f2abf9cb4029cc418b45ba79cc45cf39685be661c67da75ead45d9a', 26); - --- ---------------------------- --- Table structure for block --- ---------------------------- -DROP TABLE IF EXISTS "public"."block"; -CREATE TABLE "public"."block" ( - "id" int8 NOT NULL DEFAULT nextval('block_id_seq'::regclass), - "created_at" timestamptz(6), - "updated_at" timestamptz(6), - "deleted_at" timestamptz(6), - "block_commitment" text COLLATE "pg_catalog"."default", - "block_height" int8, - "state_root" text COLLATE "pg_catalog"."default", - "priority_operations" int8, - "pending_on_chain_operations_hash" text COLLATE "pg_catalog"."default", - "pending_on_chain_operations_pub_data" text COLLATE "pg_catalog"."default", - "committed_tx_hash" text COLLATE "pg_catalog"."default", - "committed_at" int8, - "verified_tx_hash" text COLLATE "pg_catalog"."default", - "verified_at" int8, - "block_status" int8 -) -; - --- ---------------------------- --- Records of block --- ---------------------------- -INSERT INTO "public"."block" VALUES (1, '2022-06-08 08:24:27.340951+00', '2022-06-08 08:24:27.340951+00', NULL, '0000000000000000000000000000000000000000000000000000000000000000', 0, '14e4e8ad4848558d7200530337052e1ad30f5385b3c7187c80ad85f48547b74f', 0, 'c5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470', '', '', 0, '', 0, 3); -INSERT INTO "public"."block" VALUES (2, '2022-06-09 07:09:06.039+00', '2022-06-09 07:09:06.042917+00', NULL, '1a72f54f1286faefd0f05a774d75a9fc14a981226b52f93af7a8301bfaa1a9dd', 1, '21422f9bebac15af8ddc504da0dbb88020c1a4de7e7b6722fe00acb0ed968942', 1, 'c5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470', '', '', 0, '', 0, 1); -INSERT INTO "public"."block" VALUES (3, '2022-06-09 07:09:06.053+00', '2022-06-09 07:09:06.056523+00', NULL, '044d5308e567d8490b58f1416c701f308a45b886c99da304ac6f7cd7c02de1ae', 2, '1b2ff4ae0d507a971fb267849af6a28000b1d483865c5a610cc47db6f196c672', 1, 'c5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470', '', '', 0, '', 0, 1); -INSERT INTO "public"."block" VALUES (4, '2022-06-09 07:09:06.062+00', '2022-06-09 07:09:06.065022+00', NULL, '0927fa92c98b9d94d8b6af2855f9a09ee9562274540826f81d3820ca72538dfe', 3, '189517f4cfb59471e3539dae36b8f53cb1264d407daf6afbf86132917f1cbafc', 1, 'c5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470', '', '', 0, '', 0, 1); -INSERT INTO "public"."block" VALUES (5, '2022-06-09 07:09:06.07+00', '2022-06-09 07:09:06.073155+00', NULL, '242a431dd79dc30695f5f53f7f995444dd0fe97044dafe0d00086b54db844576', 4, '08b2dc20da16235e692de317d6134578159532d4f081827bd29a5fc783fcc2b7', 1, 'c5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470', '', '', 0, '', 0, 1); -INSERT INTO "public"."block" VALUES (6, '2022-06-09 07:09:06.078+00', '2022-06-09 07:09:06.079383+00', NULL, '1ca7efbd17cc00f4793cc499eadc87380c8b30b3a2a50184f48f850dc969ed2b', 5, '236e2c312a52cfbe96fc14a0693ea0f26d59fae774b35d44ddcf7737d965902f', 1, 'c5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470', '', '', 0, '', 0, 1); -INSERT INTO "public"."block" VALUES (7, '2022-06-09 07:09:06.087+00', '2022-06-09 07:09:06.088388+00', NULL, '181982d4f80f4a56b6df25550961a5d471eefa8db9854d79234f087c8912aad7', 6, '029cfe1c99565d3722f32b6bdb4ee5740d4f4c78bf318968c366c9c7e82d9ba7', 1, 'c5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470', '', '', 0, '', 0, 1); -INSERT INTO "public"."block" VALUES (8, '2022-06-09 07:09:06.094+00', '2022-06-09 07:09:06.095702+00', NULL, '21337368ad6b2c7fde547cdb84bbeaa286d26ed43987bb21f217f10ac346f196', 7, '25cade17a4affef4114a06b8ae6e8e18651a8c4aa0aa01e1c20abce23ad614ec', 1, 'c5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470', '', '', 0, '', 0, 1); -INSERT INTO "public"."block" VALUES (9, '2022-06-09 07:09:06.101+00', '2022-06-09 07:09:06.102555+00', NULL, '254dd9634abd61d13c24f435ad04d6d089b91aef10a8e68e6bc7fe08d1ad4768', 8, '17a21620fe89a6ef610ceea7b2c6230dba84731020a11bd081b46ba23c1cae94', 1, 'c5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470', '', '', 0, '', 0, 1); -INSERT INTO "public"."block" VALUES (10, '2022-06-09 07:09:06.108+00', '2022-06-09 07:09:06.110587+00', NULL, '05439be16da134ebfb53da4aea2088c97a23b7f88569db1ef8ef44266198925c', 9, '0f5cf7c3fa8452ccb12d87b99952cfde059999f3767ddbc032994d94f3fe24ba', 1, 'c5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470', '', '', 0, '', 0, 1); -INSERT INTO "public"."block" VALUES (11, '2022-06-09 07:09:06.118+00', '2022-06-09 07:09:06.119587+00', NULL, '2cd16cf22d1c33999e7ef4706413d8f93a60123dac8823995dc8991f06357eda', 10, '0945597849e7df9b43bfade724068c4d5a9d6039da208e6b829feb530ce784cd', 1, 'c5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470', '', '', 0, '', 0, 1); -INSERT INTO "public"."block" VALUES (12, '2022-06-09 07:09:06.125+00', '2022-06-09 07:09:06.127124+00', NULL, '12fc02c4e112c2062a416d8443e2a79b9425fc1984e23d66bd4384de288f3f4e', 11, '1671dd749a5a522f18908e28512d1c6c10034740923bbe9bab5664585b87411d', 1, 'c5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470', '', '', 0, '', 0, 1); -INSERT INTO "public"."block" VALUES (13, '2022-06-09 07:09:06.132+00', '2022-06-09 07:09:06.133802+00', NULL, '114681e463929e795b6143ef83e51c19d74d54b1f4dbc224ddb67c7cecee5e9c', 12, '08ef9af5048b3df61fe3bd025a8db3f47b591a0136281cb3325e7e89930f3925', 1, 'c5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470', '', '', 0, '', 0, 1); -INSERT INTO "public"."block" VALUES (14, '2022-06-09 07:09:06.139+00', '2022-06-09 07:09:06.143066+00', NULL, '2ebcf44ebde73f0a5f18b0691ce757580ad3b5a6a45ee89b4224d90f88c54936', 13, '23f9301b57dbde40b067fc04f2bb2e5241b58739845efa223de352a8a14dd2ae', 1, 'c5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470', '', '', 0, '', 0, 1); -INSERT INTO "public"."block" VALUES (15, '2022-06-09 07:09:06.152+00', '2022-06-09 07:09:06.153338+00', NULL, '1f7e314a0141f18fe8ff32edae3bfe3204f435a35258be0c1642859858813d90', 14, '1785a0c0ef9c282c5dddde78ad80b9689d34cda4a59ed35fcf4f00966ff034e2', 1, '25519b0462cdac4689dce03a73cb3ac5d1200dc9ab3a43f3d0da8e2064c92205', '["EQAAAAIAAQAAAAAAAAAFa8deLWMQAAAAAAAAAAAAAAAhSi168gIt+u5J2tuJktPXwiXYrjYQm1McKEBt1pqtRQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA"]', '', 0, '', 0, 1); -INSERT INTO "public"."block" VALUES (16, '2022-06-09 07:09:06.159+00', '2022-06-09 07:09:06.161147+00', NULL, '115a7ed40790d5c0f8faa54d867d07ea41652a613b1e8bb4743229326f1832a7', 15, '28ff96ba5f7e023a7ed9d446cb412fc6965a6ed68d1439b357bb4014ec57a8a4', 1, 'e557d59f7ebc4acb5a9d4de3fe645c08b591d83170bd01c80e7e5335f92e8450', '["EgAAAAIAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAC3rUp+lFnQwVQdsu7OzqzH26gD4SFKLXryAi367kna24mS09fCJdiuNhCbUxwoQG3Wmq1FAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACr0baueVB/e0oyqEq2SVvJ/uZ0UO0xbbuna6zoo8UZewAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA"]', '', 0, '', 0, 1); -INSERT INTO "public"."block" VALUES (17, '2022-06-09 07:09:06.171+00', '2022-06-09 07:09:06.174649+00', NULL, '08043522804cec1b46b7a3eae1afd4f577d00e8ea12fc9db6ec37a65f77eb290', 16, '0c599d212ed3641e0b6df735e8b04dd627accfdafbbfa38c173af5f38efb433e', 0, 'c5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470', '', '', 0, '', 0, 1); -INSERT INTO "public"."block" VALUES (18, '2022-06-09 07:09:06.184+00', '2022-06-09 07:09:06.186387+00', NULL, '2f8a924c6a3e9412341c3151fbf3e0e4ccb928926d80108796b7760c4e58ffea', 17, '2d425cddc3d5aaec5ed0dd1465e7038e7fbbcc679e4a4c6c620742134ef93714', 0, 'a213a63e48ec6e59019927c20dbeda00195c7a95e68a25d8dd25db55cfc6fd03', '["CgAAAAKZrIiBg0eX68MvGF7ifC6WhC4aRwAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACYloAAAAABAAI+gQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA"]', '', 0, '', 0, 1); -INSERT INTO "public"."block" VALUES (19, '2022-06-09 07:09:06.195+00', '2022-06-09 07:09:06.199381+00', NULL, '20d1b0f785f43e91a989bc29b6fb417bc0f9c3a1de3029615adee054e00271b3', 18, '1211d91f4e22bd2f1aa38daaec68431b35fd37f8a272d147ebe7ba3e73a58555', 0, 'c5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470', '', '', 0, '', 0, 1); -INSERT INTO "public"."block" VALUES (20, '2022-06-09 07:09:06.209+00', '2022-06-09 07:09:06.211937+00', NULL, '0e00c0df1a6d31d4c9da8845b419c1a63ca17e96374cfbf25cfd6649f4a708be', 19, '1db7fb69796667194858edf7aea403110c42cddc0907b3953181e1184907fb35', 0, 'c5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470', '', '', 0, '', 0, 1); -INSERT INTO "public"."block" VALUES (21, '2022-06-09 07:09:06.22+00', '2022-06-09 07:09:06.224079+00', NULL, '01b9b434b824c6ef586c29c2d210550cd22f163c35ef28c6287aa8fe2f724fb1', 20, '2e888850863cf0c2dffa40c8a0c162749f1f93da6ddf225030a01648cdfc26d6', 0, 'c5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470', '', '', 0, '', 0, 1); -INSERT INTO "public"."block" VALUES (22, '2022-06-09 07:09:06.233+00', '2022-06-09 07:09:06.234755+00', NULL, '0ece5e1e234d032f797f8c7002aa705d0cf2279a12ac08c7c907abe1384faa34', 21, '140622efbca882cddada16ee07f9cc8718b69998a5d8d7922fa7591f2f533edd', 0, 'c5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470', '', '', 0, '', 0, 1); -INSERT INTO "public"."block" VALUES (23, '2022-06-09 07:09:06.241+00', '2022-06-09 07:09:06.244865+00', NULL, '1e7e2fd098f75e8b5e8ac4714b5b467161cc9a8eb85d5c8c9d23dbd1efa96b28', 22, '12884f8bb4852d02ad1f654daa7a2fc230c539e5a1d3dcd83a474eed139e1f7f', 0, 'c5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470', '', '', 0, '', 0, 1); -INSERT INTO "public"."block" VALUES (24, '2022-06-09 07:09:06.252+00', '2022-06-09 07:09:06.255441+00', NULL, '09bf02a6510b15e9e22892aa8c1f6a509468d718fe7305a2a1f5013be4357800', 23, '19ca2bf9cca9b55f61c3f2d352ab486ac7e529670b6af93171054cb8d82f4fee', 0, 'c5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470', '', '', 0, '', 0, 1); -INSERT INTO "public"."block" VALUES (25, '2022-06-09 07:09:06.264+00', '2022-06-09 07:09:06.268598+00', NULL, '080e4feab8e0f5992a1ae7f65956ca02032377c3ecc5275601e8207dd5c5268b', 24, '137f5a5193ca65babef27c7f5be3ebc0eee3fd5c4de748d4cc0e736b80a99649', 0, 'c5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470', '', '', 0, '', 0, 1); -INSERT INTO "public"."block" VALUES (26, '2022-06-09 07:09:06.277+00', '2022-06-09 07:09:06.280578+00', NULL, '12380990e61da98f07e1997c6377f6ab6ae2e240882f97d2b7670636a001c332', 25, '16033680a98409353095c6679b48d1fe06a03ec709b55e448a4c4a56e229393e', 0, 'c5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470', '', '', 0, '', 0, 1); -INSERT INTO "public"."block" VALUES (27, '2022-06-09 07:09:06.288+00', '2022-06-09 07:09:06.292098+00', NULL, '004030d6fc1ab25f9aad69781c2af86d779af098cedd06729e53f4786f44ecd4', 26, '278d08c3c1a50ed6e932abdfde1555b7843c43de10c1fded32f7cfc2987c9105', 0, '2e0da5cc3106784395dd76ba3f496389e9c033b84d523b96c1c30f011cd9d219', '["EAAAAAMAAAACAAAAAAAAAQABAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAADVqjtWouITnbMVzf47NBScjtCRcQAAAAEAAD6BBmpl0+Q5etBfsuf1DqwWBkenSGws7bvqxkYkyL7qIvEAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACFKLXryAi367kna24mS09fCJdiuNhCbUxwoQG3Wmq1F"]', '', 0, '', 0, 1); - --- ---------------------------- --- Table structure for block_for_commit --- ---------------------------- -DROP TABLE IF EXISTS "public"."block_for_commit"; -CREATE TABLE "public"."block_for_commit" ( - "id" int8 NOT NULL DEFAULT nextval('block_for_commit_id_seq'::regclass), - "created_at" timestamptz(6), - "updated_at" timestamptz(6), - "deleted_at" timestamptz(6), - "block_height" int8, - "state_root" text COLLATE "pg_catalog"."default", - "public_data" text COLLATE "pg_catalog"."default", - "timestamp" int8, - "public_data_offsets" text COLLATE "pg_catalog"."default" -) -; - --- ---------------------------- --- Records of block_for_commit --- ---------------------------- -INSERT INTO "public"."block_for_commit" VALUES (1, '2022-06-09 07:09:06.046519+00', '2022-06-09 07:09:06.046519+00', NULL, 1, '21422f9bebac15af8ddc504da0dbb88020c1a4de7e7b6722fe00acb0ed968942', '01000000000000000000000000000000000000000000000000000000000000007472656173757279000000000000000000000000000000000000000000000000167c5363088a40a4839912a872f43164270740c7e986ec55397b2d583317ab4a2005db7af2bdcfae1fa8d28833ae2f1995e9a8e0825377cff121db64b0db21b718a96ca582a72b16f464330c89ab73277cb96e42df105ebf5c9ac5330d47b8fc0000000000000000000000000000000000000000000000000000000000000000', 1654758546039, '[0]'); -INSERT INTO "public"."block_for_commit" VALUES (2, '2022-06-09 07:09:06.057578+00', '2022-06-09 07:09:06.057578+00', NULL, 2, '1b2ff4ae0d507a971fb267849af6a28000b1d483865c5a610cc47db6f196c672', '010000000100000000000000000000000000000000000000000000000000000067617300000000000000000000000000000000000000000000000000000000000a48e9892a45a04d0c5b0f235a3aeb07b92137ba71a59b9c457774bafde959832c24415b75651673b0d7bbf145ac8d7cb744ba6926963d1d014836336df1317a134f4726b89983a8e7babbf6973e7ee16311e24328edf987bb0fbe7a494ec91e0000000000000000000000000000000000000000000000000000000000000000', 1654758546053, '[0]'); -INSERT INTO "public"."block_for_commit" VALUES (3, '2022-06-09 07:09:06.066523+00', '2022-06-09 07:09:06.066523+00', NULL, 3, '189517f4cfb59471e3539dae36b8f53cb1264d407daf6afbf86132917f1cbafc', '01000000020000000000000000000000000000000000000000000000000000007368657200000000000000000000000000000000000000000000000000000000214a2d7af2022dfaee49dadb8992d3d7c225d8ae36109b531c28406dd69aad45235fdbbbf5ef1665f3422211702126433c909487c456e594ef3a56910810396a05dde55c8adfb6689ead7f5610726afd5fd6ea35a3516dc68e57546146f7b6b00000000000000000000000000000000000000000000000000000000000000000', 1654758546062, '[0]'); -INSERT INTO "public"."block_for_commit" VALUES (4, '2022-06-09 07:09:06.074432+00', '2022-06-09 07:09:06.074432+00', NULL, 4, '08b2dc20da16235e692de317d6134578159532d4f081827bd29a5fc783fcc2b7', '0100000003000000000000000000000000000000000000000000000000000000676176696e0000000000000000000000000000000000000000000000000000001c54c09c98f7ade9d5eeba4124ac7c912e65699a3f76fa65d71eaf6359d9bceb0649fef47f6cf3dfb767cf5599eea11677bb6495956ec4cf75707d3aca7c06ed0e07b60bf3a2bf5e1a355793498de43e4d8dac50b892528f9664a03ceacc00050000000000000000000000000000000000000000000000000000000000000000', 1654758546070, '[0]'); -INSERT INTO "public"."block_for_commit" VALUES (5, '2022-06-09 07:09:06.081888+00', '2022-06-09 07:09:06.081888+00', NULL, 5, '236e2c312a52cfbe96fc14a0693ea0f26d59fae774b35d44ddcf7737d965902f', '040000000200000000000000000000016345785d8a0000000000000000000000214a2d7af2022dfaee49dadb8992d3d7c225d8ae36109b531c28406dd69aad450000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000', 1654758546078, '[0]'); -INSERT INTO "public"."block_for_commit" VALUES (6, '2022-06-09 07:09:06.090131+00', '2022-06-09 07:09:06.090131+00', NULL, 6, '029cfe1c99565d3722f32b6bdb4ee5740d4f4c78bf318968c366c9c7e82d9ba7', '040000000300000000000000000000016345785d8a00000000000000000000001c54c09c98f7ade9d5eeba4124ac7c912e65699a3f76fa65d71eaf6359d9bceb0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000', 1654758546087, '[0]'); -INSERT INTO "public"."block_for_commit" VALUES (7, '2022-06-09 07:09:06.097203+00', '2022-06-09 07:09:06.097203+00', NULL, 7, '25cade17a4affef4114a06b8ae6e8e18651a8c4aa0aa01e1c20abce23ad614ec', '0400000002000100000000000000056bc75e2d63100000000000000000000000214a2d7af2022dfaee49dadb8992d3d7c225d8ae36109b531c28406dd69aad450000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000', 1654758546094, '[0]'); -INSERT INTO "public"."block_for_commit" VALUES (8, '2022-06-09 07:09:06.10415+00', '2022-06-09 07:09:06.10415+00', NULL, 8, '17a21620fe89a6ef610ceea7b2c6230dba84731020a11bd081b46ba23c1cae94', '0400000002000200000000000000056bc75e2d63100000000000000000000000214a2d7af2022dfaee49dadb8992d3d7c225d8ae36109b531c28406dd69aad450000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000', 1654758546101, '[0]'); -INSERT INTO "public"."block_for_commit" VALUES (9, '2022-06-09 07:09:06.112169+00', '2022-06-09 07:09:06.112169+00', NULL, 9, '0f5cf7c3fa8452ccb12d87b99952cfde059999f3767ddbc032994d94f3fe24ba', '02000000000002001e000000000005000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000', 1654758546108, '[0]'); -INSERT INTO "public"."block_for_commit" VALUES (10, '2022-06-09 07:09:06.121441+00', '2022-06-09 07:09:06.121441+00', NULL, 10, '0945597849e7df9b43bfade724068c4d5a9d6039da208e6b829feb530ce784cd', '02000100000001001e000000000005000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000', 1654758546118, '[0]'); -INSERT INTO "public"."block_for_commit" VALUES (11, '2022-06-09 07:09:06.128738+00', '2022-06-09 07:09:06.128738+00', NULL, 11, '1671dd749a5a522f18908e28512d1c6c10034740923bbe9bab5664585b87411d', '02000200010002001e000000000005000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000', 1654758546125, '[0]'); -INSERT INTO "public"."block_for_commit" VALUES (12, '2022-06-09 07:09:06.135375+00', '2022-06-09 07:09:06.135375+00', NULL, 12, '08ef9af5048b3df61fe3bd025a8db3f47b591a0136281cb3325e7e89930f3925', '030001003200000000000a00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000', 1654758546132, '[0]'); -INSERT INTO "public"."block_for_commit" VALUES (13, '2022-06-09 07:09:06.145391+00', '2022-06-09 07:09:06.145391+00', NULL, 13, '23f9301b57dbde40b067fc04f2bb2e5241b58739845efa223de352a8a14dd2ae', '05000000020000000000b7ad4a7e9459d0c1541db2eececeacc7dba803e100000000000000000000000000000000000000000000000000000000000000000000abd1b6ae79507f7b4a32a84ab6495bc9fee67450ed316dbba76bace8a3c5197b0000000000000000000000000000000000000000000000000000000000000000214a2d7af2022dfaee49dadb8992d3d7c225d8ae36109b531c28406dd69aad450000000000000000000000000000000000000000000000000000000000000000', 1654758546139, '[0]'); -INSERT INTO "public"."block_for_commit" VALUES (14, '2022-06-09 07:09:06.155182+00', '2022-06-09 07:09:06.155182+00', NULL, 14, '1785a0c0ef9c282c5dddde78ad80b9689d34cda4a59ed35fcf4f00966ff034e2', '1100000002000100000000000000056bc75e2d63100000000000000000000000214a2d7af2022dfaee49dadb8992d3d7c225d8ae36109b531c28406dd69aad450000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000', 1654758546152, '[0]'); -INSERT INTO "public"."block_for_commit" VALUES (15, '2022-06-09 07:09:06.16275+00', '2022-06-09 07:09:06.16275+00', NULL, 15, '28ff96ba5f7e023a7ed9d446cb412fc6965a6ed68d1439b357bb4014ec57a8a4', '1200000002000000000000000000000000000000000000000000000000000000000000000000000000000000b7ad4a7e9459d0c1541db2eececeacc7dba803e1214a2d7af2022dfaee49dadb8992d3d7c225d8ae36109b531c28406dd69aad450000000000000000000000000000000000000000000000000000000000000000abd1b6ae79507f7b4a32a84ab6495bc9fee67450ed316dbba76bace8a3c5197b0000000000000000000000000000000000000000000000000000000000000000', 1654758546159, '[0]'); -INSERT INTO "public"."block_for_commit" VALUES (16, '2022-06-09 07:09:06.177047+00', '2022-06-09 07:09:06.177047+00', NULL, 16, '0c599d212ed3641e0b6df735e8b04dd627accfdafbbfa38c173af5f38efb433e', '0600000002000000030000000030d4000000000100023e8100000000000000000dde7a022857fec1b8ffa7664a937a250d3ae68f356061754d3531e2674103d80000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000', 1654758546171, 'null'); -INSERT INTO "public"."block_for_commit" VALUES (17, '2022-06-09 07:09:06.189386+00', '2022-06-09 07:09:06.189386+00', NULL, 17, '2d425cddc3d5aaec5ed0dd1465e7038e7fbbcc679e4a4c6c620742134ef93714', '0a0000000299ac8881834797ebc32f185ee27c2e96842e1a47000000000000000000000000000000000000000000000000000000009896800000000100023e810000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000', 1654758546184, '[0]'); -INSERT INTO "public"."block_for_commit" VALUES (18, '2022-06-09 07:09:06.202172+00', '2022-06-09 07:09:06.202172+00', NULL, 18, '1211d91f4e22bd2f1aa38daaec68431b35fd37f8a272d147ebe7ba3e73a58555', '08000000020000000030d400000030d400000030d4004a817c800000000000000000000000000000000000000000000000000000000000000000000100023e810000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000', 1654758546195, 'null'); -INSERT INTO "public"."block_for_commit" VALUES (19, '2022-06-09 07:09:06.214306+00', '2022-06-09 07:09:06.214306+00', NULL, 19, '1db7fb69796667194858edf7aea403110c42cddc0907b3953181e1184907fb35', '070000000200000000000c800000000c600000000100003e810000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000', 1654758546209, 'null'); -INSERT INTO "public"."block_for_commit" VALUES (20, '2022-06-09 07:09:06.22603+00', '2022-06-09 07:09:06.22603+00', NULL, 20, '2e888850863cf0c2dffa40c8a0c162749f1f93da6ddf225030a01648cdfc26d6', '090000000200000000000c600000000c800000000c804a5bb8880000000000000000000000000000000000000000000000000000000000000000000100023e810000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000', 1654758546220, 'null'); -INSERT INTO "public"."block_for_commit" VALUES (21, '2022-06-09 07:09:06.236752+00', '2022-06-09 07:09:06.236752+00', NULL, 21, '140622efbca882cddada16ee07f9cc8718b69998a5d8d7922fa7591f2f533edd', '0b0000000200010000000100023e81000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000', 1654758546233, 'null'); -INSERT INTO "public"."block_for_commit" VALUES (22, '2022-06-09 07:09:06.246599+00', '2022-06-09 07:09:06.246599+00', NULL, 22, '12884f8bb4852d02ad1f654daa7a2fc230c539e5a1d3dcd83a474eed139e1f7f', '0c000000020000000300000000010000000100023e8100000001000000000000066a65d3e4397ad05fb2e7f50eac160647a7486c2cedbbeac64624c8beea22f10000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000', 1654758546241, 'null'); -INSERT INTO "public"."block_for_commit" VALUES (23, '2022-06-09 07:09:06.257099+00', '2022-06-09 07:09:06.257099+00', NULL, 23, '19ca2bf9cca9b55f61c3f2d352ab486ac7e529670b6af93171054cb8d82f4fee', '0d000000030000000200000000010000000100003e81000000000000000000000dde7a022857fec1b8ffa7664a937a250d3ae68f356061754d3531e2674103d80000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000', 1654758546252, 'null'); -INSERT INTO "public"."block_for_commit" VALUES (24, '2022-06-09 07:09:06.270873+00', '2022-06-09 07:09:06.270873+00', NULL, 24, '137f5a5193ca65babef27c7f5be3ebc0eee3fd5c4de748d4cc0e736b80a99649', '0e00000002000000030000000000000200000000000000010000000000000000000000000000000000000004e200000000000000000019000000000100003e810000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000', 1654758546264, 'null'); -INSERT INTO "public"."block_for_commit" VALUES (25, '2022-06-09 07:09:06.282652+00', '2022-06-09 07:09:06.282652+00', NULL, 25, '16033680a98409353095c6679b48d1fe06a03ec709b55e448a4c4a56e229393e', '0f000000020000010000000100023e810000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000', 1654758546277, 'null'); -INSERT INTO "public"."block_for_commit" VALUES (26, '2022-06-09 07:09:06.293926+00', '2022-06-09 07:09:06.293926+00', NULL, 26, '278d08c3c1a50ed6e932abdfde1555b7843c43de10c1fded32f7cfc2987c9105', '1000000003000000020000000000000100010000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000d5aa3b56a2e2139db315cdfe3b34149c8ed091710000000100003e81066a65d3e4397ad05fb2e7f50eac160647a7486c2cedbbeac64624c8beea22f10000000000000000000000000000000000000000000000000000000000000000214a2d7af2022dfaee49dadb8992d3d7c225d8ae36109b531c28406dd69aad45', 1654758546288, '[0]'); - --- ---------------------------- --- Table structure for fail_tx --- ---------------------------- -DROP TABLE IF EXISTS "public"."fail_tx"; -CREATE TABLE "public"."fail_tx" ( - "id" int8 NOT NULL DEFAULT nextval('fail_tx_id_seq'::regclass), - "created_at" timestamptz(6), - "updated_at" timestamptz(6), - "deleted_at" timestamptz(6), - "tx_hash" text COLLATE "pg_catalog"."default", - "tx_type" int8, - "gas_fee" text COLLATE "pg_catalog"."default", - "gas_fee_asset_id" int8, - "tx_status" int8, - "asset_a_id" int8, - "asset_b_id" int8, - "tx_amount" text COLLATE "pg_catalog"."default", - "native_address" text COLLATE "pg_catalog"."default", - "tx_info" text COLLATE "pg_catalog"."default", - "extra_info" text COLLATE "pg_catalog"."default", - "memo" text COLLATE "pg_catalog"."default" -) -; - --- ---------------------------- --- Records of fail_tx --- ---------------------------- - --- ---------------------------- --- Table structure for l1_amount --- ---------------------------- -DROP TABLE IF EXISTS "public"."l1_amount"; -CREATE TABLE "public"."l1_amount" ( - "id" int8 NOT NULL DEFAULT nextval('l1_amount_id_seq'::regclass), - "created_at" timestamptz(6), - "updated_at" timestamptz(6), - "deleted_at" timestamptz(6), - "asset_id" int8, - "block_height" int8, - "total_amount" int8 -) -; - --- ---------------------------- --- Records of l1_amount --- ---------------------------- - --- ---------------------------- --- Table structure for l1_block_monitor --- ---------------------------- -DROP TABLE IF EXISTS "public"."l1_block_monitor"; -CREATE TABLE "public"."l1_block_monitor" ( - "id" int8 NOT NULL DEFAULT nextval('l1_block_monitor_id_seq'::regclass), - "created_at" timestamptz(6), - "updated_at" timestamptz(6), - "deleted_at" timestamptz(6), - "l1_block_height" int8, - "block_info" text COLLATE "pg_catalog"."default", - "monitor_type" int8 -) -; - --- ---------------------------- --- Records of l1_block_monitor --- ---------------------------- -INSERT INTO "public"."l1_block_monitor" VALUES (1, '2022-06-08 08:24:45.516427+00', '2022-06-08 08:24:45.516427+00', NULL, 320000, 'null', 1); -INSERT INTO "public"."l1_block_monitor" VALUES (2, '2022-06-08 08:24:56.731448+00', '2022-06-08 08:24:56.731448+00', NULL, 400645, '[{"EventType":6,"TxHash":"0xb9b9c017a978399c97013d23b40d1423c30f009c4d22c97deb824ea30eb4f90b"},{"EventType":7,"TxHash":"0xb9b9c017a978399c97013d23b40d1423c30f009c4d22c97deb824ea30eb4f90b"},{"EventType":5,"TxHash":"0xb9b9c017a978399c97013d23b40d1423c30f009c4d22c97deb824ea30eb4f90b"},{"EventType":4,"TxHash":"0xbc8ad0a59bec5bc874fee604ec6fc94f8067bac6e9e3f0a88b6910e090f91508"},{"EventType":4,"TxHash":"0x115fe0fd9d7c293528507f061915c3f667f796549bdfa8cf3d08d0d00c43dee8"}]', 1); -INSERT INTO "public"."l1_block_monitor" VALUES (3, '2022-06-08 08:25:22.693508+00', '2022-06-08 08:25:22.693508+00', NULL, 320000, 'null', 0); -INSERT INTO "public"."l1_block_monitor" VALUES (4, '2022-06-08 08:25:32.568327+00', '2022-06-08 08:25:32.568327+00', NULL, 400658, '[{"EventType":0,"TxHash":"0x168dace8281eee3d90f81db58754161166af5c6c25b9db744fce8551a0c87af6"},{"EventType":0,"TxHash":"0xc6e7f209ac872a25879b76f125b842f99556cff8451e7ec4af1300ef6502b4f3"},{"EventType":0,"TxHash":"0x2ababebda27a5441798f88b10b9b75ca29095b35c3965ee07aa8098067314e5d"},{"EventType":0,"TxHash":"0x1a1f35d1e50f4b9aa9abd110679e6aba0586f620a4c56a52308cd847b07490ed"},{"EventType":0,"TxHash":"0x4d5c3b3d15b48a17cd3920535581119be48a3ec8fb44eb621b1d587e14ca5a77"},{"EventType":0,"TxHash":"0x9ddacae8cde4948d91826f1badbce9136c50d2be70363b7f2d38b0bfc0d5ff5c"},{"EventType":0,"TxHash":"0x418cdeec66b95e6d9d0a9460d3c44479c15951101143a86035f2cd800f5fab98"},{"EventType":0,"TxHash":"0x8b6f1bf70751be929f0f77d8b1ea204b045881f01494ae3a7d7d73657812d38b"},{"EventType":0,"TxHash":"0xa5c17af78376a85c9f213ea488adefbee9966d263e9047f8c31dea5408d227e7"},{"EventType":0,"TxHash":"0xe5d077d6f88dafa65e9e0058dab2e852e8302dc7032d36e005dad37418cbe439"},{"EventType":0,"TxHash":"0xbad7534ff00fac01c27cf86f3c1e0039ed8d9bcecd918a0ac1e9c1a56652bc6d"},{"EventType":0,"TxHash":"0x463dcd43d99faeea3d3db3001ee9b464696ec9200d2955b84ffba08ff92df38a"},{"EventType":0,"TxHash":"0x390fb0013d788c35a9418f348e1b44c565d6f73b587e6c5d77f47ff858d8322a"},{"EventType":0,"TxHash":"0x9cc1335db2ad0cdd2e006c64da7e2b63adacab522a15050949b5cab827f9f2a9"},{"EventType":0,"TxHash":"0xb0e1317c18b03378e3699829339135b7afbf090a6aaf985cd825abf109c08066"}]', 0); - --- ---------------------------- --- Table structure for l1_tx_sender --- ---------------------------- -DROP TABLE IF EXISTS "public"."l1_tx_sender"; -CREATE TABLE "public"."l1_tx_sender" ( - "id" int8 NOT NULL DEFAULT nextval('l1_tx_sender_id_seq'::regclass), - "created_at" timestamptz(6), - "updated_at" timestamptz(6), - "deleted_at" timestamptz(6), - "l1_tx_hash" text COLLATE "pg_catalog"."default", - "tx_status" int8, - "tx_type" int2, - "l2_block_height" int8 -) -; - --- ---------------------------- --- Records of l1_tx_sender --- ---------------------------- - --- ---------------------------- --- Table structure for l2_asset_info --- ---------------------------- -DROP TABLE IF EXISTS "public"."l2_asset_info"; -CREATE TABLE "public"."l2_asset_info" ( - "id" int8 NOT NULL DEFAULT nextval('l2_asset_info_id_seq'::regclass), - "created_at" timestamptz(6), - "updated_at" timestamptz(6), - "deleted_at" timestamptz(6), - "asset_id" int8, - "asset_address" text COLLATE "pg_catalog"."default", - "asset_name" text COLLATE "pg_catalog"."default", - "asset_symbol" text COLLATE "pg_catalog"."default", - "decimals" int8, - "status" int8 -) -; - --- ---------------------------- --- Records of l2_asset_info --- ---------------------------- -INSERT INTO "public"."l2_asset_info" VALUES (1, '2022-06-08 08:24:27.334984+00', '2022-06-08 08:24:27.334984+00', NULL, 0, '0x00', 'BNB', 'BNB', 18, 0); -INSERT INTO "public"."l2_asset_info" VALUES (2, '2022-06-08 08:24:56.735319+00', '2022-06-08 08:24:56.735319+00', NULL, 1, '0x3E72bC3842c47d5B63B634F0c7f2E5a56Ad94124', 'LEG', 'LEG', 18, 0); -INSERT INTO "public"."l2_asset_info" VALUES (3, '2022-06-08 08:24:56.735319+00', '2022-06-08 08:24:56.735319+00', NULL, 2, '0x6403c9a361Df1276c1568EAB2141aceD24F53eF6', 'REY', 'REY', 18, 0); - --- ---------------------------- --- Table structure for l2_block_event_monitor --- ---------------------------- -DROP TABLE IF EXISTS "public"."l2_block_event_monitor"; -CREATE TABLE "public"."l2_block_event_monitor" ( - "id" int8 NOT NULL DEFAULT nextval('l2_block_event_monitor_id_seq'::regclass), - "created_at" timestamptz(6), - "updated_at" timestamptz(6), - "deleted_at" timestamptz(6), - "block_event_type" int2, - "l1_block_height" int8, - "l1_tx_hash" text COLLATE "pg_catalog"."default", - "l2_block_height" int8, - "status" int8 -) -; - --- ---------------------------- --- Records of l2_block_event_monitor --- ---------------------------- - --- ---------------------------- --- Table structure for l2_nft --- ---------------------------- -DROP TABLE IF EXISTS "public"."l2_nft"; -CREATE TABLE "public"."l2_nft" ( - "id" int8 NOT NULL DEFAULT nextval('l2_nft_id_seq'::regclass), - "created_at" timestamptz(6), - "updated_at" timestamptz(6), - "deleted_at" timestamptz(6), - "nft_index" int8, - "creator_account_index" int8, - "owner_account_index" int8, - "nft_content_hash" text COLLATE "pg_catalog"."default", - "nft_l1_address" text COLLATE "pg_catalog"."default", - "nft_l1_token_id" text COLLATE "pg_catalog"."default", - "creator_treasury_rate" int8, - "collection_id" int8 -) -; - --- ---------------------------- --- Records of l2_nft --- ---------------------------- -INSERT INTO "public"."l2_nft" VALUES (1, '2022-06-08 08:25:58.056967+00', '2022-06-09 07:09:06.168045+00', NULL, 0, 0, 0, '0', '0', '0', 0, 0); -INSERT INTO "public"."l2_nft" VALUES (2, '2022-06-09 05:59:35.3716+00', '2022-06-09 07:09:06.299699+00', NULL, 1, 0, 0, '0', '0', '0', 0, 0); - --- ---------------------------- --- Table structure for l2_nft_collection --- ---------------------------- -DROP TABLE IF EXISTS "public"."l2_nft_collection"; -CREATE TABLE "public"."l2_nft_collection" ( - "id" int8 NOT NULL DEFAULT nextval('l2_nft_collection_id_seq'::regclass), - "created_at" timestamptz(6), - "updated_at" timestamptz(6), - "deleted_at" timestamptz(6), - "account_index" int8, - "name" text COLLATE "pg_catalog"."default", - "introduction" text COLLATE "pg_catalog"."default", - "status" int8 -) -; - --- ---------------------------- --- Records of l2_nft_collection --- ---------------------------- - --- ---------------------------- --- Table structure for l2_nft_exchange --- ---------------------------- -DROP TABLE IF EXISTS "public"."l2_nft_exchange"; -CREATE TABLE "public"."l2_nft_exchange" ( - "id" int8 NOT NULL DEFAULT nextval('l2_nft_exchange_id_seq'::regclass), - "created_at" timestamptz(6), - "updated_at" timestamptz(6), - "deleted_at" timestamptz(6), - "buyer_account_index" int8, - "owner_account_index" int8, - "nft_index" int8, - "asset_id" int8, - "asset_amount" text COLLATE "pg_catalog"."default" -) -; - --- ---------------------------- --- Records of l2_nft_exchange --- ---------------------------- -INSERT INTO "public"."l2_nft_exchange" VALUES (1, '2022-06-09 05:59:50.16996+00', '2022-06-09 05:59:50.16996+00', NULL, 3, 2, 1, 0, '10000'); - --- ---------------------------- --- Table structure for l2_nft_exchange_history --- ---------------------------- -DROP TABLE IF EXISTS "public"."l2_nft_exchange_history"; -CREATE TABLE "public"."l2_nft_exchange_history" ( - "id" int8 NOT NULL DEFAULT nextval('l2_nft_exchange_history_id_seq'::regclass), - "created_at" timestamptz(6), - "updated_at" timestamptz(6), - "deleted_at" timestamptz(6), - "buyer_account_index" int8, - "owner_account_index" int8, - "nft_index" int8, - "asset_id" int8, - "asset_amount" int8, - "l2_block_height" int8 -) -; - --- ---------------------------- --- Records of l2_nft_exchange_history --- ---------------------------- - --- ---------------------------- --- Table structure for l2_nft_history --- ---------------------------- -DROP TABLE IF EXISTS "public"."l2_nft_history"; -CREATE TABLE "public"."l2_nft_history" ( - "id" int8 NOT NULL DEFAULT nextval('l2_nft_history_id_seq'::regclass), - "created_at" timestamptz(6), - "updated_at" timestamptz(6), - "deleted_at" timestamptz(6), - "nft_index" int8, - "creator_account_index" int8, - "owner_account_index" int8, - "nft_content_hash" text COLLATE "pg_catalog"."default", - "nft_l1_address" text COLLATE "pg_catalog"."default", - "nft_l1_token_id" text COLLATE "pg_catalog"."default", - "creator_treasury_rate" int8, - "collection_id" int8, - "status" int8, - "l2_block_height" int8 -) -; - --- ---------------------------- --- Records of l2_nft_history --- ---------------------------- -INSERT INTO "public"."l2_nft_history" VALUES (1, '2022-06-09 07:09:06.149881+00', '2022-06-09 07:09:06.149881+00', NULL, 0, 0, 2, 'abd1b6ae79507f7b4a32a84ab6495bc9fee67450ed316dbba76bace8a3c5197b', '0xB7aD4A7E9459D0C1541Db2eEceceAcc7dBa803e1', '0', 0, 0, 0, 13); -INSERT INTO "public"."l2_nft_history" VALUES (2, '2022-06-09 07:09:06.170047+00', '2022-06-09 07:09:06.170047+00', NULL, 0, 0, 0, '0', '0', '0', 0, 0, 0, 15); -INSERT INTO "public"."l2_nft_history" VALUES (3, '2022-06-09 07:09:06.251098+00', '2022-06-09 07:09:06.251098+00', NULL, 1, 2, 3, '066a65d3e4397ad05fb2e7f50eac160647a7486c2cedbbeac64624c8beea22f1', '0', '0', 0, 1, 0, 22); -INSERT INTO "public"."l2_nft_history" VALUES (4, '2022-06-09 07:09:06.26284+00', '2022-06-09 07:09:06.26284+00', NULL, 1, 2, 2, '066a65d3e4397ad05fb2e7f50eac160647a7486c2cedbbeac64624c8beea22f1', '0', '0', 0, 1, 0, 23); -INSERT INTO "public"."l2_nft_history" VALUES (5, '2022-06-09 07:09:06.276395+00', '2022-06-09 07:09:06.276395+00', NULL, 1, 2, 3, '066a65d3e4397ad05fb2e7f50eac160647a7486c2cedbbeac64624c8beea22f1', '0', '0', 0, 1, 0, 24); -INSERT INTO "public"."l2_nft_history" VALUES (6, '2022-06-09 07:09:06.300854+00', '2022-06-09 07:09:06.300854+00', NULL, 1, 0, 0, '0', '0', '0', 0, 0, 0, 26); - --- ---------------------------- --- Table structure for l2_nft_withdraw_history --- ---------------------------- -DROP TABLE IF EXISTS "public"."l2_nft_withdraw_history"; -CREATE TABLE "public"."l2_nft_withdraw_history" ( - "id" int8 NOT NULL DEFAULT nextval('l2_nft_withdraw_history_id_seq'::regclass), - "created_at" timestamptz(6), - "updated_at" timestamptz(6), - "deleted_at" timestamptz(6), - "nft_index" int8, - "creator_account_index" int8, - "owner_account_index" int8, - "nft_content_hash" text COLLATE "pg_catalog"."default", - "nft_l1_address" text COLLATE "pg_catalog"."default", - "nft_l1_token_id" text COLLATE "pg_catalog"."default", - "creator_treasury_rate" int8, - "collection_id" int8 -) -; - --- ---------------------------- --- Records of l2_nft_withdraw_history --- ---------------------------- -INSERT INTO "public"."l2_nft_withdraw_history" VALUES (1, '2022-06-09 07:09:06.167039+00', '2022-06-09 07:09:06.167039+00', NULL, 0, 0, 2, 'abd1b6ae79507f7b4a32a84ab6495bc9fee67450ed316dbba76bace8a3c5197b', '0xB7aD4A7E9459D0C1541Db2eEceceAcc7dBa803e1', '0', 0, 0); -INSERT INTO "public"."l2_nft_withdraw_history" VALUES (2, '2022-06-09 07:09:06.298804+00', '2022-06-09 07:09:06.298804+00', NULL, 1, 2, 3, '066a65d3e4397ad05fb2e7f50eac160647a7486c2cedbbeac64624c8beea22f1', '0', '0', 0, 1); - --- ---------------------------- --- Table structure for l2_tx_event_monitor --- ---------------------------- -DROP TABLE IF EXISTS "public"."l2_tx_event_monitor"; -CREATE TABLE "public"."l2_tx_event_monitor" ( - "id" int8 NOT NULL DEFAULT nextval('l2_tx_event_monitor_id_seq'::regclass), - "created_at" timestamptz(6), - "updated_at" timestamptz(6), - "deleted_at" timestamptz(6), - "l1_tx_hash" text COLLATE "pg_catalog"."default", - "l1_block_height" int8, - "sender_address" text COLLATE "pg_catalog"."default", - "request_id" int8, - "tx_type" int8, - "pubdata" text COLLATE "pg_catalog"."default", - "expiration_block" int8, - "status" int8 -) -; - --- ---------------------------- --- Records of l2_tx_event_monitor --- ---------------------------- -INSERT INTO "public"."l2_tx_event_monitor" VALUES (1, '2022-06-08 08:25:32.572194+00', '2022-06-08 08:25:58.058004+00', NULL, '0x168dace8281eee3d90f81db58754161166af5c6c25b9db744fce8551a0c87af6', 399780, '0x56744Dc80a3a520F0cCABf083AC874a4bf6433F3', 0, 1, '01000000007472656173757279000000000000000000000000000000000000000000000000167c5363088a40a4839912a872f43164270740c7e986ec55397b2d583317ab4a2005db7af2bdcfae1fa8d28833ae2f1995e9a8e0825377cff121db64b0db21b718a96ca582a72b16f464330c89ab73277cb96e42df105ebf5c9ac5330d47b8fc', 440100, 2); -INSERT INTO "public"."l2_tx_event_monitor" VALUES (2, '2022-06-08 08:25:32.572194+00', '2022-06-08 08:25:58.059215+00', NULL, '0xc6e7f209ac872a25879b76f125b842f99556cff8451e7ec4af1300ef6502b4f3', 399782, '0x56744Dc80a3a520F0cCABf083AC874a4bf6433F3', 1, 1, '010000000067617300000000000000000000000000000000000000000000000000000000000a48e9892a45a04d0c5b0f235a3aeb07b92137ba71a59b9c457774bafde959832c24415b75651673b0d7bbf145ac8d7cb744ba6926963d1d014836336df1317a134f4726b89983a8e7babbf6973e7ee16311e24328edf987bb0fbe7a494ec91e', 440102, 2); -INSERT INTO "public"."l2_tx_event_monitor" VALUES (3, '2022-06-08 08:25:32.572194+00', '2022-06-08 08:25:58.059769+00', NULL, '0x2ababebda27a5441798f88b10b9b75ca29095b35c3965ee07aa8098067314e5d', 399785, '0x56744Dc80a3a520F0cCABf083AC874a4bf6433F3', 2, 1, '01000000007368657200000000000000000000000000000000000000000000000000000000214a2d7af2022dfaee49dadb8992d3d7c225d8ae36109b531c28406dd69aad45235fdbbbf5ef1665f3422211702126433c909487c456e594ef3a56910810396a05dde55c8adfb6689ead7f5610726afd5fd6ea35a3516dc68e57546146f7b6b0', 440105, 2); -INSERT INTO "public"."l2_tx_event_monitor" VALUES (4, '2022-06-08 08:25:32.572194+00', '2022-06-08 08:25:58.060276+00', NULL, '0x1a1f35d1e50f4b9aa9abd110679e6aba0586f620a4c56a52308cd847b07490ed', 399788, '0x56744Dc80a3a520F0cCABf083AC874a4bf6433F3', 3, 1, '0100000000676176696e0000000000000000000000000000000000000000000000000000001c54c09c98f7ade9d5eeba4124ac7c912e65699a3f76fa65d71eaf6359d9bceb0649fef47f6cf3dfb767cf5599eea11677bb6495956ec4cf75707d3aca7c06ed0e07b60bf3a2bf5e1a355793498de43e4d8dac50b892528f9664a03ceacc0005', 440108, 2); -INSERT INTO "public"."l2_tx_event_monitor" VALUES (5, '2022-06-08 08:25:32.572194+00', '2022-06-08 08:25:58.060504+00', NULL, '0x4d5c3b3d15b48a17cd3920535581119be48a3ec8fb44eb621b1d587e14ca5a77', 399796, '0x56744Dc80a3a520F0cCABf083AC874a4bf6433F3', 4, 4, '0400000000214a2d7af2022dfaee49dadb8992d3d7c225d8ae36109b531c28406dd69aad4500000000000000000000016345785d8a0000', 440116, 2); -INSERT INTO "public"."l2_tx_event_monitor" VALUES (6, '2022-06-08 08:25:32.572194+00', '2022-06-08 08:25:58.061004+00', NULL, '0x9ddacae8cde4948d91826f1badbce9136c50d2be70363b7f2d38b0bfc0d5ff5c', 399798, '0x56744Dc80a3a520F0cCABf083AC874a4bf6433F3', 5, 4, '04000000001c54c09c98f7ade9d5eeba4124ac7c912e65699a3f76fa65d71eaf6359d9bceb00000000000000000000016345785d8a0000', 440118, 2); -INSERT INTO "public"."l2_tx_event_monitor" VALUES (7, '2022-06-08 08:25:32.572194+00', '2022-06-08 08:25:58.061504+00', NULL, '0x418cdeec66b95e6d9d0a9460d3c44479c15951101143a86035f2cd800f5fab98', 399805, '0x56744Dc80a3a520F0cCABf083AC874a4bf6433F3', 6, 4, '0400000000214a2d7af2022dfaee49dadb8992d3d7c225d8ae36109b531c28406dd69aad45000100000000000000056bc75e2d63100000', 440125, 2); -INSERT INTO "public"."l2_tx_event_monitor" VALUES (8, '2022-06-08 08:25:32.572194+00', '2022-06-08 08:25:58.062503+00', NULL, '0x8b6f1bf70751be929f0f77d8b1ea204b045881f01494ae3a7d7d73657812d38b', 399808, '0x56744Dc80a3a520F0cCABf083AC874a4bf6433F3', 7, 4, '0400000000214a2d7af2022dfaee49dadb8992d3d7c225d8ae36109b531c28406dd69aad45000200000000000000056bc75e2d63100000', 440128, 2); -INSERT INTO "public"."l2_tx_event_monitor" VALUES (9, '2022-06-08 08:25:32.572194+00', '2022-06-08 08:25:58.06257+00', NULL, '0xa5c17af78376a85c9f213ea488adefbee9966d263e9047f8c31dea5408d227e7', 399815, '0x56744Dc80a3a520F0cCABf083AC874a4bf6433F3', 8, 2, '02000000000002001e000000000005', 440135, 2); -INSERT INTO "public"."l2_tx_event_monitor" VALUES (10, '2022-06-08 08:25:32.572194+00', '2022-06-08 08:25:58.063316+00', NULL, '0xe5d077d6f88dafa65e9e0058dab2e852e8302dc7032d36e005dad37418cbe439', 399817, '0x56744Dc80a3a520F0cCABf083AC874a4bf6433F3', 9, 2, '02000100000001001e000000000005', 440137, 2); -INSERT INTO "public"."l2_tx_event_monitor" VALUES (11, '2022-06-08 08:25:32.572194+00', '2022-06-08 08:25:58.063777+00', NULL, '0xbad7534ff00fac01c27cf86f3c1e0039ed8d9bcecd918a0ac1e9c1a56652bc6d', 399820, '0x56744Dc80a3a520F0cCABf083AC874a4bf6433F3', 10, 2, '02000200010002001e000000000005', 440140, 2); -INSERT INTO "public"."l2_tx_event_monitor" VALUES (12, '2022-06-08 08:25:32.572194+00', '2022-06-08 08:25:58.064004+00', NULL, '0x463dcd43d99faeea3d3db3001ee9b464696ec9200d2955b84ffba08ff92df38a', 399827, '0x56744Dc80a3a520F0cCABf083AC874a4bf6433F3', 11, 3, '030001003200000000000a', 440147, 2); -INSERT INTO "public"."l2_tx_event_monitor" VALUES (13, '2022-06-08 08:25:32.572194+00', '2022-06-08 08:25:58.0647+00', NULL, '0x390fb0013d788c35a9418f348e1b44c565d6f73b587e6c5d77f47ff858d8322a', 399837, '0x56744Dc80a3a520F0cCABf083AC874a4bf6433F3', 12, 5, '05000000000000000000b7ad4a7e9459d0c1541db2eececeacc7dba803e1000000000000abd1b6ae79507f7b4a32a84ab6495bc9fee67450ed316dbba76bace8a3c5197b0000000000000000000000000000000000000000000000000000000000000000214a2d7af2022dfaee49dadb8992d3d7c225d8ae36109b531c28406dd69aad450000', 440157, 2); -INSERT INTO "public"."l2_tx_event_monitor" VALUES (14, '2022-06-08 08:25:32.572194+00', '2022-06-08 08:25:58.065229+00', NULL, '0x9cc1335db2ad0cdd2e006c64da7e2b63adacab522a15050949b5cab827f9f2a9', 399844, '0x56744Dc80a3a520F0cCABf083AC874a4bf6433F3', 13, 17, '1100000000000100000000000000000000000000000000214a2d7af2022dfaee49dadb8992d3d7c225d8ae36109b531c28406dd69aad45', 440164, 2); -INSERT INTO "public"."l2_tx_event_monitor" VALUES (15, '2022-06-08 08:25:32.572194+00', '2022-06-08 08:25:58.06572+00', NULL, '0xb0e1317c18b03378e3699829339135b7afbf090a6aaf985cd825abf109c08066', 399851, '0x56744Dc80a3a520F0cCABf083AC874a4bf6433F3', 14, 18, '1200000000000000000000000000000000000000000000000000000000000000000000000000214a2d7af2022dfaee49dadb8992d3d7c225d8ae36109b531c28406dd69aad45000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000', 440171, 2); - --- ---------------------------- --- Table structure for liquidity --- ---------------------------- -DROP TABLE IF EXISTS "public"."liquidity"; -CREATE TABLE "public"."liquidity" ( - "id" int8 NOT NULL DEFAULT nextval('liquidity_id_seq'::regclass), - "created_at" timestamptz(6), - "updated_at" timestamptz(6), - "deleted_at" timestamptz(6), - "pair_index" int8, - "asset_a_id" int8, - "asset_a" text COLLATE "pg_catalog"."default", - "asset_b_id" int8, - "asset_b" text COLLATE "pg_catalog"."default", - "lp_amount" text COLLATE "pg_catalog"."default", - "k_last" text COLLATE "pg_catalog"."default", - "fee_rate" int8, - "treasury_account_index" int8, - "treasury_rate" int8 -) -; - --- ---------------------------- --- Records of liquidity --- ---------------------------- -INSERT INTO "public"."liquidity" VALUES (3, '2022-06-08 08:25:58.055259+00', '2022-06-09 07:09:06.130441+00', NULL, 2, 1, '0', 2, '0', '0', '0', 30, 0, 5); -INSERT INTO "public"."liquidity" VALUES (2, '2022-06-08 08:25:58.055259+00', '2022-06-09 07:09:06.137001+00', NULL, 1, 0, '0', 1, '0', '0', '0', 50, 0, 10); -INSERT INTO "public"."liquidity" VALUES (1, '2022-06-08 08:25:58.055259+00', '2022-06-09 07:09:06.230752+00', NULL, 0, 0, '99802', 2, '100000', '99900', '9980200000', 30, 0, 5); - --- ---------------------------- --- Table structure for liquidity_history --- ---------------------------- -DROP TABLE IF EXISTS "public"."liquidity_history"; -CREATE TABLE "public"."liquidity_history" ( - "id" int8 NOT NULL DEFAULT nextval('liquidity_history_id_seq'::regclass), - "created_at" timestamptz(6), - "updated_at" timestamptz(6), - "deleted_at" timestamptz(6), - "pair_index" int8, - "asset_a_id" int8, - "asset_a" text COLLATE "pg_catalog"."default", - "asset_b_id" int8, - "asset_b" text COLLATE "pg_catalog"."default", - "lp_amount" text COLLATE "pg_catalog"."default", - "k_last" text COLLATE "pg_catalog"."default", - "fee_rate" int8, - "treasury_account_index" int8, - "treasury_rate" int8, - "l2_block_height" int8 -) -; - --- ---------------------------- --- Records of liquidity_history --- ---------------------------- -INSERT INTO "public"."liquidity_history" VALUES (1, '2022-06-09 07:09:06.115785+00', '2022-06-09 07:09:06.115785+00', NULL, 0, 0, '0', 2, '0', '0', '0', 30, 0, 5, 9); -INSERT INTO "public"."liquidity_history" VALUES (2, '2022-06-09 07:09:06.124009+00', '2022-06-09 07:09:06.124009+00', NULL, 1, 0, '0', 1, '0', '0', '0', 30, 0, 5, 10); -INSERT INTO "public"."liquidity_history" VALUES (3, '2022-06-09 07:09:06.131379+00', '2022-06-09 07:09:06.131379+00', NULL, 2, 1, '0', 2, '0', '0', '0', 30, 0, 5, 11); -INSERT INTO "public"."liquidity_history" VALUES (4, '2022-06-09 07:09:06.137996+00', '2022-06-09 07:09:06.137996+00', NULL, 1, 0, '0', 1, '0', '0', '0', 50, 0, 10, 12); -INSERT INTO "public"."liquidity_history" VALUES (5, '2022-06-09 07:09:06.208003+00', '2022-06-09 07:09:06.208003+00', NULL, 0, 0, '100000', 2, '100000', '100000', '10000000000', 30, 0, 5, 18); -INSERT INTO "public"."liquidity_history" VALUES (6, '2022-06-09 07:09:06.219124+00', '2022-06-09 07:09:06.219124+00', NULL, 0, 0, '99901', 2, '100100', '100000', '10000000000', 30, 0, 5, 19); -INSERT INTO "public"."liquidity_history" VALUES (7, '2022-06-09 07:09:06.231662+00', '2022-06-09 07:09:06.231662+00', NULL, 0, 0, '99802', 2, '100000', '99900', '9980200000', 30, 0, 5, 20); - --- ---------------------------- --- Table structure for mempool_tx --- ---------------------------- -DROP TABLE IF EXISTS "public"."mempool_tx"; -CREATE TABLE "public"."mempool_tx" ( - "id" int8 NOT NULL DEFAULT nextval('mempool_tx_id_seq'::regclass), - "created_at" timestamptz(6), - "updated_at" timestamptz(6), - "deleted_at" timestamptz(6), - "tx_hash" text COLLATE "pg_catalog"."default", - "tx_type" int8, - "gas_fee_asset_id" int8, - "gas_fee" text COLLATE "pg_catalog"."default", - "nft_index" int8, - "pair_index" int8, - "asset_id" int8, - "tx_amount" text COLLATE "pg_catalog"."default", - "native_address" text COLLATE "pg_catalog"."default", - "tx_info" text COLLATE "pg_catalog"."default", - "extra_info" text COLLATE "pg_catalog"."default", - "memo" text COLLATE "pg_catalog"."default", - "account_index" int8, - "nonce" int8, - "expired_at" int8, - "l2_block_height" int8, - "status" int8 -) -; - --- ---------------------------- --- Records of mempool_tx --- ---------------------------- -INSERT INTO "public"."mempool_tx" VALUES (2, '2022-06-08 08:25:58.050504+00', '2022-06-09 07:09:06.058617+00', NULL, '9f50d170-e704-11ec-b6f3-988fe0603efa', 1, 0, '0', -1, -1, 0, '0', '0x56744Dc80a3a520F0cCABf083AC874a4bf6433F3', '{"TxType":1,"AccountIndex":1,"AccountName":"gas.legend","AccountNameHash":"CkjpiSpFoE0MWw8jWjrrB7khN7pxpZucRXd0uv3pWYM=","PubKey":"1ec94e497abe0fbb87f9ed2843e21163e17e3e97f6bbbae7a88399b826474f93"}', '', '', 1, 0, 0, 2, 1); -INSERT INTO "public"."mempool_tx" VALUES (3, '2022-06-08 08:25:58.050504+00', '2022-06-09 07:09:06.067524+00', NULL, '9f50f093-e704-11ec-b6f3-988fe0603efa', 1, 0, '0', -1, -1, 0, '0', '0x56744Dc80a3a520F0cCABf083AC874a4bf6433F3', '{"TxType":1,"AccountIndex":2,"AccountName":"sher.legend","AccountNameHash":"IUotevICLfruSdrbiZLT18Il2K42EJtTHChAbdaarUU=","PubKey":"b0b6f7466154578ec66d51a335ead65ffd6a7210567fad9e68b6df8a5ce5dd85"}', '', '', 2, 0, 0, 3, 1); -INSERT INTO "public"."mempool_tx" VALUES (4, '2022-06-08 08:25:58.050504+00', '2022-06-09 07:09:06.074676+00', NULL, '9f510cb1-e704-11ec-b6f3-988fe0603efa', 1, 0, '0', -1, -1, 0, '0', '0x56744Dc80a3a520F0cCABf083AC874a4bf6433F3', '{"TxType":1,"AccountIndex":3,"AccountName":"gavin.legend","AccountNameHash":"HFTAnJj3renV7rpBJKx8kS5laZo/dvpl1x6vY1nZvOs=","PubKey":"0500ccea3ca064968f5292b850ac8d4d3ee48d499357351a5ebfa2f30bb6070e"}', '', '', 3, 0, 0, 4, 1); -INSERT INTO "public"."mempool_tx" VALUES (5, '2022-06-08 08:25:58.050504+00', '2022-06-09 07:09:06.082495+00', NULL, '9f510cb1-e704-11ec-b6f4-988fe0603efa', 4, 0, '0', -1, -1, 0, '100000000000000000', '0x56744Dc80a3a520F0cCABf083AC874a4bf6433F3', '{"TxType":4,"AccountIndex":2,"AccountNameHash":"IUotevICLfruSdrbiZLT18Il2K42EJtTHChAbdaarUU=","AssetId":0,"AssetAmount":100000000000000000}', '', '', 2, 0, 0, 5, 1); -INSERT INTO "public"."mempool_tx" VALUES (6, '2022-06-08 08:25:58.050504+00', '2022-06-09 07:09:06.090802+00', NULL, '9f510cb1-e704-11ec-b6f5-988fe0603efa', 4, 0, '0', -1, -1, 0, '100000000000000000', '0x56744Dc80a3a520F0cCABf083AC874a4bf6433F3', '{"TxType":4,"AccountIndex":3,"AccountNameHash":"HFTAnJj3renV7rpBJKx8kS5laZo/dvpl1x6vY1nZvOs=","AssetId":0,"AssetAmount":100000000000000000}', '', '', 3, 0, 0, 6, 1); -INSERT INTO "public"."mempool_tx" VALUES (7, '2022-06-08 08:25:58.050504+00', '2022-06-09 07:09:06.097963+00', NULL, '9f510cb1-e704-11ec-b6f6-988fe0603efa', 4, 0, '0', -1, -1, 1, '100000000000000000000', '0x56744Dc80a3a520F0cCABf083AC874a4bf6433F3', '{"TxType":4,"AccountIndex":2,"AccountNameHash":"IUotevICLfruSdrbiZLT18Il2K42EJtTHChAbdaarUU=","AssetId":1,"AssetAmount":100000000000000000000}', '', '', 2, 0, 0, 7, 1); -INSERT INTO "public"."mempool_tx" VALUES (8, '2022-06-08 08:25:58.050504+00', '2022-06-09 07:09:06.104587+00', NULL, '9f510cb1-e704-11ec-b6f7-988fe0603efa', 4, 0, '0', -1, -1, 2, '100000000000000000000', '0x56744Dc80a3a520F0cCABf083AC874a4bf6433F3', '{"TxType":4,"AccountIndex":2,"AccountNameHash":"IUotevICLfruSdrbiZLT18Il2K42EJtTHChAbdaarUU=","AssetId":2,"AssetAmount":100000000000000000000}', '', '', 2, 0, 0, 8, 1); -INSERT INTO "public"."mempool_tx" VALUES (9, '2022-06-08 08:25:58.050504+00', '2022-06-09 07:09:06.112587+00', NULL, '9f510cb1-e704-11ec-b6f8-988fe0603efa', 2, 0, '0', -1, 0, 0, '0', '0', '{"TxType":2,"PairIndex":0,"AssetAId":0,"AssetBId":2,"FeeRate":30,"TreasuryAccountIndex":0,"TreasuryRate":5}', '', '', -1, 0, 0, 9, 1); -INSERT INTO "public"."mempool_tx" VALUES (10, '2022-06-08 08:25:58.050504+00', '2022-06-09 07:09:06.121926+00', NULL, '9f510cb1-e704-11ec-b6f9-988fe0603efa', 2, 0, '0', -1, 1, 0, '0', '0', '{"TxType":2,"PairIndex":1,"AssetAId":0,"AssetBId":1,"FeeRate":30,"TreasuryAccountIndex":0,"TreasuryRate":5}', '', '', -1, 0, 0, 10, 1); -INSERT INTO "public"."mempool_tx" VALUES (11, '2022-06-08 08:25:58.050504+00', '2022-06-09 07:09:06.129367+00', NULL, '9f510cb1-e704-11ec-b6fa-988fe0603efa', 2, 0, '0', -1, 2, 0, '0', '0', '{"TxType":2,"PairIndex":2,"AssetAId":1,"AssetBId":2,"FeeRate":30,"TreasuryAccountIndex":0,"TreasuryRate":5}', '', '', -1, 0, 0, 11, 1); -INSERT INTO "public"."mempool_tx" VALUES (12, '2022-06-08 08:25:58.050504+00', '2022-06-09 07:09:06.135924+00', NULL, '9f510cb1-e704-11ec-b6fb-988fe0603efa', 3, 0, '0', -1, 1, 0, '0', '0', '{"TxType":3,"PairIndex":1,"FeeRate":50,"TreasuryAccountIndex":0,"TreasuryRate":10}', '', '', -1, 0, 0, 12, 1); -INSERT INTO "public"."mempool_tx" VALUES (13, '2022-06-08 08:25:58.050504+00', '2022-06-09 07:09:06.145891+00', NULL, '9f51d005-e704-11ec-b6fb-988fe0603efa', 5, 0, '0', 0, -1, 0, '0', '0x56744Dc80a3a520F0cCABf083AC874a4bf6433F3', '{"TxType":5,"AccountIndex":2,"NftIndex":0,"NftL1Address":"0xB7aD4A7E9459D0C1541Db2eEceceAcc7dBa803e1","CreatorAccountIndex":0,"CreatorTreasuryRate":0,"NftContentHash":"q9G2rnlQf3tKMqhKtklbyf7mdFDtMW27p2us6KPFGXs=","NftL1TokenId":0,"AccountNameHash":"IUotevICLfruSdrbiZLT18Il2K42EJtTHChAbdaarUU=","CollectionId":0}', '', '', 2, 0, 0, 13, 1); -INSERT INTO "public"."mempool_tx" VALUES (14, '2022-06-08 08:25:58.050504+00', '2022-06-09 07:09:06.155677+00', NULL, '9f51d005-e704-11ec-b6fc-988fe0603efa', 17, 0, '0', -1, -1, 1, '100000000000000000000', '0x56744Dc80a3a520F0cCABf083AC874a4bf6433F3', '{"TxType":17,"AccountIndex":2,"AccountNameHash":"IUotevICLfruSdrbiZLT18Il2K42EJtTHChAbdaarUU=","AssetId":1,"AssetAmount":100000000000000000000}', '', '', 2, 0, 0, 14, 1); -INSERT INTO "public"."mempool_tx" VALUES (15, '2022-06-08 08:25:58.050504+00', '2022-06-09 07:09:06.163659+00', NULL, '9f51d005-e704-11ec-b6fd-988fe0603efa', 18, 0, '0', 0, -1, 0, '0', '0x56744Dc80a3a520F0cCABf083AC874a4bf6433F3', '{"TxType":18,"AccountIndex":2,"CreatorAccountIndex":0,"CreatorTreasuryRate":0,"NftIndex":0,"CollectionId":0,"NftL1Address":"0xB7aD4A7E9459D0C1541Db2eEceceAcc7dBa803e1","AccountNameHash":"IUotevICLfruSdrbiZLT18Il2K42EJtTHChAbdaarUU=","CreatorAccountNameHash":"AA==","NftContentHash":"q9G2rnlQf3tKMqhKtklbyf7mdFDtMW27p2us6KPFGXs=","NftL1TokenId":0}', '', '', 2, 0, 0, 15, 1); -INSERT INTO "public"."mempool_tx" VALUES (17, '2022-06-09 05:59:00.037782+00', '2022-06-09 07:09:06.18983+00', NULL, '1d4024d1-a49a-4f6a-9221-b716fedbd4aa', 10, 2, '5000', -1, -1, 0, '10000000', '0x99AC8881834797ebC32f185ee27c2e96842e1a47', '{"FromAccountIndex":2,"AssetId":0,"AssetAmount":10000000,"GasAccountIndex":1,"GasFeeAssetId":2,"GasFeeAssetAmount":5000,"ToAddress":"0x99AC8881834797ebC32f185ee27c2e96842e1a47","ExpiredAt":1654761540020,"Nonce":2,"Sig":"YLVT6d4HuMWX7zPw3gcsmqs//dqE0xnfqTbSyLxx3pMDN7IvgmErKDXGiZ/XKC75wf2I03R2dXpmZ6bercDVHA=="}', '', '', 2, 2, 1654761540020, 17, 1); -INSERT INTO "public"."mempool_tx" VALUES (18, '2022-06-09 05:59:08.01764+00', '2022-06-09 07:09:06.202731+00', NULL, 'd9f93c69-3be2-483c-b746-d98022a61ecb', 8, 2, '5000', -1, 0, 0, '100000', '', '{"FromAccountIndex":2,"PairIndex":0,"AssetAId":0,"AssetAAmount":100000,"AssetBId":2,"AssetBAmount":100000,"LpAmount":100000,"KLast":10000000000,"TreasuryAmount":0,"GasAccountIndex":1,"GasFeeAssetId":2,"GasFeeAssetAmount":5000,"ExpiredAt":1654761547992,"Nonce":3,"Sig":"7x6AUCZwD+fmcXWDs0WERCMR+rIBRSDDlzSV3vrjggQEdNd8uj6ghy3uzqrM2oeqa/9gP8vRzPFHYmUvmCKsTQ=="}', '', '', 2, 3, 1654761547992, 18, 1); -INSERT INTO "public"."mempool_tx" VALUES (1, '2022-06-08 08:25:58.050504+00', '2022-06-09 07:09:06.048101+00', NULL, '9f5005a9-e704-11ec-b6f3-988fe0603efa', 1, 0, '0', -1, -1, 0, '0', '0x56744Dc80a3a520F0cCABf083AC874a4bf6433F3', '{"TxType":1,"AccountIndex":0,"AccountName":"treasury.legend","AccountNameHash":"FnxTYwiKQKSDmRKocvQxZCcHQMfphuxVOXstWDMXq0o=","PubKey":"fcb8470d33c59a5cbf5e10df426eb97c2773ab890c3364f4162ba782a56ca998"}', '', '', 0, 0, 0, 1, 1); -INSERT INTO "public"."mempool_tx" VALUES (16, '2022-06-09 05:58:53.463597+00', '2022-06-09 07:09:06.177934+00', NULL, '01de4078-304a-406d-9995-7c8550248f28', 6, 2, '5000', -1, -1, 0, '100000', '', '{"FromAccountIndex":2,"ToAccountIndex":3,"ToAccountNameHash":"1c54c09c98f7ade9d5eeba4124ac7c912e65699a3f76fa65d71eaf6359d9bceb","AssetId":0,"AssetAmount":100000,"GasAccountIndex":1,"GasFeeAssetId":2,"GasFeeAssetAmount":5000,"Memo":"transfer","CallData":"","CallDataHash":"Dd56AihX/sG4/6dmSpN6JQ065o81YGF1TTUx4mdBA9g=","ExpiredAt":1654761533445,"Nonce":1,"Sig":"epyzhZA39/F3mHPAvv8dz8NgPTtPWlYqPs9tEyDNsQgA8A4bi4ruGJe6evoUJ9BdWR49SJ1SCaJ+on1y2QyEFg=="}', '', 'transfer', 2, 1, 1654761533445, 16, 1); -INSERT INTO "public"."mempool_tx" VALUES (19, '2022-06-09 05:59:15.071544+00', '2022-06-09 07:09:06.214826+00', NULL, '8d60898f-ef87-4726-9322-1bda3fd22c2b', 7, 0, '5000', -1, 0, 0, '100', '', '{"FromAccountIndex":2,"PairIndex":0,"AssetAId":2,"AssetAAmount":100,"AssetBId":0,"AssetBMinAmount":98,"AssetBAmountDelta":99,"GasAccountIndex":1,"GasFeeAssetId":0,"GasFeeAssetAmount":5000,"ExpiredAt":1654761555052,"Nonce":4,"Sig":"qCqSqikwaTLE/4VDURQHRYK+9gykmtanhYBv/ByGSoMGBJq+8D7z2b9yc0D8M1zNmfeD5YvpCpJkElsjarviGw=="}', '', '', 2, 4, 1654761555052, 19, 1); -INSERT INTO "public"."mempool_tx" VALUES (20, '2022-06-09 05:59:22.05211+00', '2022-06-09 07:09:06.226605+00', NULL, '4282c649-d9be-49fc-bc34-48dd4bab5f15', 9, 2, '5000', -1, 0, 0, '100', '', '{"FromAccountIndex":2,"PairIndex":0,"AssetAId":0,"AssetAMinAmount":98,"AssetBId":2,"AssetBMinAmount":99,"LpAmount":100,"AssetAAmountDelta":99,"AssetBAmountDelta":100,"KLast":9980200000,"TreasuryAmount":0,"GasAccountIndex":1,"GasFeeAssetId":2,"GasFeeAssetAmount":5000,"ExpiredAt":1654761562031,"Nonce":5,"Sig":"A9Qrmz5Uj7mDA3WfUGRhFoNuWFMtO+RwxwfkbkmglBEAXouZOVBhU4iguZYAEtwO6xFsPYrGFI92A0z21KFk6Q=="}', '', '', 2, 5, 1654761562031, 20, 1); -INSERT INTO "public"."mempool_tx" VALUES (21, '2022-06-09 05:59:29.308896+00', '2022-06-09 07:09:06.237175+00', NULL, '0c3c72c1-ff2d-4ba3-b026-93b7cec8e6a1', 11, 2, '5000', -1, -1, 0, 'sher.legend', '0', '{"AccountIndex":2,"CollectionId":1,"Name":"Zecrey Collection","Introduction":"Wonderful zecrey!","GasAccountIndex":1,"GasFeeAssetId":2,"GasFeeAssetAmount":5000,"ExpiredAt":1654761569287,"Nonce":6,"Sig":"HrXIp5Vauk06jV3Jj4Lke/+XYp3ThhQsVAx8QksB1aYFVYeEZOf5nmIML+U3TSatTwyLCGMmomQSjGMxVtouxw=="}', '', '', 2, 6, 1654761569287, 21, 1); -INSERT INTO "public"."mempool_tx" VALUES (22, '2022-06-09 05:59:35.369268+00', '2022-06-09 07:09:06.247097+00', NULL, '69b6e9bb-0f8b-4b20-af35-35c6945489aa', 12, 2, '5000', 1, -1, 0, '0', '', '{"CreatorAccountIndex":2,"ToAccountIndex":3,"ToAccountNameHash":"1c54c09c98f7ade9d5eeba4124ac7c912e65699a3f76fa65d71eaf6359d9bceb","NftIndex":1,"NftContentHash":"066a65d3e4397ad05fb2e7f50eac160647a7486c2cedbbeac64624c8beea22f1","NftCollectionId":1,"CreatorTreasuryRate":0,"GasAccountIndex":1,"GasFeeAssetId":2,"GasFeeAssetAmount":5000,"ExpiredAt":1654761575344,"Nonce":7,"Sig":"KAC/ZwgC7PJo4KDqBpVI048lfSjzli9qLXp3I4CFBJIBZY/c301Cm46AgoxkfaC02p8M5W263VWSEOsW+YATRg=="}', '', '', 2, 7, 1654761575344, 22, 1); -INSERT INTO "public"."mempool_tx" VALUES (23, '2022-06-09 05:59:43.911324+00', '2022-06-09 07:09:06.257815+00', NULL, 'a512199e-4146-407c-9b13-0039c8796650', 13, 0, '5000', 1, -1, 0, '0', '', '{"FromAccountIndex":3,"ToAccountIndex":2,"ToAccountNameHash":"214a2d7af2022dfaee49dadb8992d3d7c225d8ae36109b531c28406dd69aad45","NftIndex":1,"GasAccountIndex":1,"GasFeeAssetId":0,"GasFeeAssetAmount":5000,"CallData":"","CallDataHash":"Dd56AihX/sG4/6dmSpN6JQ065o81YGF1TTUx4mdBA9g=","ExpiredAt":1654761583894,"Nonce":1,"Sig":"G/YUzJOh4sp8ZF27Mz4s5hvLBC1P63uZbIVjMzHjqRkB0wTz30LsTSC5VIcxILqv7a/dCw4qd4Y3LYOtgLPW1w=="}', '', '', 3, 1, 1654761583894, 23, 1); -INSERT INTO "public"."mempool_tx" VALUES (24, '2022-06-09 05:59:50.167539+00', '2022-06-09 07:09:06.271098+00', NULL, '79245e26-0ec4-486e-9556-2fd477928380', 14, 0, '5000', 1, -1, 0, '10000', '', '{"AccountIndex":2,"BuyOffer":{"Type":0,"OfferId":0,"AccountIndex":3,"NftIndex":1,"AssetId":0,"AssetAmount":10000,"ListedAt":1654754390138,"ExpiredAt":1654761590138,"TreasuryRate":200,"Sig":"CrwNdL+oHhdWBgJ0j+O/IY5Ca5qnBw6kDkPyUWD4wywApriICAXoTooPa//9vP9QRDPEQsHu9C2vvfeNaXeUGA=="},"SellOffer":{"Type":1,"OfferId":0,"AccountIndex":2,"NftIndex":1,"AssetId":0,"AssetAmount":10000,"ListedAt":1654754390138,"ExpiredAt":1654761590138,"TreasuryRate":200,"Sig":"1k2/LHCg9jCQ2+S9qYW8hWRLFryR7xQU+32zmjlEAAICKm6Tlks2bqoUXLBiOe5VNwUIMJ5gwJxTKOlJbOExbA=="},"GasAccountIndex":1,"GasFeeAssetId":0,"GasFeeAssetAmount":5000,"CreatorAmount":0,"TreasuryAmount":200,"Nonce":8,"ExpiredAt":1654761590138,"Sig":"Y/HMbBEAcLfdg5+eqAo/Gz+Nq8ZHdmLbm+SRUBZAB5cASst6Eo7UiL6O7+2IGNa7lBij3RgRvPn8bcBbxEO+Ww=="}', '', '', 2, 8, 1654761590138, 24, 1); -INSERT INTO "public"."mempool_tx" VALUES (25, '2022-06-09 06:00:01.732295+00', '2022-06-09 07:09:06.283098+00', NULL, 'fa2c5d73-deab-494a-a5c1-64938d1430aa', 15, 2, '5000', -1, -1, 0, 'sher.legend', '0', '{"AccountIndex":2,"OfferId":1,"GasAccountIndex":1,"GasFeeAssetId":2,"GasFeeAssetAmount":5000,"ExpiredAt":1654761601706,"Nonce":9,"Sig":"XQyUjK2wFu2opRmOhCnmDtCsVFeyj0MDofWLqyQCBBQCRi4zpBIxphGhuSMkoDO1WiFWkxVaRXwINrpOKjfMug=="}', '', '', 2, 9, 1654761601706, 25, 1); -INSERT INTO "public"."mempool_tx" VALUES (26, '2022-06-09 06:00:11.597823+00', '2022-06-09 07:09:06.294273+00', NULL, '5c3021a0-15bc-42b3-a966-11b0afdb0c73', 16, 0, '5000', 1, -1, 0, '0', '', '{"AccountIndex":3,"CreatorAccountIndex":2,"CreatorAccountNameHash":"IUotevICLfruSdrbiZLT18Il2K42EJtTHChAbdaarUU=","CreatorTreasuryRate":0,"NftIndex":1,"NftContentHash":"Bmpl0+Q5etBfsuf1DqwWBkenSGws7bvqxkYkyL7qIvE=","NftL1Address":"0","NftL1TokenId":0,"CollectionId":1,"ToAddress":"0xd5Aa3B56a2E2139DB315CdFE3b34149c8ed09171","GasAccountIndex":1,"GasFeeAssetId":0,"GasFeeAssetAmount":5000,"ExpiredAt":1654761611582,"Nonce":2,"Sig":"sPJWFi9pTtfv6Z9zFI/QlQ2M9APJdOrWuPuq8wVnZ5kDbJkAuE2MoDA7o07rv5g/nz/cjloY2us88w1dkg+5sQ=="}', '', '', 3, 2, 1654761611582, 26, 1); - --- ---------------------------- --- Table structure for mempool_tx_detail --- ---------------------------- -DROP TABLE IF EXISTS "public"."mempool_tx_detail"; -CREATE TABLE "public"."mempool_tx_detail" ( - "id" int8 NOT NULL DEFAULT nextval('mempool_tx_detail_id_seq'::regclass), - "created_at" timestamptz(6), - "updated_at" timestamptz(6), - "deleted_at" timestamptz(6), - "tx_id" int8, - "asset_id" int8, - "asset_type" int8, - "account_index" int8, - "account_name" text COLLATE "pg_catalog"."default", - "balance_delta" text COLLATE "pg_catalog"."default", - "order" int8, - "account_order" int8 -) -; - --- ---------------------------- --- Records of mempool_tx_detail --- ---------------------------- -INSERT INTO "public"."mempool_tx_detail" VALUES (2, '2022-06-08 08:25:58.053003+00', '2022-06-08 08:25:58.053003+00', NULL, 6, 0, 1, 3, 'gavin.legend', '{"AssetId":0,"Balance":100000000000000000,"LpAmount":0,"OfferCanceledOrFinalized":0}', 0, 0); -INSERT INTO "public"."mempool_tx_detail" VALUES (3, '2022-06-08 08:25:58.053003+00', '2022-06-08 08:25:58.053003+00', NULL, 7, 1, 1, 2, 'sher.legend', '{"AssetId":1,"Balance":100000000000000000000,"LpAmount":0,"OfferCanceledOrFinalized":0}', 0, 0); -INSERT INTO "public"."mempool_tx_detail" VALUES (4, '2022-06-08 08:25:58.053003+00', '2022-06-08 08:25:58.053003+00', NULL, 8, 2, 1, 2, 'sher.legend', '{"AssetId":2,"Balance":100000000000000000000,"LpAmount":0,"OfferCanceledOrFinalized":0}', 0, 0); -INSERT INTO "public"."mempool_tx_detail" VALUES (5, '2022-06-08 08:25:58.053003+00', '2022-06-08 08:25:58.053003+00', NULL, 9, 0, 2, -1, '', '{"PairIndex":0,"AssetAId":0,"AssetA":0,"AssetBId":2,"AssetB":0,"LpAmount":0,"KLast":0,"FeeRate":30,"TreasuryAccountIndex":0,"TreasuryRate":5}', 0, -1); -INSERT INTO "public"."mempool_tx_detail" VALUES (6, '2022-06-08 08:25:58.053003+00', '2022-06-08 08:25:58.053003+00', NULL, 10, 1, 2, -1, '', '{"PairIndex":1,"AssetAId":0,"AssetA":0,"AssetBId":1,"AssetB":0,"LpAmount":0,"KLast":0,"FeeRate":30,"TreasuryAccountIndex":0,"TreasuryRate":5}', 0, -1); -INSERT INTO "public"."mempool_tx_detail" VALUES (7, '2022-06-08 08:25:58.053003+00', '2022-06-08 08:25:58.053003+00', NULL, 11, 2, 2, -1, '', '{"PairIndex":2,"AssetAId":1,"AssetA":0,"AssetBId":2,"AssetB":0,"LpAmount":0,"KLast":0,"FeeRate":30,"TreasuryAccountIndex":0,"TreasuryRate":5}', 0, -1); -INSERT INTO "public"."mempool_tx_detail" VALUES (8, '2022-06-08 08:25:58.053003+00', '2022-06-08 08:25:58.053003+00', NULL, 12, 1, 2, -1, '', '{"PairIndex":1,"AssetAId":0,"AssetA":0,"AssetBId":1,"AssetB":0,"LpAmount":0,"KLast":0,"FeeRate":50,"TreasuryAccountIndex":0,"TreasuryRate":10}', 0, -1); -INSERT INTO "public"."mempool_tx_detail" VALUES (9, '2022-06-08 08:25:58.053003+00', '2022-06-08 08:25:58.053003+00', NULL, 13, 0, 1, 2, 'sher.legend', '{"AssetId":0,"Balance":0,"LpAmount":0,"OfferCanceledOrFinalized":0}', 0, 0); -INSERT INTO "public"."mempool_tx_detail" VALUES (11, '2022-06-08 08:25:58.053003+00', '2022-06-08 08:25:58.053003+00', NULL, 14, 1, 1, 2, 'sher.legend', '{"AssetId":1,"Balance":0,"LpAmount":0,"OfferCanceledOrFinalized":null}', 0, 0); -INSERT INTO "public"."mempool_tx_detail" VALUES (12, '2022-06-08 08:25:58.053003+00', '2022-06-08 08:25:58.053003+00', NULL, 15, 0, 1, 2, 'sher.legend', '{"AssetId":0,"Balance":0,"LpAmount":0,"OfferCanceledOrFinalized":0}', 0, 0); -INSERT INTO "public"."mempool_tx_detail" VALUES (13, '2022-06-08 08:25:58.053003+00', '2022-06-08 08:25:58.053003+00', NULL, 15, 0, 3, 2, 'sher.legend', '{"NftIndex":0,"CreatorAccountIndex":0,"OwnerAccountIndex":0,"NftContentHash":"0","NftL1TokenId":"0","NftL1Address":"0","CreatorTreasuryRate":0,"CollectionId":0}', 1, -1); -INSERT INTO "public"."mempool_tx_detail" VALUES (14, '2022-06-09 05:58:53.464815+00', '2022-06-09 05:58:53.464815+00', NULL, 16, 0, 1, 2, 'sher.legend', '{"AssetId":0,"Balance":-100000,"LpAmount":0,"OfferCanceledOrFinalized":0}', 0, 0); -INSERT INTO "public"."mempool_tx_detail" VALUES (15, '2022-06-09 05:58:53.464815+00', '2022-06-09 05:58:53.464815+00', NULL, 16, 2, 1, 2, 'sher.legend', '{"AssetId":2,"Balance":-5000,"LpAmount":0,"OfferCanceledOrFinalized":0}', 1, 0); -INSERT INTO "public"."mempool_tx_detail" VALUES (16, '2022-06-09 05:58:53.464815+00', '2022-06-09 05:58:53.464815+00', NULL, 16, 0, 1, 3, 'gavin.legend', '{"AssetId":0,"Balance":100000,"LpAmount":0,"OfferCanceledOrFinalized":0}', 2, 1); -INSERT INTO "public"."mempool_tx_detail" VALUES (18, '2022-06-09 05:59:00.039002+00', '2022-06-09 05:59:00.039002+00', NULL, 17, 0, 1, 2, 'sher.legend', '{"AssetId":0,"Balance":-10000000,"LpAmount":0,"OfferCanceledOrFinalized":0}', 0, 0); -INSERT INTO "public"."mempool_tx_detail" VALUES (19, '2022-06-09 05:59:00.039002+00', '2022-06-09 05:59:00.039002+00', NULL, 17, 2, 1, 2, 'sher.legend', '{"AssetId":2,"Balance":-5000,"LpAmount":0,"OfferCanceledOrFinalized":0}', 1, 0); -INSERT INTO "public"."mempool_tx_detail" VALUES (20, '2022-06-09 05:59:00.039002+00', '2022-06-09 05:59:00.039002+00', NULL, 17, 2, 1, 1, 'gas.legend', '{"AssetId":2,"Balance":5000,"LpAmount":0,"OfferCanceledOrFinalized":0}', 2, 1); -INSERT INTO "public"."mempool_tx_detail" VALUES (21, '2022-06-09 05:59:08.018849+00', '2022-06-09 05:59:08.018849+00', NULL, 18, 0, 1, 2, 'sher.legend', '{"AssetId":0,"Balance":-100000,"LpAmount":0,"OfferCanceledOrFinalized":0}', 0, 0); -INSERT INTO "public"."mempool_tx_detail" VALUES (22, '2022-06-09 05:59:08.018849+00', '2022-06-09 05:59:08.018849+00', NULL, 18, 2, 1, 2, 'sher.legend', '{"AssetId":2,"Balance":-100000,"LpAmount":0,"OfferCanceledOrFinalized":0}', 1, 0); -INSERT INTO "public"."mempool_tx_detail" VALUES (23, '2022-06-09 05:59:08.018849+00', '2022-06-09 05:59:08.018849+00', NULL, 18, 2, 1, 2, 'sher.legend', '{"AssetId":2,"Balance":-5000,"LpAmount":0,"OfferCanceledOrFinalized":0}', 2, 0); -INSERT INTO "public"."mempool_tx_detail" VALUES (24, '2022-06-09 05:59:08.018849+00', '2022-06-09 05:59:08.018849+00', NULL, 18, 0, 1, 2, 'sher.legend', '{"AssetId":0,"Balance":0,"LpAmount":100000,"OfferCanceledOrFinalized":0}', 3, 0); -INSERT INTO "public"."mempool_tx_detail" VALUES (28, '2022-06-09 05:59:15.072544+00', '2022-06-09 05:59:15.072544+00', NULL, 19, 2, 1, 2, 'sher.legend', '{"AssetId":2,"Balance":-100,"LpAmount":0,"OfferCanceledOrFinalized":0}', 0, 0); -INSERT INTO "public"."mempool_tx_detail" VALUES (29, '2022-06-09 05:59:15.072544+00', '2022-06-09 05:59:15.072544+00', NULL, 19, 0, 1, 2, 'sher.legend', '{"AssetId":0,"Balance":99,"LpAmount":0,"OfferCanceledOrFinalized":0}', 1, 0); -INSERT INTO "public"."mempool_tx_detail" VALUES (30, '2022-06-09 05:59:15.072544+00', '2022-06-09 05:59:15.072544+00', NULL, 19, 0, 1, 2, 'sher.legend', '{"AssetId":0,"Balance":-5000,"LpAmount":0,"OfferCanceledOrFinalized":0}', 2, 0); -INSERT INTO "public"."mempool_tx_detail" VALUES (31, '2022-06-09 05:59:15.072544+00', '2022-06-09 05:59:15.072544+00', NULL, 19, 0, 2, -1, '', '{"PairIndex":0,"AssetAId":0,"AssetA":-99,"AssetBId":2,"AssetB":100,"LpAmount":0,"KLast":0,"FeeRate":30,"TreasuryAccountIndex":0,"TreasuryRate":5}', 3, -1); -INSERT INTO "public"."mempool_tx_detail" VALUES (32, '2022-06-09 05:59:15.072544+00', '2022-06-09 05:59:15.072544+00', NULL, 19, 0, 1, 1, 'gas.legend', '{"AssetId":0,"Balance":5000,"LpAmount":0,"OfferCanceledOrFinalized":0}', 4, 1); -INSERT INTO "public"."mempool_tx_detail" VALUES (33, '2022-06-09 05:59:22.053572+00', '2022-06-09 05:59:22.053572+00', NULL, 20, 0, 1, 2, 'sher.legend', '{"AssetId":0,"Balance":99,"LpAmount":0,"OfferCanceledOrFinalized":0}', 0, 0); -INSERT INTO "public"."mempool_tx_detail" VALUES (34, '2022-06-09 05:59:22.053572+00', '2022-06-09 05:59:22.053572+00', NULL, 20, 2, 1, 2, 'sher.legend', '{"AssetId":2,"Balance":100,"LpAmount":0,"OfferCanceledOrFinalized":0}', 1, 0); -INSERT INTO "public"."mempool_tx_detail" VALUES (35, '2022-06-09 05:59:22.053572+00', '2022-06-09 05:59:22.053572+00', NULL, 20, 2, 1, 2, 'sher.legend', '{"AssetId":2,"Balance":-5000,"LpAmount":0,"OfferCanceledOrFinalized":0}', 2, 0); -INSERT INTO "public"."mempool_tx_detail" VALUES (36, '2022-06-09 05:59:22.053572+00', '2022-06-09 05:59:22.053572+00', NULL, 20, 0, 1, 2, 'sher.legend', '{"AssetId":0,"Balance":0,"LpAmount":-100,"OfferCanceledOrFinalized":0}', 3, 0); -INSERT INTO "public"."mempool_tx_detail" VALUES (37, '2022-06-09 05:59:22.053572+00', '2022-06-09 05:59:22.053572+00', NULL, 20, 0, 1, 0, 'treasury.legend', '{"AssetId":0,"Balance":0,"LpAmount":0,"OfferCanceledOrFinalized":0}', 4, 1); -INSERT INTO "public"."mempool_tx_detail" VALUES (1, '2022-06-08 08:25:58.053003+00', '2022-06-08 08:25:58.053003+00', NULL, 5, 0, 1, 2, 'sher.legend', '{"AssetId":0,"Balance":100000000000000000,"LpAmount":0,"OfferCanceledOrFinalized":0}', 0, 0); -INSERT INTO "public"."mempool_tx_detail" VALUES (10, '2022-06-08 08:25:58.053003+00', '2022-06-08 08:25:58.053003+00', NULL, 13, 0, 3, 2, 'sher.legend', '{"NftIndex":0,"CreatorAccountIndex":0,"OwnerAccountIndex":2,"NftContentHash":"abd1b6ae79507f7b4a32a84ab6495bc9fee67450ed316dbba76bace8a3c5197b","NftL1TokenId":"0","NftL1Address":"0xB7aD4A7E9459D0C1541Db2eEceceAcc7dBa803e1","CreatorTreasuryRate":0,"CollectionId":0}', 0, -1); -INSERT INTO "public"."mempool_tx_detail" VALUES (17, '2022-06-09 05:58:53.464815+00', '2022-06-09 05:58:53.464815+00', NULL, 16, 2, 1, 1, 'gas.legend', '{"AssetId":2,"Balance":5000,"LpAmount":0,"OfferCanceledOrFinalized":0}', 3, 2); -INSERT INTO "public"."mempool_tx_detail" VALUES (25, '2022-06-09 05:59:08.018849+00', '2022-06-09 05:59:08.018849+00', NULL, 18, 0, 2, -1, '', '{"PairIndex":0,"AssetAId":0,"AssetA":100000,"AssetBId":2,"AssetB":100000,"LpAmount":100000,"KLast":10000000000,"FeeRate":30,"TreasuryAccountIndex":0,"TreasuryRate":5}', 4, -1); -INSERT INTO "public"."mempool_tx_detail" VALUES (26, '2022-06-09 05:59:08.018849+00', '2022-06-09 05:59:08.018849+00', NULL, 18, 0, 1, 0, 'treasury.legend', '{"AssetId":0,"Balance":0,"LpAmount":0,"OfferCanceledOrFinalized":0}', 5, 1); -INSERT INTO "public"."mempool_tx_detail" VALUES (27, '2022-06-09 05:59:08.018849+00', '2022-06-09 05:59:08.018849+00', NULL, 18, 2, 1, 1, 'gas.legend', '{"AssetId":2,"Balance":5000,"LpAmount":0,"OfferCanceledOrFinalized":0}', 6, 2); -INSERT INTO "public"."mempool_tx_detail" VALUES (38, '2022-06-09 05:59:22.053572+00', '2022-06-09 05:59:22.053572+00', NULL, 20, 0, 2, -1, '', '{"PairIndex":0,"AssetAId":0,"AssetA":-99,"AssetBId":2,"AssetB":-100,"LpAmount":-100,"KLast":9980200000,"FeeRate":30,"TreasuryAccountIndex":0,"TreasuryRate":5}', 5, -1); -INSERT INTO "public"."mempool_tx_detail" VALUES (39, '2022-06-09 05:59:22.053572+00', '2022-06-09 05:59:22.053572+00', NULL, 20, 2, 1, 1, 'gas.legend', '{"AssetId":2,"Balance":5000,"LpAmount":0,"OfferCanceledOrFinalized":0}', 6, 2); -INSERT INTO "public"."mempool_tx_detail" VALUES (41, '2022-06-09 05:59:29.310534+00', '2022-06-09 05:59:29.310534+00', NULL, 21, 2, 1, 2, 'sher.legend', '{"AssetId":2,"Balance":-5000,"LpAmount":0,"OfferCanceledOrFinalized":0}', 1, 0); -INSERT INTO "public"."mempool_tx_detail" VALUES (43, '2022-06-09 05:59:35.370489+00', '2022-06-09 05:59:35.370489+00', NULL, 22, 2, 1, 2, 'sher.legend', '{"AssetId":2,"Balance":-5000,"LpAmount":0,"OfferCanceledOrFinalized":0}', 0, 0); -INSERT INTO "public"."mempool_tx_detail" VALUES (44, '2022-06-09 05:59:35.370489+00', '2022-06-09 05:59:35.370489+00', NULL, 22, 2, 1, 3, 'gavin.legend', '{"AssetId":2,"Balance":0,"LpAmount":0,"OfferCanceledOrFinalized":0}', 1, 1); -INSERT INTO "public"."mempool_tx_detail" VALUES (46, '2022-06-09 05:59:35.370489+00', '2022-06-09 05:59:35.370489+00', NULL, 22, 2, 1, 1, 'gas.legend', '{"AssetId":2,"Balance":5000,"LpAmount":0,"OfferCanceledOrFinalized":0}', 3, 2); -INSERT INTO "public"."mempool_tx_detail" VALUES (47, '2022-06-09 05:59:43.912534+00', '2022-06-09 05:59:43.912534+00', NULL, 23, 0, 1, 3, 'gavin.legend', '{"AssetId":0,"Balance":-5000,"LpAmount":0,"OfferCanceledOrFinalized":0}', 0, 0); -INSERT INTO "public"."mempool_tx_detail" VALUES (48, '2022-06-09 05:59:43.912534+00', '2022-06-09 05:59:43.912534+00', NULL, 23, 0, 1, 2, 'sher.legend', '{"AssetId":0,"Balance":0,"LpAmount":0,"OfferCanceledOrFinalized":0}', 1, 1); -INSERT INTO "public"."mempool_tx_detail" VALUES (49, '2022-06-09 05:59:43.912534+00', '2022-06-09 05:59:43.912534+00', NULL, 23, 1, 3, 2, 'sher.legend', '{"NftIndex":1,"CreatorAccountIndex":2,"OwnerAccountIndex":2,"NftContentHash":"066a65d3e4397ad05fb2e7f50eac160647a7486c2cedbbeac64624c8beea22f1","NftL1TokenId":"0","NftL1Address":"0","CreatorTreasuryRate":0,"CollectionId":1}', 2, -1); -INSERT INTO "public"."mempool_tx_detail" VALUES (50, '2022-06-09 05:59:43.912534+00', '2022-06-09 05:59:43.912534+00', NULL, 23, 0, 1, 1, 'gas.legend', '{"AssetId":0,"Balance":5000,"LpAmount":0,"OfferCanceledOrFinalized":0}', 3, 2); -INSERT INTO "public"."mempool_tx_detail" VALUES (51, '2022-06-09 05:59:50.16896+00', '2022-06-09 05:59:50.16896+00', NULL, 24, 0, 1, 2, 'sher.legend', '{"AssetId":0,"Balance":-5000,"LpAmount":0,"OfferCanceledOrFinalized":0}', 0, 0); -INSERT INTO "public"."mempool_tx_detail" VALUES (52, '2022-06-09 05:59:50.16896+00', '2022-06-09 05:59:50.16896+00', NULL, 24, 0, 1, 3, 'gavin.legend', '{"AssetId":0,"Balance":-10000,"LpAmount":0,"OfferCanceledOrFinalized":0}', 1, 1); -INSERT INTO "public"."mempool_tx_detail" VALUES (53, '2022-06-09 05:59:50.16896+00', '2022-06-09 05:59:50.16896+00', NULL, 24, 0, 1, 3, 'gavin.legend', '{"AssetId":0,"Balance":0,"LpAmount":0,"OfferCanceledOrFinalized":1}', 2, 1); -INSERT INTO "public"."mempool_tx_detail" VALUES (54, '2022-06-09 05:59:50.16896+00', '2022-06-09 05:59:50.16896+00', NULL, 24, 0, 1, 2, 'sher.legend', '{"AssetId":0,"Balance":9800,"LpAmount":0,"OfferCanceledOrFinalized":0}', 3, 2); -INSERT INTO "public"."mempool_tx_detail" VALUES (55, '2022-06-09 05:59:50.16896+00', '2022-06-09 05:59:50.16896+00', NULL, 24, 0, 1, 2, 'sher.legend', '{"AssetId":0,"Balance":0,"LpAmount":0,"OfferCanceledOrFinalized":1}', 4, 2); -INSERT INTO "public"."mempool_tx_detail" VALUES (60, '2022-06-09 06:00:01.734295+00', '2022-06-09 06:00:01.734295+00', NULL, 25, 2, 1, 2, 'sher.legend', '{"AssetId":2,"Balance":-5000,"LpAmount":0,"OfferCanceledOrFinalized":0}', 0, 0); -INSERT INTO "public"."mempool_tx_detail" VALUES (61, '2022-06-09 06:00:01.734295+00', '2022-06-09 06:00:01.734295+00', NULL, 25, 0, 1, 2, 'sher.legend', '{"AssetId":0,"Balance":0,"LpAmount":0,"OfferCanceledOrFinalized":3}', 1, 0); -INSERT INTO "public"."mempool_tx_detail" VALUES (62, '2022-06-09 06:00:01.734295+00', '2022-06-09 06:00:01.734295+00', NULL, 25, 2, 1, 1, 'gas.legend', '{"AssetId":2,"Balance":5000,"LpAmount":0,"OfferCanceledOrFinalized":0}', 2, 1); -INSERT INTO "public"."mempool_tx_detail" VALUES (63, '2022-06-09 06:00:11.599038+00', '2022-06-09 06:00:11.599038+00', NULL, 26, 0, 1, 3, 'gavin.legend', '{"AssetId":0,"Balance":-5000,"LpAmount":0,"OfferCanceledOrFinalized":0}', 0, 0); -INSERT INTO "public"."mempool_tx_detail" VALUES (64, '2022-06-09 06:00:11.599038+00', '2022-06-09 06:00:11.599038+00', NULL, 26, 1, 3, -1, '', '{"NftIndex":1,"CreatorAccountIndex":0,"OwnerAccountIndex":0,"NftContentHash":"0","NftL1TokenId":"0","NftL1Address":"0","CreatorTreasuryRate":0,"CollectionId":0}', 1, -1); -INSERT INTO "public"."mempool_tx_detail" VALUES (65, '2022-06-09 06:00:11.599038+00', '2022-06-09 06:00:11.599038+00', NULL, 26, 0, 1, 2, 'sher.legend', '{"AssetId":0,"Balance":0,"LpAmount":0,"OfferCanceledOrFinalized":0}', 2, 1); -INSERT INTO "public"."mempool_tx_detail" VALUES (66, '2022-06-09 06:00:11.599038+00', '2022-06-09 06:00:11.599038+00', NULL, 26, 0, 1, 1, 'gas.legend', '{"AssetId":0,"Balance":5000,"LpAmount":0,"OfferCanceledOrFinalized":0}', 3, 2); -INSERT INTO "public"."mempool_tx_detail" VALUES (40, '2022-06-09 05:59:29.310534+00', '2022-06-09 05:59:29.310534+00', NULL, 21, 0, 4, 2, 'sher.legend', '1', 0, 0); -INSERT INTO "public"."mempool_tx_detail" VALUES (42, '2022-06-09 05:59:29.310534+00', '2022-06-09 05:59:29.310534+00', NULL, 21, 2, 1, 1, 'gas.legend', '{"AssetId":2,"Balance":5000,"LpAmount":0,"OfferCanceledOrFinalized":0}', 2, 1); -INSERT INTO "public"."mempool_tx_detail" VALUES (45, '2022-06-09 05:59:35.370489+00', '2022-06-09 05:59:35.370489+00', NULL, 22, 1, 3, 3, 'gavin.legend', '{"NftIndex":1,"CreatorAccountIndex":2,"OwnerAccountIndex":3,"NftContentHash":"066a65d3e4397ad05fb2e7f50eac160647a7486c2cedbbeac64624c8beea22f1","NftL1TokenId":"0","NftL1Address":"0","CreatorTreasuryRate":0,"CollectionId":1}', 2, -1); -INSERT INTO "public"."mempool_tx_detail" VALUES (56, '2022-06-09 05:59:50.16896+00', '2022-06-09 05:59:50.16896+00', NULL, 24, 0, 1, 2, 'sher.legend', '{"AssetId":0,"Balance":0,"LpAmount":0,"OfferCanceledOrFinalized":0}', 5, 3); -INSERT INTO "public"."mempool_tx_detail" VALUES (57, '2022-06-09 05:59:50.16896+00', '2022-06-09 05:59:50.16896+00', NULL, 24, 1, 3, -1, '', '{"NftIndex":1,"CreatorAccountIndex":2,"OwnerAccountIndex":3,"NftContentHash":"066a65d3e4397ad05fb2e7f50eac160647a7486c2cedbbeac64624c8beea22f1","NftL1TokenId":"0","NftL1Address":"0","CreatorTreasuryRate":0,"CollectionId":1}', 6, -1); -INSERT INTO "public"."mempool_tx_detail" VALUES (58, '2022-06-09 05:59:50.16896+00', '2022-06-09 05:59:50.16896+00', NULL, 24, 0, 1, 1, 'gas.legend', '{"AssetId":0,"Balance":200,"LpAmount":0,"OfferCanceledOrFinalized":0}', 7, 4); -INSERT INTO "public"."mempool_tx_detail" VALUES (59, '2022-06-09 05:59:50.16896+00', '2022-06-09 05:59:50.16896+00', NULL, 24, 0, 1, 1, 'gas.legend', '{"AssetId":0,"Balance":5000,"LpAmount":0,"OfferCanceledOrFinalized":0}', 8, 4); - --- ---------------------------- --- Table structure for offer --- ---------------------------- -DROP TABLE IF EXISTS "public"."offer"; -CREATE TABLE "public"."offer" ( - "id" int8 NOT NULL DEFAULT nextval('offer_id_seq'::regclass), - "created_at" timestamptz(6), - "updated_at" timestamptz(6), - "deleted_at" timestamptz(6), - "offer_type" int8, - "offer_id" int8, - "account_index" int8, - "nft_index" int8, - "asset_id" int8, - "asset_amount" text COLLATE "pg_catalog"."default", - "listed_at" int8, - "expired_at" int8, - "treasury_rate" int8, - "sig" text COLLATE "pg_catalog"."default", - "status" int8 -) -; - --- ---------------------------- --- Records of offer --- ---------------------------- - --- ---------------------------- --- Table structure for proof_sender --- ---------------------------- -DROP TABLE IF EXISTS "public"."proof_sender"; -CREATE TABLE "public"."proof_sender" ( - "id" int8 NOT NULL DEFAULT nextval('proof_sender_id_seq'::regclass), - "created_at" timestamptz(6), - "updated_at" timestamptz(6), - "deleted_at" timestamptz(6), - "proof_info" text COLLATE "pg_catalog"."default", - "block_number" int8, - "status" int8 -) -; - --- ---------------------------- --- Records of proof_sender --- ---------------------------- -INSERT INTO "public"."proof_sender" VALUES (1, '2022-06-09 07:23:24.586408+00', '2022-06-09 07:23:24.586408+00', NULL, '{"A":[5357907446709718487941986733309225406048435776139431945762925093327186667256,17496725062937861203898650287725858704516594988593501934567880265799491463177],"B":[[14720521433584626624325963016940251896438503185693768206360980248724179708363,18375239315756568022095442647985200911749918229485560905531441891961308708856],[16363043350587558836741133002397756467217077564923596019135116682493418534006,5596519350005502017924021801867967131722550642201664380726442341779855698916]],"C":[1850693862752086122306022103343831655605374862562030081903491489803751696327,5443157994089927105172260006038197925773595247560597214530383901744905578760],"Inputs":[9450703979270269782239153154655868962158804821639950878322637986092829620047,15043264495212376832665268192414242291394558777525090122806455607283976407362,11963247688191561196873225082343613795306480307795283451691052361457074285021]}', 1, 0); -INSERT INTO "public"."proof_sender" VALUES (2, '2022-06-09 07:23:44.643141+00', '2022-06-09 07:23:44.643141+00', NULL, '{"A":[20813507163771042917946551071764721159056224597776408025052785981323338481154,10921825533225065035430794056222068611765401207937070688882451768008661226777],"B":[[15486517432908393408144952664765811446772407320369480943565640740700026795462,17674942133842931647379190043665571853160939366208023700909488416882045291640],[16199502416665520960377122180660905576906512074109147977536118367276226939751,12064251941308843097075711141155291440613675702020429411506723259048251240700]],"C":[16681275058732350549969434485232133225813187897612271968303104552461518982258,20050623136967140394695099894279884077307717504336685541180071165681973942195],"Inputs":[15043264495212376832665268192414242291394558777525090122806455607283976407362,12297177442334280409244260380119123763383333089941937037498066619533324895858,1945871703106592192445527630015428020428207196346331000444516874376214077870]}', 2, 0); -INSERT INTO "public"."proof_sender" VALUES (3, '2022-06-09 07:23:54.527389+00', '2022-06-09 07:23:54.527389+00', NULL, '{"A":[7559736663398274886813521875027328296080906357658551100428005329404128126355,4573291067759265048831985704695636350001464683771328909544384358380876318135],"B":[[19410483811700283141714932375622551750479122510836369528558895173524035900853,6795018017361741386612848268111959380245189784456180212424711313656215623940],[15202464355920779518305434970816636317493593488258678323070164540041171347404,14447803888624990866915689832929528497877374236470736218388819527564579233453]],"C":[17627580096248132224088972877089435327314530498914390280315000907016139621845,4249624274470976318203955964841436803380468133755247436840408256085294282023],"Inputs":[12297177442334280409244260380119123763383333089941937037498066619533324895858,11118933918917677552118113339774603267482948853529286163697696035860800912124,4141452066739870661498648949772962314122049597784115302113989740758522105342]}', 3, 0); -INSERT INTO "public"."proof_sender" VALUES (4, '2022-06-09 07:24:04.538222+00', '2022-06-09 07:24:04.538222+00', NULL, '{"A":[9110103407963974868366728464799245078858067689910507226822459220207506272499,21719474617242243481844770115660891794725263698349330338195525371390808817327],"B":[[21389161538241386313330361588428069577956957114833598623812545981430584522601,12971960139169430699066909962695468630217732692054862567443912236352393104033],[20509884633768299055224189396400437941908017883338859122321435747087159532373,15819007506379581663541742336647097354994209799108208341565219065761245952556]],"C":[19873323619447981526452307419853499863685799395091206487134024403814099312748,6119875941814807106675237921726989378323319043345040438007083815484556674579],"Inputs":[11118933918917677552118113339774603267482948853529286163697696035860800912124,3934520836078457484774962454656757098464038511289928405982572228040277672631,16357933347269012595297401294074469152155600812034184911699282662045711091062]}', 4, 0); -INSERT INTO "public"."proof_sender" VALUES (5, '2022-06-09 07:24:14.518489+00', '2022-06-09 07:24:14.518489+00', NULL, '{"A":[15533918622537399015518237774264923161077934783910189699836003437656124636309,5069299624683444574823466007839504469631626285727350111939705844716777430979],"B":[[3744919964186335507197012330680948825493812403368725511327036779440140104980,7492296653040452746904231660511773202247564846231760948945037281248294183181],[8965723521881953179017783714000090715251224020206188942678134208190811516624,7797531568507494197968822873157266008002521381654473396471317662493892575082]],"C":[6265171476735348143148882338652859440161392921916648421009933142497839856567,10921850148220087867261903715064576494902109547006503271526089466913338341096],"Inputs":[3934520836078457484774962454656757098464038511289928405982572228040277672631,16025607879873774270539617119489323551891610600919089473988379724010068742191,12961477835462357069095357285869702333916507490943518131419162928549401849131]}', 5, 0); -INSERT INTO "public"."proof_sender" VALUES (6, '2022-06-09 07:24:24.51569+00', '2022-06-09 07:24:24.51569+00', NULL, '{"A":[12775481537775540353198771706037410309403565868973758418831274624796609709031,12563220958406810787093974273390792396340258681839358919355603843534017576407],"B":[[466766940151359509587006629080093685334850865423711940572706407903099898485,8113475061863033439057383092505483824758758712632932303197768364205513767414],[8619352988342772426438347244630686188715154751181596645552301889409243178922,4871484385926229301046871359466334901001342722779358695691716172665669796423]],"C":[14671499948976426331719441470204431306391140342047201077417051680238410706345,20905717683481457094817392586169937372173832875383492533339292915438773370549],"Inputs":[16025607879873774270539617119489323551891610600919089473988379724010068742191,1182007653870860980188901032313239440165930770789334793344276798172967050151,10900582511275367572049526641256039768650071979656307171225251307845332413143]}', 6, 0); -INSERT INTO "public"."proof_sender" VALUES (7, '2022-06-09 07:24:34.61646+00', '2022-06-09 07:24:34.61646+00', NULL, '{"A":[19736957023120166357001005889444490685321584374642961893365372621327616694990,19392065716103988721648074939960091280107501528042542745581432276521263510412],"B":[[571492896427163448890807461782650204657343510215215625757504010828886836382,8001850526536950477450814216227647275767248232511233069549157099510638871718],[15399770455289783494841448938457475291997723025011314246777739774075988801854,6953533655578395070391182841774083704495115922140550198572691463822431464216]],"C":[12293068457310743600605512778002935015544221667513259377039012799901122256184,20161071113916839973112348786702810815193408038299265761391507217557745590211],"Inputs":[1182007653870860980188901032313239440165930770789334793344276798172967050151,17094011329777467206340521509783369268317290252369917384669054004578361939180,15017229726478937929116658928635635052098058456225427220340952831123077394838]}', 7, 0); -INSERT INTO "public"."proof_sender" VALUES (8, '2022-06-09 07:24:44.505085+00', '2022-06-09 07:24:44.505085+00', NULL, '{"A":[5980924307279874344680032233475410884184538889970060347530687502031738791447,21287811521893823900630817973519994854181255263477570592182999332351498328517],"B":[[529751275497961151962680821055254987754056664988591178253218172601907431388,5726880856402803987695595811737231271996887951427266623691512124984038042124],[7811601596705273184505367807438448214366325755942922992393197559446165768496,17344914781957304781850965204935074254591824007899403656719967166506658016449]],"C":[15814752059536845242306798590880089606594520429855000946339129876488798000392,17775757703161992670597128874999787610476786657426988862225264557367667309266],"Inputs":[17094011329777467206340521509783369268317290252369917384669054004578361939180,10689577469853096804426857094028199763247558905267837019735861787144318922388,16873122977431782423632070671545326080911017857820594834326133253531214104424]}', 8, 0); -INSERT INTO "public"."proof_sender" VALUES (9, '2022-06-09 07:24:54.49822+00', '2022-06-09 07:24:54.49822+00', NULL, '{"A":[17298168395729437977041039664220119339465364533281654677311938416691559070801,4271068355456674107335747387035117575005876702521403663596060266745310486890],"B":[[16270513231831194845343763638164526208133394145718804726859532012475995303773,17414153493171324446566093008114435376532165611209940832002789465557349791412],[785565747771497805408072558414309406441868066607847606639392630449005797289,7662969918737420327668916565397411820649711028656723319111596087480064056168]],"C":[11739174259408060548826038043849794049356355754716408122867437867125992732497,8893477733239534739317139667111081321007948604776245868588416718554518008063],"Inputs":[10689577469853096804426857094028199763247558905267837019735861787144318922388,6948952673628372168875450143833765144025527013713566967806168545755060380858,2381018844473592730594362683078647128924834795096405048545540899272736150108]}', 9, 0); -INSERT INTO "public"."proof_sender" VALUES (10, '2022-06-09 07:25:04.533155+00', '2022-06-09 07:25:04.533155+00', NULL, '{"A":[15521017184051992261896355549794298246534652628121152266063070688366730452162,13961508868962569273505124249185255794900260440581779157951474985385748174470],"B":[[17065657997897318674948795907310757449530732001037734380186702211180214299746,15677243625116285317847613474005417091647205554397557018353889845576538878865],[19390619060562936877588984907791381849976304337491405690373860956341962708934,5563341927983312698473895099760917317701486515061397527318804189948823286630]],"C":[16597160865679091783441187860804896114304069725353186870292748219295416554721,2394595445592421482288547744316437281444854255474176523427122295346372419127],"Inputs":[6948952673628372168875450143833765144025527013713566967806168545755060380858,4193345583120754934140829490350309912111035558299556830065423384594352669901,20271788291865604819762498640632971866147076040033239944791480616232093581018]}', 10, 0); -INSERT INTO "public"."proof_sender" VALUES (11, '2022-06-09 07:25:14.491108+00', '2022-06-09 07:25:14.491108+00', NULL, '{"A":[10690123035117347435583218686434724670922995232403377702074029183849931986601,11614324004353826540263410033076139666145259771313223397148950306079058192104],"B":[[2000851832757796106490606948503810002054532222084721787129197252803428227728,2979971793942668255925708494679646258534614415906908614190155356821720342782],[2311482297729564514731645320034205088354037661620700235056820462166900664425,17847514665886669859493612822919360619813712229216796895445963611376311242076]],"C":[3904059735216286784462585874468716885727990103665582715792194935387028735974,14307128667942802919098261629321726611748388900225190312728863743939670107572],"Inputs":[4193345583120754934140829490350309912111035558299556830065423384594352669901,10152064816703520911570552026983147930837787570518811408541967275193207832861,8586895846168170365040413310156165056878705110488484396408283666332873867086]}', 11, 0); -INSERT INTO "public"."proof_sender" VALUES (12, '2022-06-09 07:25:24.663412+00', '2022-06-09 07:25:24.663412+00', NULL, '{"A":[10159975002397651137232468947306723784663524120049685253062346380189043876645,11406588679264760619545135428596927996095766051967155205361292168064406762956],"B":[[21485535930335108169767673721139457748766431533328656866258396985007395593322,11483195628712409573411933921150045560230569903877254153789567973343214209489],[8377597436709839824938622408128856671090541846011514103291544481895408030905,12358524440356972678169391602672492180796154765740788494658455382769151527068]],"C":[16165101008492526683951381872986995534701186382159210474925499479976069214227,15432847146843294548990405680486435645749480783451120247215124966198786454914],"Inputs":[10152064816703520911570552026983147930837787570518811408541967275193207832861,4041848711751034178697814175798083696278503514951292272975039336585530128677,7813894203082824048002111901200503984007958306255276001456868296408697822876]}', 12, 0); -INSERT INTO "public"."proof_sender" VALUES (13, '2022-06-09 07:25:34.508794+00', '2022-06-09 07:25:34.508794+00', NULL, '{"A":[10261566542587671522158701555391897278151906206660783292567297598068776634404,14431967531693461723360728030346792766815705294739312195077940320409189664739],"B":[[15098950899127777846090369607864746915518927985834483464449391535208274309950,20643275506250556064271603846631212093790052706946095891782513042968936043553],[4514615009399575525815224706513983786443715249372435337001216796291604905574,20662716222622625024266833655160998686051845829662553482552997846267247812557]],"C":[14384476812401774943946787938840653353027237830566400231649510365669251514828,20074829254704778631937473514626755917209730062309287218455118244563346860026],"Inputs":[4041848711751034178697814175798083696278503514951292272975039336585530128677,16271226640539965147245273017495976697549012047434316200568775079391086367406,21140244431992199370190780936908877210928676351903003902465868132982527969590]}', 13, 0); -INSERT INTO "public"."proof_sender" VALUES (14, '2022-06-09 07:25:44.50934+00', '2022-06-09 07:25:44.50934+00', NULL, '{"A":[2210158445496352120899045035190168685146098850684767849659993025219098074105,10713476324707801711837733445879892899723197674209918769939423961418422183906],"B":[[810160846127514362971089065671508266995730849990827791074389192241995082479,17124714650811796689345365984747471876548722425821929813372000519204243123207],[11501093053962789887708697857007171022988320622370497060069208049036375154072,9988260441357861754140739934979132018062785935350555268885500845009532139197]],"C":[14073647615142636673801731120126325000171724717440383555614408416935944109915,19995541106659746044602934413206860303224000396734660878547845845329391686974],"Inputs":[16271226640539965147245273017495976697549012047434316200568775079391086367406,10639295657989775574086409957947829148199615559969022728688638593216982037730,14244661216982820488005781571188745917102390506429749184784599384196934352272]}', 14, 0); -INSERT INTO "public"."proof_sender" VALUES (15, '2022-06-09 07:25:54.499472+00', '2022-06-09 07:25:54.499472+00', NULL, '{"A":[17571712050418197449385769643123403538568730885831817467140695867950742576096,3090941610870382185102534174225411338274127812102697732643424632237105650634],"B":[[3369613931290973248116149070356946993596232435753033083434632596692438477339,18366942516192635738122891200923533303260759803093492328031426562456172291703],[16896288962244998074529166931355136611523654181929170060647399444920873404819,7119856041363768320082243762237331758563029093817707992004535650778247565643]],"C":[1840248247603955387301578228061851895876391391526584490102386947110025383239,1893584611121116190202568066637880678400976517808952365283486707485518827515],"Inputs":[10639295657989775574086409957947829148199615559969022728688638593216982037730,18544100231407746896690739092020187337464474124217572106403296132870486599844,7849209998090739933837234226634748060565161797241883646562575696184552731303]}', 15, 0); -INSERT INTO "public"."proof_sender" VALUES (16, '2022-06-09 07:26:04.528534+00', '2022-06-09 07:26:04.528534+00', NULL, '{"A":[2126836860157825719241372547050490994950916536601448821227821491730516016811,13089882072162797154796234962607920289216749510915300142391196912148875925630],"B":[[13659561965717366527017806818607480486672342922075516437810500485021637136675,2496538948713319851218392543472407434180517047990867205408257741031808477045],[15094687796192020436386343827450550386224173671744781873726481980329008843262,867732462846363625180283057195445738889659048593905680925877170955622438867]],"C":[9704168160272942752392608817731304970934678699143790116321686366666394590413,4122674166104500008511969464450669713016489254905378284799298012930487000827],"Inputs":[18544100231407746896690739092020187337464474124217572106403296132870486599844,5586088040550485664848597881510203165901247035169583122754084497094449251134,3625936899631428556066012573882141829798170239079403930433757352539709158032]}', 16, 0); -INSERT INTO "public"."proof_sender" VALUES (17, '2022-06-09 07:26:24.48907+00', '2022-06-09 07:26:24.48907+00', NULL, '{"A":[8021825240282321891602795145832694810174683334757724685843445828289183743411,9797806148552150218332209743807309164594090867572113559113072043118468089958],"B":[[13863111205755049631187565537917882894965629334513264946003524866530026042856,11209558927815688530042307249100566886043360494201459472397024212479453599141],[1458525839525311687666868325725678981703071909829436207947447624093353197195,14371955880321935916965723312134766118885105402963244880434574586878625571850]],"C":[5142659326844849799389371635098315968708084820094983091627390207520198145902,10975790742628854149436194753636462470319330706494819659909605694454074916861],"Inputs":[5586088040550485664848597881510203165901247035169583122754084497094449251134,20471331031958273670946341121221367175878128316941189710493194485442977281812,21503538493464361595391979114337605806935242050381727476274743294853677383658]}', 17, 0); -INSERT INTO "public"."proof_sender" VALUES (18, '2022-06-09 07:26:34.515311+00', '2022-06-09 07:26:34.515311+00', NULL, '{"A":[524446151924457502629782649096180450887763800184045018702064673837856878801,8645493695716812053638687555484455361153079383536366362474845175164849280430],"B":[[13214392869132535758338359438094175432538398792575925524242672252312289975848,17446421255848264570787336455552175374432005980647957336096459498263244110182],[8956035534231479808117633904629365467408603959678606431087745192681877660792,2096584104701727630376880684056131272718140728642981244842879176486428113166]],"C":[8356912430786065703682252072677821589280512372740410081300042583681741620653,17092389843809185642541566923425735468618940902636325750934483545031198522812],"Inputs":[20471331031958273670946341121221367175878128316941189710493194485442977281812,8173166197544277304186478691258384071873880483015113628785610445658683442517,14844503571774079792674155527073126612056383359258218655941264775591156150707]}', 18, 0); -INSERT INTO "public"."proof_sender" VALUES (19, '2022-06-09 07:26:44.506942+00', '2022-06-09 07:26:44.506942+00', NULL, '{"A":[16614803823762227265602542762562641617849651049582737327882059707161533543297,1989647283631880081800578290111135047905000283185640783386107664754726608866],"B":[[20517483818085564818237112031668130358941958900214193134365496704834545814495,19992039921266405329973970711495463926957582252208064666206277052770185140352],[20827926094190320228662724871873283173549730499119631069740628945187691395199,18605260650007427798312238830058849173731293343524854304739735661339975496477]],"C":[7135967049520761871891773095894762069023434730315873135828697605034781859080,10353166813353644128713769387067055957556937429813052188920135431420516271602],"Inputs":[8173166197544277304186478691258384071873880483015113628785610445658683442517,13442140803681527408895133679865362700517560901608420346744235869865161259829,6333711030315459658295063134508763993660996977628803990603795678616206379198]}', 19, 0); -INSERT INTO "public"."proof_sender" VALUES (20, '2022-06-09 07:26:54.490118+00', '2022-06-09 07:26:54.490118+00', NULL, '{"A":[6132310064061812917597796263388370286946492790233537399322889464402543655586,19501022468640836101914191464949093401739722695796254822615620304574394511496],"B":[[10564932849935093467524018260797185368528626335439755795496815498065708499361,12181575049670227291538072836469390186022690793637974386390835441070364460172],[17270157245668942413517738043529004891926380047387686288612710002861761976772,5020610668548115993849743727814694147202074122182742214407662069230058251452]],"C":[20111335642937765370660225974752385554013986571008863223283829441596809046862,17277203220361190408934792092857832556440285354519614185589057268311960712867],"Inputs":[13442140803681527408895133679865362700517560901608420346744235869865161259829,21047623044075927563049268030940086099536328266902531926068000182657135421142,780423291219507366819846711946964749186412840151552921123168758150621974449]}', 20, 0); -INSERT INTO "public"."proof_sender" VALUES (21, '2022-06-09 07:27:04.491762+00', '2022-06-09 07:27:04.491762+00', NULL, '{"A":[16204880395453769651183471645582504007136579741357249653408735404807902775575,11276361857025370970975160193182912988905786419776451433719369209273371611236],"B":[[1200179618287056260178250055792404941220317454194816388154861357457628691214,6143279746262085118120942236486502237997359991984043994710272891856626082483],[14580391975815040214423623849462454542400218822168596850723208790989682217681,19558548856321681450690854261761759117678761694861749630042902389894356441783]],"C":[18873465828122548482914840778283129050244062092889099188257531303887505408914,19901832500179328790914736663559185779975220202535263730181407432683217093314],"Inputs":[21047623044075927563049268030940086099536328266902531926068000182657135421142,9057099176725074620829751149253261324171759863292310461253640687639650844381,6696999952182686226580887420154672517510273548902301225211986252493422045748]}', 21, 0); -INSERT INTO "public"."proof_sender" VALUES (22, '2022-06-09 07:27:14.521571+00', '2022-06-09 07:27:14.521571+00', NULL, '{"A":[14562964723383369250614892264240850983218951872823483654716940872994934168222,18714665548629189509729781458902633894127532066976957075778926500601573130917],"B":[[10460335902891958301152056783471155320860960591164890155102139766864344954914,9702800026805007716668661710187751346083240230687670446136239080903120554346],[4675988947523719050292265611283630343218199784368931232743768167455679434561,7844147298526505729646700381208644021845085587734624963556951982625113881763]],"C":[18252077589966946687951281102899931594068265217916094749109049683522583514675,10549164743029520638807373574510828809421021652485838728206134173091971953915],"Inputs":[9057099176725074620829751149253261324171759863292310461253640687639650844381,8382471479713636303903675749137703248387104876156021657409718717386550288255,13792338193516504167605049995473338407598939251269101016710219761823104199464]}', 22, 0); -INSERT INTO "public"."proof_sender" VALUES (23, '2022-06-09 07:27:24.533483+00', '2022-06-09 07:27:24.533483+00', NULL, '{"A":[2926703581978961895068066663533236177576491453879253105032048888122012070666,7565050302237490214998916833549598732924889515843145342497594450511354526710],"B":[[20983553324602106415744502450751001321320010849139907626154686689590803824187,20952076097787923848866419557958010591451541910355446375784730028915647228249],[18566066011684678568885649123674957074249585518641554932716632277292855153842,6470374219420229210187114229231342604112241790502587950558902833071586584090]],"C":[18869937697941340802318991706403654013215397448685130150843326018726535552502,8399006894026466651718959278957840411670575275113677591162226392583051491877],"Inputs":[8382471479713636303903675749137703248387104876156021657409718717386550288255,11665027831340046981101462667750854285054296068534204360488168379139618721774,4408301714000765206938897260593154811260687353421837370346725156225419671552]}', 23, 0); -INSERT INTO "public"."proof_sender" VALUES (24, '2022-06-09 07:27:34.520581+00', '2022-06-09 07:27:34.520581+00', NULL, '{"A":[8613180370901318718535658826393657157718755207167640216722179502090122844596,7971365462724084651970567060770767223533461273856821699255777023502432582047],"B":[[11516845629720987684566185779404053777946172153484679065223422801568720688765,1054292701819868932735155683802379429757984484931516139527833884187119639764],[17222905044158934511657582945725149487047980273853254990095469965574557553059,17477985489841557074413411764295171684047570166779175602095250545750914557380]],"C":[15777708239550131531845517976436796320872537263437302331163260884736381117735,14019692207264213807601540972115716111720730295561640445545636361972264817784],"Inputs":[11665027831340046981101462667750854285054296068534204360488168379139618721774,8818957056799975707280962631258144156165571563895234601332200658646258521673,3643790213631949638864011697999856248793190006166832594111498243928238663307]}', 24, 0); -INSERT INTO "public"."proof_sender" VALUES (25, '2022-06-09 07:27:44.500311+00', '2022-06-09 07:27:44.500311+00', NULL, '{"A":[15907645387255906742293391976489550924924599442618279118513345514194139207749,14299252587697908068611513008159920094383486195374171300518689111245539354090],"B":[[1799064402919018012127201967533560600332495390927724231009113377096689031408,7411343843117199858564464170634859204350865750826298776609685467388249520344],[9779149591180700989360511827703341472063602823885947050689112328670212205416,9342713518381803847579955466085361551677908426489131536216915959174949180491]],"C":[16563916302308965019361984399780929675350645386842313591102501581696612381506,9966615749352168923007071815883740598284539694814305279346358061010472882966],"Inputs":[8818957056799975707280962631258144156165571563895234601332200658646258521673,9956559373054189521626153440752156380238465255941779671710647272705605843262,8240640732309855160099724147307196178791653968869763265416528684501512078130]}', 25, 0); -INSERT INTO "public"."proof_sender" VALUES (26, '2022-06-09 07:27:54.547263+00', '2022-06-09 07:27:54.547263+00', NULL, '{"A":[13278734818578729076788456409748139707332537534106790890065762058518357911317,732301948154512214339264190683228344624000693108116330381339967333728024523],"B":[[8062897158964664459698142749075207797521593608674749601607549387249478838369,6406497615395643218977142812598202091354173200192125929125964380656165127531],[13866921034158733047477149440298064321847924367044897483057198282082356920207,8558319630986355638688323043606199717274600482790105366148830850597179876078]],"C":[20413444276586054130791024748995999792808863624570698781301686567290319680246,556709541424462817359791800349120421572936831535638901907151249589940357489],"Inputs":[9956559373054189521626153440752156380238465255941779671710647272705605843262,17889387022434688957087371144479155250645584381568910126928831640401314877701,113415291948729229580264397657414532692480125788587862562577637314771872980]}', 26, 0); - --- ---------------------------- --- Table structure for sys_config --- ---------------------------- -DROP TABLE IF EXISTS "public"."sys_config"; -CREATE TABLE "public"."sys_config" ( - "id" int8 NOT NULL DEFAULT nextval('sys_config_id_seq'::regclass), - "created_at" timestamptz(6), - "updated_at" timestamptz(6), - "deleted_at" timestamptz(6), - "name" text COLLATE "pg_catalog"."default", - "value" text COLLATE "pg_catalog"."default", - "value_type" text COLLATE "pg_catalog"."default", - "comment" text COLLATE "pg_catalog"."default" -) -; - --- ---------------------------- --- Records of sys_config --- ---------------------------- -INSERT INTO "public"."sys_config" VALUES (1, '2022-06-08 08:24:27.338662+00', '2022-06-08 08:24:27.338662+00', NULL, 'SysGasFee', '1', 'float', 'based on ETH'); -INSERT INTO "public"."sys_config" VALUES (2, '2022-06-08 08:24:27.338662+00', '2022-06-08 08:24:27.338662+00', NULL, 'MaxAssetId', '9', 'int', 'max number of asset id'); -INSERT INTO "public"."sys_config" VALUES (3, '2022-06-08 08:24:27.338662+00', '2022-06-08 08:24:27.338662+00', NULL, 'TreasuryAccountIndex', '0', 'int', 'treasury index'); -INSERT INTO "public"."sys_config" VALUES (4, '2022-06-08 08:24:27.338662+00', '2022-06-08 08:24:27.338662+00', NULL, 'GasAccountIndex', '1', 'int', 'gas index'); -INSERT INTO "public"."sys_config" VALUES (5, '2022-06-08 08:24:27.338662+00', '2022-06-08 08:24:27.338662+00', NULL, 'ZecreyLegendContract', '0x39c6354FdB9009E15B4006205E5Aa4C08c558c35', 'string', 'Zecrey contract on BSC'); -INSERT INTO "public"."sys_config" VALUES (6, '2022-06-08 08:24:27.338662+00', '2022-06-08 08:24:27.338662+00', NULL, 'GovernanceContract', '0x5b7adDf0882aB683E5aC0BD880830eb0947B2BD1', 'string', 'Governance contract on BSC'); -INSERT INTO "public"."sys_config" VALUES (7, '2022-06-08 08:24:27.338662+00', '2022-06-08 08:24:27.338662+00', NULL, 'BscTestNetworkRpc', 'http://tf-dex-preview-validator-nlb-6fd109ac8b9d390a.elb.ap-northeast-1.amazonaws.com:8545', 'string', 'BSC network rpc'); -INSERT INTO "public"."sys_config" VALUES (8, '2022-06-08 08:24:27.338662+00', '2022-06-08 08:24:27.338662+00', NULL, 'Local_Test_Network_RPC', 'http://127.0.0.1:8545/', 'string', 'Local network rpc'); -INSERT INTO "public"."sys_config" VALUES (9, '2022-06-08 08:24:56.738464+00', '2022-06-08 08:24:56.738464+00', NULL, 'AssetGovernanceContract', '0x4C7B3D1c2aafcE6Ca3a7c35c25fC717178565DE2', 'string', 'asset governance contract'); -INSERT INTO "public"."sys_config" VALUES (10, '2022-06-08 08:24:56.738464+00', '2022-06-08 08:24:56.738464+00', NULL, 'Validators', '{"0x9A973e0b7dB1935Ffb59Ff35272332e6feE00182":{"Address":"0x9A973e0b7dB1935Ffb59Ff35272332e6feE00182","IsActive":true}}', 'map[string]*ValidatorInfo', 'validator info'); -INSERT INTO "public"."sys_config" VALUES (11, '2022-06-08 08:24:56.738464+00', '2022-06-08 08:24:56.738464+00', NULL, 'Governor', '0x56744Dc80a3a520F0cCABf083AC874a4bf6433F3', 'string', 'governor'); - --- ---------------------------- --- Table structure for tx --- ---------------------------- -DROP TABLE IF EXISTS "public"."tx"; -CREATE TABLE "public"."tx" ( - "id" int8 NOT NULL DEFAULT nextval('tx_id_seq'::regclass), - "created_at" timestamptz(6), - "updated_at" timestamptz(6), - "deleted_at" timestamptz(6), - "tx_hash" text COLLATE "pg_catalog"."default", - "tx_type" int8, - "gas_fee" text COLLATE "pg_catalog"."default", - "gas_fee_asset_id" int8, - "tx_status" int8, - "block_height" int8, - "block_id" int8, - "state_root" text COLLATE "pg_catalog"."default", - "nft_index" int8, - "pair_index" int8, - "asset_id" int8, - "tx_amount" text COLLATE "pg_catalog"."default", - "native_address" text COLLATE "pg_catalog"."default", - "tx_info" text COLLATE "pg_catalog"."default", - "extra_info" text COLLATE "pg_catalog"."default", - "memo" text COLLATE "pg_catalog"."default", - "account_index" int8, - "nonce" int8, - "expired_at" int8 -) -; - --- ---------------------------- --- Records of tx --- ---------------------------- -INSERT INTO "public"."tx" VALUES (1, '2022-06-09 07:09:06.044514+00', '2022-06-09 07:09:06.044514+00', NULL, '9f5005a9-e704-11ec-b6f3-988fe0603efa', 1, '0', 0, 1, 1, 2, '21422f9bebac15af8ddc504da0dbb88020c1a4de7e7b6722fe00acb0ed968942', -1, -1, 0, '0', '0x56744Dc80a3a520F0cCABf083AC874a4bf6433F3', '{"TxType":1,"AccountIndex":0,"AccountName":"treasury.legend","AccountNameHash":"FnxTYwiKQKSDmRKocvQxZCcHQMfphuxVOXstWDMXq0o=","PubKey":"fcb8470d33c59a5cbf5e10df426eb97c2773ab890c3364f4162ba782a56ca998"}', '', '', 0, 0, 0); -INSERT INTO "public"."tx" VALUES (2, '2022-06-09 07:09:06.057049+00', '2022-06-09 07:09:06.057049+00', NULL, '9f50d170-e704-11ec-b6f3-988fe0603efa', 1, '0', 0, 1, 2, 3, '1b2ff4ae0d507a971fb267849af6a28000b1d483865c5a610cc47db6f196c672', -1, -1, 0, '0', '0x56744Dc80a3a520F0cCABf083AC874a4bf6433F3', '{"TxType":1,"AccountIndex":1,"AccountName":"gas.legend","AccountNameHash":"CkjpiSpFoE0MWw8jWjrrB7khN7pxpZucRXd0uv3pWYM=","PubKey":"1ec94e497abe0fbb87f9ed2843e21163e17e3e97f6bbbae7a88399b826474f93"}', '', '', 1, 0, 0); -INSERT INTO "public"."tx" VALUES (3, '2022-06-09 07:09:06.066023+00', '2022-06-09 07:09:06.066023+00', NULL, '9f50f093-e704-11ec-b6f3-988fe0603efa', 1, '0', 0, 1, 3, 4, '189517f4cfb59471e3539dae36b8f53cb1264d407daf6afbf86132917f1cbafc', -1, -1, 0, '0', '0x56744Dc80a3a520F0cCABf083AC874a4bf6433F3', '{"TxType":1,"AccountIndex":2,"AccountName":"sher.legend","AccountNameHash":"IUotevICLfruSdrbiZLT18Il2K42EJtTHChAbdaarUU=","PubKey":"b0b6f7466154578ec66d51a335ead65ffd6a7210567fad9e68b6df8a5ce5dd85"}', '', '', 2, 0, 0); -INSERT INTO "public"."tx" VALUES (4, '2022-06-09 07:09:06.073642+00', '2022-06-09 07:09:06.073642+00', NULL, '9f510cb1-e704-11ec-b6f3-988fe0603efa', 1, '0', 0, 1, 4, 5, '08b2dc20da16235e692de317d6134578159532d4f081827bd29a5fc783fcc2b7', -1, -1, 0, '0', '0x56744Dc80a3a520F0cCABf083AC874a4bf6433F3', '{"TxType":1,"AccountIndex":3,"AccountName":"gavin.legend","AccountNameHash":"HFTAnJj3renV7rpBJKx8kS5laZo/dvpl1x6vY1nZvOs=","PubKey":"0500ccea3ca064968f5292b850ac8d4d3ee48d499357351a5ebfa2f30bb6070e"}', '', '', 3, 0, 0); -INSERT INTO "public"."tx" VALUES (5, '2022-06-09 07:09:06.079888+00', '2022-06-09 07:09:06.079888+00', NULL, '9f510cb1-e704-11ec-b6f4-988fe0603efa', 4, '0', 0, 1, 5, 6, '236e2c312a52cfbe96fc14a0693ea0f26d59fae774b35d44ddcf7737d965902f', -1, -1, 0, '100000000000000000', '0x56744Dc80a3a520F0cCABf083AC874a4bf6433F3', '{"TxType":4,"AccountIndex":2,"AccountNameHash":"IUotevICLfruSdrbiZLT18Il2K42EJtTHChAbdaarUU=","AssetId":0,"AssetAmount":100000000000000000}', '', '', 2, 0, 0); -INSERT INTO "public"."tx" VALUES (6, '2022-06-09 07:09:06.088848+00', '2022-06-09 07:09:06.088848+00', NULL, '9f510cb1-e704-11ec-b6f5-988fe0603efa', 4, '0', 0, 1, 6, 7, '029cfe1c99565d3722f32b6bdb4ee5740d4f4c78bf318968c366c9c7e82d9ba7', -1, -1, 0, '100000000000000000', '0x56744Dc80a3a520F0cCABf083AC874a4bf6433F3', '{"TxType":4,"AccountIndex":3,"AccountNameHash":"HFTAnJj3renV7rpBJKx8kS5laZo/dvpl1x6vY1nZvOs=","AssetId":0,"AssetAmount":100000000000000000}', '', '', 3, 0, 0); -INSERT INTO "public"."tx" VALUES (7, '2022-06-09 07:09:06.096384+00', '2022-06-09 07:09:06.096384+00', NULL, '9f510cb1-e704-11ec-b6f6-988fe0603efa', 4, '0', 0, 1, 7, 8, '25cade17a4affef4114a06b8ae6e8e18651a8c4aa0aa01e1c20abce23ad614ec', -1, -1, 1, '100000000000000000000', '0x56744Dc80a3a520F0cCABf083AC874a4bf6433F3', '{"TxType":4,"AccountIndex":2,"AccountNameHash":"IUotevICLfruSdrbiZLT18Il2K42EJtTHChAbdaarUU=","AssetId":1,"AssetAmount":100000000000000000000}', '', '', 2, 0, 0); -INSERT INTO "public"."tx" VALUES (8, '2022-06-09 07:09:06.103059+00', '2022-06-09 07:09:06.103059+00', NULL, '9f510cb1-e704-11ec-b6f7-988fe0603efa', 4, '0', 0, 1, 8, 9, '17a21620fe89a6ef610ceea7b2c6230dba84731020a11bd081b46ba23c1cae94', -1, -1, 2, '100000000000000000000', '0x56744Dc80a3a520F0cCABf083AC874a4bf6433F3', '{"TxType":4,"AccountIndex":2,"AccountNameHash":"IUotevICLfruSdrbiZLT18Il2K42EJtTHChAbdaarUU=","AssetId":2,"AssetAmount":100000000000000000000}', '', '', 2, 0, 0); -INSERT INTO "public"."tx" VALUES (9, '2022-06-09 07:09:06.111132+00', '2022-06-09 07:09:06.111132+00', NULL, '9f510cb1-e704-11ec-b6f8-988fe0603efa', 2, '0', 0, 1, 9, 10, '0f5cf7c3fa8452ccb12d87b99952cfde059999f3767ddbc032994d94f3fe24ba', -1, 0, 0, '0', '0', '{"TxType":2,"PairIndex":0,"AssetAId":0,"AssetBId":2,"FeeRate":30,"TreasuryAccountIndex":0,"TreasuryRate":5}', '', '', -1, 0, 0); -INSERT INTO "public"."tx" VALUES (10, '2022-06-09 07:09:06.120087+00', '2022-06-09 07:09:06.120087+00', NULL, '9f510cb1-e704-11ec-b6f9-988fe0603efa', 2, '0', 0, 1, 10, 11, '0945597849e7df9b43bfade724068c4d5a9d6039da208e6b829feb530ce784cd', -1, 1, 0, '0', '0', '{"TxType":2,"PairIndex":1,"AssetAId":0,"AssetBId":1,"FeeRate":30,"TreasuryAccountIndex":0,"TreasuryRate":5}', '', '', -1, 0, 0); -INSERT INTO "public"."tx" VALUES (11, '2022-06-09 07:09:06.127598+00', '2022-06-09 07:09:06.127598+00', NULL, '9f510cb1-e704-11ec-b6fa-988fe0603efa', 2, '0', 0, 1, 11, 12, '1671dd749a5a522f18908e28512d1c6c10034740923bbe9bab5664585b87411d', -1, 2, 0, '0', '0', '{"TxType":2,"PairIndex":2,"AssetAId":1,"AssetBId":2,"FeeRate":30,"TreasuryAccountIndex":0,"TreasuryRate":5}', '', '', -1, 0, 0); -INSERT INTO "public"."tx" VALUES (12, '2022-06-09 07:09:06.134315+00', '2022-06-09 07:09:06.134315+00', NULL, '9f510cb1-e704-11ec-b6fb-988fe0603efa', 3, '0', 0, 1, 12, 13, '08ef9af5048b3df61fe3bd025a8db3f47b591a0136281cb3325e7e89930f3925', -1, 1, 0, '0', '0', '{"TxType":3,"PairIndex":1,"FeeRate":50,"TreasuryAccountIndex":0,"TreasuryRate":10}', '', '', -1, 0, 0); -INSERT INTO "public"."tx" VALUES (13, '2022-06-09 07:09:06.143621+00', '2022-06-09 07:09:06.143621+00', NULL, '9f51d005-e704-11ec-b6fb-988fe0603efa', 5, '0', 0, 1, 13, 14, '23f9301b57dbde40b067fc04f2bb2e5241b58739845efa223de352a8a14dd2ae', 0, -1, 0, '0', '0x56744Dc80a3a520F0cCABf083AC874a4bf6433F3', '{"TxType":5,"AccountIndex":2,"NftIndex":0,"NftL1Address":"0xB7aD4A7E9459D0C1541Db2eEceceAcc7dBa803e1","CreatorAccountIndex":0,"CreatorTreasuryRate":0,"NftContentHash":"q9G2rnlQf3tKMqhKtklbyf7mdFDtMW27p2us6KPFGXs=","NftL1TokenId":0,"AccountNameHash":"IUotevICLfruSdrbiZLT18Il2K42EJtTHChAbdaarUU=","CollectionId":0}', '', '', 2, 0, 0); -INSERT INTO "public"."tx" VALUES (14, '2022-06-09 07:09:06.153872+00', '2022-06-09 07:09:06.153872+00', NULL, '9f51d005-e704-11ec-b6fc-988fe0603efa', 17, '0', 0, 1, 14, 15, '1785a0c0ef9c282c5dddde78ad80b9689d34cda4a59ed35fcf4f00966ff034e2', -1, -1, 1, '100000000000000000000', '0x56744Dc80a3a520F0cCABf083AC874a4bf6433F3', '{"TxType":17,"AccountIndex":2,"AccountNameHash":"IUotevICLfruSdrbiZLT18Il2K42EJtTHChAbdaarUU=","AssetId":1,"AssetAmount":100000000000000000000}', '', '', 2, 0, 0); -INSERT INTO "public"."tx" VALUES (15, '2022-06-09 07:09:06.161637+00', '2022-06-09 07:09:06.161637+00', NULL, '9f51d005-e704-11ec-b6fd-988fe0603efa', 18, '0', 0, 1, 15, 16, '28ff96ba5f7e023a7ed9d446cb412fc6965a6ed68d1439b357bb4014ec57a8a4', 0, -1, 0, '0', '0x56744Dc80a3a520F0cCABf083AC874a4bf6433F3', '{"TxType":18,"AccountIndex":2,"CreatorAccountIndex":0,"CreatorTreasuryRate":0,"NftIndex":0,"CollectionId":0,"NftL1Address":"0xB7aD4A7E9459D0C1541Db2eEceceAcc7dBa803e1","AccountNameHash":"IUotevICLfruSdrbiZLT18Il2K42EJtTHChAbdaarUU=","CreatorAccountNameHash":"AA==","NftContentHash":"q9G2rnlQf3tKMqhKtklbyf7mdFDtMW27p2us6KPFGXs=","NftL1TokenId":0}', '', '', 2, 0, 0); -INSERT INTO "public"."tx" VALUES (16, '2022-06-09 07:09:06.175353+00', '2022-06-09 07:09:06.175353+00', NULL, '01de4078-304a-406d-9995-7c8550248f28', 6, '5000', 2, 1, 16, 17, '0c599d212ed3641e0b6df735e8b04dd627accfdafbbfa38c173af5f38efb433e', -1, -1, 0, '100000', '', '{"FromAccountIndex":2,"ToAccountIndex":3,"ToAccountNameHash":"1c54c09c98f7ade9d5eeba4124ac7c912e65699a3f76fa65d71eaf6359d9bceb","AssetId":0,"AssetAmount":100000,"GasAccountIndex":1,"GasFeeAssetId":2,"GasFeeAssetAmount":5000,"Memo":"transfer","CallData":"","CallDataHash":"Dd56AihX/sG4/6dmSpN6JQ065o81YGF1TTUx4mdBA9g=","ExpiredAt":1654761533445,"Nonce":1,"Sig":"epyzhZA39/F3mHPAvv8dz8NgPTtPWlYqPs9tEyDNsQgA8A4bi4ruGJe6evoUJ9BdWR49SJ1SCaJ+on1y2QyEFg=="}', '', 'transfer', 2, 1, 1654761533445); -INSERT INTO "public"."tx" VALUES (17, '2022-06-09 07:09:06.187051+00', '2022-06-09 07:09:06.187051+00', NULL, '1d4024d1-a49a-4f6a-9221-b716fedbd4aa', 10, '5000', 2, 1, 17, 18, '2d425cddc3d5aaec5ed0dd1465e7038e7fbbcc679e4a4c6c620742134ef93714', -1, -1, 0, '10000000', '0x99AC8881834797ebC32f185ee27c2e96842e1a47', '{"FromAccountIndex":2,"AssetId":0,"AssetAmount":10000000,"GasAccountIndex":1,"GasFeeAssetId":2,"GasFeeAssetAmount":5000,"ToAddress":"0x99AC8881834797ebC32f185ee27c2e96842e1a47","ExpiredAt":1654761540020,"Nonce":2,"Sig":"YLVT6d4HuMWX7zPw3gcsmqs//dqE0xnfqTbSyLxx3pMDN7IvgmErKDXGiZ/XKC75wf2I03R2dXpmZ6bercDVHA=="}', '', '', 2, 2, 1654761540020); -INSERT INTO "public"."tx" VALUES (18, '2022-06-09 07:09:06.200171+00', '2022-06-09 07:09:06.200171+00', NULL, 'd9f93c69-3be2-483c-b746-d98022a61ecb', 8, '5000', 2, 1, 18, 19, '1211d91f4e22bd2f1aa38daaec68431b35fd37f8a272d147ebe7ba3e73a58555', -1, 0, 0, '100000', '', '{"FromAccountIndex":2,"PairIndex":0,"AssetAId":0,"AssetAAmount":100000,"AssetBId":2,"AssetBAmount":100000,"LpAmount":100000,"KLast":10000000000,"TreasuryAmount":0,"GasAccountIndex":1,"GasFeeAssetId":2,"GasFeeAssetAmount":5000,"ExpiredAt":1654761547992,"Nonce":3,"Sig":"7x6AUCZwD+fmcXWDs0WERCMR+rIBRSDDlzSV3vrjggQEdNd8uj6ghy3uzqrM2oeqa/9gP8vRzPFHYmUvmCKsTQ=="}', '', '', 2, 3, 1654761547992); -INSERT INTO "public"."tx" VALUES (19, '2022-06-09 07:09:06.212437+00', '2022-06-09 07:09:06.212437+00', NULL, '8d60898f-ef87-4726-9322-1bda3fd22c2b', 7, '5000', 0, 1, 19, 20, '1db7fb69796667194858edf7aea403110c42cddc0907b3953181e1184907fb35', -1, 0, 0, '100', '', '{"FromAccountIndex":2,"PairIndex":0,"AssetAId":2,"AssetAAmount":100,"AssetBId":0,"AssetBMinAmount":98,"AssetBAmountDelta":99,"GasAccountIndex":1,"GasFeeAssetId":0,"GasFeeAssetAmount":5000,"ExpiredAt":1654761555052,"Nonce":4,"Sig":"qCqSqikwaTLE/4VDURQHRYK+9gykmtanhYBv/ByGSoMGBJq+8D7z2b9yc0D8M1zNmfeD5YvpCpJkElsjarviGw=="}', '', '', 2, 4, 1654761555052); -INSERT INTO "public"."tx" VALUES (20, '2022-06-09 07:09:06.22453+00', '2022-06-09 07:09:06.22453+00', NULL, '4282c649-d9be-49fc-bc34-48dd4bab5f15', 9, '5000', 2, 1, 20, 21, '2e888850863cf0c2dffa40c8a0c162749f1f93da6ddf225030a01648cdfc26d6', -1, 0, 0, '100', '', '{"FromAccountIndex":2,"PairIndex":0,"AssetAId":0,"AssetAMinAmount":98,"AssetBId":2,"AssetBMinAmount":99,"LpAmount":100,"AssetAAmountDelta":99,"AssetBAmountDelta":100,"KLast":9980200000,"TreasuryAmount":0,"GasAccountIndex":1,"GasFeeAssetId":2,"GasFeeAssetAmount":5000,"ExpiredAt":1654761562031,"Nonce":5,"Sig":"A9Qrmz5Uj7mDA3WfUGRhFoNuWFMtO+RwxwfkbkmglBEAXouZOVBhU4iguZYAEtwO6xFsPYrGFI92A0z21KFk6Q=="}', '', '', 2, 5, 1654761562031); -INSERT INTO "public"."tx" VALUES (21, '2022-06-09 07:09:06.235545+00', '2022-06-09 07:09:06.235545+00', NULL, '0c3c72c1-ff2d-4ba3-b026-93b7cec8e6a1', 11, '5000', 2, 1, 21, 22, '140622efbca882cddada16ee07f9cc8718b69998a5d8d7922fa7591f2f533edd', -1, -1, 0, 'sher.legend', '0', '{"AccountIndex":2,"CollectionId":1,"Name":"Zecrey Collection","Introduction":"Wonderful zecrey!","GasAccountIndex":1,"GasFeeAssetId":2,"GasFeeAssetAmount":5000,"ExpiredAt":1654761569287,"Nonce":6,"Sig":"HrXIp5Vauk06jV3Jj4Lke/+XYp3ThhQsVAx8QksB1aYFVYeEZOf5nmIML+U3TSatTwyLCGMmomQSjGMxVtouxw=="}', '', '', 2, 6, 1654761569287); -INSERT INTO "public"."tx" VALUES (22, '2022-06-09 07:09:06.245099+00', '2022-06-09 07:09:06.245099+00', NULL, '69b6e9bb-0f8b-4b20-af35-35c6945489aa', 12, '5000', 2, 1, 22, 23, '12884f8bb4852d02ad1f654daa7a2fc230c539e5a1d3dcd83a474eed139e1f7f', 1, -1, 0, '0', '', '{"CreatorAccountIndex":2,"ToAccountIndex":3,"ToAccountNameHash":"1c54c09c98f7ade9d5eeba4124ac7c912e65699a3f76fa65d71eaf6359d9bceb","NftIndex":1,"NftContentHash":"066a65d3e4397ad05fb2e7f50eac160647a7486c2cedbbeac64624c8beea22f1","NftCollectionId":1,"CreatorTreasuryRate":0,"GasAccountIndex":1,"GasFeeAssetId":2,"GasFeeAssetAmount":5000,"ExpiredAt":1654761575344,"Nonce":7,"Sig":"KAC/ZwgC7PJo4KDqBpVI048lfSjzli9qLXp3I4CFBJIBZY/c301Cm46AgoxkfaC02p8M5W263VWSEOsW+YATRg=="}', '', '', 2, 7, 1654761575344); -INSERT INTO "public"."tx" VALUES (23, '2022-06-09 07:09:06.256098+00', '2022-06-09 07:09:06.256098+00', NULL, 'a512199e-4146-407c-9b13-0039c8796650', 13, '5000', 0, 1, 23, 24, '19ca2bf9cca9b55f61c3f2d352ab486ac7e529670b6af93171054cb8d82f4fee', 1, -1, 0, '0', '', '{"FromAccountIndex":3,"ToAccountIndex":2,"ToAccountNameHash":"214a2d7af2022dfaee49dadb8992d3d7c225d8ae36109b531c28406dd69aad45","NftIndex":1,"GasAccountIndex":1,"GasFeeAssetId":0,"GasFeeAssetAmount":5000,"CallData":"","CallDataHash":"Dd56AihX/sG4/6dmSpN6JQ065o81YGF1TTUx4mdBA9g=","ExpiredAt":1654761583894,"Nonce":1,"Sig":"G/YUzJOh4sp8ZF27Mz4s5hvLBC1P63uZbIVjMzHjqRkB0wTz30LsTSC5VIcxILqv7a/dCw4qd4Y3LYOtgLPW1w=="}', '', '', 3, 1, 1654761583894); -INSERT INTO "public"."tx" VALUES (24, '2022-06-09 07:09:06.269189+00', '2022-06-09 07:09:06.269189+00', NULL, '79245e26-0ec4-486e-9556-2fd477928380', 14, '5000', 0, 1, 24, 25, '137f5a5193ca65babef27c7f5be3ebc0eee3fd5c4de748d4cc0e736b80a99649', 1, -1, 0, '10000', '', '{"AccountIndex":2,"BuyOffer":{"Type":0,"OfferId":0,"AccountIndex":3,"NftIndex":1,"AssetId":0,"AssetAmount":10000,"ListedAt":1654754390138,"ExpiredAt":1654761590138,"TreasuryRate":200,"Sig":"CrwNdL+oHhdWBgJ0j+O/IY5Ca5qnBw6kDkPyUWD4wywApriICAXoTooPa//9vP9QRDPEQsHu9C2vvfeNaXeUGA=="},"SellOffer":{"Type":1,"OfferId":0,"AccountIndex":2,"NftIndex":1,"AssetId":0,"AssetAmount":10000,"ListedAt":1654754390138,"ExpiredAt":1654761590138,"TreasuryRate":200,"Sig":"1k2/LHCg9jCQ2+S9qYW8hWRLFryR7xQU+32zmjlEAAICKm6Tlks2bqoUXLBiOe5VNwUIMJ5gwJxTKOlJbOExbA=="},"GasAccountIndex":1,"GasFeeAssetId":0,"GasFeeAssetAmount":5000,"CreatorAmount":0,"TreasuryAmount":200,"Nonce":8,"ExpiredAt":1654761590138,"Sig":"Y/HMbBEAcLfdg5+eqAo/Gz+Nq8ZHdmLbm+SRUBZAB5cASst6Eo7UiL6O7+2IGNa7lBij3RgRvPn8bcBbxEO+Ww=="}', '', '', 2, 8, 1654761590138); -INSERT INTO "public"."tx" VALUES (25, '2022-06-09 07:09:06.281436+00', '2022-06-09 07:09:06.281436+00', NULL, 'fa2c5d73-deab-494a-a5c1-64938d1430aa', 15, '5000', 2, 1, 25, 26, '16033680a98409353095c6679b48d1fe06a03ec709b55e448a4c4a56e229393e', -1, -1, 0, 'sher.legend', '0', '{"AccountIndex":2,"OfferId":1,"GasAccountIndex":1,"GasFeeAssetId":2,"GasFeeAssetAmount":5000,"ExpiredAt":1654761601706,"Nonce":9,"Sig":"XQyUjK2wFu2opRmOhCnmDtCsVFeyj0MDofWLqyQCBBQCRi4zpBIxphGhuSMkoDO1WiFWkxVaRXwINrpOKjfMug=="}', '', '', 2, 9, 1654761601706); -INSERT INTO "public"."tx" VALUES (26, '2022-06-09 07:09:06.292598+00', '2022-06-09 07:09:06.292598+00', NULL, '5c3021a0-15bc-42b3-a966-11b0afdb0c73', 16, '5000', 0, 1, 26, 27, '278d08c3c1a50ed6e932abdfde1555b7843c43de10c1fded32f7cfc2987c9105', 1, -1, 0, '0', '', '{"AccountIndex":3,"CreatorAccountIndex":2,"CreatorAccountNameHash":"IUotevICLfruSdrbiZLT18Il2K42EJtTHChAbdaarUU=","CreatorTreasuryRate":0,"NftIndex":1,"NftContentHash":"Bmpl0+Q5etBfsuf1DqwWBkenSGws7bvqxkYkyL7qIvE=","NftL1Address":"0","NftL1TokenId":0,"CollectionId":1,"ToAddress":"0xd5Aa3B56a2E2139DB315CdFE3b34149c8ed09171","GasAccountIndex":1,"GasFeeAssetId":0,"GasFeeAssetAmount":5000,"ExpiredAt":1654761611582,"Nonce":2,"Sig":"sPJWFi9pTtfv6Z9zFI/QlQ2M9APJdOrWuPuq8wVnZ5kDbJkAuE2MoDA7o07rv5g/nz/cjloY2us88w1dkg+5sQ=="}', '', '', 3, 2, 1654761611582); - --- ---------------------------- --- Table structure for tx_detail --- ---------------------------- -DROP TABLE IF EXISTS "public"."tx_detail"; -CREATE TABLE "public"."tx_detail" ( - "id" int8 NOT NULL DEFAULT nextval('tx_detail_id_seq'::regclass), - "created_at" timestamptz(6), - "updated_at" timestamptz(6), - "deleted_at" timestamptz(6), - "tx_id" int8, - "asset_id" int8, - "asset_type" int8, - "account_index" int8, - "account_name" text COLLATE "pg_catalog"."default", - "balance" text COLLATE "pg_catalog"."default", - "balance_delta" text COLLATE "pg_catalog"."default", - "order" int8, - "account_order" int8, - "nonce" int8, - "collection_nonce" int8 -) -; - --- ---------------------------- --- Records of tx_detail --- ---------------------------- -INSERT INTO "public"."tx_detail" VALUES (1, '2022-06-09 07:09:06.08039+00', '2022-06-09 07:09:06.08039+00', NULL, 5, 0, 1, 2, 'sher.legend', '{"AssetId":0,"Balance":0,"LpAmount":0,"OfferCanceledOrFinalized":0}', '{"AssetId":0,"Balance":100000000000000000,"LpAmount":0,"OfferCanceledOrFinalized":0}', 0, 0, 0, 0); -INSERT INTO "public"."tx_detail" VALUES (2, '2022-06-09 07:09:06.089081+00', '2022-06-09 07:09:06.089081+00', NULL, 6, 0, 1, 3, 'gavin.legend', '{"AssetId":0,"Balance":0,"LpAmount":0,"OfferCanceledOrFinalized":0}', '{"AssetId":0,"Balance":100000000000000000,"LpAmount":0,"OfferCanceledOrFinalized":0}', 0, 0, 0, 0); -INSERT INTO "public"."tx_detail" VALUES (3, '2022-06-09 07:09:06.096937+00', '2022-06-09 07:09:06.096937+00', NULL, 7, 1, 1, 2, 'sher.legend', '{"AssetId":1,"Balance":0,"LpAmount":0,"OfferCanceledOrFinalized":0}', '{"AssetId":1,"Balance":100000000000000000000,"LpAmount":0,"OfferCanceledOrFinalized":0}', 0, 0, 0, 0); -INSERT INTO "public"."tx_detail" VALUES (4, '2022-06-09 07:09:06.103587+00', '2022-06-09 07:09:06.103587+00', NULL, 8, 2, 1, 2, 'sher.legend', '{"AssetId":2,"Balance":0,"LpAmount":0,"OfferCanceledOrFinalized":0}', '{"AssetId":2,"Balance":100000000000000000000,"LpAmount":0,"OfferCanceledOrFinalized":0}', 0, 0, 0, 0); -INSERT INTO "public"."tx_detail" VALUES (5, '2022-06-09 07:09:06.111654+00', '2022-06-09 07:09:06.111654+00', NULL, 9, 0, 2, -1, '', '{"PairIndex":0,"AssetAId":0,"AssetA":0,"AssetBId":0,"AssetB":0,"LpAmount":0,"KLast":0,"FeeRate":0,"TreasuryAccountIndex":0,"TreasuryRate":0}', '{"PairIndex":0,"AssetAId":0,"AssetA":0,"AssetBId":2,"AssetB":0,"LpAmount":0,"KLast":0,"FeeRate":30,"TreasuryAccountIndex":0,"TreasuryRate":5}', 0, -1, 0, 0); -INSERT INTO "public"."tx_detail" VALUES (6, '2022-06-09 07:09:06.120737+00', '2022-06-09 07:09:06.120737+00', NULL, 10, 1, 2, -1, '', '{"PairIndex":1,"AssetAId":0,"AssetA":0,"AssetBId":0,"AssetB":0,"LpAmount":0,"KLast":0,"FeeRate":0,"TreasuryAccountIndex":0,"TreasuryRate":0}', '{"PairIndex":1,"AssetAId":0,"AssetA":0,"AssetBId":1,"AssetB":0,"LpAmount":0,"KLast":0,"FeeRate":30,"TreasuryAccountIndex":0,"TreasuryRate":5}', 0, -1, 0, 0); -INSERT INTO "public"."tx_detail" VALUES (7, '2022-06-09 07:09:06.128237+00', '2022-06-09 07:09:06.128237+00', NULL, 11, 2, 2, -1, '', '{"PairIndex":2,"AssetAId":0,"AssetA":0,"AssetBId":0,"AssetB":0,"LpAmount":0,"KLast":0,"FeeRate":0,"TreasuryAccountIndex":0,"TreasuryRate":0}', '{"PairIndex":2,"AssetAId":1,"AssetA":0,"AssetBId":2,"AssetB":0,"LpAmount":0,"KLast":0,"FeeRate":30,"TreasuryAccountIndex":0,"TreasuryRate":5}', 0, -1, 0, 0); -INSERT INTO "public"."tx_detail" VALUES (8, '2022-06-09 07:09:06.134875+00', '2022-06-09 07:09:06.134875+00', NULL, 12, 1, 2, -1, '', '{"PairIndex":1,"AssetAId":0,"AssetA":0,"AssetBId":1,"AssetB":0,"LpAmount":0,"KLast":0,"FeeRate":30,"TreasuryAccountIndex":0,"TreasuryRate":5}', '{"PairIndex":1,"AssetAId":0,"AssetA":0,"AssetBId":1,"AssetB":0,"LpAmount":0,"KLast":0,"FeeRate":50,"TreasuryAccountIndex":0,"TreasuryRate":10}', 0, -1, 0, 0); -INSERT INTO "public"."tx_detail" VALUES (9, '2022-06-09 07:09:06.144201+00', '2022-06-09 07:09:06.144201+00', NULL, 13, 0, 1, 2, 'sher.legend', '{"AssetId":0,"Balance":100000000000000000,"LpAmount":0,"OfferCanceledOrFinalized":0}', '{"AssetId":0,"Balance":0,"LpAmount":0,"OfferCanceledOrFinalized":0}', 0, 0, 0, 0); -INSERT INTO "public"."tx_detail" VALUES (10, '2022-06-09 07:09:06.144201+00', '2022-06-09 07:09:06.144201+00', NULL, 13, 0, 3, 2, 'sher.legend', '{"NftIndex":0,"CreatorAccountIndex":0,"OwnerAccountIndex":0,"NftContentHash":"0","NftL1TokenId":"0","NftL1Address":"0","CreatorTreasuryRate":0,"CollectionId":0}', '{"NftIndex":0,"CreatorAccountIndex":0,"OwnerAccountIndex":2,"NftContentHash":"abd1b6ae79507f7b4a32a84ab6495bc9fee67450ed316dbba76bace8a3c5197b","NftL1TokenId":"0","NftL1Address":"0xB7aD4A7E9459D0C1541Db2eEceceAcc7dBa803e1","CreatorTreasuryRate":0,"CollectionId":0}', 0, -1, 0, 0); -INSERT INTO "public"."tx_detail" VALUES (11, '2022-06-09 07:09:06.154429+00', '2022-06-09 07:09:06.154429+00', NULL, 14, 1, 1, 2, 'sher.legend', '{"AssetId":1,"Balance":100000000000000000000,"LpAmount":0,"OfferCanceledOrFinalized":0}', '{"AssetId":1,"Balance":-100000000000000000000,"LpAmount":0,"OfferCanceledOrFinalized":0}', 0, 0, 0, 0); -INSERT INTO "public"."tx_detail" VALUES (12, '2022-06-09 07:09:06.162169+00', '2022-06-09 07:09:06.162169+00', NULL, 15, 0, 1, 2, 'sher.legend', '{"AssetId":0,"Balance":100000000000000000,"LpAmount":0,"OfferCanceledOrFinalized":0}', '{"AssetId":0,"Balance":0,"LpAmount":0,"OfferCanceledOrFinalized":0}', 0, 0, 0, 0); -INSERT INTO "public"."tx_detail" VALUES (13, '2022-06-09 07:09:06.162169+00', '2022-06-09 07:09:06.162169+00', NULL, 15, 0, 3, 2, 'sher.legend', '{"NftIndex":0,"CreatorAccountIndex":0,"OwnerAccountIndex":2,"NftContentHash":"abd1b6ae79507f7b4a32a84ab6495bc9fee67450ed316dbba76bace8a3c5197b","NftL1TokenId":"0","NftL1Address":"0xB7aD4A7E9459D0C1541Db2eEceceAcc7dBa803e1","CreatorTreasuryRate":0,"CollectionId":0}', '{"NftIndex":0,"CreatorAccountIndex":0,"OwnerAccountIndex":0,"NftContentHash":"0","NftL1TokenId":"0","NftL1Address":"0","CreatorTreasuryRate":0,"CollectionId":0}', 1, -1, 0, 0); -INSERT INTO "public"."tx_detail" VALUES (14, '2022-06-09 07:09:06.176047+00', '2022-06-09 07:09:06.176047+00', NULL, 16, 0, 1, 2, 'sher.legend', '{"AssetId":0,"Balance":100000000000000000,"LpAmount":0,"OfferCanceledOrFinalized":0}', '{"AssetId":0,"Balance":-100000,"LpAmount":0,"OfferCanceledOrFinalized":0}', 0, 0, 0, 0); -INSERT INTO "public"."tx_detail" VALUES (15, '2022-06-09 07:09:06.176047+00', '2022-06-09 07:09:06.176047+00', NULL, 16, 2, 1, 2, 'sher.legend', '{"AssetId":2,"Balance":100000000000000000000,"LpAmount":0,"OfferCanceledOrFinalized":0}', '{"AssetId":2,"Balance":-5000,"LpAmount":0,"OfferCanceledOrFinalized":0}', 1, 0, 0, 0); -INSERT INTO "public"."tx_detail" VALUES (16, '2022-06-09 07:09:06.176047+00', '2022-06-09 07:09:06.176047+00', NULL, 16, 0, 1, 3, 'gavin.legend', '{"AssetId":0,"Balance":100000000000000000,"LpAmount":0,"OfferCanceledOrFinalized":0}', '{"AssetId":0,"Balance":100000,"LpAmount":0,"OfferCanceledOrFinalized":0}', 2, 1, 0, 0); -INSERT INTO "public"."tx_detail" VALUES (17, '2022-06-09 07:09:06.176047+00', '2022-06-09 07:09:06.176047+00', NULL, 16, 2, 1, 1, 'gas.legend', '{"AssetId":2,"Balance":0,"LpAmount":0,"OfferCanceledOrFinalized":0}', '{"AssetId":2,"Balance":5000,"LpAmount":0,"OfferCanceledOrFinalized":0}', 3, 2, 0, 0); -INSERT INTO "public"."tx_detail" VALUES (18, '2022-06-09 07:09:06.187885+00', '2022-06-09 07:09:06.187885+00', NULL, 17, 0, 1, 2, 'sher.legend', '{"AssetId":0,"Balance":99999999999900000,"LpAmount":0,"OfferCanceledOrFinalized":0}', '{"AssetId":0,"Balance":-10000000,"LpAmount":0,"OfferCanceledOrFinalized":0}', 0, 0, 1, 0); -INSERT INTO "public"."tx_detail" VALUES (19, '2022-06-09 07:09:06.187885+00', '2022-06-09 07:09:06.187885+00', NULL, 17, 2, 1, 2, 'sher.legend', '{"AssetId":2,"Balance":99999999999999995000,"LpAmount":0,"OfferCanceledOrFinalized":0}', '{"AssetId":2,"Balance":-5000,"LpAmount":0,"OfferCanceledOrFinalized":0}', 1, 0, 1, 0); -INSERT INTO "public"."tx_detail" VALUES (20, '2022-06-09 07:09:06.187885+00', '2022-06-09 07:09:06.187885+00', NULL, 17, 2, 1, 1, 'gas.legend', '{"AssetId":2,"Balance":5000,"LpAmount":0,"OfferCanceledOrFinalized":0}', '{"AssetId":2,"Balance":5000,"LpAmount":0,"OfferCanceledOrFinalized":0}', 2, 1, 0, 0); -INSERT INTO "public"."tx_detail" VALUES (21, '2022-06-09 07:09:06.200912+00', '2022-06-09 07:09:06.200912+00', NULL, 18, 0, 1, 2, 'sher.legend', '{"AssetId":0,"Balance":99999999989900000,"LpAmount":0,"OfferCanceledOrFinalized":0}', '{"AssetId":0,"Balance":-100000,"LpAmount":0,"OfferCanceledOrFinalized":0}', 0, 0, 2, 0); -INSERT INTO "public"."tx_detail" VALUES (22, '2022-06-09 07:09:06.200912+00', '2022-06-09 07:09:06.200912+00', NULL, 18, 2, 1, 2, 'sher.legend', '{"AssetId":2,"Balance":99999999999999990000,"LpAmount":0,"OfferCanceledOrFinalized":0}', '{"AssetId":2,"Balance":-100000,"LpAmount":0,"OfferCanceledOrFinalized":0}', 1, 0, 2, 0); -INSERT INTO "public"."tx_detail" VALUES (23, '2022-06-09 07:09:06.200912+00', '2022-06-09 07:09:06.200912+00', NULL, 18, 2, 1, 2, 'sher.legend', '{"AssetId":2,"Balance":99999999999999890000,"LpAmount":0,"OfferCanceledOrFinalized":0}', '{"AssetId":2,"Balance":-5000,"LpAmount":0,"OfferCanceledOrFinalized":0}', 2, 0, 2, 0); -INSERT INTO "public"."tx_detail" VALUES (24, '2022-06-09 07:09:06.200912+00', '2022-06-09 07:09:06.200912+00', NULL, 18, 0, 1, 2, 'sher.legend', '{"AssetId":0,"Balance":99999999989800000,"LpAmount":0,"OfferCanceledOrFinalized":0}', '{"AssetId":0,"Balance":0,"LpAmount":100000,"OfferCanceledOrFinalized":0}', 3, 0, 2, 0); -INSERT INTO "public"."tx_detail" VALUES (25, '2022-06-09 07:09:06.200912+00', '2022-06-09 07:09:06.200912+00', NULL, 18, 0, 2, -1, '', '{"PairIndex":0,"AssetAId":0,"AssetA":0,"AssetBId":2,"AssetB":0,"LpAmount":0,"KLast":0,"FeeRate":30,"TreasuryAccountIndex":0,"TreasuryRate":5}', '{"PairIndex":0,"AssetAId":0,"AssetA":100000,"AssetBId":2,"AssetB":100000,"LpAmount":100000,"KLast":10000000000,"FeeRate":30,"TreasuryAccountIndex":0,"TreasuryRate":5}', 4, -1, 0, 0); -INSERT INTO "public"."tx_detail" VALUES (26, '2022-06-09 07:09:06.200912+00', '2022-06-09 07:09:06.200912+00', NULL, 18, 0, 1, 0, 'treasury.legend', '{"AssetId":0,"Balance":0,"LpAmount":0,"OfferCanceledOrFinalized":0}', '{"AssetId":0,"Balance":0,"LpAmount":0,"OfferCanceledOrFinalized":0}', 5, 1, 0, 0); -INSERT INTO "public"."tx_detail" VALUES (27, '2022-06-09 07:09:06.200912+00', '2022-06-09 07:09:06.200912+00', NULL, 18, 2, 1, 1, 'gas.legend', '{"AssetId":2,"Balance":10000,"LpAmount":0,"OfferCanceledOrFinalized":0}', '{"AssetId":2,"Balance":5000,"LpAmount":0,"OfferCanceledOrFinalized":0}', 6, 2, 0, 0); -INSERT INTO "public"."tx_detail" VALUES (28, '2022-06-09 07:09:06.212936+00', '2022-06-09 07:09:06.212936+00', NULL, 19, 2, 1, 2, 'sher.legend', '{"AssetId":2,"Balance":99999999999999885000,"LpAmount":0,"OfferCanceledOrFinalized":0}', '{"AssetId":2,"Balance":-100,"LpAmount":0,"OfferCanceledOrFinalized":0}', 0, 0, 3, 0); -INSERT INTO "public"."tx_detail" VALUES (29, '2022-06-09 07:09:06.212936+00', '2022-06-09 07:09:06.212936+00', NULL, 19, 0, 1, 2, 'sher.legend', '{"AssetId":0,"Balance":99999999989800000,"LpAmount":100000,"OfferCanceledOrFinalized":0}', '{"AssetId":0,"Balance":99,"LpAmount":0,"OfferCanceledOrFinalized":0}', 1, 0, 3, 0); -INSERT INTO "public"."tx_detail" VALUES (30, '2022-06-09 07:09:06.212936+00', '2022-06-09 07:09:06.212936+00', NULL, 19, 0, 1, 2, 'sher.legend', '{"AssetId":0,"Balance":99999999989800099,"LpAmount":100000,"OfferCanceledOrFinalized":0}', '{"AssetId":0,"Balance":-5000,"LpAmount":0,"OfferCanceledOrFinalized":0}', 2, 0, 3, 0); -INSERT INTO "public"."tx_detail" VALUES (31, '2022-06-09 07:09:06.212936+00', '2022-06-09 07:09:06.212936+00', NULL, 19, 0, 2, -1, '', '{"PairIndex":0,"AssetAId":0,"AssetA":100000,"AssetBId":2,"AssetB":100000,"LpAmount":100000,"KLast":10000000000,"FeeRate":30,"TreasuryAccountIndex":0,"TreasuryRate":5}', '{"PairIndex":0,"AssetAId":0,"AssetA":-99,"AssetBId":2,"AssetB":100,"LpAmount":0,"KLast":0,"FeeRate":30,"TreasuryAccountIndex":0,"TreasuryRate":5}', 3, -1, 0, 0); -INSERT INTO "public"."tx_detail" VALUES (32, '2022-06-09 07:09:06.212936+00', '2022-06-09 07:09:06.212936+00', NULL, 19, 0, 1, 1, 'gas.legend', '{"AssetId":0,"Balance":0,"LpAmount":0,"OfferCanceledOrFinalized":0}', '{"AssetId":0,"Balance":5000,"LpAmount":0,"OfferCanceledOrFinalized":0}', 4, 1, 0, 0); -INSERT INTO "public"."tx_detail" VALUES (33, '2022-06-09 07:09:06.22503+00', '2022-06-09 07:09:06.22503+00', NULL, 20, 0, 1, 2, 'sher.legend', '{"AssetId":0,"Balance":99999999989795099,"LpAmount":100000,"OfferCanceledOrFinalized":0}', '{"AssetId":0,"Balance":99,"LpAmount":0,"OfferCanceledOrFinalized":0}', 0, 0, 4, 0); -INSERT INTO "public"."tx_detail" VALUES (34, '2022-06-09 07:09:06.22503+00', '2022-06-09 07:09:06.22503+00', NULL, 20, 2, 1, 2, 'sher.legend', '{"AssetId":2,"Balance":99999999999999884900,"LpAmount":0,"OfferCanceledOrFinalized":0}', '{"AssetId":2,"Balance":100,"LpAmount":0,"OfferCanceledOrFinalized":0}', 1, 0, 4, 0); -INSERT INTO "public"."tx_detail" VALUES (35, '2022-06-09 07:09:06.22503+00', '2022-06-09 07:09:06.22503+00', NULL, 20, 2, 1, 2, 'sher.legend', '{"AssetId":2,"Balance":99999999999999885000,"LpAmount":0,"OfferCanceledOrFinalized":0}', '{"AssetId":2,"Balance":-5000,"LpAmount":0,"OfferCanceledOrFinalized":0}', 2, 0, 4, 0); -INSERT INTO "public"."tx_detail" VALUES (36, '2022-06-09 07:09:06.22503+00', '2022-06-09 07:09:06.22503+00', NULL, 20, 0, 1, 2, 'sher.legend', '{"AssetId":0,"Balance":99999999989795198,"LpAmount":100000,"OfferCanceledOrFinalized":0}', '{"AssetId":0,"Balance":0,"LpAmount":-100,"OfferCanceledOrFinalized":0}', 3, 0, 4, 0); -INSERT INTO "public"."tx_detail" VALUES (37, '2022-06-09 07:09:06.22503+00', '2022-06-09 07:09:06.22503+00', NULL, 20, 0, 1, 0, 'treasury.legend', '{"AssetId":0,"Balance":0,"LpAmount":0,"OfferCanceledOrFinalized":0}', '{"AssetId":0,"Balance":0,"LpAmount":0,"OfferCanceledOrFinalized":0}', 4, 1, 0, 0); -INSERT INTO "public"."tx_detail" VALUES (38, '2022-06-09 07:09:06.22503+00', '2022-06-09 07:09:06.22503+00', NULL, 20, 0, 2, -1, '', '{"PairIndex":0,"AssetAId":0,"AssetA":99901,"AssetBId":2,"AssetB":100100,"LpAmount":100000,"KLast":10000000000,"FeeRate":30,"TreasuryAccountIndex":0,"TreasuryRate":5}', '{"PairIndex":0,"AssetAId":0,"AssetA":-99,"AssetBId":2,"AssetB":-100,"LpAmount":-100,"KLast":9980200000,"FeeRate":30,"TreasuryAccountIndex":0,"TreasuryRate":5}', 5, -1, 0, 0); -INSERT INTO "public"."tx_detail" VALUES (39, '2022-06-09 07:09:06.22503+00', '2022-06-09 07:09:06.22503+00', NULL, 20, 2, 1, 1, 'gas.legend', '{"AssetId":2,"Balance":15000,"LpAmount":0,"OfferCanceledOrFinalized":0}', '{"AssetId":2,"Balance":5000,"LpAmount":0,"OfferCanceledOrFinalized":0}', 6, 2, 0, 0); -INSERT INTO "public"."tx_detail" VALUES (41, '2022-06-09 07:09:06.236063+00', '2022-06-09 07:09:06.236063+00', NULL, 21, 2, 1, 2, 'sher.legend', '{"AssetId":2,"Balance":99999999999999880000,"LpAmount":0,"OfferCanceledOrFinalized":0}', '{"AssetId":2,"Balance":-5000,"LpAmount":0,"OfferCanceledOrFinalized":0}', 1, 0, 5, 0); -INSERT INTO "public"."tx_detail" VALUES (42, '2022-06-09 07:09:06.236063+00', '2022-06-09 07:09:06.236063+00', NULL, 21, 2, 1, 1, 'gas.legend', '{"AssetId":2,"Balance":20000,"LpAmount":0,"OfferCanceledOrFinalized":0}', '{"AssetId":2,"Balance":5000,"LpAmount":0,"OfferCanceledOrFinalized":0}', 2, 1, 0, 0); -INSERT INTO "public"."tx_detail" VALUES (43, '2022-06-09 07:09:06.246098+00', '2022-06-09 07:09:06.246098+00', NULL, 22, 2, 1, 2, 'sher.legend', '{"AssetId":2,"Balance":99999999999999875000,"LpAmount":0,"OfferCanceledOrFinalized":0}', '{"AssetId":2,"Balance":-5000,"LpAmount":0,"OfferCanceledOrFinalized":0}', 0, 0, 6, 1); -INSERT INTO "public"."tx_detail" VALUES (44, '2022-06-09 07:09:06.246098+00', '2022-06-09 07:09:06.246098+00', NULL, 22, 2, 1, 3, 'gavin.legend', '{"AssetId":2,"Balance":0,"LpAmount":0,"OfferCanceledOrFinalized":0}', '{"AssetId":2,"Balance":0,"LpAmount":0,"OfferCanceledOrFinalized":0}', 1, 1, 0, 0); -INSERT INTO "public"."tx_detail" VALUES (45, '2022-06-09 07:09:06.246098+00', '2022-06-09 07:09:06.246098+00', NULL, 22, 1, 3, 3, 'gavin.legend', '{"NftIndex":1,"CreatorAccountIndex":0,"OwnerAccountIndex":0,"NftContentHash":"0","NftL1TokenId":"0","NftL1Address":"0","CreatorTreasuryRate":0,"CollectionId":0}', '{"NftIndex":1,"CreatorAccountIndex":2,"OwnerAccountIndex":3,"NftContentHash":"066a65d3e4397ad05fb2e7f50eac160647a7486c2cedbbeac64624c8beea22f1","NftL1TokenId":"0","NftL1Address":"0","CreatorTreasuryRate":0,"CollectionId":1}', 2, -1, 0, 0); -INSERT INTO "public"."tx_detail" VALUES (46, '2022-06-09 07:09:06.246098+00', '2022-06-09 07:09:06.246098+00', NULL, 22, 2, 1, 1, 'gas.legend', '{"AssetId":2,"Balance":25000,"LpAmount":0,"OfferCanceledOrFinalized":0}', '{"AssetId":2,"Balance":5000,"LpAmount":0,"OfferCanceledOrFinalized":0}', 3, 2, 0, 0); -INSERT INTO "public"."tx_detail" VALUES (47, '2022-06-09 07:09:06.256597+00', '2022-06-09 07:09:06.256597+00', NULL, 23, 0, 1, 3, 'gavin.legend', '{"AssetId":0,"Balance":100000000000100000,"LpAmount":0,"OfferCanceledOrFinalized":0}', '{"AssetId":0,"Balance":-5000,"LpAmount":0,"OfferCanceledOrFinalized":0}', 0, 0, 0, 0); -INSERT INTO "public"."tx_detail" VALUES (48, '2022-06-09 07:09:06.256597+00', '2022-06-09 07:09:06.256597+00', NULL, 23, 0, 1, 2, 'sher.legend', '{"AssetId":0,"Balance":99999999989795198,"LpAmount":99900,"OfferCanceledOrFinalized":0}', '{"AssetId":0,"Balance":0,"LpAmount":0,"OfferCanceledOrFinalized":0}', 1, 1, 7, 1); -INSERT INTO "public"."tx_detail" VALUES (40, '2022-06-09 07:09:06.236063+00', '2022-06-09 07:09:06.236063+00', NULL, 21, 0, 4, 2, 'sher.legend', '0', '1', 0, 0, 5, 0); -INSERT INTO "public"."tx_detail" VALUES (49, '2022-06-09 07:09:06.256597+00', '2022-06-09 07:09:06.256597+00', NULL, 23, 1, 3, 2, 'sher.legend', '{"NftIndex":1,"CreatorAccountIndex":2,"OwnerAccountIndex":3,"NftContentHash":"066a65d3e4397ad05fb2e7f50eac160647a7486c2cedbbeac64624c8beea22f1","NftL1TokenId":"0","NftL1Address":"0","CreatorTreasuryRate":0,"CollectionId":1}', '{"NftIndex":1,"CreatorAccountIndex":2,"OwnerAccountIndex":2,"NftContentHash":"066a65d3e4397ad05fb2e7f50eac160647a7486c2cedbbeac64624c8beea22f1","NftL1TokenId":"0","NftL1Address":"0","CreatorTreasuryRate":0,"CollectionId":1}', 2, -1, 7, 1); -INSERT INTO "public"."tx_detail" VALUES (50, '2022-06-09 07:09:06.256597+00', '2022-06-09 07:09:06.256597+00', NULL, 23, 0, 1, 1, 'gas.legend', '{"AssetId":0,"Balance":5000,"LpAmount":0,"OfferCanceledOrFinalized":0}', '{"AssetId":0,"Balance":5000,"LpAmount":0,"OfferCanceledOrFinalized":0}', 3, 2, 0, 0); -INSERT INTO "public"."tx_detail" VALUES (51, '2022-06-09 07:09:06.269598+00', '2022-06-09 07:09:06.269598+00', NULL, 24, 0, 1, 2, 'sher.legend', '{"AssetId":0,"Balance":99999999989795198,"LpAmount":99900,"OfferCanceledOrFinalized":0}', '{"AssetId":0,"Balance":-5000,"LpAmount":0,"OfferCanceledOrFinalized":0}', 0, 0, 7, 1); -INSERT INTO "public"."tx_detail" VALUES (52, '2022-06-09 07:09:06.269598+00', '2022-06-09 07:09:06.269598+00', NULL, 24, 0, 1, 3, 'gavin.legend', '{"AssetId":0,"Balance":100000000000095000,"LpAmount":0,"OfferCanceledOrFinalized":0}', '{"AssetId":0,"Balance":-10000,"LpAmount":0,"OfferCanceledOrFinalized":0}', 1, 1, 1, 0); -INSERT INTO "public"."tx_detail" VALUES (53, '2022-06-09 07:09:06.269598+00', '2022-06-09 07:09:06.269598+00', NULL, 24, 0, 1, 3, 'gavin.legend', '{"AssetId":0,"Balance":100000000000085000,"LpAmount":0,"OfferCanceledOrFinalized":0}', '{"AssetId":0,"Balance":0,"LpAmount":0,"OfferCanceledOrFinalized":1}', 2, 1, 1, 0); -INSERT INTO "public"."tx_detail" VALUES (54, '2022-06-09 07:09:06.269598+00', '2022-06-09 07:09:06.269598+00', NULL, 24, 0, 1, 2, 'sher.legend', '{"AssetId":0,"Balance":99999999989790198,"LpAmount":99900,"OfferCanceledOrFinalized":0}', '{"AssetId":0,"Balance":9800,"LpAmount":0,"OfferCanceledOrFinalized":0}', 3, 2, 7, 1); -INSERT INTO "public"."tx_detail" VALUES (55, '2022-06-09 07:09:06.269598+00', '2022-06-09 07:09:06.269598+00', NULL, 24, 0, 1, 2, 'sher.legend', '{"AssetId":0,"Balance":99999999989799998,"LpAmount":99900,"OfferCanceledOrFinalized":0}', '{"AssetId":0,"Balance":0,"LpAmount":0,"OfferCanceledOrFinalized":1}', 4, 2, 7, 1); -INSERT INTO "public"."tx_detail" VALUES (56, '2022-06-09 07:09:06.269598+00', '2022-06-09 07:09:06.269598+00', NULL, 24, 0, 1, 2, 'sher.legend', '{"AssetId":0,"Balance":99999999989799998,"LpAmount":99900,"OfferCanceledOrFinalized":1}', '{"AssetId":0,"Balance":0,"LpAmount":0,"OfferCanceledOrFinalized":0}', 5, 3, 7, 1); -INSERT INTO "public"."tx_detail" VALUES (57, '2022-06-09 07:09:06.269598+00', '2022-06-09 07:09:06.269598+00', NULL, 24, 1, 3, -1, '', '{"NftIndex":1,"CreatorAccountIndex":2,"OwnerAccountIndex":2,"NftContentHash":"066a65d3e4397ad05fb2e7f50eac160647a7486c2cedbbeac64624c8beea22f1","NftL1TokenId":"0","NftL1Address":"0","CreatorTreasuryRate":0,"CollectionId":1}', '{"NftIndex":1,"CreatorAccountIndex":2,"OwnerAccountIndex":3,"NftContentHash":"066a65d3e4397ad05fb2e7f50eac160647a7486c2cedbbeac64624c8beea22f1","NftL1TokenId":"0","NftL1Address":"0","CreatorTreasuryRate":0,"CollectionId":1}', 6, -1, 0, 0); -INSERT INTO "public"."tx_detail" VALUES (58, '2022-06-09 07:09:06.269598+00', '2022-06-09 07:09:06.269598+00', NULL, 24, 0, 1, 1, 'gas.legend', '{"AssetId":0,"Balance":10000,"LpAmount":0,"OfferCanceledOrFinalized":0}', '{"AssetId":0,"Balance":200,"LpAmount":0,"OfferCanceledOrFinalized":0}', 7, 4, 0, 0); -INSERT INTO "public"."tx_detail" VALUES (59, '2022-06-09 07:09:06.269598+00', '2022-06-09 07:09:06.269598+00', NULL, 24, 0, 1, 1, 'gas.legend', '{"AssetId":0,"Balance":10200,"LpAmount":0,"OfferCanceledOrFinalized":0}', '{"AssetId":0,"Balance":5000,"LpAmount":0,"OfferCanceledOrFinalized":0}', 8, 4, 0, 0); -INSERT INTO "public"."tx_detail" VALUES (60, '2022-06-09 07:09:06.282098+00', '2022-06-09 07:09:06.282098+00', NULL, 25, 2, 1, 2, 'sher.legend', '{"AssetId":2,"Balance":99999999999999870000,"LpAmount":0,"OfferCanceledOrFinalized":0}', '{"AssetId":2,"Balance":-5000,"LpAmount":0,"OfferCanceledOrFinalized":0}', 0, 0, 8, 1); -INSERT INTO "public"."tx_detail" VALUES (61, '2022-06-09 07:09:06.282098+00', '2022-06-09 07:09:06.282098+00', NULL, 25, 0, 1, 2, 'sher.legend', '{"AssetId":0,"Balance":99999999989799998,"LpAmount":99900,"OfferCanceledOrFinalized":1}', '{"AssetId":0,"Balance":0,"LpAmount":0,"OfferCanceledOrFinalized":3}', 1, 0, 8, 1); -INSERT INTO "public"."tx_detail" VALUES (62, '2022-06-09 07:09:06.282098+00', '2022-06-09 07:09:06.282098+00', NULL, 25, 2, 1, 1, 'gas.legend', '{"AssetId":2,"Balance":30000,"LpAmount":0,"OfferCanceledOrFinalized":0}', '{"AssetId":2,"Balance":5000,"LpAmount":0,"OfferCanceledOrFinalized":0}', 2, 1, 0, 0); -INSERT INTO "public"."tx_detail" VALUES (63, '2022-06-09 07:09:06.293268+00', '2022-06-09 07:09:06.293268+00', NULL, 26, 0, 1, 3, 'gavin.legend', '{"AssetId":0,"Balance":100000000000085000,"LpAmount":0,"OfferCanceledOrFinalized":1}', '{"AssetId":0,"Balance":-5000,"LpAmount":0,"OfferCanceledOrFinalized":0}', 0, 0, 1, 0); -INSERT INTO "public"."tx_detail" VALUES (64, '2022-06-09 07:09:06.293268+00', '2022-06-09 07:09:06.293268+00', NULL, 26, 1, 3, -1, '', '{"NftIndex":1,"CreatorAccountIndex":2,"OwnerAccountIndex":3,"NftContentHash":"066a65d3e4397ad05fb2e7f50eac160647a7486c2cedbbeac64624c8beea22f1","NftL1TokenId":"0","NftL1Address":"0","CreatorTreasuryRate":0,"CollectionId":1}', '{"NftIndex":1,"CreatorAccountIndex":0,"OwnerAccountIndex":0,"NftContentHash":"0","NftL1TokenId":"0","NftL1Address":"0","CreatorTreasuryRate":0,"CollectionId":0}', 1, -1, 0, 0); -INSERT INTO "public"."tx_detail" VALUES (65, '2022-06-09 07:09:06.293268+00', '2022-06-09 07:09:06.293268+00', NULL, 26, 0, 1, 2, 'sher.legend', '{"AssetId":0,"Balance":99999999989799998,"LpAmount":99900,"OfferCanceledOrFinalized":3}', '{"AssetId":0,"Balance":0,"LpAmount":0,"OfferCanceledOrFinalized":0}', 2, 1, 9, 1); -INSERT INTO "public"."tx_detail" VALUES (66, '2022-06-09 07:09:06.293268+00', '2022-06-09 07:09:06.293268+00', NULL, 26, 0, 1, 1, 'gas.legend', '{"AssetId":0,"Balance":15200,"LpAmount":0,"OfferCanceledOrFinalized":0}', '{"AssetId":0,"Balance":5000,"LpAmount":0,"OfferCanceledOrFinalized":0}', 3, 2, 0, 0); - --- ---------------------------- --- Alter sequences owned by --- ---------------------------- -ALTER SEQUENCE "public"."account_history_id_seq" -OWNED BY "public"."account_history"."id"; -SELECT setval('"public"."account_history_id_seq"', 40, true); - --- ---------------------------- --- Alter sequences owned by --- ---------------------------- -ALTER SEQUENCE "public"."account_id_seq" -OWNED BY "public"."account"."id"; -SELECT setval('"public"."account_id_seq"', 4, true); - --- ---------------------------- --- Alter sequences owned by --- ---------------------------- -ALTER SEQUENCE "public"."block_for_commit_id_seq" -OWNED BY "public"."block_for_commit"."id"; -SELECT setval('"public"."block_for_commit_id_seq"', 26, true); - --- ---------------------------- --- Alter sequences owned by --- ---------------------------- -ALTER SEQUENCE "public"."block_id_seq" -OWNED BY "public"."block"."id"; -SELECT setval('"public"."block_id_seq"', 27, true); - --- ---------------------------- --- Alter sequences owned by --- ---------------------------- -ALTER SEQUENCE "public"."fail_tx_id_seq" -OWNED BY "public"."fail_tx"."id"; -SELECT setval('"public"."fail_tx_id_seq"', 1, false); - --- ---------------------------- --- Alter sequences owned by --- ---------------------------- -ALTER SEQUENCE "public"."l1_amount_id_seq" -OWNED BY "public"."l1_amount"."id"; -SELECT setval('"public"."l1_amount_id_seq"', 1, false); - --- ---------------------------- --- Alter sequences owned by --- ---------------------------- -ALTER SEQUENCE "public"."l1_block_monitor_id_seq" -OWNED BY "public"."l1_block_monitor"."id"; -SELECT setval('"public"."l1_block_monitor_id_seq"', 4, true); - --- ---------------------------- --- Alter sequences owned by --- ---------------------------- -ALTER SEQUENCE "public"."l1_tx_sender_id_seq" -OWNED BY "public"."l1_tx_sender"."id"; -SELECT setval('"public"."l1_tx_sender_id_seq"', 1, false); - --- ---------------------------- --- Alter sequences owned by --- ---------------------------- -ALTER SEQUENCE "public"."l2_asset_info_id_seq" -OWNED BY "public"."l2_asset_info"."id"; -SELECT setval('"public"."l2_asset_info_id_seq"', 3, true); - --- ---------------------------- --- Alter sequences owned by --- ---------------------------- -ALTER SEQUENCE "public"."l2_block_event_monitor_id_seq" -OWNED BY "public"."l2_block_event_monitor"."id"; -SELECT setval('"public"."l2_block_event_monitor_id_seq"', 1, false); - --- ---------------------------- --- Alter sequences owned by --- ---------------------------- -ALTER SEQUENCE "public"."l2_nft_collection_id_seq" -OWNED BY "public"."l2_nft_collection"."id"; -SELECT setval('"public"."l2_nft_collection_id_seq"', 1, false); - --- ---------------------------- --- Alter sequences owned by --- ---------------------------- -ALTER SEQUENCE "public"."l2_nft_exchange_history_id_seq" -OWNED BY "public"."l2_nft_exchange_history"."id"; -SELECT setval('"public"."l2_nft_exchange_history_id_seq"', 2, false); - --- ---------------------------- --- Alter sequences owned by --- ---------------------------- -ALTER SEQUENCE "public"."l2_nft_exchange_id_seq" -OWNED BY "public"."l2_nft_exchange"."id"; -SELECT setval('"public"."l2_nft_exchange_id_seq"', 1, true); - --- ---------------------------- --- Alter sequences owned by --- ---------------------------- -ALTER SEQUENCE "public"."l2_nft_history_id_seq" -OWNED BY "public"."l2_nft_history"."id"; -SELECT setval('"public"."l2_nft_history_id_seq"', 6, true); - --- ---------------------------- --- Alter sequences owned by --- ---------------------------- -ALTER SEQUENCE "public"."l2_nft_id_seq" -OWNED BY "public"."l2_nft"."id"; -SELECT setval('"public"."l2_nft_id_seq"', 2, true); - --- ---------------------------- --- Alter sequences owned by --- ---------------------------- -ALTER SEQUENCE "public"."l2_nft_withdraw_history_id_seq" -OWNED BY "public"."l2_nft_withdraw_history"."id"; -SELECT setval('"public"."l2_nft_withdraw_history_id_seq"', 2, true); - --- ---------------------------- --- Alter sequences owned by --- ---------------------------- -ALTER SEQUENCE "public"."l2_tx_event_monitor_id_seq" -OWNED BY "public"."l2_tx_event_monitor"."id"; -SELECT setval('"public"."l2_tx_event_monitor_id_seq"', 15, true); - --- ---------------------------- --- Alter sequences owned by --- ---------------------------- -ALTER SEQUENCE "public"."liquidity_history_id_seq" -OWNED BY "public"."liquidity_history"."id"; -SELECT setval('"public"."liquidity_history_id_seq"', 7, true); - --- ---------------------------- --- Alter sequences owned by --- ---------------------------- -ALTER SEQUENCE "public"."liquidity_id_seq" -OWNED BY "public"."liquidity"."id"; -SELECT setval('"public"."liquidity_id_seq"', 3, true); - --- ---------------------------- --- Alter sequences owned by --- ---------------------------- -ALTER SEQUENCE "public"."mempool_tx_detail_id_seq" -OWNED BY "public"."mempool_tx_detail"."id"; -SELECT setval('"public"."mempool_tx_detail_id_seq"', 66, true); - --- ---------------------------- --- Alter sequences owned by --- ---------------------------- -ALTER SEQUENCE "public"."mempool_tx_id_seq" -OWNED BY "public"."mempool_tx"."id"; -SELECT setval('"public"."mempool_tx_id_seq"', 26, true); - --- ---------------------------- --- Alter sequences owned by --- ---------------------------- -ALTER SEQUENCE "public"."offer_id_seq" -OWNED BY "public"."offer"."id"; -SELECT setval('"public"."offer_id_seq"', 1, false); - --- ---------------------------- --- Alter sequences owned by --- ---------------------------- -ALTER SEQUENCE "public"."proof_sender_id_seq" -OWNED BY "public"."proof_sender"."id"; -SELECT setval('"public"."proof_sender_id_seq"', 26, true); - --- ---------------------------- --- Alter sequences owned by --- ---------------------------- -ALTER SEQUENCE "public"."sys_config_id_seq" -OWNED BY "public"."sys_config"."id"; -SELECT setval('"public"."sys_config_id_seq"', 11, true); - --- ---------------------------- --- Alter sequences owned by --- ---------------------------- -ALTER SEQUENCE "public"."tx_detail_id_seq" -OWNED BY "public"."tx_detail"."id"; -SELECT setval('"public"."tx_detail_id_seq"', 66, true); - --- ---------------------------- --- Alter sequences owned by --- ---------------------------- -ALTER SEQUENCE "public"."tx_id_seq" -OWNED BY "public"."tx"."id"; -SELECT setval('"public"."tx_id_seq"', 26, true); - --- ---------------------------- --- Indexes structure for table account --- ---------------------------- -CREATE UNIQUE INDEX "idx_account_account_index" ON "public"."account" USING btree ( - "account_index" "pg_catalog"."int8_ops" ASC NULLS LAST -); -CREATE UNIQUE INDEX "idx_account_account_name" ON "public"."account" USING btree ( - "account_name" COLLATE "pg_catalog"."default" "pg_catalog"."text_ops" ASC NULLS LAST -); -CREATE UNIQUE INDEX "idx_account_account_name_hash" ON "public"."account" USING btree ( - "account_name_hash" COLLATE "pg_catalog"."default" "pg_catalog"."text_ops" ASC NULLS LAST -); -CREATE INDEX "idx_account_deleted_at" ON "public"."account" USING btree ( - "deleted_at" "pg_catalog"."timestamptz_ops" ASC NULLS LAST -); -CREATE UNIQUE INDEX "idx_account_public_key" ON "public"."account" USING btree ( - "public_key" COLLATE "pg_catalog"."default" "pg_catalog"."text_ops" ASC NULLS LAST -); - --- ---------------------------- --- Primary Key structure for table account --- ---------------------------- -ALTER TABLE "public"."account" ADD CONSTRAINT "account_pkey" PRIMARY KEY ("id"); - --- ---------------------------- --- Indexes structure for table account_history --- ---------------------------- -CREATE INDEX "idx_account_history_account_index" ON "public"."account_history" USING btree ( - "account_index" "pg_catalog"."int8_ops" ASC NULLS LAST -); -CREATE INDEX "idx_account_history_deleted_at" ON "public"."account_history" USING btree ( - "deleted_at" "pg_catalog"."timestamptz_ops" ASC NULLS LAST -); - --- ---------------------------- --- Primary Key structure for table account_history --- ---------------------------- -ALTER TABLE "public"."account_history" ADD CONSTRAINT "account_history_pkey" PRIMARY KEY ("id"); - --- ---------------------------- --- Indexes structure for table block --- ---------------------------- -CREATE INDEX "idx_block_deleted_at" ON "public"."block" USING btree ( - "deleted_at" "pg_catalog"."timestamptz_ops" ASC NULLS LAST -); - --- ---------------------------- --- Primary Key structure for table block --- ---------------------------- -ALTER TABLE "public"."block" ADD CONSTRAINT "block_pkey" PRIMARY KEY ("id"); - --- ---------------------------- --- Indexes structure for table block_for_commit --- ---------------------------- -CREATE INDEX "idx_block_for_commit_deleted_at" ON "public"."block_for_commit" USING btree ( - "deleted_at" "pg_catalog"."timestamptz_ops" ASC NULLS LAST -); - --- ---------------------------- --- Primary Key structure for table block_for_commit --- ---------------------------- -ALTER TABLE "public"."block_for_commit" ADD CONSTRAINT "block_for_commit_pkey" PRIMARY KEY ("id"); - --- ---------------------------- --- Indexes structure for table fail_tx --- ---------------------------- -CREATE INDEX "idx_fail_tx_deleted_at" ON "public"."fail_tx" USING btree ( - "deleted_at" "pg_catalog"."timestamptz_ops" ASC NULLS LAST -); -CREATE UNIQUE INDEX "idx_fail_tx_tx_hash" ON "public"."fail_tx" USING btree ( - "tx_hash" COLLATE "pg_catalog"."default" "pg_catalog"."text_ops" ASC NULLS LAST -); - --- ---------------------------- --- Primary Key structure for table fail_tx --- ---------------------------- -ALTER TABLE "public"."fail_tx" ADD CONSTRAINT "fail_tx_pkey" PRIMARY KEY ("id"); - --- ---------------------------- --- Indexes structure for table l1_amount --- ---------------------------- -CREATE INDEX "idx_l1_amount_asset_id" ON "public"."l1_amount" USING btree ( - "asset_id" "pg_catalog"."int8_ops" ASC NULLS LAST -); -CREATE INDEX "idx_l1_amount_block_height" ON "public"."l1_amount" USING btree ( - "block_height" "pg_catalog"."int8_ops" ASC NULLS LAST -); -CREATE INDEX "idx_l1_amount_deleted_at" ON "public"."l1_amount" USING btree ( - "deleted_at" "pg_catalog"."timestamptz_ops" ASC NULLS LAST -); - --- ---------------------------- --- Primary Key structure for table l1_amount --- ---------------------------- -ALTER TABLE "public"."l1_amount" ADD CONSTRAINT "l1_amount_pkey" PRIMARY KEY ("id"); - --- ---------------------------- --- Indexes structure for table l1_block_monitor --- ---------------------------- -CREATE INDEX "idx_l1_block_monitor_deleted_at" ON "public"."l1_block_monitor" USING btree ( - "deleted_at" "pg_catalog"."timestamptz_ops" ASC NULLS LAST -); - --- ---------------------------- --- Primary Key structure for table l1_block_monitor --- ---------------------------- -ALTER TABLE "public"."l1_block_monitor" ADD CONSTRAINT "l1_block_monitor_pkey" PRIMARY KEY ("id"); - --- ---------------------------- --- Indexes structure for table l1_tx_sender --- ---------------------------- -CREATE INDEX "idx_l1_tx_sender_deleted_at" ON "public"."l1_tx_sender" USING btree ( - "deleted_at" "pg_catalog"."timestamptz_ops" ASC NULLS LAST -); - --- ---------------------------- --- Primary Key structure for table l1_tx_sender --- ---------------------------- -ALTER TABLE "public"."l1_tx_sender" ADD CONSTRAINT "l1_tx_sender_pkey" PRIMARY KEY ("id"); - --- ---------------------------- --- Indexes structure for table l2_asset_info --- ---------------------------- -CREATE UNIQUE INDEX "idx_l2_asset_info_asset_id" ON "public"."l2_asset_info" USING btree ( - "asset_id" "pg_catalog"."int8_ops" ASC NULLS LAST -); -CREATE INDEX "idx_l2_asset_info_deleted_at" ON "public"."l2_asset_info" USING btree ( - "deleted_at" "pg_catalog"."timestamptz_ops" ASC NULLS LAST -); - --- ---------------------------- --- Primary Key structure for table l2_asset_info --- ---------------------------- -ALTER TABLE "public"."l2_asset_info" ADD CONSTRAINT "l2_asset_info_pkey" PRIMARY KEY ("id"); - --- ---------------------------- --- Indexes structure for table l2_block_event_monitor --- ---------------------------- -CREATE INDEX "idx_l2_block_event_monitor_block_event_type" ON "public"."l2_block_event_monitor" USING btree ( - "block_event_type" "pg_catalog"."int2_ops" ASC NULLS LAST -); -CREATE INDEX "idx_l2_block_event_monitor_deleted_at" ON "public"."l2_block_event_monitor" USING btree ( - "deleted_at" "pg_catalog"."timestamptz_ops" ASC NULLS LAST -); -CREATE INDEX "idx_l2_block_event_monitor_l2_block_height" ON "public"."l2_block_event_monitor" USING btree ( - "l2_block_height" "pg_catalog"."int8_ops" ASC NULLS LAST -); - --- ---------------------------- --- Primary Key structure for table l2_block_event_monitor --- ---------------------------- -ALTER TABLE "public"."l2_block_event_monitor" ADD CONSTRAINT "l2_block_event_monitor_pkey" PRIMARY KEY ("id"); - --- ---------------------------- --- Indexes structure for table l2_nft --- ---------------------------- -CREATE INDEX "idx_l2_nft_deleted_at" ON "public"."l2_nft" USING btree ( - "deleted_at" "pg_catalog"."timestamptz_ops" ASC NULLS LAST -); -CREATE UNIQUE INDEX "idx_l2_nft_nft_index" ON "public"."l2_nft" USING btree ( - "nft_index" "pg_catalog"."int8_ops" ASC NULLS LAST -); - --- ---------------------------- --- Primary Key structure for table l2_nft --- ---------------------------- -ALTER TABLE "public"."l2_nft" ADD CONSTRAINT "l2_nft_pkey" PRIMARY KEY ("id"); - --- ---------------------------- --- Indexes structure for table l2_nft_collection --- ---------------------------- -CREATE INDEX "idx_l2_nft_collection_deleted_at" ON "public"."l2_nft_collection" USING btree ( - "deleted_at" "pg_catalog"."timestamptz_ops" ASC NULLS LAST -); - --- ---------------------------- --- Primary Key structure for table l2_nft_collection --- ---------------------------- -ALTER TABLE "public"."l2_nft_collection" ADD CONSTRAINT "l2_nft_collection_pkey" PRIMARY KEY ("id"); - --- ---------------------------- --- Indexes structure for table l2_nft_exchange --- ---------------------------- -CREATE INDEX "idx_l2_nft_exchange_deleted_at" ON "public"."l2_nft_exchange" USING btree ( - "deleted_at" "pg_catalog"."timestamptz_ops" ASC NULLS LAST -); - --- ---------------------------- --- Primary Key structure for table l2_nft_exchange --- ---------------------------- -ALTER TABLE "public"."l2_nft_exchange" ADD CONSTRAINT "l2_nft_exchange_pkey" PRIMARY KEY ("id"); - --- ---------------------------- --- Indexes structure for table l2_nft_exchange_history --- ---------------------------- -CREATE INDEX "idx_l2_nft_exchange_history_deleted_at" ON "public"."l2_nft_exchange_history" USING btree ( - "deleted_at" "pg_catalog"."timestamptz_ops" ASC NULLS LAST -); - --- ---------------------------- --- Primary Key structure for table l2_nft_exchange_history --- ---------------------------- -ALTER TABLE "public"."l2_nft_exchange_history" ADD CONSTRAINT "l2_nft_exchange_history_pkey" PRIMARY KEY ("id"); - --- ---------------------------- --- Indexes structure for table l2_nft_history --- ---------------------------- -CREATE INDEX "idx_l2_nft_history_deleted_at" ON "public"."l2_nft_history" USING btree ( - "deleted_at" "pg_catalog"."timestamptz_ops" ASC NULLS LAST -); - --- ---------------------------- --- Primary Key structure for table l2_nft_history --- ---------------------------- -ALTER TABLE "public"."l2_nft_history" ADD CONSTRAINT "l2_nft_history_pkey" PRIMARY KEY ("id"); - --- ---------------------------- --- Indexes structure for table l2_nft_withdraw_history --- ---------------------------- -CREATE INDEX "idx_l2_nft_withdraw_history_deleted_at" ON "public"."l2_nft_withdraw_history" USING btree ( - "deleted_at" "pg_catalog"."timestamptz_ops" ASC NULLS LAST -); -CREATE UNIQUE INDEX "idx_l2_nft_withdraw_history_nft_index" ON "public"."l2_nft_withdraw_history" USING btree ( - "nft_index" "pg_catalog"."int8_ops" ASC NULLS LAST -); - --- ---------------------------- --- Primary Key structure for table l2_nft_withdraw_history --- ---------------------------- -ALTER TABLE "public"."l2_nft_withdraw_history" ADD CONSTRAINT "l2_nft_withdraw_history_pkey" PRIMARY KEY ("id"); - --- ---------------------------- --- Indexes structure for table l2_tx_event_monitor --- ---------------------------- -CREATE INDEX "idx_l2_tx_event_monitor_deleted_at" ON "public"."l2_tx_event_monitor" USING btree ( - "deleted_at" "pg_catalog"."timestamptz_ops" ASC NULLS LAST -); - --- ---------------------------- --- Primary Key structure for table l2_tx_event_monitor --- ---------------------------- -ALTER TABLE "public"."l2_tx_event_monitor" ADD CONSTRAINT "l2_tx_event_monitor_pkey" PRIMARY KEY ("id"); - --- ---------------------------- --- Indexes structure for table liquidity --- ---------------------------- -CREATE INDEX "idx_liquidity_deleted_at" ON "public"."liquidity" USING btree ( - "deleted_at" "pg_catalog"."timestamptz_ops" ASC NULLS LAST -); - --- ---------------------------- --- Primary Key structure for table liquidity --- ---------------------------- -ALTER TABLE "public"."liquidity" ADD CONSTRAINT "liquidity_pkey" PRIMARY KEY ("id"); - --- ---------------------------- --- Indexes structure for table liquidity_history --- ---------------------------- -CREATE INDEX "idx_liquidity_history_deleted_at" ON "public"."liquidity_history" USING btree ( - "deleted_at" "pg_catalog"."timestamptz_ops" ASC NULLS LAST -); - --- ---------------------------- --- Primary Key structure for table liquidity_history --- ---------------------------- -ALTER TABLE "public"."liquidity_history" ADD CONSTRAINT "liquidity_history_pkey" PRIMARY KEY ("id"); - --- ---------------------------- --- Indexes structure for table mempool_tx --- ---------------------------- -CREATE INDEX "idx_mempool_tx_deleted_at" ON "public"."mempool_tx" USING btree ( - "deleted_at" "pg_catalog"."timestamptz_ops" ASC NULLS LAST -); -CREATE INDEX "idx_mempool_tx_status" ON "public"."mempool_tx" USING btree ( - "status" "pg_catalog"."int8_ops" ASC NULLS LAST -); -CREATE UNIQUE INDEX "idx_mempool_tx_tx_hash" ON "public"."mempool_tx" USING btree ( - "tx_hash" COLLATE "pg_catalog"."default" "pg_catalog"."text_ops" ASC NULLS LAST -); - --- ---------------------------- --- Primary Key structure for table mempool_tx --- ---------------------------- -ALTER TABLE "public"."mempool_tx" ADD CONSTRAINT "mempool_tx_pkey" PRIMARY KEY ("id"); - --- ---------------------------- --- Indexes structure for table mempool_tx_detail --- ---------------------------- -CREATE INDEX "idx_mempool_tx_detail_account_index" ON "public"."mempool_tx_detail" USING btree ( - "account_index" "pg_catalog"."int8_ops" ASC NULLS LAST -); -CREATE INDEX "idx_mempool_tx_detail_deleted_at" ON "public"."mempool_tx_detail" USING btree ( - "deleted_at" "pg_catalog"."timestamptz_ops" ASC NULLS LAST -); -CREATE INDEX "idx_mempool_tx_detail_tx_id" ON "public"."mempool_tx_detail" USING btree ( - "tx_id" "pg_catalog"."int8_ops" ASC NULLS LAST -); - --- ---------------------------- --- Primary Key structure for table mempool_tx_detail --- ---------------------------- -ALTER TABLE "public"."mempool_tx_detail" ADD CONSTRAINT "mempool_tx_detail_pkey" PRIMARY KEY ("id"); - --- ---------------------------- --- Indexes structure for table offer --- ---------------------------- -CREATE INDEX "idx_offer_deleted_at" ON "public"."offer" USING btree ( - "deleted_at" "pg_catalog"."timestamptz_ops" ASC NULLS LAST -); - --- ---------------------------- --- Primary Key structure for table offer --- ---------------------------- -ALTER TABLE "public"."offer" ADD CONSTRAINT "offer_pkey" PRIMARY KEY ("id"); - --- ---------------------------- --- Indexes structure for table proof_sender --- ---------------------------- -CREATE INDEX "idx_proof_sender_block_number" ON "public"."proof_sender" USING btree ( - "block_number" "pg_catalog"."int8_ops" ASC NULLS LAST -); -CREATE INDEX "idx_proof_sender_deleted_at" ON "public"."proof_sender" USING btree ( - "deleted_at" "pg_catalog"."timestamptz_ops" ASC NULLS LAST -); - --- ---------------------------- --- Primary Key structure for table proof_sender --- ---------------------------- -ALTER TABLE "public"."proof_sender" ADD CONSTRAINT "proof_sender_pkey" PRIMARY KEY ("id"); - --- ---------------------------- --- Indexes structure for table sys_config --- ---------------------------- -CREATE INDEX "idx_sys_config_deleted_at" ON "public"."sys_config" USING btree ( - "deleted_at" "pg_catalog"."timestamptz_ops" ASC NULLS LAST -); - --- ---------------------------- --- Primary Key structure for table sys_config --- ---------------------------- -ALTER TABLE "public"."sys_config" ADD CONSTRAINT "sys_config_pkey" PRIMARY KEY ("id"); - --- ---------------------------- --- Indexes structure for table tx --- ---------------------------- -CREATE INDEX "idx_tx_block_height" ON "public"."tx" USING btree ( - "block_height" "pg_catalog"."int8_ops" ASC NULLS LAST -); -CREATE INDEX "idx_tx_block_id" ON "public"."tx" USING btree ( - "block_id" "pg_catalog"."int8_ops" ASC NULLS LAST -); -CREATE INDEX "idx_tx_deleted_at" ON "public"."tx" USING btree ( - "deleted_at" "pg_catalog"."timestamptz_ops" ASC NULLS LAST -); -CREATE UNIQUE INDEX "idx_tx_tx_hash" ON "public"."tx" USING btree ( - "tx_hash" COLLATE "pg_catalog"."default" "pg_catalog"."text_ops" ASC NULLS LAST -); - --- ---------------------------- --- Primary Key structure for table tx --- ---------------------------- -ALTER TABLE "public"."tx" ADD CONSTRAINT "tx_pkey" PRIMARY KEY ("id"); - --- ---------------------------- --- Indexes structure for table tx_detail --- ---------------------------- -CREATE INDEX "idx_tx_detail_account_index" ON "public"."tx_detail" USING btree ( - "account_index" "pg_catalog"."int8_ops" ASC NULLS LAST -); -CREATE INDEX "idx_tx_detail_deleted_at" ON "public"."tx_detail" USING btree ( - "deleted_at" "pg_catalog"."timestamptz_ops" ASC NULLS LAST -); -CREATE INDEX "idx_tx_detail_tx_id" ON "public"."tx_detail" USING btree ( - "tx_id" "pg_catalog"."int8_ops" ASC NULLS LAST -); - --- ---------------------------- --- Primary Key structure for table tx_detail --- ---------------------------- -ALTER TABLE "public"."tx_detail" ADD CONSTRAINT "tx_detail_pkey" PRIMARY KEY ("id"); diff --git a/common/proverUtil/atomicMatch.go b/common/proverUtil/atomicMatch.go deleted file mode 100644 index e8e75cd76..000000000 --- a/common/proverUtil/atomicMatch.go +++ /dev/null @@ -1,160 +0,0 @@ -/* - * Copyright © 2021 Zkbas Protocol - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package proverUtil - -import ( - "errors" - - bsmt "github.com/bnb-chain/bas-smt" - "github.com/consensys/gnark-crypto/ecc/bn254/twistededwards/eddsa" - "github.com/zeromicro/go-zero/core/logx" - - "github.com/bnb-chain/zkbas/common/commonTx" - "github.com/bnb-chain/zkbas/common/util" - "github.com/bnb-chain/zkbas/pkg/treedb" -) - -func ConstructAtomicMatchCryptoTx( - oTx *Tx, - treeCtx *treedb.Context, - finalityBlockNr uint64, - accountTree bsmt.SparseMerkleTree, - accountAssetsTree *[]bsmt.SparseMerkleTree, - liquidityTree bsmt.SparseMerkleTree, - nftTree bsmt.SparseMerkleTree, - accountModel AccountModel, -) (cryptoTx *CryptoTx, err error) { - if oTx.TxType != commonTx.TxTypeAtomicMatch { - logx.Errorf("[ConstructAtomicMatchCryptoTx] invalid tx type") - return nil, errors.New("[ConstructAtomicMatchCryptoTx] invalid tx type") - } - if oTx == nil || accountTree == nil || accountAssetsTree == nil || liquidityTree == nil || nftTree == nil { - logx.Errorf("[ConstructAtomicMatchCryptoTx] invalid params") - return nil, errors.New("[ConstructAtomicMatchCryptoTx] invalid params") - } - txInfo, err := commonTx.ParseAtomicMatchTxInfo(oTx.TxInfo) - if err != nil { - logx.Errorf("[ConstructAtomicMatchCryptoTx] unable to parse register zns tx info:%s", err.Error()) - return nil, err - } - cryptoTxInfo, err := ToCryptoAtomicMatchTx(txInfo) - if err != nil { - logx.Errorf("[ConstructAtomicMatchCryptoTx] unable to convert to crypto register zns tx: %s", err.Error()) - return nil, err - } - accountKeys, proverAccounts, proverLiquidityInfo, proverNftInfo, err := ConstructProverInfo(oTx, accountModel) - if err != nil { - logx.Errorf("[ConstructAtomicMatchCryptoTx] unable to construct prover info: %s", err.Error()) - return nil, err - } - cryptoTx, err = ConstructWitnessInfo( - oTx, - accountModel, - treeCtx, - finalityBlockNr, - accountTree, - accountAssetsTree, - liquidityTree, - nftTree, - accountKeys, - proverAccounts, - proverLiquidityInfo, - proverNftInfo, - ) - if err != nil { - logx.Errorf("[ConstructAtomicMatchCryptoTx] unable to construct witness info: %s", err.Error()) - return nil, err - } - cryptoTx.TxType = uint8(oTx.TxType) - cryptoTx.AtomicMatchTxInfo = cryptoTxInfo - cryptoTx.Nonce = oTx.Nonce - cryptoTx.ExpiredAt = txInfo.ExpiredAt - cryptoTx.Signature = new(eddsa.Signature) - _, err = cryptoTx.Signature.SetBytes(txInfo.Sig) - if err != nil { - logx.Errorf("[ConstructAtomicMatchCryptoTx] invalid sig bytes: %s", err.Error()) - return nil, err - } - return cryptoTx, nil -} - -func ToCryptoAtomicMatchTx(txInfo *commonTx.AtomicMatchTxInfo) (info *CryptoAtomicMatchTx, err error) { - packedFee, err := util.ToPackedFee(txInfo.GasFeeAssetAmount) - if err != nil { - logx.Errorf("[ToCryptoSwapTx] unable to convert to packed fee: %s", err.Error()) - return nil, err - } - packedAmount, err := util.ToPackedAmount(txInfo.BuyOffer.AssetAmount) - if err != nil { - logx.Errorf("[ToCryptoSwapTx] unable to convert to packed amount: %s", err.Error()) - return nil, err - } - packedCreatorAmount, err := util.ToPackedAmount(txInfo.CreatorAmount) - if err != nil { - logx.Errorf("[ToCryptoSwapTx] unable to convert to packed amount: %s", err.Error()) - return nil, err - } - packedTreasuryAmount, err := util.ToPackedAmount(txInfo.TreasuryAmount) - if err != nil { - logx.Errorf("[ToCryptoSwapTx] unable to convert to packed amount: %s", err.Error()) - return nil, err - } - buySig := new(eddsa.Signature) - _, err = buySig.SetBytes(txInfo.BuyOffer.Sig) - if err != nil { - return nil, err - } - sellSig := new(eddsa.Signature) - _, err = sellSig.SetBytes(txInfo.SellOffer.Sig) - if err != nil { - return nil, err - } - info = &CryptoAtomicMatchTx{ - AccountIndex: txInfo.AccountIndex, - BuyOffer: &CryptoOfferTx{ - Type: txInfo.BuyOffer.Type, - OfferId: txInfo.BuyOffer.OfferId, - AccountIndex: txInfo.BuyOffer.AccountIndex, - NftIndex: txInfo.BuyOffer.NftIndex, - AssetId: txInfo.BuyOffer.AssetId, - AssetAmount: packedAmount, - ListedAt: txInfo.BuyOffer.ListedAt, - ExpiredAt: txInfo.BuyOffer.ExpiredAt, - TreasuryRate: txInfo.BuyOffer.TreasuryRate, - Sig: buySig, - }, - SellOffer: &CryptoOfferTx{ - Type: txInfo.SellOffer.Type, - OfferId: txInfo.SellOffer.OfferId, - AccountIndex: txInfo.SellOffer.AccountIndex, - NftIndex: txInfo.SellOffer.NftIndex, - AssetId: txInfo.SellOffer.AssetId, - AssetAmount: packedAmount, - ListedAt: txInfo.SellOffer.ListedAt, - ExpiredAt: txInfo.SellOffer.ExpiredAt, - TreasuryRate: txInfo.SellOffer.TreasuryRate, - Sig: sellSig, - }, - CreatorAmount: packedCreatorAmount, - TreasuryAmount: packedTreasuryAmount, - GasAccountIndex: txInfo.GasAccountIndex, - GasFeeAssetId: txInfo.GasFeeAssetId, - GasFeeAssetAmount: packedFee, - } - return info, nil -} diff --git a/common/proverUtil/atomicMatch_test.go b/common/proverUtil/atomicMatch_test.go deleted file mode 100644 index 02934f776..000000000 --- a/common/proverUtil/atomicMatch_test.go +++ /dev/null @@ -1,83 +0,0 @@ -/* - * Copyright © 2021 Zkbas Protocol - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package proverUtil - -import ( - "encoding/json" - "fmt" - "testing" - - "github.com/bnb-chain/bas-smt/database/memory" - "github.com/zeromicro/go-zero/core/stores/redis" - - "github.com/bnb-chain/zkbas/common/model/account" - "github.com/bnb-chain/zkbas/common/model/basic" - "github.com/bnb-chain/zkbas/common/model/liquidity" - "github.com/bnb-chain/zkbas/common/model/nft" - "github.com/bnb-chain/zkbas/common/model/tx" - "github.com/bnb-chain/zkbas/common/tree" - "github.com/bnb-chain/zkbas/pkg/treedb" -) - -func TestConstructAtomicMatchCryptoTxFirst(t *testing.T) { - redisConn := redis.New(basic.CacheConf[0].Host, WithRedis(basic.CacheConf[0].Type, basic.CacheConf[0].Pass)) - txModel := tx.NewTxModel(basic.Connection, basic.CacheConf, basic.DB, redisConn) - accountModel := account.NewAccountModel(basic.Connection, basic.CacheConf, basic.DB) - accountHistoryModel := account.NewAccountHistoryModel(basic.Connection, basic.CacheConf, basic.DB) - //liquidityModel := liquidity.NewLiquidityModel(basic.Connection, basic.CacheConf, basic.DB) - liquidityHistoryModel := liquidity.NewLiquidityHistoryModel(basic.Connection, basic.CacheConf, basic.DB) - //nftModel := nft.NewL2NftModel(basic.Connection, basic.CacheConf, basic.DB) - nftHistoryModel := nft.NewL2NftHistoryModel(basic.Connection, basic.CacheConf, basic.DB) - ctx := &treedb.Context{ - Driver: treedb.MemoryDB, - TreeDB: memory.NewMemoryDB(), - } - txInfo, err := txModel.GetTxByTxId(24) - if err != nil { - t.Fatal(err) - } - blockHeight := int64(23) - accountTree, accountAssetTrees, err := tree.InitAccountTree(accountModel, accountHistoryModel, blockHeight, ctx) - if err != nil { - t.Fatal(err) - } - liquidityTree, err := tree.InitLiquidityTree(liquidityHistoryModel, blockHeight, ctx) - if err != nil { - t.Fatal(err) - } - nftTree, err := tree.InitNftTree(nftHistoryModel, blockHeight, ctx) - if err != nil { - t.Fatal(err) - } - cryptoTx, err := ConstructAtomicMatchCryptoTx( - txInfo, - ctx, 0, - accountTree, &accountAssetTrees, - liquidityTree, - nftTree, - accountModel, - ) - if err != nil { - t.Fatal(err) - } - txBytes, err := json.Marshal(cryptoTx) - if err != nil { - t.Fatal(err) - } - fmt.Println(string(txBytes)) -} diff --git a/common/proverUtil/beforeCommitter-l1-keccak256.sql b/common/proverUtil/beforeCommitter-l1-keccak256.sql deleted file mode 100644 index 20cd27d15..000000000 --- a/common/proverUtil/beforeCommitter-l1-keccak256.sql +++ /dev/null @@ -1,1616 +0,0 @@ -/* - Navicat Premium Data Transfer - - Source Server : local_docker - Source Server Type : PostgreSQL - Source Server Version : 140003 - Source Host : localhost:5432 - Source Catalog : zecreyLegend - Source Schema : public - - Target Server Type : PostgreSQL - Target Server Version : 140003 - File Encoding : 65001 - - Date: 16/06/2022 11:03:16 -*/ - - --- ---------------------------- --- Sequence structure for account_history_id_seq --- ---------------------------- -DROP SEQUENCE IF EXISTS "public"."account_history_id_seq"; -CREATE SEQUENCE "public"."account_history_id_seq" -INCREMENT 1 -MINVALUE 1 -MAXVALUE 9223372036854775807 -START 1 -CACHE 1; - --- ---------------------------- --- Sequence structure for account_id_seq --- ---------------------------- -DROP SEQUENCE IF EXISTS "public"."account_id_seq"; -CREATE SEQUENCE "public"."account_id_seq" -INCREMENT 1 -MINVALUE 1 -MAXVALUE 9223372036854775807 -START 1 -CACHE 1; - --- ---------------------------- --- Sequence structure for asset_info_id_seq --- ---------------------------- -DROP SEQUENCE IF EXISTS "public"."asset_info_id_seq"; -CREATE SEQUENCE "public"."asset_info_id_seq" -INCREMENT 1 -MINVALUE 1 -MAXVALUE 9223372036854775807 -START 1 -CACHE 1; - --- ---------------------------- --- Sequence structure for block_for_commit_id_seq --- ---------------------------- -DROP SEQUENCE IF EXISTS "public"."block_for_commit_id_seq"; -CREATE SEQUENCE "public"."block_for_commit_id_seq" -INCREMENT 1 -MINVALUE 1 -MAXVALUE 9223372036854775807 -START 1 -CACHE 1; - --- ---------------------------- --- Sequence structure for block_id_seq --- ---------------------------- -DROP SEQUENCE IF EXISTS "public"."block_id_seq"; -CREATE SEQUENCE "public"."block_id_seq" -INCREMENT 1 -MINVALUE 1 -MAXVALUE 9223372036854775807 -START 1 -CACHE 1; - --- ---------------------------- --- Sequence structure for fail_tx_id_seq --- ---------------------------- -DROP SEQUENCE IF EXISTS "public"."fail_tx_id_seq"; -CREATE SEQUENCE "public"."fail_tx_id_seq" -INCREMENT 1 -MINVALUE 1 -MAXVALUE 9223372036854775807 -START 1 -CACHE 1; - --- ---------------------------- --- Sequence structure for l1_amount_id_seq --- ---------------------------- -DROP SEQUENCE IF EXISTS "public"."l1_amount_id_seq"; -CREATE SEQUENCE "public"."l1_amount_id_seq" -INCREMENT 1 -MINVALUE 1 -MAXVALUE 9223372036854775807 -START 1 -CACHE 1; - --- ---------------------------- --- Sequence structure for l1_block_monitor_id_seq --- ---------------------------- -DROP SEQUENCE IF EXISTS "public"."l1_block_monitor_id_seq"; -CREATE SEQUENCE "public"."l1_block_monitor_id_seq" -INCREMENT 1 -MINVALUE 1 -MAXVALUE 9223372036854775807 -START 1 -CACHE 1; - --- ---------------------------- --- Sequence structure for l1_tx_sender_id_seq --- ---------------------------- -DROP SEQUENCE IF EXISTS "public"."l1_tx_sender_id_seq"; -CREATE SEQUENCE "public"."l1_tx_sender_id_seq" -INCREMENT 1 -MINVALUE 1 -MAXVALUE 9223372036854775807 -START 1 -CACHE 1; - --- ---------------------------- --- Sequence structure for l2_asset_info_id_seq --- ---------------------------- -DROP SEQUENCE IF EXISTS "public"."l2_asset_info_id_seq"; -CREATE SEQUENCE "public"."l2_asset_info_id_seq" -INCREMENT 1 -MINVALUE 1 -MAXVALUE 9223372036854775807 -START 1 -CACHE 1; - --- ---------------------------- --- Sequence structure for l2_block_event_monitor_id_seq --- ---------------------------- -DROP SEQUENCE IF EXISTS "public"."l2_block_event_monitor_id_seq"; -CREATE SEQUENCE "public"."l2_block_event_monitor_id_seq" -INCREMENT 1 -MINVALUE 1 -MAXVALUE 9223372036854775807 -START 1 -CACHE 1; - --- ---------------------------- --- Sequence structure for l2_nft_collection_id_seq --- ---------------------------- -DROP SEQUENCE IF EXISTS "public"."l2_nft_collection_id_seq"; -CREATE SEQUENCE "public"."l2_nft_collection_id_seq" -INCREMENT 1 -MINVALUE 1 -MAXVALUE 9223372036854775807 -START 1 -CACHE 1; - --- ---------------------------- --- Sequence structure for l2_nft_exchange_history_id_seq --- ---------------------------- -DROP SEQUENCE IF EXISTS "public"."l2_nft_exchange_history_id_seq"; -CREATE SEQUENCE "public"."l2_nft_exchange_history_id_seq" -INCREMENT 1 -MINVALUE 1 -MAXVALUE 9223372036854775807 -START 1 -CACHE 1; - --- ---------------------------- --- Sequence structure for l2_nft_exchange_id_seq --- ---------------------------- -DROP SEQUENCE IF EXISTS "public"."l2_nft_exchange_id_seq"; -CREATE SEQUENCE "public"."l2_nft_exchange_id_seq" -INCREMENT 1 -MINVALUE 1 -MAXVALUE 9223372036854775807 -START 1 -CACHE 1; - --- ---------------------------- --- Sequence structure for l2_nft_history_id_seq --- ---------------------------- -DROP SEQUENCE IF EXISTS "public"."l2_nft_history_id_seq"; -CREATE SEQUENCE "public"."l2_nft_history_id_seq" -INCREMENT 1 -MINVALUE 1 -MAXVALUE 9223372036854775807 -START 1 -CACHE 1; - --- ---------------------------- --- Sequence structure for l2_nft_id_seq --- ---------------------------- -DROP SEQUENCE IF EXISTS "public"."l2_nft_id_seq"; -CREATE SEQUENCE "public"."l2_nft_id_seq" -INCREMENT 1 -MINVALUE 1 -MAXVALUE 9223372036854775807 -START 1 -CACHE 1; - --- ---------------------------- --- Sequence structure for l2_nft_withdraw_history_id_seq --- ---------------------------- -DROP SEQUENCE IF EXISTS "public"."l2_nft_withdraw_history_id_seq"; -CREATE SEQUENCE "public"."l2_nft_withdraw_history_id_seq" -INCREMENT 1 -MINVALUE 1 -MAXVALUE 9223372036854775807 -START 1 -CACHE 1; - --- ---------------------------- --- Sequence structure for l2_tx_event_monitor_id_seq --- ---------------------------- -DROP SEQUENCE IF EXISTS "public"."l2_tx_event_monitor_id_seq"; -CREATE SEQUENCE "public"."l2_tx_event_monitor_id_seq" -INCREMENT 1 -MINVALUE 1 -MAXVALUE 9223372036854775807 -START 1 -CACHE 1; - --- ---------------------------- --- Sequence structure for liquidity_history_id_seq --- ---------------------------- -DROP SEQUENCE IF EXISTS "public"."liquidity_history_id_seq"; -CREATE SEQUENCE "public"."liquidity_history_id_seq" -INCREMENT 1 -MINVALUE 1 -MAXVALUE 9223372036854775807 -START 1 -CACHE 1; - --- ---------------------------- --- Sequence structure for liquidity_id_seq --- ---------------------------- -DROP SEQUENCE IF EXISTS "public"."liquidity_id_seq"; -CREATE SEQUENCE "public"."liquidity_id_seq" -INCREMENT 1 -MINVALUE 1 -MAXVALUE 9223372036854775807 -START 1 -CACHE 1; - --- ---------------------------- --- Sequence structure for mempool_tx_detail_id_seq --- ---------------------------- -DROP SEQUENCE IF EXISTS "public"."mempool_tx_detail_id_seq"; -CREATE SEQUENCE "public"."mempool_tx_detail_id_seq" -INCREMENT 1 -MINVALUE 1 -MAXVALUE 9223372036854775807 -START 1 -CACHE 1; - --- ---------------------------- --- Sequence structure for mempool_tx_id_seq --- ---------------------------- -DROP SEQUENCE IF EXISTS "public"."mempool_tx_id_seq"; -CREATE SEQUENCE "public"."mempool_tx_id_seq" -INCREMENT 1 -MINVALUE 1 -MAXVALUE 9223372036854775807 -START 1 -CACHE 1; - --- ---------------------------- --- Sequence structure for offer_id_seq --- ---------------------------- -DROP SEQUENCE IF EXISTS "public"."offer_id_seq"; -CREATE SEQUENCE "public"."offer_id_seq" -INCREMENT 1 -MINVALUE 1 -MAXVALUE 9223372036854775807 -START 1 -CACHE 1; - --- ---------------------------- --- Sequence structure for proof_sender_id_seq --- ---------------------------- -DROP SEQUENCE IF EXISTS "public"."proof_sender_id_seq"; -CREATE SEQUENCE "public"."proof_sender_id_seq" -INCREMENT 1 -MINVALUE 1 -MAXVALUE 9223372036854775807 -START 1 -CACHE 1; - --- ---------------------------- --- Sequence structure for sys_config_id_seq --- ---------------------------- -DROP SEQUENCE IF EXISTS "public"."sys_config_id_seq"; -CREATE SEQUENCE "public"."sys_config_id_seq" -INCREMENT 1 -MINVALUE 1 -MAXVALUE 9223372036854775807 -START 1 -CACHE 1; - --- ---------------------------- --- Sequence structure for tx_detail_id_seq --- ---------------------------- -DROP SEQUENCE IF EXISTS "public"."tx_detail_id_seq"; -CREATE SEQUENCE "public"."tx_detail_id_seq" -INCREMENT 1 -MINVALUE 1 -MAXVALUE 9223372036854775807 -START 1 -CACHE 1; - --- ---------------------------- --- Sequence structure for tx_id_seq --- ---------------------------- -DROP SEQUENCE IF EXISTS "public"."tx_id_seq"; -CREATE SEQUENCE "public"."tx_id_seq" -INCREMENT 1 -MINVALUE 1 -MAXVALUE 9223372036854775807 -START 1 -CACHE 1; - --- ---------------------------- --- Table structure for account --- ---------------------------- -DROP TABLE IF EXISTS "public"."account"; -CREATE TABLE "public"."account" ( - "id" int8 NOT NULL DEFAULT nextval('account_id_seq'::regclass), - "created_at" timestamptz(6), - "updated_at" timestamptz(6), - "deleted_at" timestamptz(6), - "account_index" int8, - "account_name" text COLLATE "pg_catalog"."default", - "public_key" text COLLATE "pg_catalog"."default", - "account_name_hash" text COLLATE "pg_catalog"."default", - "l1_address" text COLLATE "pg_catalog"."default", - "nonce" int8, - "collection_nonce" int8, - "asset_info" text COLLATE "pg_catalog"."default", - "asset_root" text COLLATE "pg_catalog"."default", - "status" int8 -) -; - --- ---------------------------- --- Records of account --- ---------------------------- -INSERT INTO "public"."account" VALUES (1, '2022-06-16 03:02:50.036268+00', '2022-06-16 03:02:50.036268+00', NULL, 0, 'treasury.legend', 'fcb8470d33c59a5cbf5e10df426eb97c2773ab890c3364f4162ba782a56ca998', 'c0d201aace9a2c17ce7066dc6ffefaf7930f1317c4c95d0661b164a1c584d676', '0x7dD2Ac589eFCC8888474d95Cb4b084CCa2d8aA57', 0, 0, '{}', '2c642dc4ac8b021154b4248c4ab4a0b0fbcfebc1557ecc218fc3a3c19ece7f47', 0); -INSERT INTO "public"."account" VALUES (2, '2022-06-16 03:02:50.036268+00', '2022-06-16 03:02:50.036268+00', NULL, 1, 'gas.legend', '1ec94e497abe0fbb87f9ed2843e21163e17e3e97f6bbbae7a88399b826474f93', '68fbd17e77eec501c677ccc31c260f30ee8ed049c893900e084ba8b7f7569ce6', '0x7dD2Ac589eFCC8888474d95Cb4b084CCa2d8aA57', 0, 0, '{}', '2c642dc4ac8b021154b4248c4ab4a0b0fbcfebc1557ecc218fc3a3c19ece7f47', 0); -INSERT INTO "public"."account" VALUES (3, '2022-06-16 03:02:50.036268+00', '2022-06-16 03:02:50.036268+00', NULL, 2, 'sher.legend', 'b0b6f7466154578ec66d51a335ead65ffd6a7210567fad9e68b6df8a5ce5dd85', '04b2dd1162802d057ed00dcb516ea627b207970520d1ad583f712cd6e954691f', '0x7dD2Ac589eFCC8888474d95Cb4b084CCa2d8aA57', 0, 0, '{}', '2c642dc4ac8b021154b4248c4ab4a0b0fbcfebc1557ecc218fc3a3c19ece7f47', 0); -INSERT INTO "public"."account" VALUES (4, '2022-06-16 03:02:50.036268+00', '2022-06-16 03:02:50.036268+00', NULL, 3, 'gavin.legend', '0500ccea3ca064968f5292b850ac8d4d3ee48d499357351a5ebfa2f30bb6070e', 'f4a64916b32d0f467369972dd156f7d2bd859c0a108a3b395a250f194f4680a3', '0x7dD2Ac589eFCC8888474d95Cb4b084CCa2d8aA57', 0, 0, '{}', '2c642dc4ac8b021154b4248c4ab4a0b0fbcfebc1557ecc218fc3a3c19ece7f47', 0); - --- ---------------------------- --- Table structure for account_history --- ---------------------------- -DROP TABLE IF EXISTS "public"."account_history"; -CREATE TABLE "public"."account_history" ( - "id" int8 NOT NULL DEFAULT nextval('account_history_id_seq'::regclass), - "created_at" timestamptz(6), - "updated_at" timestamptz(6), - "deleted_at" timestamptz(6), - "account_index" int8, - "nonce" int8, - "collection_nonce" int8, - "asset_info" text COLLATE "pg_catalog"."default", - "asset_root" text COLLATE "pg_catalog"."default", - "l2_block_height" int8 -) -; - --- ---------------------------- --- Records of account_history --- ---------------------------- - --- ---------------------------- --- Table structure for asset_info --- ---------------------------- -DROP TABLE IF EXISTS "public"."asset_info"; -CREATE TABLE "public"."asset_info" ( - "id" int8 NOT NULL DEFAULT nextval('asset_info_id_seq'::regclass), - "created_at" timestamptz(6), - "updated_at" timestamptz(6), - "deleted_at" timestamptz(6), - "asset_id" int8, - "asset_name" text COLLATE "pg_catalog"."default", - "asset_symbol" text COLLATE "pg_catalog"."default", - "l1_address" text COLLATE "pg_catalog"."default", - "decimals" int8, - "status" int8 -) -; - --- ---------------------------- --- Records of asset_info --- ---------------------------- -INSERT INTO "public"."asset_info" VALUES (1, '2022-06-16 03:01:29.40839+00', '2022-06-16 03:01:29.40839+00', NULL, 0, 'BNB', 'BNB', '0x00', 18, 0); -INSERT INTO "public"."asset_info" VALUES (2, '2022-06-16 03:02:04.096602+00', '2022-06-16 03:02:04.096602+00', NULL, 1, 'LEG', 'LEG', '0xDFF05aF25a5A56A3c7afFcB269235caE21eE53d8', 18, 0); -INSERT INTO "public"."asset_info" VALUES (3, '2022-06-16 03:02:04.096602+00', '2022-06-16 03:02:04.096602+00', NULL, 2, 'REY', 'REY', '0xE2Bd0916DFC2f5B9e05a4936982B67013Fbd338F', 18, 0); - --- ---------------------------- --- Table structure for block --- ---------------------------- -DROP TABLE IF EXISTS "public"."block"; -CREATE TABLE "public"."block" ( - "id" int8 NOT NULL DEFAULT nextval('block_id_seq'::regclass), - "created_at" timestamptz(6), - "updated_at" timestamptz(6), - "deleted_at" timestamptz(6), - "block_commitment" text COLLATE "pg_catalog"."default", - "block_height" int8, - "state_root" text COLLATE "pg_catalog"."default", - "priority_operations" int8, - "pending_on_chain_operations_hash" text COLLATE "pg_catalog"."default", - "pending_on_chain_operations_pub_data" text COLLATE "pg_catalog"."default", - "committed_tx_hash" text COLLATE "pg_catalog"."default", - "committed_at" int8, - "verified_tx_hash" text COLLATE "pg_catalog"."default", - "verified_at" int8, - "block_status" int8 -) -; - --- ---------------------------- --- Records of block --- ---------------------------- -INSERT INTO "public"."block" VALUES (1, '2022-06-16 03:01:29.413451+00', '2022-06-16 03:01:29.413451+00', NULL, '0000000000000000000000000000000000000000000000000000000000000000', 0, '14e4e8ad4848558d7200530337052e1ad30f5385b3c7187c80ad85f48547b74f', 0, 'c5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470', '', '', 0, '', 0, 3); - --- ---------------------------- --- Table structure for block_for_commit --- ---------------------------- -DROP TABLE IF EXISTS "public"."block_for_commit"; -CREATE TABLE "public"."block_for_commit" ( - "id" int8 NOT NULL DEFAULT nextval('block_for_commit_id_seq'::regclass), - "created_at" timestamptz(6), - "updated_at" timestamptz(6), - "deleted_at" timestamptz(6), - "block_height" int8, - "state_root" text COLLATE "pg_catalog"."default", - "public_data" text COLLATE "pg_catalog"."default", - "timestamp" int8, - "public_data_offsets" text COLLATE "pg_catalog"."default" -) -; - --- ---------------------------- --- Records of block_for_commit --- ---------------------------- - --- ---------------------------- --- Table structure for fail_tx --- ---------------------------- -DROP TABLE IF EXISTS "public"."fail_tx"; -CREATE TABLE "public"."fail_tx" ( - "id" int8 NOT NULL DEFAULT nextval('fail_tx_id_seq'::regclass), - "created_at" timestamptz(6), - "updated_at" timestamptz(6), - "deleted_at" timestamptz(6), - "tx_hash" text COLLATE "pg_catalog"."default", - "tx_type" int8, - "gas_fee" text COLLATE "pg_catalog"."default", - "gas_fee_asset_id" int8, - "tx_status" int8, - "asset_a_id" int8, - "asset_b_id" int8, - "tx_amount" text COLLATE "pg_catalog"."default", - "native_address" text COLLATE "pg_catalog"."default", - "tx_info" text COLLATE "pg_catalog"."default", - "extra_info" text COLLATE "pg_catalog"."default", - "memo" text COLLATE "pg_catalog"."default" -) -; - --- ---------------------------- --- Records of fail_tx --- ---------------------------- - --- ---------------------------- --- Table structure for l1_amount --- ---------------------------- -DROP TABLE IF EXISTS "public"."l1_amount"; -CREATE TABLE "public"."l1_amount" ( - "id" int8 NOT NULL DEFAULT nextval('l1_amount_id_seq'::regclass), - "created_at" timestamptz(6), - "updated_at" timestamptz(6), - "deleted_at" timestamptz(6), - "asset_id" int8, - "block_height" int8, - "total_amount" int8 -) -; - --- ---------------------------- --- Records of l1_amount --- ---------------------------- - --- ---------------------------- --- Table structure for l1_block_monitor --- ---------------------------- -DROP TABLE IF EXISTS "public"."l1_block_monitor"; -CREATE TABLE "public"."l1_block_monitor" ( - "id" int8 NOT NULL DEFAULT nextval('l1_block_monitor_id_seq'::regclass), - "created_at" timestamptz(6), - "updated_at" timestamptz(6), - "deleted_at" timestamptz(6), - "l1_block_height" int8, - "block_info" text COLLATE "pg_catalog"."default", - "monitor_type" int8 -) -; - --- ---------------------------- --- Records of l1_block_monitor --- ---------------------------- -INSERT INTO "public"."l1_block_monitor" VALUES (1, '2022-06-16 03:02:04.09326+00', '2022-06-16 03:02:04.09326+00', NULL, 628, '[{"EventType":6,"TxHash":"0xb38b074f0ee26bac5564982245882b917f58866cebfb9625e9d15359fb6c2054"},{"EventType":7,"TxHash":"0xb38b074f0ee26bac5564982245882b917f58866cebfb9625e9d15359fb6c2054"},{"EventType":5,"TxHash":"0xb38b074f0ee26bac5564982245882b917f58866cebfb9625e9d15359fb6c2054"},{"EventType":4,"TxHash":"0x4b64139d0696ef64b76f44ebd5b367bb6e887363a4bee19ca4d549ba7a2d7d4f"},{"EventType":4,"TxHash":"0xedc14dd231ebb62ec501ad9b74b42e4c23257c0c175cfd5501a2ef9455cdad2f"}]', 1); -INSERT INTO "public"."l1_block_monitor" VALUES (2, '2022-06-16 03:02:24.114168+00', '2022-06-16 03:02:24.114168+00', NULL, 628, '[{"EventType":0,"TxHash":"0xb47eac43aeaa52c33ae80b1fe7433e221b6c8003a7c4ba54984eff42ac0770ef"},{"EventType":0,"TxHash":"0xebba40069a895f43d4f1cae8a114c4f416d8354a63fec0b845ee3ae296f1783b"},{"EventType":0,"TxHash":"0x4640fb00a59749a95cc08823d4ca95c8936ddd89503633b7076678df2da5df1e"},{"EventType":0,"TxHash":"0xea08984a16136275a6849daa793c20dc272a37fd1266e3502923950509516e92"},{"EventType":0,"TxHash":"0x7184bd484e97ae61dcdddeb3282470a191dee273383369c275875144ed7022fb"},{"EventType":0,"TxHash":"0xdb8dd7b95f720396d1fb19a6789989983bfa3deab0c6b68af03ed0f34ed65e95"},{"EventType":0,"TxHash":"0x3f31a9fa97d4ce93e9858331e8ecce6699043b5c22b384f1b362a58e8d8a51f0"},{"EventType":0,"TxHash":"0xd50ede290f499c12156ff59dad8dfef00082f980ecf53a3ab8aa6596c926ad4e"},{"EventType":0,"TxHash":"0x33a5529ba1899f41b52d26ee4167808876f28bcdd4b1961919dd70432f64bb1f"},{"EventType":0,"TxHash":"0x4f62a0d4fb2a17e225f39cf832b101aa5daadfa2895e1b19f9fa53f8f21ffea7"},{"EventType":0,"TxHash":"0xed3c1beb710e7be01f88e5eee08a4e6d40e74af08e854a761a6e0a721f90d687"},{"EventType":0,"TxHash":"0xa97822618abccde3fa3fc38753bb720751337e0a2e86a82f9a600380cbf12e2b"},{"EventType":0,"TxHash":"0xf2cd0648ddeacb4234e725966e36d642ab47be5e011c83602ab9d5f84e5de62a"},{"EventType":0,"TxHash":"0xc0cdeaa451c6678d9368858ae55a76288c383c199871daabd0165e6c8a3e1237"},{"EventType":0,"TxHash":"0xcd9f5635ee8a285f545afa70d30cc9448a782ea6f72ac2baa4c6eef1ba2278e5"}]', 0); - --- ---------------------------- --- Table structure for l1_tx_sender --- ---------------------------- -DROP TABLE IF EXISTS "public"."l1_tx_sender"; -CREATE TABLE "public"."l1_tx_sender" ( - "id" int8 NOT NULL DEFAULT nextval('l1_tx_sender_id_seq'::regclass), - "created_at" timestamptz(6), - "updated_at" timestamptz(6), - "deleted_at" timestamptz(6), - "l1_tx_hash" text COLLATE "pg_catalog"."default", - "tx_status" int8, - "tx_type" int2, - "l2_block_height" int8 -) -; - --- ---------------------------- --- Records of l1_tx_sender --- ---------------------------- - --- ---------------------------- --- Table structure for l2_asset_info --- ---------------------------- -DROP TABLE IF EXISTS "public"."l2_asset_info"; -CREATE TABLE "public"."l2_asset_info" ( - "id" int8 NOT NULL DEFAULT nextval('l2_asset_info_id_seq'::regclass), - "created_at" timestamptz(6), - "updated_at" timestamptz(6), - "deleted_at" timestamptz(6), - "asset_id" int8, - "asset_address" text COLLATE "pg_catalog"."default", - "asset_name" text COLLATE "pg_catalog"."default", - "asset_symbol" text COLLATE "pg_catalog"."default", - "decimals" int8, - "status" int8 -) -; - --- ---------------------------- --- Records of l2_asset_info --- ---------------------------- -INSERT INTO "public"."l2_asset_info" VALUES (1, '2022-06-14 06:43:24.621929+00', '2022-06-14 06:43:24.621929+00', NULL, 0, '0x00', 'BNB', 'BNB', 18, 0); -INSERT INTO "public"."l2_asset_info" VALUES (2, '2022-06-14 06:44:37.378403+00', '2022-06-14 06:44:37.378403+00', NULL, 1, '0x6b8bdbAACf09C562409Eb5f811A619D5c1A38c9D', 'LEG', 'LEG', 18, 0); -INSERT INTO "public"."l2_asset_info" VALUES (3, '2022-06-14 06:44:37.378403+00', '2022-06-14 06:44:37.378403+00', NULL, 2, '0xdDD0811dAD9d7Ef6518e0275c2e52BD9B837b6cD', 'REY', 'REY', 18, 0); - --- ---------------------------- --- Table structure for l2_block_event_monitor --- ---------------------------- -DROP TABLE IF EXISTS "public"."l2_block_event_monitor"; -CREATE TABLE "public"."l2_block_event_monitor" ( - "id" int8 NOT NULL DEFAULT nextval('l2_block_event_monitor_id_seq'::regclass), - "created_at" timestamptz(6), - "updated_at" timestamptz(6), - "deleted_at" timestamptz(6), - "block_event_type" int2, - "l1_block_height" int8, - "l1_tx_hash" text COLLATE "pg_catalog"."default", - "l2_block_height" int8, - "status" int8 -) -; - --- ---------------------------- --- Records of l2_block_event_monitor --- ---------------------------- - --- ---------------------------- --- Table structure for l2_nft --- ---------------------------- -DROP TABLE IF EXISTS "public"."l2_nft"; -CREATE TABLE "public"."l2_nft" ( - "id" int8 NOT NULL DEFAULT nextval('l2_nft_id_seq'::regclass), - "created_at" timestamptz(6), - "updated_at" timestamptz(6), - "deleted_at" timestamptz(6), - "nft_index" int8, - "creator_account_index" int8, - "owner_account_index" int8, - "nft_content_hash" text COLLATE "pg_catalog"."default", - "nft_l1_address" text COLLATE "pg_catalog"."default", - "nft_l1_token_id" text COLLATE "pg_catalog"."default", - "creator_treasury_rate" int8, - "collection_id" int8 -) -; - --- ---------------------------- --- Records of l2_nft --- ---------------------------- -INSERT INTO "public"."l2_nft" VALUES (1, '2022-06-16 03:02:50.046873+00', '2022-06-16 03:02:50.046873+00', NULL, 0, 0, 2, '8fa3059a7c68daddcdf9c03b1cd1e6d0342b7c4a90ed610372c681bfea7ee478', '0x464ed8Ce7076Abaf743F760468230B9d71fB7D90', '0', 0, 0); - --- ---------------------------- --- Table structure for l2_nft_collection --- ---------------------------- -DROP TABLE IF EXISTS "public"."l2_nft_collection"; -CREATE TABLE "public"."l2_nft_collection" ( - "id" int8 NOT NULL DEFAULT nextval('l2_nft_collection_id_seq'::regclass), - "created_at" timestamptz(6), - "updated_at" timestamptz(6), - "deleted_at" timestamptz(6), - "account_index" int8, - "name" text COLLATE "pg_catalog"."default", - "introduction" text COLLATE "pg_catalog"."default", - "status" int8 -) -; - --- ---------------------------- --- Records of l2_nft_collection --- ---------------------------- - --- ---------------------------- --- Table structure for l2_nft_exchange --- ---------------------------- -DROP TABLE IF EXISTS "public"."l2_nft_exchange"; -CREATE TABLE "public"."l2_nft_exchange" ( - "id" int8 NOT NULL DEFAULT nextval('l2_nft_exchange_id_seq'::regclass), - "created_at" timestamptz(6), - "updated_at" timestamptz(6), - "deleted_at" timestamptz(6), - "buyer_account_index" int8, - "owner_account_index" int8, - "nft_index" int8, - "asset_id" int8, - "asset_amount" text COLLATE "pg_catalog"."default" -) -; - --- ---------------------------- --- Records of l2_nft_exchange --- ---------------------------- - --- ---------------------------- --- Table structure for l2_nft_exchange_history --- ---------------------------- -DROP TABLE IF EXISTS "public"."l2_nft_exchange_history"; -CREATE TABLE "public"."l2_nft_exchange_history" ( - "id" int8 NOT NULL DEFAULT nextval('l2_nft_exchange_history_id_seq'::regclass), - "created_at" timestamptz(6), - "updated_at" timestamptz(6), - "deleted_at" timestamptz(6), - "buyer_account_index" int8, - "owner_account_index" int8, - "nft_index" int8, - "asset_id" int8, - "asset_amount" int8, - "l2_block_height" int8 -) -; - --- ---------------------------- --- Records of l2_nft_exchange_history --- ---------------------------- - --- ---------------------------- --- Table structure for l2_nft_history --- ---------------------------- -DROP TABLE IF EXISTS "public"."l2_nft_history"; -CREATE TABLE "public"."l2_nft_history" ( - "id" int8 NOT NULL DEFAULT nextval('l2_nft_history_id_seq'::regclass), - "created_at" timestamptz(6), - "updated_at" timestamptz(6), - "deleted_at" timestamptz(6), - "nft_index" int8, - "creator_account_index" int8, - "owner_account_index" int8, - "nft_content_hash" text COLLATE "pg_catalog"."default", - "nft_l1_address" text COLLATE "pg_catalog"."default", - "nft_l1_token_id" text COLLATE "pg_catalog"."default", - "creator_treasury_rate" int8, - "collection_id" int8, - "status" int8, - "l2_block_height" int8 -) -; - --- ---------------------------- --- Records of l2_nft_history --- ---------------------------- - --- ---------------------------- --- Table structure for l2_nft_withdraw_history --- ---------------------------- -DROP TABLE IF EXISTS "public"."l2_nft_withdraw_history"; -CREATE TABLE "public"."l2_nft_withdraw_history" ( - "id" int8 NOT NULL DEFAULT nextval('l2_nft_withdraw_history_id_seq'::regclass), - "created_at" timestamptz(6), - "updated_at" timestamptz(6), - "deleted_at" timestamptz(6), - "nft_index" int8, - "creator_account_index" int8, - "owner_account_index" int8, - "nft_content_hash" text COLLATE "pg_catalog"."default", - "nft_l1_address" text COLLATE "pg_catalog"."default", - "nft_l1_token_id" text COLLATE "pg_catalog"."default", - "creator_treasury_rate" int8, - "collection_id" int8 -) -; - --- ---------------------------- --- Records of l2_nft_withdraw_history --- ---------------------------- - --- ---------------------------- --- Table structure for l2_tx_event_monitor --- ---------------------------- -DROP TABLE IF EXISTS "public"."l2_tx_event_monitor"; -CREATE TABLE "public"."l2_tx_event_monitor" ( - "id" int8 NOT NULL DEFAULT nextval('l2_tx_event_monitor_id_seq'::regclass), - "created_at" timestamptz(6), - "updated_at" timestamptz(6), - "deleted_at" timestamptz(6), - "l1_tx_hash" text COLLATE "pg_catalog"."default", - "l1_block_height" int8, - "sender_address" text COLLATE "pg_catalog"."default", - "request_id" int8, - "tx_type" int8, - "pubdata" text COLLATE "pg_catalog"."default", - "expiration_block" int8, - "status" int8 -) -; - --- ---------------------------- --- Records of l2_tx_event_monitor --- ---------------------------- -INSERT INTO "public"."l2_tx_event_monitor" VALUES (1, '2022-06-16 03:02:24.117031+00', '2022-06-16 03:02:50.047909+00', NULL, '0xb47eac43aeaa52c33ae80b1fe7433e221b6c8003a7c4ba54984eff42ac0770ef', 605, '0x7dD2Ac589eFCC8888474d95Cb4b084CCa2d8aA57', 0, 1, '01000000007472656173757279000000000000000000000000000000000000000000000000c0d201aace9a2c17ce7066dc6ffefaf7930f1317c4c95d0661b164a1c584d6762005db7af2bdcfae1fa8d28833ae2f1995e9a8e0825377cff121db64b0db21b718a96ca582a72b16f464330c89ab73277cb96e42df105ebf5c9ac5330d47b8fc', 40925, 2); -INSERT INTO "public"."l2_tx_event_monitor" VALUES (2, '2022-06-16 03:02:24.117031+00', '2022-06-16 03:02:50.049562+00', NULL, '0xebba40069a895f43d4f1cae8a114c4f416d8354a63fec0b845ee3ae296f1783b', 606, '0x7dD2Ac589eFCC8888474d95Cb4b084CCa2d8aA57', 1, 1, '0100000001676173000000000000000000000000000000000000000000000000000000000068fbd17e77eec501c677ccc31c260f30ee8ed049c893900e084ba8b7f7569ce62c24415b75651673b0d7bbf145ac8d7cb744ba6926963d1d014836336df1317a134f4726b89983a8e7babbf6973e7ee16311e24328edf987bb0fbe7a494ec91e', 40926, 2); -INSERT INTO "public"."l2_tx_event_monitor" VALUES (3, '2022-06-16 03:02:24.117031+00', '2022-06-16 03:02:50.05065+00', NULL, '0x4640fb00a59749a95cc08823d4ca95c8936ddd89503633b7076678df2da5df1e', 607, '0x7dD2Ac589eFCC8888474d95Cb4b084CCa2d8aA57', 2, 1, '0100000002736865720000000000000000000000000000000000000000000000000000000004b2dd1162802d057ed00dcb516ea627b207970520d1ad583f712cd6e954691f235fdbbbf5ef1665f3422211702126433c909487c456e594ef3a56910810396a05dde55c8adfb6689ead7f5610726afd5fd6ea35a3516dc68e57546146f7b6b0', 40927, 2); -INSERT INTO "public"."l2_tx_event_monitor" VALUES (4, '2022-06-16 03:02:24.117031+00', '2022-06-16 03:02:50.051304+00', NULL, '0xea08984a16136275a6849daa793c20dc272a37fd1266e3502923950509516e92', 608, '0x7dD2Ac589eFCC8888474d95Cb4b084CCa2d8aA57', 3, 1, '0100000003676176696e000000000000000000000000000000000000000000000000000000f4a64916b32d0f467369972dd156f7d2bd859c0a108a3b395a250f194f4680a30649fef47f6cf3dfb767cf5599eea11677bb6495956ec4cf75707d3aca7c06ed0e07b60bf3a2bf5e1a355793498de43e4d8dac50b892528f9664a03ceacc0005', 40928, 2); -INSERT INTO "public"."l2_tx_event_monitor" VALUES (5, '2022-06-16 03:02:24.117031+00', '2022-06-16 03:02:50.051809+00', NULL, '0x7184bd484e97ae61dcdddeb3282470a191dee273383369c275875144ed7022fb', 610, '0x7dD2Ac589eFCC8888474d95Cb4b084CCa2d8aA57', 4, 4, '040000000004b2dd1162802d057ed00dcb516ea627b207970520d1ad583f712cd6e954691f00000000000000000000016345785d8a0000', 40930, 2); -INSERT INTO "public"."l2_tx_event_monitor" VALUES (6, '2022-06-16 03:02:24.117031+00', '2022-06-16 03:02:50.052336+00', NULL, '0xdb8dd7b95f720396d1fb19a6789989983bfa3deab0c6b68af03ed0f34ed65e95', 611, '0x7dD2Ac589eFCC8888474d95Cb4b084CCa2d8aA57', 5, 4, '0400000000f4a64916b32d0f467369972dd156f7d2bd859c0a108a3b395a250f194f4680a300000000000000000000016345785d8a0000', 40931, 2); -INSERT INTO "public"."l2_tx_event_monitor" VALUES (7, '2022-06-16 03:02:24.117031+00', '2022-06-16 03:02:50.053406+00', NULL, '0x3f31a9fa97d4ce93e9858331e8ecce6699043b5c22b384f1b362a58e8d8a51f0', 614, '0x7dD2Ac589eFCC8888474d95Cb4b084CCa2d8aA57', 6, 4, '040000000004b2dd1162802d057ed00dcb516ea627b207970520d1ad583f712cd6e954691f000100000000000000056bc75e2d63100000', 40934, 2); -INSERT INTO "public"."l2_tx_event_monitor" VALUES (8, '2022-06-16 03:02:24.117031+00', '2022-06-16 03:02:50.053931+00', NULL, '0xd50ede290f499c12156ff59dad8dfef00082f980ecf53a3ab8aa6596c926ad4e', 615, '0x7dD2Ac589eFCC8888474d95Cb4b084CCa2d8aA57', 7, 4, '040000000004b2dd1162802d057ed00dcb516ea627b207970520d1ad583f712cd6e954691f000200000000000000056bc75e2d63100000', 40935, 2); -INSERT INTO "public"."l2_tx_event_monitor" VALUES (9, '2022-06-16 03:02:24.117031+00', '2022-06-16 03:02:50.054595+00', NULL, '0x33a5529ba1899f41b52d26ee4167808876f28bcdd4b1961919dd70432f64bb1f', 617, '0x7dD2Ac589eFCC8888474d95Cb4b084CCa2d8aA57', 8, 2, '02000000000002001e000000000005', 40937, 2); -INSERT INTO "public"."l2_tx_event_monitor" VALUES (10, '2022-06-16 03:02:24.117031+00', '2022-06-16 03:02:50.055102+00', NULL, '0x4f62a0d4fb2a17e225f39cf832b101aa5daadfa2895e1b19f9fa53f8f21ffea7', 618, '0x7dD2Ac589eFCC8888474d95Cb4b084CCa2d8aA57', 9, 2, '02000100000001001e000000000005', 40938, 2); -INSERT INTO "public"."l2_tx_event_monitor" VALUES (11, '2022-06-16 03:02:24.117031+00', '2022-06-16 03:02:50.056161+00', NULL, '0xed3c1beb710e7be01f88e5eee08a4e6d40e74af08e854a761a6e0a721f90d687', 619, '0x7dD2Ac589eFCC8888474d95Cb4b084CCa2d8aA57', 10, 2, '02000200010002001e000000000005', 40939, 2); -INSERT INTO "public"."l2_tx_event_monitor" VALUES (12, '2022-06-16 03:02:24.117031+00', '2022-06-16 03:02:50.056817+00', NULL, '0xa97822618abccde3fa3fc38753bb720751337e0a2e86a82f9a600380cbf12e2b', 621, '0x7dD2Ac589eFCC8888474d95Cb4b084CCa2d8aA57', 11, 3, '030001003200000000000a', 40941, 2); -INSERT INTO "public"."l2_tx_event_monitor" VALUES (13, '2022-06-16 03:02:24.117031+00', '2022-06-16 03:02:50.057379+00', NULL, '0xf2cd0648ddeacb4234e725966e36d642ab47be5e011c83602ab9d5f84e5de62a', 624, '0x7dD2Ac589eFCC8888474d95Cb4b084CCa2d8aA57', 12, 5, '05000000000000000000464ed8ce7076abaf743f760468230b9d71fb7d900000000000008fa3059a7c68daddcdf9c03b1cd1e6d0342b7c4a90ed610372c681bfea7ee478000000000000000000000000000000000000000000000000000000000000000004b2dd1162802d057ed00dcb516ea627b207970520d1ad583f712cd6e954691f0000', 40944, 2); -INSERT INTO "public"."l2_tx_event_monitor" VALUES (14, '2022-06-16 03:02:24.117031+00', '2022-06-16 03:02:50.057914+00', NULL, '0xc0cdeaa451c6678d9368858ae55a76288c383c199871daabd0165e6c8a3e1237', 626, '0x7dD2Ac589eFCC8888474d95Cb4b084CCa2d8aA57', 13, 17, '110000000000010000000000000000000000000000000004b2dd1162802d057ed00dcb516ea627b207970520d1ad583f712cd6e954691f', 40946, 2); -INSERT INTO "public"."l2_tx_event_monitor" VALUES (15, '2022-06-16 03:02:24.117031+00', '2022-06-16 03:02:50.058445+00', NULL, '0xcd9f5635ee8a285f545afa70d30cc9448a782ea6f72ac2baa4c6eef1ba2278e5', 628, '0x7dD2Ac589eFCC8888474d95Cb4b084CCa2d8aA57', 14, 18, '120000000000000000000000000000000000000000000000000000000000000000000000000004b2dd1162802d057ed00dcb516ea627b207970520d1ad583f712cd6e954691f000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000', 40948, 2); - --- ---------------------------- --- Table structure for liquidity --- ---------------------------- -DROP TABLE IF EXISTS "public"."liquidity"; -CREATE TABLE "public"."liquidity" ( - "id" int8 NOT NULL DEFAULT nextval('liquidity_id_seq'::regclass), - "created_at" timestamptz(6), - "updated_at" timestamptz(6), - "deleted_at" timestamptz(6), - "pair_index" int8, - "asset_a_id" int8, - "asset_a" text COLLATE "pg_catalog"."default", - "asset_b_id" int8, - "asset_b" text COLLATE "pg_catalog"."default", - "lp_amount" text COLLATE "pg_catalog"."default", - "k_last" text COLLATE "pg_catalog"."default", - "fee_rate" int8, - "treasury_account_index" int8, - "treasury_rate" int8 -) -; - --- ---------------------------- --- Records of liquidity --- ---------------------------- -INSERT INTO "public"."liquidity" VALUES (1, '2022-06-16 03:02:50.044467+00', '2022-06-16 03:02:50.044467+00', NULL, 0, 0, '0', 2, '0', '0', '0', 30, 0, 5); -INSERT INTO "public"."liquidity" VALUES (2, '2022-06-16 03:02:50.044467+00', '2022-06-16 03:02:50.044467+00', NULL, 1, 0, '0', 1, '0', '0', '0', 50, 0, 10); -INSERT INTO "public"."liquidity" VALUES (3, '2022-06-16 03:02:50.044467+00', '2022-06-16 03:02:50.044467+00', NULL, 2, 1, '0', 2, '0', '0', '0', 30, 0, 5); - --- ---------------------------- --- Table structure for liquidity_history --- ---------------------------- -DROP TABLE IF EXISTS "public"."liquidity_history"; -CREATE TABLE "public"."liquidity_history" ( - "id" int8 NOT NULL DEFAULT nextval('liquidity_history_id_seq'::regclass), - "created_at" timestamptz(6), - "updated_at" timestamptz(6), - "deleted_at" timestamptz(6), - "pair_index" int8, - "asset_a_id" int8, - "asset_a" text COLLATE "pg_catalog"."default", - "asset_b_id" int8, - "asset_b" text COLLATE "pg_catalog"."default", - "lp_amount" text COLLATE "pg_catalog"."default", - "k_last" text COLLATE "pg_catalog"."default", - "fee_rate" int8, - "treasury_account_index" int8, - "treasury_rate" int8, - "l2_block_height" int8 -) -; - --- ---------------------------- --- Records of liquidity_history --- ---------------------------- - --- ---------------------------- --- Table structure for mempool_tx --- ---------------------------- -DROP TABLE IF EXISTS "public"."mempool_tx"; -CREATE TABLE "public"."mempool_tx" ( - "id" int8 NOT NULL DEFAULT nextval('mempool_tx_id_seq'::regclass), - "created_at" timestamptz(6), - "updated_at" timestamptz(6), - "deleted_at" timestamptz(6), - "tx_hash" text COLLATE "pg_catalog"."default", - "tx_type" int8, - "gas_fee_asset_id" int8, - "gas_fee" text COLLATE "pg_catalog"."default", - "nft_index" int8, - "pair_index" int8, - "asset_id" int8, - "tx_amount" text COLLATE "pg_catalog"."default", - "native_address" text COLLATE "pg_catalog"."default", - "tx_info" text COLLATE "pg_catalog"."default", - "extra_info" text COLLATE "pg_catalog"."default", - "memo" text COLLATE "pg_catalog"."default", - "account_index" int8, - "nonce" int8, - "expired_at" int8, - "l2_block_height" int8, - "status" int8 -) -; - --- ---------------------------- --- Records of mempool_tx --- ---------------------------- -INSERT INTO "public"."mempool_tx" VALUES (1, '2022-06-16 03:02:50.039037+00', '2022-06-16 03:02:50.039037+00', NULL, 'ce76403c-ed20-11ec-8b10-988fe0603efa', 1, 0, '0', -1, -1, 0, '0', '0x7dD2Ac589eFCC8888474d95Cb4b084CCa2d8aA57', '{"TxType":1,"AccountIndex":0,"AccountName":"treasury.legend","AccountNameHash":"wNIBqs6aLBfOcGbcb/7695MPExfEyV0GYbFkocWE1nY=","PubKey":"fcb8470d33c59a5cbf5e10df426eb97c2773ab890c3364f4162ba782a56ca998"}', '', '', 0, 0, 0, -1, 0); -INSERT INTO "public"."mempool_tx" VALUES (2, '2022-06-16 03:02:50.039037+00', '2022-06-16 03:02:50.039037+00', NULL, 'ce771ece-ed20-11ec-8b10-988fe0603efa', 1, 0, '0', -1, -1, 0, '0', '0x7dD2Ac589eFCC8888474d95Cb4b084CCa2d8aA57', '{"TxType":1,"AccountIndex":1,"AccountName":"gas.legend","AccountNameHash":"aPvRfnfuxQHGd8zDHCYPMO6O0EnIk5AOCEuot/dWnOY=","PubKey":"1ec94e497abe0fbb87f9ed2843e21163e17e3e97f6bbbae7a88399b826474f93"}', '', '', 1, 0, 0, -1, 0); -INSERT INTO "public"."mempool_tx" VALUES (3, '2022-06-16 03:02:50.039037+00', '2022-06-16 03:02:50.039037+00', NULL, 'ce7736d6-ed20-11ec-8b10-988fe0603efa', 1, 0, '0', -1, -1, 0, '0', '0x7dD2Ac589eFCC8888474d95Cb4b084CCa2d8aA57', '{"TxType":1,"AccountIndex":2,"AccountName":"sher.legend","AccountNameHash":"BLLdEWKALQV+0A3LUW6mJ7IHlwUg0a1YP3Es1ulUaR8=","PubKey":"b0b6f7466154578ec66d51a335ead65ffd6a7210567fad9e68b6df8a5ce5dd85"}', '', '', 2, 0, 0, -1, 0); -INSERT INTO "public"."mempool_tx" VALUES (4, '2022-06-16 03:02:50.039037+00', '2022-06-16 03:02:50.039037+00', NULL, 'ce774ff9-ed20-11ec-8b10-988fe0603efa', 1, 0, '0', -1, -1, 0, '0', '0x7dD2Ac589eFCC8888474d95Cb4b084CCa2d8aA57', '{"TxType":1,"AccountIndex":3,"AccountName":"gavin.legend","AccountNameHash":"9KZJFrMtD0ZzaZct0Vb30r2FnAoQijs5WiUPGU9GgKM=","PubKey":"0500ccea3ca064968f5292b850ac8d4d3ee48d499357351a5ebfa2f30bb6070e"}', '', '', 3, 0, 0, -1, 0); -INSERT INTO "public"."mempool_tx" VALUES (5, '2022-06-16 03:02:50.039037+00', '2022-06-16 03:02:50.039037+00', NULL, 'ce774ff9-ed20-11ec-8b11-988fe0603efa', 4, 0, '0', -1, -1, 0, '100000000000000000', '0x7dD2Ac589eFCC8888474d95Cb4b084CCa2d8aA57', '{"TxType":4,"AccountIndex":2,"AccountNameHash":"BLLdEWKALQV+0A3LUW6mJ7IHlwUg0a1YP3Es1ulUaR8=","AssetId":0,"AssetAmount":100000000000000000}', '', '', 2, 0, 0, -1, 0); -INSERT INTO "public"."mempool_tx" VALUES (6, '2022-06-16 03:02:50.039037+00', '2022-06-16 03:02:50.039037+00', NULL, 'ce774ff9-ed20-11ec-8b12-988fe0603efa', 4, 0, '0', -1, -1, 0, '100000000000000000', '0x7dD2Ac589eFCC8888474d95Cb4b084CCa2d8aA57', '{"TxType":4,"AccountIndex":3,"AccountNameHash":"9KZJFrMtD0ZzaZct0Vb30r2FnAoQijs5WiUPGU9GgKM=","AssetId":0,"AssetAmount":100000000000000000}', '', '', 3, 0, 0, -1, 0); -INSERT INTO "public"."mempool_tx" VALUES (7, '2022-06-16 03:02:50.039037+00', '2022-06-16 03:02:50.039037+00', NULL, 'ce774ff9-ed20-11ec-8b13-988fe0603efa', 4, 0, '0', -1, -1, 1, '100000000000000000000', '0x7dD2Ac589eFCC8888474d95Cb4b084CCa2d8aA57', '{"TxType":4,"AccountIndex":2,"AccountNameHash":"BLLdEWKALQV+0A3LUW6mJ7IHlwUg0a1YP3Es1ulUaR8=","AssetId":1,"AssetAmount":100000000000000000000}', '', '', 2, 0, 0, -1, 0); -INSERT INTO "public"."mempool_tx" VALUES (8, '2022-06-16 03:02:50.039037+00', '2022-06-16 03:02:50.039037+00', NULL, 'ce774ff9-ed20-11ec-8b14-988fe0603efa', 4, 0, '0', -1, -1, 2, '100000000000000000000', '0x7dD2Ac589eFCC8888474d95Cb4b084CCa2d8aA57', '{"TxType":4,"AccountIndex":2,"AccountNameHash":"BLLdEWKALQV+0A3LUW6mJ7IHlwUg0a1YP3Es1ulUaR8=","AssetId":2,"AssetAmount":100000000000000000000}', '', '', 2, 0, 0, -1, 0); -INSERT INTO "public"."mempool_tx" VALUES (9, '2022-06-16 03:02:50.039037+00', '2022-06-16 03:02:50.039037+00', NULL, 'ce774ff9-ed20-11ec-8b15-988fe0603efa', 2, 0, '0', -1, 0, 0, '0', '0', '{"TxType":2,"PairIndex":0,"AssetAId":0,"AssetBId":2,"FeeRate":30,"TreasuryAccountIndex":0,"TreasuryRate":5}', '', '', -1, 0, 0, -1, 0); -INSERT INTO "public"."mempool_tx" VALUES (10, '2022-06-16 03:02:50.039037+00', '2022-06-16 03:02:50.039037+00', NULL, 'ce774ff9-ed20-11ec-8b16-988fe0603efa', 2, 0, '0', -1, 1, 0, '0', '0', '{"TxType":2,"PairIndex":1,"AssetAId":0,"AssetBId":1,"FeeRate":30,"TreasuryAccountIndex":0,"TreasuryRate":5}', '', '', -1, 0, 0, -1, 0); -INSERT INTO "public"."mempool_tx" VALUES (11, '2022-06-16 03:02:50.039037+00', '2022-06-16 03:02:50.039037+00', NULL, 'ce774ff9-ed20-11ec-8b17-988fe0603efa', 2, 0, '0', -1, 2, 0, '0', '0', '{"TxType":2,"PairIndex":2,"AssetAId":1,"AssetBId":2,"FeeRate":30,"TreasuryAccountIndex":0,"TreasuryRate":5}', '', '', -1, 0, 0, -1, 0); -INSERT INTO "public"."mempool_tx" VALUES (12, '2022-06-16 03:02:50.039037+00', '2022-06-16 03:02:50.039037+00', NULL, 'ce774ff9-ed20-11ec-8b18-988fe0603efa', 3, 0, '0', -1, 1, 0, '0', '0', '{"TxType":3,"PairIndex":1,"FeeRate":50,"TreasuryAccountIndex":0,"TreasuryRate":10}', '', '', -1, 0, 0, -1, 0); -INSERT INTO "public"."mempool_tx" VALUES (13, '2022-06-16 03:02:50.039037+00', '2022-06-16 03:02:50.039037+00', NULL, 'ce77cdf5-ed20-11ec-8b18-988fe0603efa', 5, 0, '0', 0, -1, 0, '0', '0x7dD2Ac589eFCC8888474d95Cb4b084CCa2d8aA57', '{"TxType":5,"AccountIndex":2,"NftIndex":0,"NftL1Address":"0x464ed8Ce7076Abaf743F760468230B9d71fB7D90","CreatorAccountIndex":0,"CreatorTreasuryRate":0,"NftContentHash":"j6MFmnxo2t3N+cA7HNHm0DQrfEqQ7WEDcsaBv+p+5Hg=","NftL1TokenId":0,"AccountNameHash":"BLLdEWKALQV+0A3LUW6mJ7IHlwUg0a1YP3Es1ulUaR8=","CollectionId":0}', '', '', 2, 0, 0, -1, 0); -INSERT INTO "public"."mempool_tx" VALUES (14, '2022-06-16 03:02:50.039037+00', '2022-06-16 03:02:50.039037+00', NULL, 'ce77cdf5-ed20-11ec-8b19-988fe0603efa', 17, 0, '0', -1, -1, 1, '100000000000000000000', '0x7dD2Ac589eFCC8888474d95Cb4b084CCa2d8aA57', '{"TxType":17,"AccountIndex":2,"AccountNameHash":"BLLdEWKALQV+0A3LUW6mJ7IHlwUg0a1YP3Es1ulUaR8=","AssetId":1,"AssetAmount":100000000000000000000}', '', '', 2, 0, 0, -1, 0); -INSERT INTO "public"."mempool_tx" VALUES (15, '2022-06-16 03:02:50.039037+00', '2022-06-16 03:02:50.039037+00', NULL, 'ce77cdf5-ed20-11ec-8b1a-988fe0603efa', 18, 0, '0', 0, -1, 0, '0', '0x7dD2Ac589eFCC8888474d95Cb4b084CCa2d8aA57', '{"TxType":18,"AccountIndex":2,"CreatorAccountIndex":0,"CreatorTreasuryRate":0,"NftIndex":0,"CollectionId":0,"NftL1Address":"0x464ed8Ce7076Abaf743F760468230B9d71fB7D90","AccountNameHash":"BLLdEWKALQV+0A3LUW6mJ7IHlwUg0a1YP3Es1ulUaR8=","CreatorAccountNameHash":"AA==","NftContentHash":"j6MFmnxo2t3N+cA7HNHm0DQrfEqQ7WEDcsaBv+p+5Hg=","NftL1TokenId":0}', '', '', 2, 0, 0, -1, 0); - --- ---------------------------- --- Table structure for mempool_tx_detail --- ---------------------------- -DROP TABLE IF EXISTS "public"."mempool_tx_detail"; -CREATE TABLE "public"."mempool_tx_detail" ( - "id" int8 NOT NULL DEFAULT nextval('mempool_tx_detail_id_seq'::regclass), - "created_at" timestamptz(6), - "updated_at" timestamptz(6), - "deleted_at" timestamptz(6), - "tx_id" int8, - "asset_id" int8, - "asset_type" int8, - "account_index" int8, - "account_name" text COLLATE "pg_catalog"."default", - "balance_delta" text COLLATE "pg_catalog"."default", - "order" int8, - "account_order" int8 -) -; - --- ---------------------------- --- Records of mempool_tx_detail --- ---------------------------- -INSERT INTO "public"."mempool_tx_detail" VALUES (1, '2022-06-16 03:02:50.041786+00', '2022-06-16 03:02:50.041786+00', NULL, 5, 0, 1, 2, 'sher.legend', '{"AssetId":0,"Balance":100000000000000000,"LpAmount":0,"OfferCanceledOrFinalized":0}', 0, 0); -INSERT INTO "public"."mempool_tx_detail" VALUES (2, '2022-06-16 03:02:50.041786+00', '2022-06-16 03:02:50.041786+00', NULL, 6, 0, 1, 3, 'gavin.legend', '{"AssetId":0,"Balance":100000000000000000,"LpAmount":0,"OfferCanceledOrFinalized":0}', 0, 0); -INSERT INTO "public"."mempool_tx_detail" VALUES (3, '2022-06-16 03:02:50.041786+00', '2022-06-16 03:02:50.041786+00', NULL, 7, 1, 1, 2, 'sher.legend', '{"AssetId":1,"Balance":100000000000000000000,"LpAmount":0,"OfferCanceledOrFinalized":0}', 0, 0); -INSERT INTO "public"."mempool_tx_detail" VALUES (4, '2022-06-16 03:02:50.041786+00', '2022-06-16 03:02:50.041786+00', NULL, 8, 2, 1, 2, 'sher.legend', '{"AssetId":2,"Balance":100000000000000000000,"LpAmount":0,"OfferCanceledOrFinalized":0}', 0, 0); -INSERT INTO "public"."mempool_tx_detail" VALUES (5, '2022-06-16 03:02:50.041786+00', '2022-06-16 03:02:50.041786+00', NULL, 9, 0, 2, -1, '', '{"PairIndex":0,"AssetAId":0,"AssetA":0,"AssetBId":2,"AssetB":0,"LpAmount":0,"KLast":0,"FeeRate":30,"TreasuryAccountIndex":0,"TreasuryRate":5}', 0, -1); -INSERT INTO "public"."mempool_tx_detail" VALUES (6, '2022-06-16 03:02:50.041786+00', '2022-06-16 03:02:50.041786+00', NULL, 10, 1, 2, -1, '', '{"PairIndex":1,"AssetAId":0,"AssetA":0,"AssetBId":1,"AssetB":0,"LpAmount":0,"KLast":0,"FeeRate":30,"TreasuryAccountIndex":0,"TreasuryRate":5}', 0, -1); -INSERT INTO "public"."mempool_tx_detail" VALUES (7, '2022-06-16 03:02:50.041786+00', '2022-06-16 03:02:50.041786+00', NULL, 11, 2, 2, -1, '', '{"PairIndex":2,"AssetAId":1,"AssetA":0,"AssetBId":2,"AssetB":0,"LpAmount":0,"KLast":0,"FeeRate":30,"TreasuryAccountIndex":0,"TreasuryRate":5}', 0, -1); -INSERT INTO "public"."mempool_tx_detail" VALUES (8, '2022-06-16 03:02:50.041786+00', '2022-06-16 03:02:50.041786+00', NULL, 12, 1, 2, -1, '', '{"PairIndex":1,"AssetAId":0,"AssetA":0,"AssetBId":1,"AssetB":0,"LpAmount":0,"KLast":0,"FeeRate":50,"TreasuryAccountIndex":0,"TreasuryRate":10}', 0, -1); -INSERT INTO "public"."mempool_tx_detail" VALUES (9, '2022-06-16 03:02:50.041786+00', '2022-06-16 03:02:50.041786+00', NULL, 13, 0, 1, 2, 'sher.legend', '{"AssetId":0,"Balance":0,"LpAmount":0,"OfferCanceledOrFinalized":0}', 0, 0); -INSERT INTO "public"."mempool_tx_detail" VALUES (10, '2022-06-16 03:02:50.041786+00', '2022-06-16 03:02:50.041786+00', NULL, 13, 0, 3, 2, 'sher.legend', '{"NftIndex":0,"CreatorAccountIndex":0,"OwnerAccountIndex":2,"NftContentHash":"8fa3059a7c68daddcdf9c03b1cd1e6d0342b7c4a90ed610372c681bfea7ee478","NftL1TokenId":"0","NftL1Address":"0x464ed8Ce7076Abaf743F760468230B9d71fB7D90","CreatorTreasuryRate":0,"CollectionId":0}', 0, -1); -INSERT INTO "public"."mempool_tx_detail" VALUES (11, '2022-06-16 03:02:50.041786+00', '2022-06-16 03:02:50.041786+00', NULL, 14, 1, 1, 2, 'sher.legend', '{"AssetId":1,"Balance":0,"LpAmount":0,"OfferCanceledOrFinalized":0}', 0, 0); -INSERT INTO "public"."mempool_tx_detail" VALUES (12, '2022-06-16 03:02:50.041786+00', '2022-06-16 03:02:50.041786+00', NULL, 15, 0, 1, 2, 'sher.legend', '{"AssetId":0,"Balance":0,"LpAmount":0,"OfferCanceledOrFinalized":0}', 0, 0); -INSERT INTO "public"."mempool_tx_detail" VALUES (13, '2022-06-16 03:02:50.041786+00', '2022-06-16 03:02:50.041786+00', NULL, 15, 0, 3, 2, 'sher.legend', '{"NftIndex":0,"CreatorAccountIndex":0,"OwnerAccountIndex":0,"NftContentHash":"0","NftL1TokenId":"0","NftL1Address":"0","CreatorTreasuryRate":0,"CollectionId":0}', 1, -1); - --- ---------------------------- --- Table structure for offer --- ---------------------------- -DROP TABLE IF EXISTS "public"."offer"; -CREATE TABLE "public"."offer" ( - "id" int8 NOT NULL DEFAULT nextval('offer_id_seq'::regclass), - "created_at" timestamptz(6), - "updated_at" timestamptz(6), - "deleted_at" timestamptz(6), - "offer_type" int8, - "offer_id" int8, - "account_index" int8, - "nft_index" int8, - "asset_id" int8, - "asset_amount" text COLLATE "pg_catalog"."default", - "listed_at" int8, - "expired_at" int8, - "treasury_rate" int8, - "sig" text COLLATE "pg_catalog"."default", - "status" int8 -) -; - --- ---------------------------- --- Records of offer --- ---------------------------- - --- ---------------------------- --- Table structure for proof_sender --- ---------------------------- -DROP TABLE IF EXISTS "public"."proof_sender"; -CREATE TABLE "public"."proof_sender" ( - "id" int8 NOT NULL DEFAULT nextval('proof_sender_id_seq'::regclass), - "created_at" timestamptz(6), - "updated_at" timestamptz(6), - "deleted_at" timestamptz(6), - "proof_info" text COLLATE "pg_catalog"."default", - "block_number" int8, - "status" int8 -) -; - --- ---------------------------- --- Records of proof_sender --- ---------------------------- - --- ---------------------------- --- Table structure for sys_config --- ---------------------------- -DROP TABLE IF EXISTS "public"."sys_config"; -CREATE TABLE "public"."sys_config" ( - "id" int8 NOT NULL DEFAULT nextval('sys_config_id_seq'::regclass), - "created_at" timestamptz(6), - "updated_at" timestamptz(6), - "deleted_at" timestamptz(6), - "name" text COLLATE "pg_catalog"."default", - "value" text COLLATE "pg_catalog"."default", - "value_type" text COLLATE "pg_catalog"."default", - "comment" text COLLATE "pg_catalog"."default" -) -; - --- ---------------------------- --- Records of sys_config --- ---------------------------- -INSERT INTO "public"."sys_config" VALUES (1, '2022-06-16 03:01:29.411105+00', '2022-06-16 03:01:29.411105+00', NULL, 'SysGasFee', '1', 'float', 'based on ETH'); -INSERT INTO "public"."sys_config" VALUES (2, '2022-06-16 03:01:29.411105+00', '2022-06-16 03:01:29.411105+00', NULL, 'MaxAssetId', '9', 'int', 'max number of asset id'); -INSERT INTO "public"."sys_config" VALUES (3, '2022-06-16 03:01:29.411105+00', '2022-06-16 03:01:29.411105+00', NULL, 'TreasuryAccountIndex', '0', 'int', 'treasury index'); -INSERT INTO "public"."sys_config" VALUES (4, '2022-06-16 03:01:29.411105+00', '2022-06-16 03:01:29.411105+00', NULL, 'GasAccountIndex', '1', 'int', 'gas index'); -INSERT INTO "public"."sys_config" VALUES (5, '2022-06-16 03:01:29.411105+00', '2022-06-16 03:01:29.411105+00', NULL, 'ZecreyLegendContract', '0x045A98016DF9C1790caD1be1c4d69ba1fd2aB9d9', 'string', 'Zecrey contract on BSC'); -INSERT INTO "public"."sys_config" VALUES (6, '2022-06-16 03:01:29.411105+00', '2022-06-16 03:01:29.411105+00', NULL, 'GovernanceContract', '0x45E486062b952225c97621567fCdD29eCE730B87', 'string', 'Governance contract on BSC'); -INSERT INTO "public"."sys_config" VALUES (7, '2022-06-16 03:01:29.411105+00', '2022-06-16 03:01:29.411105+00', NULL, 'BscTestNetworkRpc', 'http://tf-dex-preview-validator-nlb-6fd109ac8b9d390a.elb.ap-northeast-1.amazonaws.com:8545', 'string', 'BSC network rpc'); -INSERT INTO "public"."sys_config" VALUES (8, '2022-06-16 03:01:29.411105+00', '2022-06-16 03:01:29.411105+00', NULL, 'Local_Test_Network_RPC', 'http://127.0.0.1:8545/', 'string', 'Local network rpc'); -INSERT INTO "public"."sys_config" VALUES (9, '2022-06-16 03:02:04.098828+00', '2022-06-16 03:02:04.098828+00', NULL, 'AssetGovernanceContract', '0x74ad9cd2e0656C49B3DB427a9aF8AC704C71DBbC', 'string', 'asset governance contract'); -INSERT INTO "public"."sys_config" VALUES (10, '2022-06-16 03:02:04.098828+00', '2022-06-16 03:02:04.098828+00', NULL, 'Validators', '{"0x7dD2Ac589eFCC8888474d95Cb4b084CCa2d8aA57":{"Address":"0x7dD2Ac589eFCC8888474d95Cb4b084CCa2d8aA57","IsActive":true}}', 'map[string]*ValidatorInfo', 'validator info'); -INSERT INTO "public"."sys_config" VALUES (11, '2022-06-16 03:02:04.098828+00', '2022-06-16 03:02:04.098828+00', NULL, 'Governor', '0x7dD2Ac589eFCC8888474d95Cb4b084CCa2d8aA57', 'string', 'governor'); - --- ---------------------------- --- Table structure for tx --- ---------------------------- -DROP TABLE IF EXISTS "public"."tx"; -CREATE TABLE "public"."tx" ( - "id" int8 NOT NULL DEFAULT nextval('tx_id_seq'::regclass), - "created_at" timestamptz(6), - "updated_at" timestamptz(6), - "deleted_at" timestamptz(6), - "tx_hash" text COLLATE "pg_catalog"."default", - "tx_type" int8, - "gas_fee" text COLLATE "pg_catalog"."default", - "gas_fee_asset_id" int8, - "tx_status" int8, - "block_height" int8, - "block_id" int8, - "state_root" text COLLATE "pg_catalog"."default", - "nft_index" int8, - "pair_index" int8, - "asset_id" int8, - "tx_amount" text COLLATE "pg_catalog"."default", - "native_address" text COLLATE "pg_catalog"."default", - "tx_info" text COLLATE "pg_catalog"."default", - "extra_info" text COLLATE "pg_catalog"."default", - "memo" text COLLATE "pg_catalog"."default", - "account_index" int8, - "nonce" int8, - "expired_at" int8 -) -; - --- ---------------------------- --- Records of tx --- ---------------------------- - --- ---------------------------- --- Table structure for tx_detail --- ---------------------------- -DROP TABLE IF EXISTS "public"."tx_detail"; -CREATE TABLE "public"."tx_detail" ( - "id" int8 NOT NULL DEFAULT nextval('tx_detail_id_seq'::regclass), - "created_at" timestamptz(6), - "updated_at" timestamptz(6), - "deleted_at" timestamptz(6), - "tx_id" int8, - "asset_id" int8, - "asset_type" int8, - "account_index" int8, - "account_name" text COLLATE "pg_catalog"."default", - "balance" text COLLATE "pg_catalog"."default", - "balance_delta" text COLLATE "pg_catalog"."default", - "order" int8, - "account_order" int8, - "nonce" int8, - "collection_nonce" int8 -) -; - --- ---------------------------- --- Records of tx_detail --- ---------------------------- - --- ---------------------------- --- Alter sequences owned by --- ---------------------------- -ALTER SEQUENCE "public"."account_history_id_seq" -OWNED BY "public"."account_history"."id"; -SELECT setval('"public"."account_history_id_seq"', 1, false); - --- ---------------------------- --- Alter sequences owned by --- ---------------------------- -ALTER SEQUENCE "public"."account_id_seq" -OWNED BY "public"."account"."id"; -SELECT setval('"public"."account_id_seq"', 4, true); - --- ---------------------------- --- Alter sequences owned by --- ---------------------------- -ALTER SEQUENCE "public"."asset_info_id_seq" -OWNED BY "public"."asset_info"."id"; -SELECT setval('"public"."asset_info_id_seq"', 3, true); - --- ---------------------------- --- Alter sequences owned by --- ---------------------------- -ALTER SEQUENCE "public"."block_for_commit_id_seq" -OWNED BY "public"."block_for_commit"."id"; -SELECT setval('"public"."block_for_commit_id_seq"', 1, false); - --- ---------------------------- --- Alter sequences owned by --- ---------------------------- -ALTER SEQUENCE "public"."block_id_seq" -OWNED BY "public"."block"."id"; -SELECT setval('"public"."block_id_seq"', 1, true); - --- ---------------------------- --- Alter sequences owned by --- ---------------------------- -ALTER SEQUENCE "public"."fail_tx_id_seq" -OWNED BY "public"."fail_tx"."id"; -SELECT setval('"public"."fail_tx_id_seq"', 1, false); - --- ---------------------------- --- Alter sequences owned by --- ---------------------------- -ALTER SEQUENCE "public"."l1_amount_id_seq" -OWNED BY "public"."l1_amount"."id"; -SELECT setval('"public"."l1_amount_id_seq"', 1, false); - --- ---------------------------- --- Alter sequences owned by --- ---------------------------- -ALTER SEQUENCE "public"."l1_block_monitor_id_seq" -OWNED BY "public"."l1_block_monitor"."id"; -SELECT setval('"public"."l1_block_monitor_id_seq"', 2, true); - --- ---------------------------- --- Alter sequences owned by --- ---------------------------- -ALTER SEQUENCE "public"."l1_tx_sender_id_seq" -OWNED BY "public"."l1_tx_sender"."id"; -SELECT setval('"public"."l1_tx_sender_id_seq"', 1, false); - --- ---------------------------- --- Alter sequences owned by --- ---------------------------- -ALTER SEQUENCE "public"."l2_asset_info_id_seq" -OWNED BY "public"."l2_asset_info"."id"; -SELECT setval('"public"."l2_asset_info_id_seq"', 3, true); - --- ---------------------------- --- Alter sequences owned by --- ---------------------------- -ALTER SEQUENCE "public"."l2_block_event_monitor_id_seq" -OWNED BY "public"."l2_block_event_monitor"."id"; -SELECT setval('"public"."l2_block_event_monitor_id_seq"', 1, false); - --- ---------------------------- --- Alter sequences owned by --- ---------------------------- -ALTER SEQUENCE "public"."l2_nft_collection_id_seq" -OWNED BY "public"."l2_nft_collection"."id"; -SELECT setval('"public"."l2_nft_collection_id_seq"', 1, false); - --- ---------------------------- --- Alter sequences owned by --- ---------------------------- -ALTER SEQUENCE "public"."l2_nft_exchange_history_id_seq" -OWNED BY "public"."l2_nft_exchange_history"."id"; -SELECT setval('"public"."l2_nft_exchange_history_id_seq"', 2, false); - --- ---------------------------- --- Alter sequences owned by --- ---------------------------- -ALTER SEQUENCE "public"."l2_nft_exchange_id_seq" -OWNED BY "public"."l2_nft_exchange"."id"; -SELECT setval('"public"."l2_nft_exchange_id_seq"', 1, false); - --- ---------------------------- --- Alter sequences owned by --- ---------------------------- -ALTER SEQUENCE "public"."l2_nft_history_id_seq" -OWNED BY "public"."l2_nft_history"."id"; -SELECT setval('"public"."l2_nft_history_id_seq"', 1, false); - --- ---------------------------- --- Alter sequences owned by --- ---------------------------- -ALTER SEQUENCE "public"."l2_nft_id_seq" -OWNED BY "public"."l2_nft"."id"; -SELECT setval('"public"."l2_nft_id_seq"', 1, true); - --- ---------------------------- --- Alter sequences owned by --- ---------------------------- -ALTER SEQUENCE "public"."l2_nft_withdraw_history_id_seq" -OWNED BY "public"."l2_nft_withdraw_history"."id"; -SELECT setval('"public"."l2_nft_withdraw_history_id_seq"', 1, false); - --- ---------------------------- --- Alter sequences owned by --- ---------------------------- -ALTER SEQUENCE "public"."l2_tx_event_monitor_id_seq" -OWNED BY "public"."l2_tx_event_monitor"."id"; -SELECT setval('"public"."l2_tx_event_monitor_id_seq"', 15, true); - --- ---------------------------- --- Alter sequences owned by --- ---------------------------- -ALTER SEQUENCE "public"."liquidity_history_id_seq" -OWNED BY "public"."liquidity_history"."id"; -SELECT setval('"public"."liquidity_history_id_seq"', 1, false); - --- ---------------------------- --- Alter sequences owned by --- ---------------------------- -ALTER SEQUENCE "public"."liquidity_id_seq" -OWNED BY "public"."liquidity"."id"; -SELECT setval('"public"."liquidity_id_seq"', 3, true); - --- ---------------------------- --- Alter sequences owned by --- ---------------------------- -ALTER SEQUENCE "public"."mempool_tx_detail_id_seq" -OWNED BY "public"."mempool_tx_detail"."id"; -SELECT setval('"public"."mempool_tx_detail_id_seq"', 13, true); - --- ---------------------------- --- Alter sequences owned by --- ---------------------------- -ALTER SEQUENCE "public"."mempool_tx_id_seq" -OWNED BY "public"."mempool_tx"."id"; -SELECT setval('"public"."mempool_tx_id_seq"', 15, true); - --- ---------------------------- --- Alter sequences owned by --- ---------------------------- -ALTER SEQUENCE "public"."offer_id_seq" -OWNED BY "public"."offer"."id"; -SELECT setval('"public"."offer_id_seq"', 1, false); - --- ---------------------------- --- Alter sequences owned by --- ---------------------------- -ALTER SEQUENCE "public"."proof_sender_id_seq" -OWNED BY "public"."proof_sender"."id"; -SELECT setval('"public"."proof_sender_id_seq"', 1, false); - --- ---------------------------- --- Alter sequences owned by --- ---------------------------- -ALTER SEQUENCE "public"."sys_config_id_seq" -OWNED BY "public"."sys_config"."id"; -SELECT setval('"public"."sys_config_id_seq"', 11, true); - --- ---------------------------- --- Alter sequences owned by --- ---------------------------- -ALTER SEQUENCE "public"."tx_detail_id_seq" -OWNED BY "public"."tx_detail"."id"; -SELECT setval('"public"."tx_detail_id_seq"', 1, false); - --- ---------------------------- --- Alter sequences owned by --- ---------------------------- -ALTER SEQUENCE "public"."tx_id_seq" -OWNED BY "public"."tx"."id"; -SELECT setval('"public"."tx_id_seq"', 1, false); - --- ---------------------------- --- Indexes structure for table account --- ---------------------------- -CREATE UNIQUE INDEX "idx_account_account_index" ON "public"."account" USING btree ( - "account_index" "pg_catalog"."int8_ops" ASC NULLS LAST -); -CREATE UNIQUE INDEX "idx_account_account_name" ON "public"."account" USING btree ( - "account_name" COLLATE "pg_catalog"."default" "pg_catalog"."text_ops" ASC NULLS LAST -); -CREATE UNIQUE INDEX "idx_account_account_name_hash" ON "public"."account" USING btree ( - "account_name_hash" COLLATE "pg_catalog"."default" "pg_catalog"."text_ops" ASC NULLS LAST -); -CREATE INDEX "idx_account_deleted_at" ON "public"."account" USING btree ( - "deleted_at" "pg_catalog"."timestamptz_ops" ASC NULLS LAST -); -CREATE UNIQUE INDEX "idx_account_public_key" ON "public"."account" USING btree ( - "public_key" COLLATE "pg_catalog"."default" "pg_catalog"."text_ops" ASC NULLS LAST -); - --- ---------------------------- --- Primary Key structure for table account --- ---------------------------- -ALTER TABLE "public"."account" ADD CONSTRAINT "account_pkey" PRIMARY KEY ("id"); - --- ---------------------------- --- Indexes structure for table account_history --- ---------------------------- -CREATE INDEX "idx_account_history_account_index" ON "public"."account_history" USING btree ( - "account_index" "pg_catalog"."int8_ops" ASC NULLS LAST -); -CREATE INDEX "idx_account_history_deleted_at" ON "public"."account_history" USING btree ( - "deleted_at" "pg_catalog"."timestamptz_ops" ASC NULLS LAST -); - --- ---------------------------- --- Primary Key structure for table account_history --- ---------------------------- -ALTER TABLE "public"."account_history" ADD CONSTRAINT "account_history_pkey" PRIMARY KEY ("id"); - --- ---------------------------- --- Indexes structure for table asset_info --- ---------------------------- -CREATE UNIQUE INDEX "idx_asset_info_asset_id" ON "public"."asset_info" USING btree ( - "asset_id" "pg_catalog"."int8_ops" ASC NULLS LAST -); -CREATE INDEX "idx_asset_info_deleted_at" ON "public"."asset_info" USING btree ( - "deleted_at" "pg_catalog"."timestamptz_ops" ASC NULLS LAST -); - --- ---------------------------- --- Primary Key structure for table asset_info --- ---------------------------- -ALTER TABLE "public"."asset_info" ADD CONSTRAINT "asset_info_pkey" PRIMARY KEY ("id"); - --- ---------------------------- --- Indexes structure for table block --- ---------------------------- -CREATE INDEX "idx_block_deleted_at" ON "public"."block" USING btree ( - "deleted_at" "pg_catalog"."timestamptz_ops" ASC NULLS LAST -); - --- ---------------------------- --- Primary Key structure for table block --- ---------------------------- -ALTER TABLE "public"."block" ADD CONSTRAINT "block_pkey" PRIMARY KEY ("id"); - --- ---------------------------- --- Indexes structure for table block_for_commit --- ---------------------------- -CREATE INDEX "idx_block_for_commit_deleted_at" ON "public"."block_for_commit" USING btree ( - "deleted_at" "pg_catalog"."timestamptz_ops" ASC NULLS LAST -); - --- ---------------------------- --- Primary Key structure for table block_for_commit --- ---------------------------- -ALTER TABLE "public"."block_for_commit" ADD CONSTRAINT "block_for_commit_pkey" PRIMARY KEY ("id"); - --- ---------------------------- --- Indexes structure for table fail_tx --- ---------------------------- -CREATE INDEX "idx_fail_tx_deleted_at" ON "public"."fail_tx" USING btree ( - "deleted_at" "pg_catalog"."timestamptz_ops" ASC NULLS LAST -); -CREATE UNIQUE INDEX "idx_fail_tx_tx_hash" ON "public"."fail_tx" USING btree ( - "tx_hash" COLLATE "pg_catalog"."default" "pg_catalog"."text_ops" ASC NULLS LAST -); - --- ---------------------------- --- Primary Key structure for table fail_tx --- ---------------------------- -ALTER TABLE "public"."fail_tx" ADD CONSTRAINT "fail_tx_pkey" PRIMARY KEY ("id"); - --- ---------------------------- --- Indexes structure for table l1_amount --- ---------------------------- -CREATE INDEX "idx_l1_amount_asset_id" ON "public"."l1_amount" USING btree ( - "asset_id" "pg_catalog"."int8_ops" ASC NULLS LAST -); -CREATE INDEX "idx_l1_amount_block_height" ON "public"."l1_amount" USING btree ( - "block_height" "pg_catalog"."int8_ops" ASC NULLS LAST -); -CREATE INDEX "idx_l1_amount_deleted_at" ON "public"."l1_amount" USING btree ( - "deleted_at" "pg_catalog"."timestamptz_ops" ASC NULLS LAST -); - --- ---------------------------- --- Primary Key structure for table l1_amount --- ---------------------------- -ALTER TABLE "public"."l1_amount" ADD CONSTRAINT "l1_amount_pkey" PRIMARY KEY ("id"); - --- ---------------------------- --- Indexes structure for table l1_block_monitor --- ---------------------------- -CREATE INDEX "idx_l1_block_monitor_deleted_at" ON "public"."l1_block_monitor" USING btree ( - "deleted_at" "pg_catalog"."timestamptz_ops" ASC NULLS LAST -); - --- ---------------------------- --- Primary Key structure for table l1_block_monitor --- ---------------------------- -ALTER TABLE "public"."l1_block_monitor" ADD CONSTRAINT "l1_block_monitor_pkey" PRIMARY KEY ("id"); - --- ---------------------------- --- Indexes structure for table l1_tx_sender --- ---------------------------- -CREATE INDEX "idx_l1_tx_sender_deleted_at" ON "public"."l1_tx_sender" USING btree ( - "deleted_at" "pg_catalog"."timestamptz_ops" ASC NULLS LAST -); - --- ---------------------------- --- Primary Key structure for table l1_tx_sender --- ---------------------------- -ALTER TABLE "public"."l1_tx_sender" ADD CONSTRAINT "l1_tx_sender_pkey" PRIMARY KEY ("id"); - --- ---------------------------- --- Indexes structure for table l2_asset_info --- ---------------------------- -CREATE UNIQUE INDEX "idx_l2_asset_info_asset_id" ON "public"."l2_asset_info" USING btree ( - "asset_id" "pg_catalog"."int8_ops" ASC NULLS LAST -); -CREATE INDEX "idx_l2_asset_info_deleted_at" ON "public"."l2_asset_info" USING btree ( - "deleted_at" "pg_catalog"."timestamptz_ops" ASC NULLS LAST -); - --- ---------------------------- --- Primary Key structure for table l2_asset_info --- ---------------------------- -ALTER TABLE "public"."l2_asset_info" ADD CONSTRAINT "l2_asset_info_pkey" PRIMARY KEY ("id"); - --- ---------------------------- --- Indexes structure for table l2_block_event_monitor --- ---------------------------- -CREATE INDEX "idx_l2_block_event_monitor_block_event_type" ON "public"."l2_block_event_monitor" USING btree ( - "block_event_type" "pg_catalog"."int2_ops" ASC NULLS LAST -); -CREATE INDEX "idx_l2_block_event_monitor_deleted_at" ON "public"."l2_block_event_monitor" USING btree ( - "deleted_at" "pg_catalog"."timestamptz_ops" ASC NULLS LAST -); -CREATE INDEX "idx_l2_block_event_monitor_l2_block_height" ON "public"."l2_block_event_monitor" USING btree ( - "l2_block_height" "pg_catalog"."int8_ops" ASC NULLS LAST -); - --- ---------------------------- --- Primary Key structure for table l2_block_event_monitor --- ---------------------------- -ALTER TABLE "public"."l2_block_event_monitor" ADD CONSTRAINT "l2_block_event_monitor_pkey" PRIMARY KEY ("id"); - --- ---------------------------- --- Indexes structure for table l2_nft --- ---------------------------- -CREATE INDEX "idx_l2_nft_deleted_at" ON "public"."l2_nft" USING btree ( - "deleted_at" "pg_catalog"."timestamptz_ops" ASC NULLS LAST -); -CREATE UNIQUE INDEX "idx_l2_nft_nft_index" ON "public"."l2_nft" USING btree ( - "nft_index" "pg_catalog"."int8_ops" ASC NULLS LAST -); - --- ---------------------------- --- Primary Key structure for table l2_nft --- ---------------------------- -ALTER TABLE "public"."l2_nft" ADD CONSTRAINT "l2_nft_pkey" PRIMARY KEY ("id"); - --- ---------------------------- --- Indexes structure for table l2_nft_collection --- ---------------------------- -CREATE INDEX "idx_l2_nft_collection_deleted_at" ON "public"."l2_nft_collection" USING btree ( - "deleted_at" "pg_catalog"."timestamptz_ops" ASC NULLS LAST -); - --- ---------------------------- --- Primary Key structure for table l2_nft_collection --- ---------------------------- -ALTER TABLE "public"."l2_nft_collection" ADD CONSTRAINT "l2_nft_collection_pkey" PRIMARY KEY ("id"); - --- ---------------------------- --- Indexes structure for table l2_nft_exchange --- ---------------------------- -CREATE INDEX "idx_l2_nft_exchange_deleted_at" ON "public"."l2_nft_exchange" USING btree ( - "deleted_at" "pg_catalog"."timestamptz_ops" ASC NULLS LAST -); - --- ---------------------------- --- Primary Key structure for table l2_nft_exchange --- ---------------------------- -ALTER TABLE "public"."l2_nft_exchange" ADD CONSTRAINT "l2_nft_exchange_pkey" PRIMARY KEY ("id"); - --- ---------------------------- --- Indexes structure for table l2_nft_exchange_history --- ---------------------------- -CREATE INDEX "idx_l2_nft_exchange_history_deleted_at" ON "public"."l2_nft_exchange_history" USING btree ( - "deleted_at" "pg_catalog"."timestamptz_ops" ASC NULLS LAST -); - --- ---------------------------- --- Primary Key structure for table l2_nft_exchange_history --- ---------------------------- -ALTER TABLE "public"."l2_nft_exchange_history" ADD CONSTRAINT "l2_nft_exchange_history_pkey" PRIMARY KEY ("id"); - --- ---------------------------- --- Indexes structure for table l2_nft_history --- ---------------------------- -CREATE INDEX "idx_l2_nft_history_deleted_at" ON "public"."l2_nft_history" USING btree ( - "deleted_at" "pg_catalog"."timestamptz_ops" ASC NULLS LAST -); - --- ---------------------------- --- Primary Key structure for table l2_nft_history --- ---------------------------- -ALTER TABLE "public"."l2_nft_history" ADD CONSTRAINT "l2_nft_history_pkey" PRIMARY KEY ("id"); - --- ---------------------------- --- Indexes structure for table l2_nft_withdraw_history --- ---------------------------- -CREATE INDEX "idx_l2_nft_withdraw_history_deleted_at" ON "public"."l2_nft_withdraw_history" USING btree ( - "deleted_at" "pg_catalog"."timestamptz_ops" ASC NULLS LAST -); -CREATE UNIQUE INDEX "idx_l2_nft_withdraw_history_nft_index" ON "public"."l2_nft_withdraw_history" USING btree ( - "nft_index" "pg_catalog"."int8_ops" ASC NULLS LAST -); - --- ---------------------------- --- Primary Key structure for table l2_nft_withdraw_history --- ---------------------------- -ALTER TABLE "public"."l2_nft_withdraw_history" ADD CONSTRAINT "l2_nft_withdraw_history_pkey" PRIMARY KEY ("id"); - --- ---------------------------- --- Indexes structure for table l2_tx_event_monitor --- ---------------------------- -CREATE INDEX "idx_l2_tx_event_monitor_deleted_at" ON "public"."l2_tx_event_monitor" USING btree ( - "deleted_at" "pg_catalog"."timestamptz_ops" ASC NULLS LAST -); - --- ---------------------------- --- Primary Key structure for table l2_tx_event_monitor --- ---------------------------- -ALTER TABLE "public"."l2_tx_event_monitor" ADD CONSTRAINT "l2_tx_event_monitor_pkey" PRIMARY KEY ("id"); - --- ---------------------------- --- Indexes structure for table liquidity --- ---------------------------- -CREATE INDEX "idx_liquidity_deleted_at" ON "public"."liquidity" USING btree ( - "deleted_at" "pg_catalog"."timestamptz_ops" ASC NULLS LAST -); - --- ---------------------------- --- Primary Key structure for table liquidity --- ---------------------------- -ALTER TABLE "public"."liquidity" ADD CONSTRAINT "liquidity_pkey" PRIMARY KEY ("id"); - --- ---------------------------- --- Indexes structure for table liquidity_history --- ---------------------------- -CREATE INDEX "idx_liquidity_history_deleted_at" ON "public"."liquidity_history" USING btree ( - "deleted_at" "pg_catalog"."timestamptz_ops" ASC NULLS LAST -); - --- ---------------------------- --- Primary Key structure for table liquidity_history --- ---------------------------- -ALTER TABLE "public"."liquidity_history" ADD CONSTRAINT "liquidity_history_pkey" PRIMARY KEY ("id"); - --- ---------------------------- --- Indexes structure for table mempool_tx --- ---------------------------- -CREATE INDEX "idx_mempool_tx_deleted_at" ON "public"."mempool_tx" USING btree ( - "deleted_at" "pg_catalog"."timestamptz_ops" ASC NULLS LAST -); -CREATE INDEX "idx_mempool_tx_status" ON "public"."mempool_tx" USING btree ( - "status" "pg_catalog"."int8_ops" ASC NULLS LAST -); -CREATE UNIQUE INDEX "idx_mempool_tx_tx_hash" ON "public"."mempool_tx" USING btree ( - "tx_hash" COLLATE "pg_catalog"."default" "pg_catalog"."text_ops" ASC NULLS LAST -); - --- ---------------------------- --- Primary Key structure for table mempool_tx --- ---------------------------- -ALTER TABLE "public"."mempool_tx" ADD CONSTRAINT "mempool_tx_pkey" PRIMARY KEY ("id"); - --- ---------------------------- --- Indexes structure for table mempool_tx_detail --- ---------------------------- -CREATE INDEX "idx_mempool_tx_detail_account_index" ON "public"."mempool_tx_detail" USING btree ( - "account_index" "pg_catalog"."int8_ops" ASC NULLS LAST -); -CREATE INDEX "idx_mempool_tx_detail_deleted_at" ON "public"."mempool_tx_detail" USING btree ( - "deleted_at" "pg_catalog"."timestamptz_ops" ASC NULLS LAST -); -CREATE INDEX "idx_mempool_tx_detail_tx_id" ON "public"."mempool_tx_detail" USING btree ( - "tx_id" "pg_catalog"."int8_ops" ASC NULLS LAST -); - --- ---------------------------- --- Primary Key structure for table mempool_tx_detail --- ---------------------------- -ALTER TABLE "public"."mempool_tx_detail" ADD CONSTRAINT "mempool_tx_detail_pkey" PRIMARY KEY ("id"); - --- ---------------------------- --- Indexes structure for table offer --- ---------------------------- -CREATE INDEX "idx_offer_deleted_at" ON "public"."offer" USING btree ( - "deleted_at" "pg_catalog"."timestamptz_ops" ASC NULLS LAST -); - --- ---------------------------- --- Primary Key structure for table offer --- ---------------------------- -ALTER TABLE "public"."offer" ADD CONSTRAINT "offer_pkey" PRIMARY KEY ("id"); - --- ---------------------------- --- Indexes structure for table proof_sender --- ---------------------------- -CREATE INDEX "idx_proof_sender_block_number" ON "public"."proof_sender" USING btree ( - "block_number" "pg_catalog"."int8_ops" ASC NULLS LAST -); -CREATE INDEX "idx_proof_sender_deleted_at" ON "public"."proof_sender" USING btree ( - "deleted_at" "pg_catalog"."timestamptz_ops" ASC NULLS LAST -); - --- ---------------------------- --- Primary Key structure for table proof_sender --- ---------------------------- -ALTER TABLE "public"."proof_sender" ADD CONSTRAINT "proof_sender_pkey" PRIMARY KEY ("id"); - --- ---------------------------- --- Indexes structure for table sys_config --- ---------------------------- -CREATE INDEX "idx_sys_config_deleted_at" ON "public"."sys_config" USING btree ( - "deleted_at" "pg_catalog"."timestamptz_ops" ASC NULLS LAST -); - --- ---------------------------- --- Primary Key structure for table sys_config --- ---------------------------- -ALTER TABLE "public"."sys_config" ADD CONSTRAINT "sys_config_pkey" PRIMARY KEY ("id"); - --- ---------------------------- --- Indexes structure for table tx --- ---------------------------- -CREATE INDEX "idx_tx_block_height" ON "public"."tx" USING btree ( - "block_height" "pg_catalog"."int8_ops" ASC NULLS LAST -); -CREATE INDEX "idx_tx_block_id" ON "public"."tx" USING btree ( - "block_id" "pg_catalog"."int8_ops" ASC NULLS LAST -); -CREATE INDEX "idx_tx_deleted_at" ON "public"."tx" USING btree ( - "deleted_at" "pg_catalog"."timestamptz_ops" ASC NULLS LAST -); -CREATE UNIQUE INDEX "idx_tx_tx_hash" ON "public"."tx" USING btree ( - "tx_hash" COLLATE "pg_catalog"."default" "pg_catalog"."text_ops" ASC NULLS LAST -); - --- ---------------------------- --- Primary Key structure for table tx --- ---------------------------- -ALTER TABLE "public"."tx" ADD CONSTRAINT "tx_pkey" PRIMARY KEY ("id"); - --- ---------------------------- --- Indexes structure for table tx_detail --- ---------------------------- -CREATE INDEX "idx_tx_detail_account_index" ON "public"."tx_detail" USING btree ( - "account_index" "pg_catalog"."int8_ops" ASC NULLS LAST -); -CREATE INDEX "idx_tx_detail_deleted_at" ON "public"."tx_detail" USING btree ( - "deleted_at" "pg_catalog"."timestamptz_ops" ASC NULLS LAST -); -CREATE INDEX "idx_tx_detail_tx_id" ON "public"."tx_detail" USING btree ( - "tx_id" "pg_catalog"."int8_ops" ASC NULLS LAST -); - --- ---------------------------- --- Primary Key structure for table tx_detail --- ---------------------------- -ALTER TABLE "public"."tx_detail" ADD CONSTRAINT "tx_detail_pkey" PRIMARY KEY ("id"); diff --git a/common/proverUtil/beforeCommitter-l2-keccak256.sql b/common/proverUtil/beforeCommitter-l2-keccak256.sql deleted file mode 100644 index a7284792c..000000000 --- a/common/proverUtil/beforeCommitter-l2-keccak256.sql +++ /dev/null @@ -1,1682 +0,0 @@ -/* - Navicat Premium Data Transfer - - Source Server : local_docker - Source Server Type : PostgreSQL - Source Server Version : 140003 - Source Host : localhost:5432 - Source Catalog : zecreyLegend - Source Schema : public - - Target Server Type : PostgreSQL - Target Server Version : 140003 - File Encoding : 65001 - - Date: 16/06/2022 11:05:10 -*/ - - --- ---------------------------- --- Sequence structure for account_history_id_seq --- ---------------------------- -DROP SEQUENCE IF EXISTS "public"."account_history_id_seq"; -CREATE SEQUENCE "public"."account_history_id_seq" -INCREMENT 1 -MINVALUE 1 -MAXVALUE 9223372036854775807 -START 1 -CACHE 1; - --- ---------------------------- --- Sequence structure for account_id_seq --- ---------------------------- -DROP SEQUENCE IF EXISTS "public"."account_id_seq"; -CREATE SEQUENCE "public"."account_id_seq" -INCREMENT 1 -MINVALUE 1 -MAXVALUE 9223372036854775807 -START 1 -CACHE 1; - --- ---------------------------- --- Sequence structure for asset_info_id_seq --- ---------------------------- -DROP SEQUENCE IF EXISTS "public"."asset_info_id_seq"; -CREATE SEQUENCE "public"."asset_info_id_seq" -INCREMENT 1 -MINVALUE 1 -MAXVALUE 9223372036854775807 -START 1 -CACHE 1; - --- ---------------------------- --- Sequence structure for block_for_commit_id_seq --- ---------------------------- -DROP SEQUENCE IF EXISTS "public"."block_for_commit_id_seq"; -CREATE SEQUENCE "public"."block_for_commit_id_seq" -INCREMENT 1 -MINVALUE 1 -MAXVALUE 9223372036854775807 -START 1 -CACHE 1; - --- ---------------------------- --- Sequence structure for block_id_seq --- ---------------------------- -DROP SEQUENCE IF EXISTS "public"."block_id_seq"; -CREATE SEQUENCE "public"."block_id_seq" -INCREMENT 1 -MINVALUE 1 -MAXVALUE 9223372036854775807 -START 1 -CACHE 1; - --- ---------------------------- --- Sequence structure for fail_tx_id_seq --- ---------------------------- -DROP SEQUENCE IF EXISTS "public"."fail_tx_id_seq"; -CREATE SEQUENCE "public"."fail_tx_id_seq" -INCREMENT 1 -MINVALUE 1 -MAXVALUE 9223372036854775807 -START 1 -CACHE 1; - --- ---------------------------- --- Sequence structure for l1_amount_id_seq --- ---------------------------- -DROP SEQUENCE IF EXISTS "public"."l1_amount_id_seq"; -CREATE SEQUENCE "public"."l1_amount_id_seq" -INCREMENT 1 -MINVALUE 1 -MAXVALUE 9223372036854775807 -START 1 -CACHE 1; - --- ---------------------------- --- Sequence structure for l1_block_monitor_id_seq --- ---------------------------- -DROP SEQUENCE IF EXISTS "public"."l1_block_monitor_id_seq"; -CREATE SEQUENCE "public"."l1_block_monitor_id_seq" -INCREMENT 1 -MINVALUE 1 -MAXVALUE 9223372036854775807 -START 1 -CACHE 1; - --- ---------------------------- --- Sequence structure for l1_tx_sender_id_seq --- ---------------------------- -DROP SEQUENCE IF EXISTS "public"."l1_tx_sender_id_seq"; -CREATE SEQUENCE "public"."l1_tx_sender_id_seq" -INCREMENT 1 -MINVALUE 1 -MAXVALUE 9223372036854775807 -START 1 -CACHE 1; - --- ---------------------------- --- Sequence structure for l2_asset_info_id_seq --- ---------------------------- -DROP SEQUENCE IF EXISTS "public"."l2_asset_info_id_seq"; -CREATE SEQUENCE "public"."l2_asset_info_id_seq" -INCREMENT 1 -MINVALUE 1 -MAXVALUE 9223372036854775807 -START 1 -CACHE 1; - --- ---------------------------- --- Sequence structure for l2_block_event_monitor_id_seq --- ---------------------------- -DROP SEQUENCE IF EXISTS "public"."l2_block_event_monitor_id_seq"; -CREATE SEQUENCE "public"."l2_block_event_monitor_id_seq" -INCREMENT 1 -MINVALUE 1 -MAXVALUE 9223372036854775807 -START 1 -CACHE 1; - --- ---------------------------- --- Sequence structure for l2_nft_collection_id_seq --- ---------------------------- -DROP SEQUENCE IF EXISTS "public"."l2_nft_collection_id_seq"; -CREATE SEQUENCE "public"."l2_nft_collection_id_seq" -INCREMENT 1 -MINVALUE 1 -MAXVALUE 9223372036854775807 -START 1 -CACHE 1; - --- ---------------------------- --- Sequence structure for l2_nft_exchange_history_id_seq --- ---------------------------- -DROP SEQUENCE IF EXISTS "public"."l2_nft_exchange_history_id_seq"; -CREATE SEQUENCE "public"."l2_nft_exchange_history_id_seq" -INCREMENT 1 -MINVALUE 1 -MAXVALUE 9223372036854775807 -START 1 -CACHE 1; - --- ---------------------------- --- Sequence structure for l2_nft_exchange_id_seq --- ---------------------------- -DROP SEQUENCE IF EXISTS "public"."l2_nft_exchange_id_seq"; -CREATE SEQUENCE "public"."l2_nft_exchange_id_seq" -INCREMENT 1 -MINVALUE 1 -MAXVALUE 9223372036854775807 -START 1 -CACHE 1; - --- ---------------------------- --- Sequence structure for l2_nft_history_id_seq --- ---------------------------- -DROP SEQUENCE IF EXISTS "public"."l2_nft_history_id_seq"; -CREATE SEQUENCE "public"."l2_nft_history_id_seq" -INCREMENT 1 -MINVALUE 1 -MAXVALUE 9223372036854775807 -START 1 -CACHE 1; - --- ---------------------------- --- Sequence structure for l2_nft_id_seq --- ---------------------------- -DROP SEQUENCE IF EXISTS "public"."l2_nft_id_seq"; -CREATE SEQUENCE "public"."l2_nft_id_seq" -INCREMENT 1 -MINVALUE 1 -MAXVALUE 9223372036854775807 -START 1 -CACHE 1; - --- ---------------------------- --- Sequence structure for l2_nft_withdraw_history_id_seq --- ---------------------------- -DROP SEQUENCE IF EXISTS "public"."l2_nft_withdraw_history_id_seq"; -CREATE SEQUENCE "public"."l2_nft_withdraw_history_id_seq" -INCREMENT 1 -MINVALUE 1 -MAXVALUE 9223372036854775807 -START 1 -CACHE 1; - --- ---------------------------- --- Sequence structure for l2_tx_event_monitor_id_seq --- ---------------------------- -DROP SEQUENCE IF EXISTS "public"."l2_tx_event_monitor_id_seq"; -CREATE SEQUENCE "public"."l2_tx_event_monitor_id_seq" -INCREMENT 1 -MINVALUE 1 -MAXVALUE 9223372036854775807 -START 1 -CACHE 1; - --- ---------------------------- --- Sequence structure for liquidity_history_id_seq --- ---------------------------- -DROP SEQUENCE IF EXISTS "public"."liquidity_history_id_seq"; -CREATE SEQUENCE "public"."liquidity_history_id_seq" -INCREMENT 1 -MINVALUE 1 -MAXVALUE 9223372036854775807 -START 1 -CACHE 1; - --- ---------------------------- --- Sequence structure for liquidity_id_seq --- ---------------------------- -DROP SEQUENCE IF EXISTS "public"."liquidity_id_seq"; -CREATE SEQUENCE "public"."liquidity_id_seq" -INCREMENT 1 -MINVALUE 1 -MAXVALUE 9223372036854775807 -START 1 -CACHE 1; - --- ---------------------------- --- Sequence structure for mempool_tx_detail_id_seq --- ---------------------------- -DROP SEQUENCE IF EXISTS "public"."mempool_tx_detail_id_seq"; -CREATE SEQUENCE "public"."mempool_tx_detail_id_seq" -INCREMENT 1 -MINVALUE 1 -MAXVALUE 9223372036854775807 -START 1 -CACHE 1; - --- ---------------------------- --- Sequence structure for mempool_tx_id_seq --- ---------------------------- -DROP SEQUENCE IF EXISTS "public"."mempool_tx_id_seq"; -CREATE SEQUENCE "public"."mempool_tx_id_seq" -INCREMENT 1 -MINVALUE 1 -MAXVALUE 9223372036854775807 -START 1 -CACHE 1; - --- ---------------------------- --- Sequence structure for offer_id_seq --- ---------------------------- -DROP SEQUENCE IF EXISTS "public"."offer_id_seq"; -CREATE SEQUENCE "public"."offer_id_seq" -INCREMENT 1 -MINVALUE 1 -MAXVALUE 9223372036854775807 -START 1 -CACHE 1; - --- ---------------------------- --- Sequence structure for proof_sender_id_seq --- ---------------------------- -DROP SEQUENCE IF EXISTS "public"."proof_sender_id_seq"; -CREATE SEQUENCE "public"."proof_sender_id_seq" -INCREMENT 1 -MINVALUE 1 -MAXVALUE 9223372036854775807 -START 1 -CACHE 1; - --- ---------------------------- --- Sequence structure for sys_config_id_seq --- ---------------------------- -DROP SEQUENCE IF EXISTS "public"."sys_config_id_seq"; -CREATE SEQUENCE "public"."sys_config_id_seq" -INCREMENT 1 -MINVALUE 1 -MAXVALUE 9223372036854775807 -START 1 -CACHE 1; - --- ---------------------------- --- Sequence structure for tx_detail_id_seq --- ---------------------------- -DROP SEQUENCE IF EXISTS "public"."tx_detail_id_seq"; -CREATE SEQUENCE "public"."tx_detail_id_seq" -INCREMENT 1 -MINVALUE 1 -MAXVALUE 9223372036854775807 -START 1 -CACHE 1; - --- ---------------------------- --- Sequence structure for tx_id_seq --- ---------------------------- -DROP SEQUENCE IF EXISTS "public"."tx_id_seq"; -CREATE SEQUENCE "public"."tx_id_seq" -INCREMENT 1 -MINVALUE 1 -MAXVALUE 9223372036854775807 -START 1 -CACHE 1; - --- ---------------------------- --- Table structure for account --- ---------------------------- -DROP TABLE IF EXISTS "public"."account"; -CREATE TABLE "public"."account" ( - "id" int8 NOT NULL DEFAULT nextval('account_id_seq'::regclass), - "created_at" timestamptz(6), - "updated_at" timestamptz(6), - "deleted_at" timestamptz(6), - "account_index" int8, - "account_name" text COLLATE "pg_catalog"."default", - "public_key" text COLLATE "pg_catalog"."default", - "account_name_hash" text COLLATE "pg_catalog"."default", - "l1_address" text COLLATE "pg_catalog"."default", - "nonce" int8, - "collection_nonce" int8, - "asset_info" text COLLATE "pg_catalog"."default", - "asset_root" text COLLATE "pg_catalog"."default", - "status" int8 -) -; - --- ---------------------------- --- Records of account --- ---------------------------- -INSERT INTO "public"."account" VALUES (1, '2022-06-16 03:02:50.036268+00', '2022-06-16 03:02:50.036268+00', NULL, 0, 'treasury.legend', 'fcb8470d33c59a5cbf5e10df426eb97c2773ab890c3364f4162ba782a56ca998', 'c0d201aace9a2c17ce7066dc6ffefaf7930f1317c4c95d0661b164a1c584d676', '0x7dD2Ac589eFCC8888474d95Cb4b084CCa2d8aA57', 0, 0, '{}', '2c642dc4ac8b021154b4248c4ab4a0b0fbcfebc1557ecc218fc3a3c19ece7f47', 0); -INSERT INTO "public"."account" VALUES (2, '2022-06-16 03:02:50.036268+00', '2022-06-16 03:02:50.036268+00', NULL, 1, 'gas.legend', '1ec94e497abe0fbb87f9ed2843e21163e17e3e97f6bbbae7a88399b826474f93', '68fbd17e77eec501c677ccc31c260f30ee8ed049c893900e084ba8b7f7569ce6', '0x7dD2Ac589eFCC8888474d95Cb4b084CCa2d8aA57', 0, 0, '{}', '2c642dc4ac8b021154b4248c4ab4a0b0fbcfebc1557ecc218fc3a3c19ece7f47', 0); -INSERT INTO "public"."account" VALUES (3, '2022-06-16 03:02:50.036268+00', '2022-06-16 03:02:50.036268+00', NULL, 2, 'sher.legend', 'b0b6f7466154578ec66d51a335ead65ffd6a7210567fad9e68b6df8a5ce5dd85', '04b2dd1162802d057ed00dcb516ea627b207970520d1ad583f712cd6e954691f', '0x7dD2Ac589eFCC8888474d95Cb4b084CCa2d8aA57', 0, 0, '{}', '2c642dc4ac8b021154b4248c4ab4a0b0fbcfebc1557ecc218fc3a3c19ece7f47', 0); -INSERT INTO "public"."account" VALUES (4, '2022-06-16 03:02:50.036268+00', '2022-06-16 03:02:50.036268+00', NULL, 3, 'gavin.legend', '0500ccea3ca064968f5292b850ac8d4d3ee48d499357351a5ebfa2f30bb6070e', 'f4a64916b32d0f467369972dd156f7d2bd859c0a108a3b395a250f194f4680a3', '0x7dD2Ac589eFCC8888474d95Cb4b084CCa2d8aA57', 0, 0, '{}', '2c642dc4ac8b021154b4248c4ab4a0b0fbcfebc1557ecc218fc3a3c19ece7f47', 0); - --- ---------------------------- --- Table structure for account_history --- ---------------------------- -DROP TABLE IF EXISTS "public"."account_history"; -CREATE TABLE "public"."account_history" ( - "id" int8 NOT NULL DEFAULT nextval('account_history_id_seq'::regclass), - "created_at" timestamptz(6), - "updated_at" timestamptz(6), - "deleted_at" timestamptz(6), - "account_index" int8, - "nonce" int8, - "collection_nonce" int8, - "asset_info" text COLLATE "pg_catalog"."default", - "asset_root" text COLLATE "pg_catalog"."default", - "l2_block_height" int8 -) -; - --- ---------------------------- --- Records of account_history --- ---------------------------- - --- ---------------------------- --- Table structure for asset_info --- ---------------------------- -DROP TABLE IF EXISTS "public"."asset_info"; -CREATE TABLE "public"."asset_info" ( - "id" int8 NOT NULL DEFAULT nextval('asset_info_id_seq'::regclass), - "created_at" timestamptz(6), - "updated_at" timestamptz(6), - "deleted_at" timestamptz(6), - "asset_id" int8, - "asset_name" text COLLATE "pg_catalog"."default", - "asset_symbol" text COLLATE "pg_catalog"."default", - "l1_address" text COLLATE "pg_catalog"."default", - "decimals" int8, - "status" int8 -) -; - --- ---------------------------- --- Records of asset_info --- ---------------------------- -INSERT INTO "public"."asset_info" VALUES (1, '2022-06-16 03:01:29.40839+00', '2022-06-16 03:01:29.40839+00', NULL, 0, 'BNB', 'BNB', '0x00', 18, 0); -INSERT INTO "public"."asset_info" VALUES (2, '2022-06-16 03:02:04.096602+00', '2022-06-16 03:02:04.096602+00', NULL, 1, 'LEG', 'LEG', '0xDFF05aF25a5A56A3c7afFcB269235caE21eE53d8', 18, 0); -INSERT INTO "public"."asset_info" VALUES (3, '2022-06-16 03:02:04.096602+00', '2022-06-16 03:02:04.096602+00', NULL, 2, 'REY', 'REY', '0xE2Bd0916DFC2f5B9e05a4936982B67013Fbd338F', 18, 0); - --- ---------------------------- --- Table structure for block --- ---------------------------- -DROP TABLE IF EXISTS "public"."block"; -CREATE TABLE "public"."block" ( - "id" int8 NOT NULL DEFAULT nextval('block_id_seq'::regclass), - "created_at" timestamptz(6), - "updated_at" timestamptz(6), - "deleted_at" timestamptz(6), - "block_commitment" text COLLATE "pg_catalog"."default", - "block_height" int8, - "state_root" text COLLATE "pg_catalog"."default", - "priority_operations" int8, - "pending_on_chain_operations_hash" text COLLATE "pg_catalog"."default", - "pending_on_chain_operations_pub_data" text COLLATE "pg_catalog"."default", - "committed_tx_hash" text COLLATE "pg_catalog"."default", - "committed_at" int8, - "verified_tx_hash" text COLLATE "pg_catalog"."default", - "verified_at" int8, - "block_status" int8 -) -; - --- ---------------------------- --- Records of block --- ---------------------------- -INSERT INTO "public"."block" VALUES (1, '2022-06-16 03:01:29.413451+00', '2022-06-16 03:01:29.413451+00', NULL, '0000000000000000000000000000000000000000000000000000000000000000', 0, '14e4e8ad4848558d7200530337052e1ad30f5385b3c7187c80ad85f48547b74f', 0, 'c5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470', '', '', 0, '', 0, 3); - --- ---------------------------- --- Table structure for block_for_commit --- ---------------------------- -DROP TABLE IF EXISTS "public"."block_for_commit"; -CREATE TABLE "public"."block_for_commit" ( - "id" int8 NOT NULL DEFAULT nextval('block_for_commit_id_seq'::regclass), - "created_at" timestamptz(6), - "updated_at" timestamptz(6), - "deleted_at" timestamptz(6), - "block_height" int8, - "state_root" text COLLATE "pg_catalog"."default", - "public_data" text COLLATE "pg_catalog"."default", - "timestamp" int8, - "public_data_offsets" text COLLATE "pg_catalog"."default" -) -; - --- ---------------------------- --- Records of block_for_commit --- ---------------------------- - --- ---------------------------- --- Table structure for fail_tx --- ---------------------------- -DROP TABLE IF EXISTS "public"."fail_tx"; -CREATE TABLE "public"."fail_tx" ( - "id" int8 NOT NULL DEFAULT nextval('fail_tx_id_seq'::regclass), - "created_at" timestamptz(6), - "updated_at" timestamptz(6), - "deleted_at" timestamptz(6), - "tx_hash" text COLLATE "pg_catalog"."default", - "tx_type" int8, - "gas_fee" text COLLATE "pg_catalog"."default", - "gas_fee_asset_id" int8, - "tx_status" int8, - "asset_a_id" int8, - "asset_b_id" int8, - "tx_amount" text COLLATE "pg_catalog"."default", - "native_address" text COLLATE "pg_catalog"."default", - "tx_info" text COLLATE "pg_catalog"."default", - "extra_info" text COLLATE "pg_catalog"."default", - "memo" text COLLATE "pg_catalog"."default" -) -; - --- ---------------------------- --- Records of fail_tx --- ---------------------------- - --- ---------------------------- --- Table structure for l1_amount --- ---------------------------- -DROP TABLE IF EXISTS "public"."l1_amount"; -CREATE TABLE "public"."l1_amount" ( - "id" int8 NOT NULL DEFAULT nextval('l1_amount_id_seq'::regclass), - "created_at" timestamptz(6), - "updated_at" timestamptz(6), - "deleted_at" timestamptz(6), - "asset_id" int8, - "block_height" int8, - "total_amount" int8 -) -; - --- ---------------------------- --- Records of l1_amount --- ---------------------------- - --- ---------------------------- --- Table structure for l1_block_monitor --- ---------------------------- -DROP TABLE IF EXISTS "public"."l1_block_monitor"; -CREATE TABLE "public"."l1_block_monitor" ( - "id" int8 NOT NULL DEFAULT nextval('l1_block_monitor_id_seq'::regclass), - "created_at" timestamptz(6), - "updated_at" timestamptz(6), - "deleted_at" timestamptz(6), - "l1_block_height" int8, - "block_info" text COLLATE "pg_catalog"."default", - "monitor_type" int8 -) -; - --- ---------------------------- --- Records of l1_block_monitor --- ---------------------------- -INSERT INTO "public"."l1_block_monitor" VALUES (1, '2022-06-16 03:02:04.09326+00', '2022-06-16 03:02:04.09326+00', NULL, 628, '[{"EventType":6,"TxHash":"0xb38b074f0ee26bac5564982245882b917f58866cebfb9625e9d15359fb6c2054"},{"EventType":7,"TxHash":"0xb38b074f0ee26bac5564982245882b917f58866cebfb9625e9d15359fb6c2054"},{"EventType":5,"TxHash":"0xb38b074f0ee26bac5564982245882b917f58866cebfb9625e9d15359fb6c2054"},{"EventType":4,"TxHash":"0x4b64139d0696ef64b76f44ebd5b367bb6e887363a4bee19ca4d549ba7a2d7d4f"},{"EventType":4,"TxHash":"0xedc14dd231ebb62ec501ad9b74b42e4c23257c0c175cfd5501a2ef9455cdad2f"}]', 1); -INSERT INTO "public"."l1_block_monitor" VALUES (2, '2022-06-16 03:02:24.114168+00', '2022-06-16 03:02:24.114168+00', NULL, 628, '[{"EventType":0,"TxHash":"0xb47eac43aeaa52c33ae80b1fe7433e221b6c8003a7c4ba54984eff42ac0770ef"},{"EventType":0,"TxHash":"0xebba40069a895f43d4f1cae8a114c4f416d8354a63fec0b845ee3ae296f1783b"},{"EventType":0,"TxHash":"0x4640fb00a59749a95cc08823d4ca95c8936ddd89503633b7076678df2da5df1e"},{"EventType":0,"TxHash":"0xea08984a16136275a6849daa793c20dc272a37fd1266e3502923950509516e92"},{"EventType":0,"TxHash":"0x7184bd484e97ae61dcdddeb3282470a191dee273383369c275875144ed7022fb"},{"EventType":0,"TxHash":"0xdb8dd7b95f720396d1fb19a6789989983bfa3deab0c6b68af03ed0f34ed65e95"},{"EventType":0,"TxHash":"0x3f31a9fa97d4ce93e9858331e8ecce6699043b5c22b384f1b362a58e8d8a51f0"},{"EventType":0,"TxHash":"0xd50ede290f499c12156ff59dad8dfef00082f980ecf53a3ab8aa6596c926ad4e"},{"EventType":0,"TxHash":"0x33a5529ba1899f41b52d26ee4167808876f28bcdd4b1961919dd70432f64bb1f"},{"EventType":0,"TxHash":"0x4f62a0d4fb2a17e225f39cf832b101aa5daadfa2895e1b19f9fa53f8f21ffea7"},{"EventType":0,"TxHash":"0xed3c1beb710e7be01f88e5eee08a4e6d40e74af08e854a761a6e0a721f90d687"},{"EventType":0,"TxHash":"0xa97822618abccde3fa3fc38753bb720751337e0a2e86a82f9a600380cbf12e2b"},{"EventType":0,"TxHash":"0xf2cd0648ddeacb4234e725966e36d642ab47be5e011c83602ab9d5f84e5de62a"},{"EventType":0,"TxHash":"0xc0cdeaa451c6678d9368858ae55a76288c383c199871daabd0165e6c8a3e1237"},{"EventType":0,"TxHash":"0xcd9f5635ee8a285f545afa70d30cc9448a782ea6f72ac2baa4c6eef1ba2278e5"}]', 0); - --- ---------------------------- --- Table structure for l1_tx_sender --- ---------------------------- -DROP TABLE IF EXISTS "public"."l1_tx_sender"; -CREATE TABLE "public"."l1_tx_sender" ( - "id" int8 NOT NULL DEFAULT nextval('l1_tx_sender_id_seq'::regclass), - "created_at" timestamptz(6), - "updated_at" timestamptz(6), - "deleted_at" timestamptz(6), - "l1_tx_hash" text COLLATE "pg_catalog"."default", - "tx_status" int8, - "tx_type" int2, - "l2_block_height" int8 -) -; - --- ---------------------------- --- Records of l1_tx_sender --- ---------------------------- - --- ---------------------------- --- Table structure for l2_asset_info --- ---------------------------- -DROP TABLE IF EXISTS "public"."l2_asset_info"; -CREATE TABLE "public"."l2_asset_info" ( - "id" int8 NOT NULL DEFAULT nextval('l2_asset_info_id_seq'::regclass), - "created_at" timestamptz(6), - "updated_at" timestamptz(6), - "deleted_at" timestamptz(6), - "asset_id" int8, - "asset_address" text COLLATE "pg_catalog"."default", - "asset_name" text COLLATE "pg_catalog"."default", - "asset_symbol" text COLLATE "pg_catalog"."default", - "decimals" int8, - "status" int8 -) -; - --- ---------------------------- --- Records of l2_asset_info --- ---------------------------- -INSERT INTO "public"."l2_asset_info" VALUES (1, '2022-06-14 06:43:24.621929+00', '2022-06-14 06:43:24.621929+00', NULL, 0, '0x00', 'BNB', 'BNB', 18, 0); -INSERT INTO "public"."l2_asset_info" VALUES (2, '2022-06-14 06:44:37.378403+00', '2022-06-14 06:44:37.378403+00', NULL, 1, '0x6b8bdbAACf09C562409Eb5f811A619D5c1A38c9D', 'LEG', 'LEG', 18, 0); -INSERT INTO "public"."l2_asset_info" VALUES (3, '2022-06-14 06:44:37.378403+00', '2022-06-14 06:44:37.378403+00', NULL, 2, '0xdDD0811dAD9d7Ef6518e0275c2e52BD9B837b6cD', 'REY', 'REY', 18, 0); - --- ---------------------------- --- Table structure for l2_block_event_monitor --- ---------------------------- -DROP TABLE IF EXISTS "public"."l2_block_event_monitor"; -CREATE TABLE "public"."l2_block_event_monitor" ( - "id" int8 NOT NULL DEFAULT nextval('l2_block_event_monitor_id_seq'::regclass), - "created_at" timestamptz(6), - "updated_at" timestamptz(6), - "deleted_at" timestamptz(6), - "block_event_type" int2, - "l1_block_height" int8, - "l1_tx_hash" text COLLATE "pg_catalog"."default", - "l2_block_height" int8, - "status" int8 -) -; - --- ---------------------------- --- Records of l2_block_event_monitor --- ---------------------------- - --- ---------------------------- --- Table structure for l2_nft --- ---------------------------- -DROP TABLE IF EXISTS "public"."l2_nft"; -CREATE TABLE "public"."l2_nft" ( - "id" int8 NOT NULL DEFAULT nextval('l2_nft_id_seq'::regclass), - "created_at" timestamptz(6), - "updated_at" timestamptz(6), - "deleted_at" timestamptz(6), - "nft_index" int8, - "creator_account_index" int8, - "owner_account_index" int8, - "nft_content_hash" text COLLATE "pg_catalog"."default", - "nft_l1_address" text COLLATE "pg_catalog"."default", - "nft_l1_token_id" text COLLATE "pg_catalog"."default", - "creator_treasury_rate" int8, - "collection_id" int8 -) -; - --- ---------------------------- --- Records of l2_nft --- ---------------------------- -INSERT INTO "public"."l2_nft" VALUES (1, '2022-06-16 03:02:50.046873+00', '2022-06-16 03:02:50.046873+00', NULL, 0, 0, 2, '8fa3059a7c68daddcdf9c03b1cd1e6d0342b7c4a90ed610372c681bfea7ee478', '0x464ed8Ce7076Abaf743F760468230B9d71fB7D90', '0', 0, 0); -INSERT INTO "public"."l2_nft" VALUES (2, '2022-06-16 03:04:31.359403+00', '2022-06-16 03:04:31.359403+00', NULL, 1, 2, 3, '1fc88e6712229d3314dfd7c5a93f012189b39767628e74a7326113d0b003087d', '0', '0', 0, 1); - --- ---------------------------- --- Table structure for l2_nft_collection --- ---------------------------- -DROP TABLE IF EXISTS "public"."l2_nft_collection"; -CREATE TABLE "public"."l2_nft_collection" ( - "id" int8 NOT NULL DEFAULT nextval('l2_nft_collection_id_seq'::regclass), - "created_at" timestamptz(6), - "updated_at" timestamptz(6), - "deleted_at" timestamptz(6), - "account_index" int8, - "name" text COLLATE "pg_catalog"."default", - "introduction" text COLLATE "pg_catalog"."default", - "status" int8 -) -; - --- ---------------------------- --- Records of l2_nft_collection --- ---------------------------- - --- ---------------------------- --- Table structure for l2_nft_exchange --- ---------------------------- -DROP TABLE IF EXISTS "public"."l2_nft_exchange"; -CREATE TABLE "public"."l2_nft_exchange" ( - "id" int8 NOT NULL DEFAULT nextval('l2_nft_exchange_id_seq'::regclass), - "created_at" timestamptz(6), - "updated_at" timestamptz(6), - "deleted_at" timestamptz(6), - "buyer_account_index" int8, - "owner_account_index" int8, - "nft_index" int8, - "asset_id" int8, - "asset_amount" text COLLATE "pg_catalog"."default" -) -; - --- ---------------------------- --- Records of l2_nft_exchange --- ---------------------------- -INSERT INTO "public"."l2_nft_exchange" VALUES (1, '2022-06-16 03:04:46.33008+00', '2022-06-16 03:04:46.33008+00', NULL, 3, 2, 1, 0, '10000'); - --- ---------------------------- --- Table structure for l2_nft_exchange_history --- ---------------------------- -DROP TABLE IF EXISTS "public"."l2_nft_exchange_history"; -CREATE TABLE "public"."l2_nft_exchange_history" ( - "id" int8 NOT NULL DEFAULT nextval('l2_nft_exchange_history_id_seq'::regclass), - "created_at" timestamptz(6), - "updated_at" timestamptz(6), - "deleted_at" timestamptz(6), - "buyer_account_index" int8, - "owner_account_index" int8, - "nft_index" int8, - "asset_id" int8, - "asset_amount" int8, - "l2_block_height" int8 -) -; - --- ---------------------------- --- Records of l2_nft_exchange_history --- ---------------------------- - --- ---------------------------- --- Table structure for l2_nft_history --- ---------------------------- -DROP TABLE IF EXISTS "public"."l2_nft_history"; -CREATE TABLE "public"."l2_nft_history" ( - "id" int8 NOT NULL DEFAULT nextval('l2_nft_history_id_seq'::regclass), - "created_at" timestamptz(6), - "updated_at" timestamptz(6), - "deleted_at" timestamptz(6), - "nft_index" int8, - "creator_account_index" int8, - "owner_account_index" int8, - "nft_content_hash" text COLLATE "pg_catalog"."default", - "nft_l1_address" text COLLATE "pg_catalog"."default", - "nft_l1_token_id" text COLLATE "pg_catalog"."default", - "creator_treasury_rate" int8, - "collection_id" int8, - "status" int8, - "l2_block_height" int8 -) -; - --- ---------------------------- --- Records of l2_nft_history --- ---------------------------- - --- ---------------------------- --- Table structure for l2_nft_withdraw_history --- ---------------------------- -DROP TABLE IF EXISTS "public"."l2_nft_withdraw_history"; -CREATE TABLE "public"."l2_nft_withdraw_history" ( - "id" int8 NOT NULL DEFAULT nextval('l2_nft_withdraw_history_id_seq'::regclass), - "created_at" timestamptz(6), - "updated_at" timestamptz(6), - "deleted_at" timestamptz(6), - "nft_index" int8, - "creator_account_index" int8, - "owner_account_index" int8, - "nft_content_hash" text COLLATE "pg_catalog"."default", - "nft_l1_address" text COLLATE "pg_catalog"."default", - "nft_l1_token_id" text COLLATE "pg_catalog"."default", - "creator_treasury_rate" int8, - "collection_id" int8 -) -; - --- ---------------------------- --- Records of l2_nft_withdraw_history --- ---------------------------- - --- ---------------------------- --- Table structure for l2_tx_event_monitor --- ---------------------------- -DROP TABLE IF EXISTS "public"."l2_tx_event_monitor"; -CREATE TABLE "public"."l2_tx_event_monitor" ( - "id" int8 NOT NULL DEFAULT nextval('l2_tx_event_monitor_id_seq'::regclass), - "created_at" timestamptz(6), - "updated_at" timestamptz(6), - "deleted_at" timestamptz(6), - "l1_tx_hash" text COLLATE "pg_catalog"."default", - "l1_block_height" int8, - "sender_address" text COLLATE "pg_catalog"."default", - "request_id" int8, - "tx_type" int8, - "pubdata" text COLLATE "pg_catalog"."default", - "expiration_block" int8, - "status" int8 -) -; - --- ---------------------------- --- Records of l2_tx_event_monitor --- ---------------------------- -INSERT INTO "public"."l2_tx_event_monitor" VALUES (1, '2022-06-16 03:02:24.117031+00', '2022-06-16 03:02:50.047909+00', NULL, '0xb47eac43aeaa52c33ae80b1fe7433e221b6c8003a7c4ba54984eff42ac0770ef', 605, '0x7dD2Ac589eFCC8888474d95Cb4b084CCa2d8aA57', 0, 1, '01000000007472656173757279000000000000000000000000000000000000000000000000c0d201aace9a2c17ce7066dc6ffefaf7930f1317c4c95d0661b164a1c584d6762005db7af2bdcfae1fa8d28833ae2f1995e9a8e0825377cff121db64b0db21b718a96ca582a72b16f464330c89ab73277cb96e42df105ebf5c9ac5330d47b8fc', 40925, 2); -INSERT INTO "public"."l2_tx_event_monitor" VALUES (2, '2022-06-16 03:02:24.117031+00', '2022-06-16 03:02:50.049562+00', NULL, '0xebba40069a895f43d4f1cae8a114c4f416d8354a63fec0b845ee3ae296f1783b', 606, '0x7dD2Ac589eFCC8888474d95Cb4b084CCa2d8aA57', 1, 1, '0100000001676173000000000000000000000000000000000000000000000000000000000068fbd17e77eec501c677ccc31c260f30ee8ed049c893900e084ba8b7f7569ce62c24415b75651673b0d7bbf145ac8d7cb744ba6926963d1d014836336df1317a134f4726b89983a8e7babbf6973e7ee16311e24328edf987bb0fbe7a494ec91e', 40926, 2); -INSERT INTO "public"."l2_tx_event_monitor" VALUES (3, '2022-06-16 03:02:24.117031+00', '2022-06-16 03:02:50.05065+00', NULL, '0x4640fb00a59749a95cc08823d4ca95c8936ddd89503633b7076678df2da5df1e', 607, '0x7dD2Ac589eFCC8888474d95Cb4b084CCa2d8aA57', 2, 1, '0100000002736865720000000000000000000000000000000000000000000000000000000004b2dd1162802d057ed00dcb516ea627b207970520d1ad583f712cd6e954691f235fdbbbf5ef1665f3422211702126433c909487c456e594ef3a56910810396a05dde55c8adfb6689ead7f5610726afd5fd6ea35a3516dc68e57546146f7b6b0', 40927, 2); -INSERT INTO "public"."l2_tx_event_monitor" VALUES (4, '2022-06-16 03:02:24.117031+00', '2022-06-16 03:02:50.051304+00', NULL, '0xea08984a16136275a6849daa793c20dc272a37fd1266e3502923950509516e92', 608, '0x7dD2Ac589eFCC8888474d95Cb4b084CCa2d8aA57', 3, 1, '0100000003676176696e000000000000000000000000000000000000000000000000000000f4a64916b32d0f467369972dd156f7d2bd859c0a108a3b395a250f194f4680a30649fef47f6cf3dfb767cf5599eea11677bb6495956ec4cf75707d3aca7c06ed0e07b60bf3a2bf5e1a355793498de43e4d8dac50b892528f9664a03ceacc0005', 40928, 2); -INSERT INTO "public"."l2_tx_event_monitor" VALUES (5, '2022-06-16 03:02:24.117031+00', '2022-06-16 03:02:50.051809+00', NULL, '0x7184bd484e97ae61dcdddeb3282470a191dee273383369c275875144ed7022fb', 610, '0x7dD2Ac589eFCC8888474d95Cb4b084CCa2d8aA57', 4, 4, '040000000004b2dd1162802d057ed00dcb516ea627b207970520d1ad583f712cd6e954691f00000000000000000000016345785d8a0000', 40930, 2); -INSERT INTO "public"."l2_tx_event_monitor" VALUES (6, '2022-06-16 03:02:24.117031+00', '2022-06-16 03:02:50.052336+00', NULL, '0xdb8dd7b95f720396d1fb19a6789989983bfa3deab0c6b68af03ed0f34ed65e95', 611, '0x7dD2Ac589eFCC8888474d95Cb4b084CCa2d8aA57', 5, 4, '0400000000f4a64916b32d0f467369972dd156f7d2bd859c0a108a3b395a250f194f4680a300000000000000000000016345785d8a0000', 40931, 2); -INSERT INTO "public"."l2_tx_event_monitor" VALUES (7, '2022-06-16 03:02:24.117031+00', '2022-06-16 03:02:50.053406+00', NULL, '0x3f31a9fa97d4ce93e9858331e8ecce6699043b5c22b384f1b362a58e8d8a51f0', 614, '0x7dD2Ac589eFCC8888474d95Cb4b084CCa2d8aA57', 6, 4, '040000000004b2dd1162802d057ed00dcb516ea627b207970520d1ad583f712cd6e954691f000100000000000000056bc75e2d63100000', 40934, 2); -INSERT INTO "public"."l2_tx_event_monitor" VALUES (8, '2022-06-16 03:02:24.117031+00', '2022-06-16 03:02:50.053931+00', NULL, '0xd50ede290f499c12156ff59dad8dfef00082f980ecf53a3ab8aa6596c926ad4e', 615, '0x7dD2Ac589eFCC8888474d95Cb4b084CCa2d8aA57', 7, 4, '040000000004b2dd1162802d057ed00dcb516ea627b207970520d1ad583f712cd6e954691f000200000000000000056bc75e2d63100000', 40935, 2); -INSERT INTO "public"."l2_tx_event_monitor" VALUES (9, '2022-06-16 03:02:24.117031+00', '2022-06-16 03:02:50.054595+00', NULL, '0x33a5529ba1899f41b52d26ee4167808876f28bcdd4b1961919dd70432f64bb1f', 617, '0x7dD2Ac589eFCC8888474d95Cb4b084CCa2d8aA57', 8, 2, '02000000000002001e000000000005', 40937, 2); -INSERT INTO "public"."l2_tx_event_monitor" VALUES (10, '2022-06-16 03:02:24.117031+00', '2022-06-16 03:02:50.055102+00', NULL, '0x4f62a0d4fb2a17e225f39cf832b101aa5daadfa2895e1b19f9fa53f8f21ffea7', 618, '0x7dD2Ac589eFCC8888474d95Cb4b084CCa2d8aA57', 9, 2, '02000100000001001e000000000005', 40938, 2); -INSERT INTO "public"."l2_tx_event_monitor" VALUES (11, '2022-06-16 03:02:24.117031+00', '2022-06-16 03:02:50.056161+00', NULL, '0xed3c1beb710e7be01f88e5eee08a4e6d40e74af08e854a761a6e0a721f90d687', 619, '0x7dD2Ac589eFCC8888474d95Cb4b084CCa2d8aA57', 10, 2, '02000200010002001e000000000005', 40939, 2); -INSERT INTO "public"."l2_tx_event_monitor" VALUES (12, '2022-06-16 03:02:24.117031+00', '2022-06-16 03:02:50.056817+00', NULL, '0xa97822618abccde3fa3fc38753bb720751337e0a2e86a82f9a600380cbf12e2b', 621, '0x7dD2Ac589eFCC8888474d95Cb4b084CCa2d8aA57', 11, 3, '030001003200000000000a', 40941, 2); -INSERT INTO "public"."l2_tx_event_monitor" VALUES (13, '2022-06-16 03:02:24.117031+00', '2022-06-16 03:02:50.057379+00', NULL, '0xf2cd0648ddeacb4234e725966e36d642ab47be5e011c83602ab9d5f84e5de62a', 624, '0x7dD2Ac589eFCC8888474d95Cb4b084CCa2d8aA57', 12, 5, '05000000000000000000464ed8ce7076abaf743f760468230b9d71fb7d900000000000008fa3059a7c68daddcdf9c03b1cd1e6d0342b7c4a90ed610372c681bfea7ee478000000000000000000000000000000000000000000000000000000000000000004b2dd1162802d057ed00dcb516ea627b207970520d1ad583f712cd6e954691f0000', 40944, 2); -INSERT INTO "public"."l2_tx_event_monitor" VALUES (14, '2022-06-16 03:02:24.117031+00', '2022-06-16 03:02:50.057914+00', NULL, '0xc0cdeaa451c6678d9368858ae55a76288c383c199871daabd0165e6c8a3e1237', 626, '0x7dD2Ac589eFCC8888474d95Cb4b084CCa2d8aA57', 13, 17, '110000000000010000000000000000000000000000000004b2dd1162802d057ed00dcb516ea627b207970520d1ad583f712cd6e954691f', 40946, 2); -INSERT INTO "public"."l2_tx_event_monitor" VALUES (15, '2022-06-16 03:02:24.117031+00', '2022-06-16 03:02:50.058445+00', NULL, '0xcd9f5635ee8a285f545afa70d30cc9448a782ea6f72ac2baa4c6eef1ba2278e5', 628, '0x7dD2Ac589eFCC8888474d95Cb4b084CCa2d8aA57', 14, 18, '120000000000000000000000000000000000000000000000000000000000000000000000000004b2dd1162802d057ed00dcb516ea627b207970520d1ad583f712cd6e954691f000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000', 40948, 2); - --- ---------------------------- --- Table structure for liquidity --- ---------------------------- -DROP TABLE IF EXISTS "public"."liquidity"; -CREATE TABLE "public"."liquidity" ( - "id" int8 NOT NULL DEFAULT nextval('liquidity_id_seq'::regclass), - "created_at" timestamptz(6), - "updated_at" timestamptz(6), - "deleted_at" timestamptz(6), - "pair_index" int8, - "asset_a_id" int8, - "asset_a" text COLLATE "pg_catalog"."default", - "asset_b_id" int8, - "asset_b" text COLLATE "pg_catalog"."default", - "lp_amount" text COLLATE "pg_catalog"."default", - "k_last" text COLLATE "pg_catalog"."default", - "fee_rate" int8, - "treasury_account_index" int8, - "treasury_rate" int8 -) -; - --- ---------------------------- --- Records of liquidity --- ---------------------------- -INSERT INTO "public"."liquidity" VALUES (1, '2022-06-16 03:02:50.044467+00', '2022-06-16 03:02:50.044467+00', NULL, 0, 0, '0', 2, '0', '0', '0', 30, 0, 5); -INSERT INTO "public"."liquidity" VALUES (2, '2022-06-16 03:02:50.044467+00', '2022-06-16 03:02:50.044467+00', NULL, 1, 0, '0', 1, '0', '0', '0', 50, 0, 10); -INSERT INTO "public"."liquidity" VALUES (3, '2022-06-16 03:02:50.044467+00', '2022-06-16 03:02:50.044467+00', NULL, 2, 1, '0', 2, '0', '0', '0', 30, 0, 5); - --- ---------------------------- --- Table structure for liquidity_history --- ---------------------------- -DROP TABLE IF EXISTS "public"."liquidity_history"; -CREATE TABLE "public"."liquidity_history" ( - "id" int8 NOT NULL DEFAULT nextval('liquidity_history_id_seq'::regclass), - "created_at" timestamptz(6), - "updated_at" timestamptz(6), - "deleted_at" timestamptz(6), - "pair_index" int8, - "asset_a_id" int8, - "asset_a" text COLLATE "pg_catalog"."default", - "asset_b_id" int8, - "asset_b" text COLLATE "pg_catalog"."default", - "lp_amount" text COLLATE "pg_catalog"."default", - "k_last" text COLLATE "pg_catalog"."default", - "fee_rate" int8, - "treasury_account_index" int8, - "treasury_rate" int8, - "l2_block_height" int8 -) -; - --- ---------------------------- --- Records of liquidity_history --- ---------------------------- - --- ---------------------------- --- Table structure for mempool_tx --- ---------------------------- -DROP TABLE IF EXISTS "public"."mempool_tx"; -CREATE TABLE "public"."mempool_tx" ( - "id" int8 NOT NULL DEFAULT nextval('mempool_tx_id_seq'::regclass), - "created_at" timestamptz(6), - "updated_at" timestamptz(6), - "deleted_at" timestamptz(6), - "tx_hash" text COLLATE "pg_catalog"."default", - "tx_type" int8, - "gas_fee_asset_id" int8, - "gas_fee" text COLLATE "pg_catalog"."default", - "nft_index" int8, - "pair_index" int8, - "asset_id" int8, - "tx_amount" text COLLATE "pg_catalog"."default", - "native_address" text COLLATE "pg_catalog"."default", - "tx_info" text COLLATE "pg_catalog"."default", - "extra_info" text COLLATE "pg_catalog"."default", - "memo" text COLLATE "pg_catalog"."default", - "account_index" int8, - "nonce" int8, - "expired_at" int8, - "l2_block_height" int8, - "status" int8 -) -; - --- ---------------------------- --- Records of mempool_tx --- ---------------------------- -INSERT INTO "public"."mempool_tx" VALUES (1, '2022-06-16 03:02:50.039037+00', '2022-06-16 03:02:50.039037+00', NULL, 'ce76403c-ed20-11ec-8b10-988fe0603efa', 1, 0, '0', -1, -1, 0, '0', '0x7dD2Ac589eFCC8888474d95Cb4b084CCa2d8aA57', '{"TxType":1,"AccountIndex":0,"AccountName":"treasury.legend","AccountNameHash":"wNIBqs6aLBfOcGbcb/7695MPExfEyV0GYbFkocWE1nY=","PubKey":"fcb8470d33c59a5cbf5e10df426eb97c2773ab890c3364f4162ba782a56ca998"}', '', '', 0, 0, 0, -1, 0); -INSERT INTO "public"."mempool_tx" VALUES (2, '2022-06-16 03:02:50.039037+00', '2022-06-16 03:02:50.039037+00', NULL, 'ce771ece-ed20-11ec-8b10-988fe0603efa', 1, 0, '0', -1, -1, 0, '0', '0x7dD2Ac589eFCC8888474d95Cb4b084CCa2d8aA57', '{"TxType":1,"AccountIndex":1,"AccountName":"gas.legend","AccountNameHash":"aPvRfnfuxQHGd8zDHCYPMO6O0EnIk5AOCEuot/dWnOY=","PubKey":"1ec94e497abe0fbb87f9ed2843e21163e17e3e97f6bbbae7a88399b826474f93"}', '', '', 1, 0, 0, -1, 0); -INSERT INTO "public"."mempool_tx" VALUES (3, '2022-06-16 03:02:50.039037+00', '2022-06-16 03:02:50.039037+00', NULL, 'ce7736d6-ed20-11ec-8b10-988fe0603efa', 1, 0, '0', -1, -1, 0, '0', '0x7dD2Ac589eFCC8888474d95Cb4b084CCa2d8aA57', '{"TxType":1,"AccountIndex":2,"AccountName":"sher.legend","AccountNameHash":"BLLdEWKALQV+0A3LUW6mJ7IHlwUg0a1YP3Es1ulUaR8=","PubKey":"b0b6f7466154578ec66d51a335ead65ffd6a7210567fad9e68b6df8a5ce5dd85"}', '', '', 2, 0, 0, -1, 0); -INSERT INTO "public"."mempool_tx" VALUES (4, '2022-06-16 03:02:50.039037+00', '2022-06-16 03:02:50.039037+00', NULL, 'ce774ff9-ed20-11ec-8b10-988fe0603efa', 1, 0, '0', -1, -1, 0, '0', '0x7dD2Ac589eFCC8888474d95Cb4b084CCa2d8aA57', '{"TxType":1,"AccountIndex":3,"AccountName":"gavin.legend","AccountNameHash":"9KZJFrMtD0ZzaZct0Vb30r2FnAoQijs5WiUPGU9GgKM=","PubKey":"0500ccea3ca064968f5292b850ac8d4d3ee48d499357351a5ebfa2f30bb6070e"}', '', '', 3, 0, 0, -1, 0); -INSERT INTO "public"."mempool_tx" VALUES (5, '2022-06-16 03:02:50.039037+00', '2022-06-16 03:02:50.039037+00', NULL, 'ce774ff9-ed20-11ec-8b11-988fe0603efa', 4, 0, '0', -1, -1, 0, '100000000000000000', '0x7dD2Ac589eFCC8888474d95Cb4b084CCa2d8aA57', '{"TxType":4,"AccountIndex":2,"AccountNameHash":"BLLdEWKALQV+0A3LUW6mJ7IHlwUg0a1YP3Es1ulUaR8=","AssetId":0,"AssetAmount":100000000000000000}', '', '', 2, 0, 0, -1, 0); -INSERT INTO "public"."mempool_tx" VALUES (6, '2022-06-16 03:02:50.039037+00', '2022-06-16 03:02:50.039037+00', NULL, 'ce774ff9-ed20-11ec-8b12-988fe0603efa', 4, 0, '0', -1, -1, 0, '100000000000000000', '0x7dD2Ac589eFCC8888474d95Cb4b084CCa2d8aA57', '{"TxType":4,"AccountIndex":3,"AccountNameHash":"9KZJFrMtD0ZzaZct0Vb30r2FnAoQijs5WiUPGU9GgKM=","AssetId":0,"AssetAmount":100000000000000000}', '', '', 3, 0, 0, -1, 0); -INSERT INTO "public"."mempool_tx" VALUES (7, '2022-06-16 03:02:50.039037+00', '2022-06-16 03:02:50.039037+00', NULL, 'ce774ff9-ed20-11ec-8b13-988fe0603efa', 4, 0, '0', -1, -1, 1, '100000000000000000000', '0x7dD2Ac589eFCC8888474d95Cb4b084CCa2d8aA57', '{"TxType":4,"AccountIndex":2,"AccountNameHash":"BLLdEWKALQV+0A3LUW6mJ7IHlwUg0a1YP3Es1ulUaR8=","AssetId":1,"AssetAmount":100000000000000000000}', '', '', 2, 0, 0, -1, 0); -INSERT INTO "public"."mempool_tx" VALUES (8, '2022-06-16 03:02:50.039037+00', '2022-06-16 03:02:50.039037+00', NULL, 'ce774ff9-ed20-11ec-8b14-988fe0603efa', 4, 0, '0', -1, -1, 2, '100000000000000000000', '0x7dD2Ac589eFCC8888474d95Cb4b084CCa2d8aA57', '{"TxType":4,"AccountIndex":2,"AccountNameHash":"BLLdEWKALQV+0A3LUW6mJ7IHlwUg0a1YP3Es1ulUaR8=","AssetId":2,"AssetAmount":100000000000000000000}', '', '', 2, 0, 0, -1, 0); -INSERT INTO "public"."mempool_tx" VALUES (9, '2022-06-16 03:02:50.039037+00', '2022-06-16 03:02:50.039037+00', NULL, 'ce774ff9-ed20-11ec-8b15-988fe0603efa', 2, 0, '0', -1, 0, 0, '0', '0', '{"TxType":2,"PairIndex":0,"AssetAId":0,"AssetBId":2,"FeeRate":30,"TreasuryAccountIndex":0,"TreasuryRate":5}', '', '', -1, 0, 0, -1, 0); -INSERT INTO "public"."mempool_tx" VALUES (10, '2022-06-16 03:02:50.039037+00', '2022-06-16 03:02:50.039037+00', NULL, 'ce774ff9-ed20-11ec-8b16-988fe0603efa', 2, 0, '0', -1, 1, 0, '0', '0', '{"TxType":2,"PairIndex":1,"AssetAId":0,"AssetBId":1,"FeeRate":30,"TreasuryAccountIndex":0,"TreasuryRate":5}', '', '', -1, 0, 0, -1, 0); -INSERT INTO "public"."mempool_tx" VALUES (11, '2022-06-16 03:02:50.039037+00', '2022-06-16 03:02:50.039037+00', NULL, 'ce774ff9-ed20-11ec-8b17-988fe0603efa', 2, 0, '0', -1, 2, 0, '0', '0', '{"TxType":2,"PairIndex":2,"AssetAId":1,"AssetBId":2,"FeeRate":30,"TreasuryAccountIndex":0,"TreasuryRate":5}', '', '', -1, 0, 0, -1, 0); -INSERT INTO "public"."mempool_tx" VALUES (12, '2022-06-16 03:02:50.039037+00', '2022-06-16 03:02:50.039037+00', NULL, 'ce774ff9-ed20-11ec-8b18-988fe0603efa', 3, 0, '0', -1, 1, 0, '0', '0', '{"TxType":3,"PairIndex":1,"FeeRate":50,"TreasuryAccountIndex":0,"TreasuryRate":10}', '', '', -1, 0, 0, -1, 0); -INSERT INTO "public"."mempool_tx" VALUES (13, '2022-06-16 03:02:50.039037+00', '2022-06-16 03:02:50.039037+00', NULL, 'ce77cdf5-ed20-11ec-8b18-988fe0603efa', 5, 0, '0', 0, -1, 0, '0', '0x7dD2Ac589eFCC8888474d95Cb4b084CCa2d8aA57', '{"TxType":5,"AccountIndex":2,"NftIndex":0,"NftL1Address":"0x464ed8Ce7076Abaf743F760468230B9d71fB7D90","CreatorAccountIndex":0,"CreatorTreasuryRate":0,"NftContentHash":"j6MFmnxo2t3N+cA7HNHm0DQrfEqQ7WEDcsaBv+p+5Hg=","NftL1TokenId":0,"AccountNameHash":"BLLdEWKALQV+0A3LUW6mJ7IHlwUg0a1YP3Es1ulUaR8=","CollectionId":0}', '', '', 2, 0, 0, -1, 0); -INSERT INTO "public"."mempool_tx" VALUES (14, '2022-06-16 03:02:50.039037+00', '2022-06-16 03:02:50.039037+00', NULL, 'ce77cdf5-ed20-11ec-8b19-988fe0603efa', 17, 0, '0', -1, -1, 1, '100000000000000000000', '0x7dD2Ac589eFCC8888474d95Cb4b084CCa2d8aA57', '{"TxType":17,"AccountIndex":2,"AccountNameHash":"BLLdEWKALQV+0A3LUW6mJ7IHlwUg0a1YP3Es1ulUaR8=","AssetId":1,"AssetAmount":100000000000000000000}', '', '', 2, 0, 0, -1, 0); -INSERT INTO "public"."mempool_tx" VALUES (15, '2022-06-16 03:02:50.039037+00', '2022-06-16 03:02:50.039037+00', NULL, 'ce77cdf5-ed20-11ec-8b1a-988fe0603efa', 18, 0, '0', 0, -1, 0, '0', '0x7dD2Ac589eFCC8888474d95Cb4b084CCa2d8aA57', '{"TxType":18,"AccountIndex":2,"CreatorAccountIndex":0,"CreatorTreasuryRate":0,"NftIndex":0,"CollectionId":0,"NftL1Address":"0x464ed8Ce7076Abaf743F760468230B9d71fB7D90","AccountNameHash":"BLLdEWKALQV+0A3LUW6mJ7IHlwUg0a1YP3Es1ulUaR8=","CreatorAccountNameHash":"AA==","NftContentHash":"j6MFmnxo2t3N+cA7HNHm0DQrfEqQ7WEDcsaBv+p+5Hg=","NftL1TokenId":0}', '', '', 2, 0, 0, -1, 0); -INSERT INTO "public"."mempool_tx" VALUES (16, '2022-06-16 03:03:35.430999+00', '2022-06-16 03:03:35.430999+00', NULL, 'a8900553-c58c-46a6-80ef-11d8d676ebf1', 6, 2, '5000', -1, -1, 0, '100000', '', '{"FromAccountIndex":2,"ToAccountIndex":3,"ToAccountNameHash":"f4a64916b32d0f467369972dd156f7d2bd859c0a108a3b395a250f194f4680a3","AssetId":0,"AssetAmount":100000,"GasAccountIndex":1,"GasFeeAssetId":2,"GasFeeAssetAmount":5000,"Memo":"transfer","CallData":"","CallDataHash":"Dd56AihX/sG4/6dmSpN6JQ065o81YGF1TTUx4mdBA9g=","ExpiredAt":1655355815404,"Nonce":1,"Sig":"KtUPLRrTacOmMynKNTA4zK4lrYxj/offgvMFW09O7IkDsACGHAg/HwjXfwVldHJ6id7lUt2KkxA9XqSheK/IVg=="}', '', 'transfer', 2, 1, 1655355815404, -1, 0); -INSERT INTO "public"."mempool_tx" VALUES (17, '2022-06-16 03:03:43.36864+00', '2022-06-16 03:03:43.36864+00', NULL, '0d178556-fc26-4119-afc9-63641ade9d00', 10, 2, '5000', -1, -1, 0, '10000000', '0x99AC8881834797ebC32f185ee27c2e96842e1a47', '{"FromAccountIndex":2,"AssetId":0,"AssetAmount":10000000,"GasAccountIndex":1,"GasFeeAssetId":2,"GasFeeAssetAmount":5000,"ToAddress":"0x99AC8881834797ebC32f185ee27c2e96842e1a47","ExpiredAt":1655355823348,"Nonce":2,"Sig":"ZXetoL3NuMCyGb2qXyeT10GzXEwZKWYbHyFlJHy1KqMDINfoPymKN2OZceuSGoG7mwGKMWTkDKxvCtqWFlviGQ=="}', '', '', 2, 2, 1655355823348, -1, 0); -INSERT INTO "public"."mempool_tx" VALUES (18, '2022-06-16 03:03:50.344678+00', '2022-06-16 03:03:50.344678+00', NULL, '06c5b62f-36e9-4e1a-a7b9-c5d005135ca7', 8, 2, '5000', -1, 0, 0, '100000', '', '{"FromAccountIndex":2,"PairIndex":0,"AssetAId":0,"AssetAAmount":100000,"AssetBId":2,"AssetBAmount":100000,"LpAmount":100000,"KLast":10000000000,"TreasuryAmount":0,"GasAccountIndex":1,"GasFeeAssetId":2,"GasFeeAssetAmount":5000,"ExpiredAt":1655355830311,"Nonce":3,"Sig":"ZH4H199j3phj03/LbkMZDq1Ke4KmzbIhE6+LN9sQWIoBPRsXkehzq/dVVAG06ALy+S8mBhd0Wk3RSGvB0pgbhQ=="}', '', '', 2, 3, 1655355830311, -1, 0); -INSERT INTO "public"."mempool_tx" VALUES (19, '2022-06-16 03:04:00.539552+00', '2022-06-16 03:04:00.539552+00', NULL, '8c4b9d9f-a08b-4b0c-8567-1d5e115beaed', 7, 0, '5000', -1, 0, 0, '100', '', '{"FromAccountIndex":2,"PairIndex":0,"AssetAId":2,"AssetAAmount":100,"AssetBId":0,"AssetBMinAmount":98,"AssetBAmountDelta":99,"GasAccountIndex":1,"GasFeeAssetId":0,"GasFeeAssetAmount":5000,"ExpiredAt":1655355840514,"Nonce":4,"Sig":"lTYyacQAKuQ7gi5/nSaoeANfXG0pYxRcoQB/Y3nT6YEFgat7otzWJ6uN0Nf0lZSZWKqR5VzyDcMC/C4J1POXFw=="}', '', '', 2, 4, 1655355840514, -1, 0); -INSERT INTO "public"."mempool_tx" VALUES (20, '2022-06-16 03:04:08.952053+00', '2022-06-16 03:04:08.952053+00', NULL, 'd3fd9699-5176-46ab-9d02-f72b2e80d051', 9, 2, '5000', -1, 0, 0, '100', '', '{"FromAccountIndex":2,"PairIndex":0,"AssetAId":0,"AssetAMinAmount":98,"AssetBId":2,"AssetBMinAmount":99,"LpAmount":100,"AssetAAmountDelta":99,"AssetBAmountDelta":100,"KLast":9980200000,"TreasuryAmount":0,"GasAccountIndex":1,"GasFeeAssetId":2,"GasFeeAssetAmount":5000,"ExpiredAt":1655355848921,"Nonce":5,"Sig":"ZMYD2OQyb51odjDLRAa7qOxvk/ZjvLNVjoYOaIPL3isAteMJXD6BfNlQAIVqwYUskyz1MN2OVi+Dod6J9LdpIw=="}', '', '', 2, 5, 1655355848921, -1, 0); -INSERT INTO "public"."mempool_tx" VALUES (21, '2022-06-16 03:04:20.519599+00', '2022-06-16 03:04:20.519599+00', NULL, 'dc98197d-485b-4da2-975e-e78d491ce4b9', 11, 2, '5000', -1, -1, 0, 'sher.legend', '0', '{"AccountIndex":2,"CollectionId":1,"Name":"Zecrey Collection","Introduction":"Wonderful zecrey!","GasAccountIndex":1,"GasFeeAssetId":2,"GasFeeAssetAmount":5000,"ExpiredAt":1655355860496,"Nonce":6,"Sig":"v7O5TtBHRxa0uBqELnsnzQj5y90DAz29h76B47ccbJMBZyupQDfjfCwBeD/L7e34wOnL9N4Z3Rb2+Gc9SF7tYQ=="}', '', '', 2, 6, 1655355860496, -1, 0); -INSERT INTO "public"."mempool_tx" VALUES (22, '2022-06-16 03:04:31.356251+00', '2022-06-16 03:04:31.356251+00', NULL, '000fdd5a-aee0-49dd-bcb7-123549404031', 12, 2, '5000', 1, -1, 0, '0', '', '{"CreatorAccountIndex":2,"ToAccountIndex":3,"ToAccountNameHash":"f4a64916b32d0f467369972dd156f7d2bd859c0a108a3b395a250f194f4680a3","NftIndex":1,"NftContentHash":"1fc88e6712229d3314dfd7c5a93f012189b39767628e74a7326113d0b003087d","NftCollectionId":1,"CreatorTreasuryRate":0,"GasAccountIndex":1,"GasFeeAssetId":2,"GasFeeAssetAmount":5000,"ExpiredAt":1655355871326,"Nonce":7,"Sig":"J1F9haAXrw5A6KTaY+6Rg1GsQb1FJcJNHbYVfPnrCQcA3oftibaP+PTcX+J6tbBIr/OyusVOd3fZrhHjsKgNfA=="}', '', '', 2, 7, 1655355871326, -1, 0); -INSERT INTO "public"."mempool_tx" VALUES (23, '2022-06-16 03:04:38.436978+00', '2022-06-16 03:04:38.436978+00', NULL, '51e208b9-eea4-4f7f-b5c9-b85bee42df68', 13, 0, '5000', 1, -1, 0, '0', '', '{"FromAccountIndex":3,"ToAccountIndex":2,"ToAccountNameHash":"04b2dd1162802d057ed00dcb516ea627b207970520d1ad583f712cd6e954691f","NftIndex":1,"GasAccountIndex":1,"GasFeeAssetId":0,"GasFeeAssetAmount":5000,"CallData":"","CallDataHash":"Dd56AihX/sG4/6dmSpN6JQ065o81YGF1TTUx4mdBA9g=","ExpiredAt":1655355878420,"Nonce":1,"Sig":"Sq3f3Iat0hy+tALsajXe8aup2xtw5aDbQvLfYdjOP6EFTtDFck5y8jv8vQl1j6/gUYkMXQLyNsZbcI7h7qyNGA=="}', '', '', 3, 1, 1655355878420, -1, 0); -INSERT INTO "public"."mempool_tx" VALUES (24, '2022-06-16 03:04:46.327448+00', '2022-06-16 03:04:46.327448+00', NULL, '2901f0c1-a274-4ef7-a38a-3d629ab4fbe2', 14, 0, '5000', 1, -1, 0, '10000', '', '{"AccountIndex":2,"BuyOffer":{"Type":0,"OfferId":0,"AccountIndex":3,"NftIndex":1,"AssetId":0,"AssetAmount":10000,"ListedAt":1655348686296,"ExpiredAt":1655355886296,"TreasuryRate":200,"Sig":"KyuYNkngeab29jv6BanHeIzlB4u0bTUAwZzoVwS7+a8ED/wI/U4aBHHCEy8P0h1tHScO9c0o5t2n70wJxQiOcg=="},"SellOffer":{"Type":1,"OfferId":0,"AccountIndex":2,"NftIndex":1,"AssetId":0,"AssetAmount":10000,"ListedAt":1655348686296,"ExpiredAt":1655355886296,"TreasuryRate":200,"Sig":"nN4s9DcMNVCNmLwnNqvXWbtHbuXRufwslizVsyME9RIASok1Un//UiNcZrZt5orQawoEMNS8AuhQ20s5QeIY8A=="},"GasAccountIndex":1,"GasFeeAssetId":0,"GasFeeAssetAmount":5000,"CreatorAmount":0,"TreasuryAmount":200,"Nonce":8,"ExpiredAt":1655355886296,"Sig":"le4taxbmv2pIxpP2VHPoehRoVfH6cQFDw4qaaZy89hIETCMKhF4rVap5qiSvIii57WZgzS1AolkYPbxizH2M1Q=="}', '', '', 2, 8, 1655355886296, -1, 0); -INSERT INTO "public"."mempool_tx" VALUES (25, '2022-06-16 03:04:53.187937+00', '2022-06-16 03:04:53.187937+00', NULL, '70164a0c-c150-47e1-bc4c-fccfa9ad85b6', 15, 2, '5000', -1, -1, 0, 'sher.legend', '0', '{"AccountIndex":2,"OfferId":1,"GasAccountIndex":1,"GasFeeAssetId":2,"GasFeeAssetAmount":5000,"ExpiredAt":1655355893164,"Nonce":9,"Sig":"S1nxhQIKCyG/tyWFiOS2jevBq3EUh9rtDEGqXQd4QZUBnoyxnEfpxqUoy4alW8q8uieQlKS8E3fkeLuizypHuw=="}', '', '', 2, 9, 1655355893164, -1, 0); -INSERT INTO "public"."mempool_tx" VALUES (26, '2022-06-16 03:05:00.081721+00', '2022-06-16 03:05:00.081721+00', NULL, '9fec13eb-f5d3-461f-be42-77a6695c5cd6', 16, 0, '5000', 1, -1, 0, '0', '', '{"AccountIndex":3,"CreatorAccountIndex":2,"CreatorAccountNameHash":"BLLdEWKALQV+0A3LUW6mJ7IHlwUg0a1YP3Es1ulUaR8=","CreatorTreasuryRate":0,"NftIndex":1,"NftContentHash":"H8iOZxIinTMU39fFqT8BIYmzl2dijnSnMmET0LADCH0=","NftL1Address":"0","NftL1TokenId":0,"CollectionId":1,"ToAddress":"0xd5Aa3B56a2E2139DB315CdFE3b34149c8ed09171","GasAccountIndex":1,"GasFeeAssetId":0,"GasFeeAssetAmount":5000,"ExpiredAt":1655355900065,"Nonce":2,"Sig":"YzILMwZti1+1eNw5zzzLPZnJsyFR8s8610x+2vav+6EF54A4RgydrQnTYz2XW54AZx84qLv1Z4KZVYXF6ysAqg=="}', '', '', 3, 2, 1655355900065, -1, 0); - --- ---------------------------- --- Table structure for mempool_tx_detail --- ---------------------------- -DROP TABLE IF EXISTS "public"."mempool_tx_detail"; -CREATE TABLE "public"."mempool_tx_detail" ( - "id" int8 NOT NULL DEFAULT nextval('mempool_tx_detail_id_seq'::regclass), - "created_at" timestamptz(6), - "updated_at" timestamptz(6), - "deleted_at" timestamptz(6), - "tx_id" int8, - "asset_id" int8, - "asset_type" int8, - "account_index" int8, - "account_name" text COLLATE "pg_catalog"."default", - "balance_delta" text COLLATE "pg_catalog"."default", - "order" int8, - "account_order" int8 -) -; - --- ---------------------------- --- Records of mempool_tx_detail --- ---------------------------- -INSERT INTO "public"."mempool_tx_detail" VALUES (1, '2022-06-16 03:02:50.041786+00', '2022-06-16 03:02:50.041786+00', NULL, 5, 0, 1, 2, 'sher.legend', '{"AssetId":0,"Balance":100000000000000000,"LpAmount":0,"OfferCanceledOrFinalized":0}', 0, 0); -INSERT INTO "public"."mempool_tx_detail" VALUES (2, '2022-06-16 03:02:50.041786+00', '2022-06-16 03:02:50.041786+00', NULL, 6, 0, 1, 3, 'gavin.legend', '{"AssetId":0,"Balance":100000000000000000,"LpAmount":0,"OfferCanceledOrFinalized":0}', 0, 0); -INSERT INTO "public"."mempool_tx_detail" VALUES (3, '2022-06-16 03:02:50.041786+00', '2022-06-16 03:02:50.041786+00', NULL, 7, 1, 1, 2, 'sher.legend', '{"AssetId":1,"Balance":100000000000000000000,"LpAmount":0,"OfferCanceledOrFinalized":0}', 0, 0); -INSERT INTO "public"."mempool_tx_detail" VALUES (4, '2022-06-16 03:02:50.041786+00', '2022-06-16 03:02:50.041786+00', NULL, 8, 2, 1, 2, 'sher.legend', '{"AssetId":2,"Balance":100000000000000000000,"LpAmount":0,"OfferCanceledOrFinalized":0}', 0, 0); -INSERT INTO "public"."mempool_tx_detail" VALUES (5, '2022-06-16 03:02:50.041786+00', '2022-06-16 03:02:50.041786+00', NULL, 9, 0, 2, -1, '', '{"PairIndex":0,"AssetAId":0,"AssetA":0,"AssetBId":2,"AssetB":0,"LpAmount":0,"KLast":0,"FeeRate":30,"TreasuryAccountIndex":0,"TreasuryRate":5}', 0, -1); -INSERT INTO "public"."mempool_tx_detail" VALUES (6, '2022-06-16 03:02:50.041786+00', '2022-06-16 03:02:50.041786+00', NULL, 10, 1, 2, -1, '', '{"PairIndex":1,"AssetAId":0,"AssetA":0,"AssetBId":1,"AssetB":0,"LpAmount":0,"KLast":0,"FeeRate":30,"TreasuryAccountIndex":0,"TreasuryRate":5}', 0, -1); -INSERT INTO "public"."mempool_tx_detail" VALUES (7, '2022-06-16 03:02:50.041786+00', '2022-06-16 03:02:50.041786+00', NULL, 11, 2, 2, -1, '', '{"PairIndex":2,"AssetAId":1,"AssetA":0,"AssetBId":2,"AssetB":0,"LpAmount":0,"KLast":0,"FeeRate":30,"TreasuryAccountIndex":0,"TreasuryRate":5}', 0, -1); -INSERT INTO "public"."mempool_tx_detail" VALUES (8, '2022-06-16 03:02:50.041786+00', '2022-06-16 03:02:50.041786+00', NULL, 12, 1, 2, -1, '', '{"PairIndex":1,"AssetAId":0,"AssetA":0,"AssetBId":1,"AssetB":0,"LpAmount":0,"KLast":0,"FeeRate":50,"TreasuryAccountIndex":0,"TreasuryRate":10}', 0, -1); -INSERT INTO "public"."mempool_tx_detail" VALUES (9, '2022-06-16 03:02:50.041786+00', '2022-06-16 03:02:50.041786+00', NULL, 13, 0, 1, 2, 'sher.legend', '{"AssetId":0,"Balance":0,"LpAmount":0,"OfferCanceledOrFinalized":0}', 0, 0); -INSERT INTO "public"."mempool_tx_detail" VALUES (10, '2022-06-16 03:02:50.041786+00', '2022-06-16 03:02:50.041786+00', NULL, 13, 0, 3, 2, 'sher.legend', '{"NftIndex":0,"CreatorAccountIndex":0,"OwnerAccountIndex":2,"NftContentHash":"8fa3059a7c68daddcdf9c03b1cd1e6d0342b7c4a90ed610372c681bfea7ee478","NftL1TokenId":"0","NftL1Address":"0x464ed8Ce7076Abaf743F760468230B9d71fB7D90","CreatorTreasuryRate":0,"CollectionId":0}', 0, -1); -INSERT INTO "public"."mempool_tx_detail" VALUES (11, '2022-06-16 03:02:50.041786+00', '2022-06-16 03:02:50.041786+00', NULL, 14, 1, 1, 2, 'sher.legend', '{"AssetId":1,"Balance":0,"LpAmount":0,"OfferCanceledOrFinalized":0}', 0, 0); -INSERT INTO "public"."mempool_tx_detail" VALUES (12, '2022-06-16 03:02:50.041786+00', '2022-06-16 03:02:50.041786+00', NULL, 15, 0, 1, 2, 'sher.legend', '{"AssetId":0,"Balance":0,"LpAmount":0,"OfferCanceledOrFinalized":0}', 0, 0); -INSERT INTO "public"."mempool_tx_detail" VALUES (13, '2022-06-16 03:02:50.041786+00', '2022-06-16 03:02:50.041786+00', NULL, 15, 0, 3, 2, 'sher.legend', '{"NftIndex":0,"CreatorAccountIndex":0,"OwnerAccountIndex":0,"NftContentHash":"0","NftL1TokenId":"0","NftL1Address":"0","CreatorTreasuryRate":0,"CollectionId":0}', 1, -1); -INSERT INTO "public"."mempool_tx_detail" VALUES (14, '2022-06-16 03:03:35.432741+00', '2022-06-16 03:03:35.432741+00', NULL, 16, 0, 1, 2, 'sher.legend', '{"AssetId":0,"Balance":-100000,"LpAmount":0,"OfferCanceledOrFinalized":0}', 0, 0); -INSERT INTO "public"."mempool_tx_detail" VALUES (15, '2022-06-16 03:03:35.432741+00', '2022-06-16 03:03:35.432741+00', NULL, 16, 2, 1, 2, 'sher.legend', '{"AssetId":2,"Balance":-5000,"LpAmount":0,"OfferCanceledOrFinalized":0}', 1, 0); -INSERT INTO "public"."mempool_tx_detail" VALUES (16, '2022-06-16 03:03:35.432741+00', '2022-06-16 03:03:35.432741+00', NULL, 16, 0, 1, 3, 'gavin.legend', '{"AssetId":0,"Balance":100000,"LpAmount":0,"OfferCanceledOrFinalized":0}', 2, 1); -INSERT INTO "public"."mempool_tx_detail" VALUES (17, '2022-06-16 03:03:35.432741+00', '2022-06-16 03:03:35.432741+00', NULL, 16, 2, 1, 1, 'gas.legend', '{"AssetId":2,"Balance":5000,"LpAmount":0,"OfferCanceledOrFinalized":0}', 3, 2); -INSERT INTO "public"."mempool_tx_detail" VALUES (18, '2022-06-16 03:03:43.370767+00', '2022-06-16 03:03:43.370767+00', NULL, 17, 0, 1, 2, 'sher.legend', '{"AssetId":0,"Balance":-10000000,"LpAmount":0,"OfferCanceledOrFinalized":0}', 0, 0); -INSERT INTO "public"."mempool_tx_detail" VALUES (19, '2022-06-16 03:03:43.370767+00', '2022-06-16 03:03:43.370767+00', NULL, 17, 2, 1, 2, 'sher.legend', '{"AssetId":2,"Balance":-5000,"LpAmount":0,"OfferCanceledOrFinalized":0}', 1, 0); -INSERT INTO "public"."mempool_tx_detail" VALUES (20, '2022-06-16 03:03:43.370767+00', '2022-06-16 03:03:43.370767+00', NULL, 17, 2, 1, 1, 'gas.legend', '{"AssetId":2,"Balance":5000,"LpAmount":0,"OfferCanceledOrFinalized":0}', 2, 1); -INSERT INTO "public"."mempool_tx_detail" VALUES (21, '2022-06-16 03:03:50.345774+00', '2022-06-16 03:03:50.345774+00', NULL, 18, 0, 1, 2, 'sher.legend', '{"AssetId":0,"Balance":-100000,"LpAmount":0,"OfferCanceledOrFinalized":0}', 0, 0); -INSERT INTO "public"."mempool_tx_detail" VALUES (22, '2022-06-16 03:03:50.345774+00', '2022-06-16 03:03:50.345774+00', NULL, 18, 2, 1, 2, 'sher.legend', '{"AssetId":2,"Balance":-100000,"LpAmount":0,"OfferCanceledOrFinalized":0}', 1, 0); -INSERT INTO "public"."mempool_tx_detail" VALUES (23, '2022-06-16 03:03:50.345774+00', '2022-06-16 03:03:50.345774+00', NULL, 18, 2, 1, 2, 'sher.legend', '{"AssetId":2,"Balance":-5000,"LpAmount":0,"OfferCanceledOrFinalized":0}', 2, 0); -INSERT INTO "public"."mempool_tx_detail" VALUES (24, '2022-06-16 03:03:50.345774+00', '2022-06-16 03:03:50.345774+00', NULL, 18, 0, 1, 2, 'sher.legend', '{"AssetId":0,"Balance":0,"LpAmount":100000,"OfferCanceledOrFinalized":0}', 3, 0); -INSERT INTO "public"."mempool_tx_detail" VALUES (25, '2022-06-16 03:03:50.345774+00', '2022-06-16 03:03:50.345774+00', NULL, 18, 0, 2, -1, '', '{"PairIndex":0,"AssetAId":0,"AssetA":100000,"AssetBId":2,"AssetB":100000,"LpAmount":100000,"KLast":10000000000,"FeeRate":30,"TreasuryAccountIndex":0,"TreasuryRate":5}', 4, -1); -INSERT INTO "public"."mempool_tx_detail" VALUES (26, '2022-06-16 03:03:50.345774+00', '2022-06-16 03:03:50.345774+00', NULL, 18, 0, 1, 0, 'treasury.legend', '{"AssetId":0,"Balance":0,"LpAmount":0,"OfferCanceledOrFinalized":0}', 5, 1); -INSERT INTO "public"."mempool_tx_detail" VALUES (27, '2022-06-16 03:03:50.345774+00', '2022-06-16 03:03:50.345774+00', NULL, 18, 2, 1, 1, 'gas.legend', '{"AssetId":2,"Balance":5000,"LpAmount":0,"OfferCanceledOrFinalized":0}', 6, 2); -INSERT INTO "public"."mempool_tx_detail" VALUES (28, '2022-06-16 03:04:00.541208+00', '2022-06-16 03:04:00.541208+00', NULL, 19, 2, 1, 2, 'sher.legend', '{"AssetId":2,"Balance":-100,"LpAmount":0,"OfferCanceledOrFinalized":0}', 0, 0); -INSERT INTO "public"."mempool_tx_detail" VALUES (29, '2022-06-16 03:04:00.541208+00', '2022-06-16 03:04:00.541208+00', NULL, 19, 0, 1, 2, 'sher.legend', '{"AssetId":0,"Balance":99,"LpAmount":0,"OfferCanceledOrFinalized":0}', 1, 0); -INSERT INTO "public"."mempool_tx_detail" VALUES (30, '2022-06-16 03:04:00.541208+00', '2022-06-16 03:04:00.541208+00', NULL, 19, 0, 1, 2, 'sher.legend', '{"AssetId":0,"Balance":-5000,"LpAmount":0,"OfferCanceledOrFinalized":0}', 2, 0); -INSERT INTO "public"."mempool_tx_detail" VALUES (31, '2022-06-16 03:04:00.541208+00', '2022-06-16 03:04:00.541208+00', NULL, 19, 0, 2, -1, '', '{"PairIndex":0,"AssetAId":0,"AssetA":-99,"AssetBId":2,"AssetB":100,"LpAmount":0,"KLast":0,"FeeRate":30,"TreasuryAccountIndex":0,"TreasuryRate":5}', 3, -1); -INSERT INTO "public"."mempool_tx_detail" VALUES (32, '2022-06-16 03:04:00.541208+00', '2022-06-16 03:04:00.541208+00', NULL, 19, 0, 1, 1, 'gas.legend', '{"AssetId":0,"Balance":5000,"LpAmount":0,"OfferCanceledOrFinalized":0}', 4, 1); -INSERT INTO "public"."mempool_tx_detail" VALUES (33, '2022-06-16 03:04:08.953687+00', '2022-06-16 03:04:08.953687+00', NULL, 20, 0, 1, 2, 'sher.legend', '{"AssetId":0,"Balance":99,"LpAmount":0,"OfferCanceledOrFinalized":0}', 0, 0); -INSERT INTO "public"."mempool_tx_detail" VALUES (34, '2022-06-16 03:04:08.953687+00', '2022-06-16 03:04:08.953687+00', NULL, 20, 2, 1, 2, 'sher.legend', '{"AssetId":2,"Balance":100,"LpAmount":0,"OfferCanceledOrFinalized":0}', 1, 0); -INSERT INTO "public"."mempool_tx_detail" VALUES (35, '2022-06-16 03:04:08.953687+00', '2022-06-16 03:04:08.953687+00', NULL, 20, 2, 1, 2, 'sher.legend', '{"AssetId":2,"Balance":-5000,"LpAmount":0,"OfferCanceledOrFinalized":0}', 2, 0); -INSERT INTO "public"."mempool_tx_detail" VALUES (36, '2022-06-16 03:04:08.953687+00', '2022-06-16 03:04:08.953687+00', NULL, 20, 0, 1, 2, 'sher.legend', '{"AssetId":0,"Balance":0,"LpAmount":-100,"OfferCanceledOrFinalized":0}', 3, 0); -INSERT INTO "public"."mempool_tx_detail" VALUES (37, '2022-06-16 03:04:08.953687+00', '2022-06-16 03:04:08.953687+00', NULL, 20, 0, 1, 0, 'treasury.legend', '{"AssetId":0,"Balance":0,"LpAmount":0,"OfferCanceledOrFinalized":0}', 4, 1); -INSERT INTO "public"."mempool_tx_detail" VALUES (38, '2022-06-16 03:04:08.953687+00', '2022-06-16 03:04:08.953687+00', NULL, 20, 0, 2, -1, '', '{"PairIndex":0,"AssetAId":0,"AssetA":-99,"AssetBId":2,"AssetB":-100,"LpAmount":-100,"KLast":9980200000,"FeeRate":30,"TreasuryAccountIndex":0,"TreasuryRate":5}', 5, -1); -INSERT INTO "public"."mempool_tx_detail" VALUES (39, '2022-06-16 03:04:08.953687+00', '2022-06-16 03:04:08.953687+00', NULL, 20, 2, 1, 1, 'gas.legend', '{"AssetId":2,"Balance":5000,"LpAmount":0,"OfferCanceledOrFinalized":0}', 6, 2); -INSERT INTO "public"."mempool_tx_detail" VALUES (40, '2022-06-16 03:04:20.521184+00', '2022-06-16 03:04:20.521184+00', NULL, 21, 0, 4, 2, 'sher.legend', '1', 0, 0); -INSERT INTO "public"."mempool_tx_detail" VALUES (41, '2022-06-16 03:04:20.521184+00', '2022-06-16 03:04:20.521184+00', NULL, 21, 2, 1, 2, 'sher.legend', '{"AssetId":2,"Balance":-5000,"LpAmount":0,"OfferCanceledOrFinalized":0}', 1, 0); -INSERT INTO "public"."mempool_tx_detail" VALUES (42, '2022-06-16 03:04:20.521184+00', '2022-06-16 03:04:20.521184+00', NULL, 21, 2, 1, 1, 'gas.legend', '{"AssetId":2,"Balance":5000,"LpAmount":0,"OfferCanceledOrFinalized":0}', 2, 1); -INSERT INTO "public"."mempool_tx_detail" VALUES (43, '2022-06-16 03:04:31.357828+00', '2022-06-16 03:04:31.357828+00', NULL, 22, 2, 1, 2, 'sher.legend', '{"AssetId":2,"Balance":-5000,"LpAmount":0,"OfferCanceledOrFinalized":0}', 0, 0); -INSERT INTO "public"."mempool_tx_detail" VALUES (44, '2022-06-16 03:04:31.357828+00', '2022-06-16 03:04:31.357828+00', NULL, 22, 2, 1, 3, 'gavin.legend', '{"AssetId":2,"Balance":0,"LpAmount":0,"OfferCanceledOrFinalized":0}', 1, 1); -INSERT INTO "public"."mempool_tx_detail" VALUES (45, '2022-06-16 03:04:31.357828+00', '2022-06-16 03:04:31.357828+00', NULL, 22, 1, 3, 3, 'gavin.legend', '{"NftIndex":1,"CreatorAccountIndex":2,"OwnerAccountIndex":3,"NftContentHash":"1fc88e6712229d3314dfd7c5a93f012189b39767628e74a7326113d0b003087d","NftL1TokenId":"0","NftL1Address":"0","CreatorTreasuryRate":0,"CollectionId":1}', 2, -1); -INSERT INTO "public"."mempool_tx_detail" VALUES (46, '2022-06-16 03:04:31.357828+00', '2022-06-16 03:04:31.357828+00', NULL, 22, 2, 1, 1, 'gas.legend', '{"AssetId":2,"Balance":5000,"LpAmount":0,"OfferCanceledOrFinalized":0}', 3, 2); -INSERT INTO "public"."mempool_tx_detail" VALUES (47, '2022-06-16 03:04:38.438547+00', '2022-06-16 03:04:38.438547+00', NULL, 23, 0, 1, 3, 'gavin.legend', '{"AssetId":0,"Balance":-5000,"LpAmount":0,"OfferCanceledOrFinalized":0}', 0, 0); -INSERT INTO "public"."mempool_tx_detail" VALUES (48, '2022-06-16 03:04:38.438547+00', '2022-06-16 03:04:38.438547+00', NULL, 23, 0, 1, 2, 'sher.legend', '{"AssetId":0,"Balance":0,"LpAmount":0,"OfferCanceledOrFinalized":0}', 1, 1); -INSERT INTO "public"."mempool_tx_detail" VALUES (49, '2022-06-16 03:04:38.438547+00', '2022-06-16 03:04:38.438547+00', NULL, 23, 1, 3, 2, 'sher.legend', '{"NftIndex":1,"CreatorAccountIndex":2,"OwnerAccountIndex":2,"NftContentHash":"1fc88e6712229d3314dfd7c5a93f012189b39767628e74a7326113d0b003087d","NftL1TokenId":"0","NftL1Address":"0","CreatorTreasuryRate":0,"CollectionId":1}', 2, -1); -INSERT INTO "public"."mempool_tx_detail" VALUES (50, '2022-06-16 03:04:38.438547+00', '2022-06-16 03:04:38.438547+00', NULL, 23, 0, 1, 1, 'gas.legend', '{"AssetId":0,"Balance":5000,"LpAmount":0,"OfferCanceledOrFinalized":0}', 3, 2); -INSERT INTO "public"."mempool_tx_detail" VALUES (51, '2022-06-16 03:04:46.329005+00', '2022-06-16 03:04:46.329005+00', NULL, 24, 0, 1, 2, 'sher.legend', '{"AssetId":0,"Balance":-5000,"LpAmount":0,"OfferCanceledOrFinalized":0}', 0, 0); -INSERT INTO "public"."mempool_tx_detail" VALUES (52, '2022-06-16 03:04:46.329005+00', '2022-06-16 03:04:46.329005+00', NULL, 24, 0, 1, 3, 'gavin.legend', '{"AssetId":0,"Balance":-10000,"LpAmount":0,"OfferCanceledOrFinalized":0}', 1, 1); -INSERT INTO "public"."mempool_tx_detail" VALUES (53, '2022-06-16 03:04:46.329005+00', '2022-06-16 03:04:46.329005+00', NULL, 24, 0, 1, 3, 'gavin.legend', '{"AssetId":0,"Balance":0,"LpAmount":0,"OfferCanceledOrFinalized":1}', 2, 1); -INSERT INTO "public"."mempool_tx_detail" VALUES (54, '2022-06-16 03:04:46.329005+00', '2022-06-16 03:04:46.329005+00', NULL, 24, 0, 1, 2, 'sher.legend', '{"AssetId":0,"Balance":9800,"LpAmount":0,"OfferCanceledOrFinalized":0}', 3, 2); -INSERT INTO "public"."mempool_tx_detail" VALUES (55, '2022-06-16 03:04:46.329005+00', '2022-06-16 03:04:46.329005+00', NULL, 24, 0, 1, 2, 'sher.legend', '{"AssetId":0,"Balance":0,"LpAmount":0,"OfferCanceledOrFinalized":1}', 4, 2); -INSERT INTO "public"."mempool_tx_detail" VALUES (56, '2022-06-16 03:04:46.329005+00', '2022-06-16 03:04:46.329005+00', NULL, 24, 0, 1, 2, 'sher.legend', '{"AssetId":0,"Balance":0,"LpAmount":0,"OfferCanceledOrFinalized":0}', 5, 3); -INSERT INTO "public"."mempool_tx_detail" VALUES (57, '2022-06-16 03:04:46.329005+00', '2022-06-16 03:04:46.329005+00', NULL, 24, 1, 3, -1, '', '{"NftIndex":1,"CreatorAccountIndex":2,"OwnerAccountIndex":3,"NftContentHash":"1fc88e6712229d3314dfd7c5a93f012189b39767628e74a7326113d0b003087d","NftL1TokenId":"0","NftL1Address":"0","CreatorTreasuryRate":0,"CollectionId":1}', 6, -1); -INSERT INTO "public"."mempool_tx_detail" VALUES (58, '2022-06-16 03:04:46.329005+00', '2022-06-16 03:04:46.329005+00', NULL, 24, 0, 1, 1, 'gas.legend', '{"AssetId":0,"Balance":200,"LpAmount":0,"OfferCanceledOrFinalized":0}', 7, 4); -INSERT INTO "public"."mempool_tx_detail" VALUES (59, '2022-06-16 03:04:46.329005+00', '2022-06-16 03:04:46.329005+00', NULL, 24, 0, 1, 1, 'gas.legend', '{"AssetId":0,"Balance":5000,"LpAmount":0,"OfferCanceledOrFinalized":0}', 8, 4); -INSERT INTO "public"."mempool_tx_detail" VALUES (60, '2022-06-16 03:04:53.19007+00', '2022-06-16 03:04:53.19007+00', NULL, 25, 2, 1, 2, 'sher.legend', '{"AssetId":2,"Balance":-5000,"LpAmount":0,"OfferCanceledOrFinalized":0}', 0, 0); -INSERT INTO "public"."mempool_tx_detail" VALUES (61, '2022-06-16 03:04:53.19007+00', '2022-06-16 03:04:53.19007+00', NULL, 25, 0, 1, 2, 'sher.legend', '{"AssetId":0,"Balance":0,"LpAmount":0,"OfferCanceledOrFinalized":3}', 1, 0); -INSERT INTO "public"."mempool_tx_detail" VALUES (62, '2022-06-16 03:04:53.19007+00', '2022-06-16 03:04:53.19007+00', NULL, 25, 2, 1, 1, 'gas.legend', '{"AssetId":2,"Balance":5000,"LpAmount":0,"OfferCanceledOrFinalized":0}', 2, 1); -INSERT INTO "public"."mempool_tx_detail" VALUES (63, '2022-06-16 03:05:00.083263+00', '2022-06-16 03:05:00.083263+00', NULL, 26, 0, 1, 3, 'gavin.legend', '{"AssetId":0,"Balance":-5000,"LpAmount":0,"OfferCanceledOrFinalized":0}', 0, 0); -INSERT INTO "public"."mempool_tx_detail" VALUES (64, '2022-06-16 03:05:00.083263+00', '2022-06-16 03:05:00.083263+00', NULL, 26, 1, 3, -1, '', '{"NftIndex":1,"CreatorAccountIndex":0,"OwnerAccountIndex":0,"NftContentHash":"0","NftL1TokenId":"0","NftL1Address":"0","CreatorTreasuryRate":0,"CollectionId":0}', 1, -1); -INSERT INTO "public"."mempool_tx_detail" VALUES (65, '2022-06-16 03:05:00.083263+00', '2022-06-16 03:05:00.083263+00', NULL, 26, 0, 1, 2, 'sher.legend', '{"AssetId":0,"Balance":0,"LpAmount":0,"OfferCanceledOrFinalized":0}', 2, 1); -INSERT INTO "public"."mempool_tx_detail" VALUES (66, '2022-06-16 03:05:00.083263+00', '2022-06-16 03:05:00.083263+00', NULL, 26, 0, 1, 1, 'gas.legend', '{"AssetId":0,"Balance":5000,"LpAmount":0,"OfferCanceledOrFinalized":0}', 3, 2); - --- ---------------------------- --- Table structure for offer --- ---------------------------- -DROP TABLE IF EXISTS "public"."offer"; -CREATE TABLE "public"."offer" ( - "id" int8 NOT NULL DEFAULT nextval('offer_id_seq'::regclass), - "created_at" timestamptz(6), - "updated_at" timestamptz(6), - "deleted_at" timestamptz(6), - "offer_type" int8, - "offer_id" int8, - "account_index" int8, - "nft_index" int8, - "asset_id" int8, - "asset_amount" text COLLATE "pg_catalog"."default", - "listed_at" int8, - "expired_at" int8, - "treasury_rate" int8, - "sig" text COLLATE "pg_catalog"."default", - "status" int8 -) -; - --- ---------------------------- --- Records of offer --- ---------------------------- - --- ---------------------------- --- Table structure for proof_sender --- ---------------------------- -DROP TABLE IF EXISTS "public"."proof_sender"; -CREATE TABLE "public"."proof_sender" ( - "id" int8 NOT NULL DEFAULT nextval('proof_sender_id_seq'::regclass), - "created_at" timestamptz(6), - "updated_at" timestamptz(6), - "deleted_at" timestamptz(6), - "proof_info" text COLLATE "pg_catalog"."default", - "block_number" int8, - "status" int8 -) -; - --- ---------------------------- --- Records of proof_sender --- ---------------------------- - --- ---------------------------- --- Table structure for sys_config --- ---------------------------- -DROP TABLE IF EXISTS "public"."sys_config"; -CREATE TABLE "public"."sys_config" ( - "id" int8 NOT NULL DEFAULT nextval('sys_config_id_seq'::regclass), - "created_at" timestamptz(6), - "updated_at" timestamptz(6), - "deleted_at" timestamptz(6), - "name" text COLLATE "pg_catalog"."default", - "value" text COLLATE "pg_catalog"."default", - "value_type" text COLLATE "pg_catalog"."default", - "comment" text COLLATE "pg_catalog"."default" -) -; - --- ---------------------------- --- Records of sys_config --- ---------------------------- -INSERT INTO "public"."sys_config" VALUES (1, '2022-06-16 03:01:29.411105+00', '2022-06-16 03:01:29.411105+00', NULL, 'SysGasFee', '1', 'float', 'based on ETH'); -INSERT INTO "public"."sys_config" VALUES (2, '2022-06-16 03:01:29.411105+00', '2022-06-16 03:01:29.411105+00', NULL, 'MaxAssetId', '9', 'int', 'max number of asset id'); -INSERT INTO "public"."sys_config" VALUES (3, '2022-06-16 03:01:29.411105+00', '2022-06-16 03:01:29.411105+00', NULL, 'TreasuryAccountIndex', '0', 'int', 'treasury index'); -INSERT INTO "public"."sys_config" VALUES (4, '2022-06-16 03:01:29.411105+00', '2022-06-16 03:01:29.411105+00', NULL, 'GasAccountIndex', '1', 'int', 'gas index'); -INSERT INTO "public"."sys_config" VALUES (5, '2022-06-16 03:01:29.411105+00', '2022-06-16 03:01:29.411105+00', NULL, 'ZecreyLegendContract', '0x045A98016DF9C1790caD1be1c4d69ba1fd2aB9d9', 'string', 'Zecrey contract on BSC'); -INSERT INTO "public"."sys_config" VALUES (6, '2022-06-16 03:01:29.411105+00', '2022-06-16 03:01:29.411105+00', NULL, 'GovernanceContract', '0x45E486062b952225c97621567fCdD29eCE730B87', 'string', 'Governance contract on BSC'); -INSERT INTO "public"."sys_config" VALUES (7, '2022-06-16 03:01:29.411105+00', '2022-06-16 03:01:29.411105+00', NULL, 'BscTestNetworkRpc', 'http://tf-dex-preview-validator-nlb-6fd109ac8b9d390a.elb.ap-northeast-1.amazonaws.com:8545', 'string', 'BSC network rpc'); -INSERT INTO "public"."sys_config" VALUES (8, '2022-06-16 03:01:29.411105+00', '2022-06-16 03:01:29.411105+00', NULL, 'Local_Test_Network_RPC', 'http://127.0.0.1:8545/', 'string', 'Local network rpc'); -INSERT INTO "public"."sys_config" VALUES (9, '2022-06-16 03:02:04.098828+00', '2022-06-16 03:02:04.098828+00', NULL, 'AssetGovernanceContract', '0x74ad9cd2e0656C49B3DB427a9aF8AC704C71DBbC', 'string', 'asset governance contract'); -INSERT INTO "public"."sys_config" VALUES (10, '2022-06-16 03:02:04.098828+00', '2022-06-16 03:02:04.098828+00', NULL, 'Validators', '{"0x7dD2Ac589eFCC8888474d95Cb4b084CCa2d8aA57":{"Address":"0x7dD2Ac589eFCC8888474d95Cb4b084CCa2d8aA57","IsActive":true}}', 'map[string]*ValidatorInfo', 'validator info'); -INSERT INTO "public"."sys_config" VALUES (11, '2022-06-16 03:02:04.098828+00', '2022-06-16 03:02:04.098828+00', NULL, 'Governor', '0x7dD2Ac589eFCC8888474d95Cb4b084CCa2d8aA57', 'string', 'governor'); - --- ---------------------------- --- Table structure for tx --- ---------------------------- -DROP TABLE IF EXISTS "public"."tx"; -CREATE TABLE "public"."tx" ( - "id" int8 NOT NULL DEFAULT nextval('tx_id_seq'::regclass), - "created_at" timestamptz(6), - "updated_at" timestamptz(6), - "deleted_at" timestamptz(6), - "tx_hash" text COLLATE "pg_catalog"."default", - "tx_type" int8, - "gas_fee" text COLLATE "pg_catalog"."default", - "gas_fee_asset_id" int8, - "tx_status" int8, - "block_height" int8, - "block_id" int8, - "state_root" text COLLATE "pg_catalog"."default", - "nft_index" int8, - "pair_index" int8, - "asset_id" int8, - "tx_amount" text COLLATE "pg_catalog"."default", - "native_address" text COLLATE "pg_catalog"."default", - "tx_info" text COLLATE "pg_catalog"."default", - "extra_info" text COLLATE "pg_catalog"."default", - "memo" text COLLATE "pg_catalog"."default", - "account_index" int8, - "nonce" int8, - "expired_at" int8 -) -; - --- ---------------------------- --- Records of tx --- ---------------------------- - --- ---------------------------- --- Table structure for tx_detail --- ---------------------------- -DROP TABLE IF EXISTS "public"."tx_detail"; -CREATE TABLE "public"."tx_detail" ( - "id" int8 NOT NULL DEFAULT nextval('tx_detail_id_seq'::regclass), - "created_at" timestamptz(6), - "updated_at" timestamptz(6), - "deleted_at" timestamptz(6), - "tx_id" int8, - "asset_id" int8, - "asset_type" int8, - "account_index" int8, - "account_name" text COLLATE "pg_catalog"."default", - "balance" text COLLATE "pg_catalog"."default", - "balance_delta" text COLLATE "pg_catalog"."default", - "order" int8, - "account_order" int8, - "nonce" int8, - "collection_nonce" int8 -) -; - --- ---------------------------- --- Records of tx_detail --- ---------------------------- - --- ---------------------------- --- Alter sequences owned by --- ---------------------------- -ALTER SEQUENCE "public"."account_history_id_seq" -OWNED BY "public"."account_history"."id"; -SELECT setval('"public"."account_history_id_seq"', 1, false); - --- ---------------------------- --- Alter sequences owned by --- ---------------------------- -ALTER SEQUENCE "public"."account_id_seq" -OWNED BY "public"."account"."id"; -SELECT setval('"public"."account_id_seq"', 4, true); - --- ---------------------------- --- Alter sequences owned by --- ---------------------------- -ALTER SEQUENCE "public"."asset_info_id_seq" -OWNED BY "public"."asset_info"."id"; -SELECT setval('"public"."asset_info_id_seq"', 3, true); - --- ---------------------------- --- Alter sequences owned by --- ---------------------------- -ALTER SEQUENCE "public"."block_for_commit_id_seq" -OWNED BY "public"."block_for_commit"."id"; -SELECT setval('"public"."block_for_commit_id_seq"', 1, false); - --- ---------------------------- --- Alter sequences owned by --- ---------------------------- -ALTER SEQUENCE "public"."block_id_seq" -OWNED BY "public"."block"."id"; -SELECT setval('"public"."block_id_seq"', 1, true); - --- ---------------------------- --- Alter sequences owned by --- ---------------------------- -ALTER SEQUENCE "public"."fail_tx_id_seq" -OWNED BY "public"."fail_tx"."id"; -SELECT setval('"public"."fail_tx_id_seq"', 1, false); - --- ---------------------------- --- Alter sequences owned by --- ---------------------------- -ALTER SEQUENCE "public"."l1_amount_id_seq" -OWNED BY "public"."l1_amount"."id"; -SELECT setval('"public"."l1_amount_id_seq"', 1, false); - --- ---------------------------- --- Alter sequences owned by --- ---------------------------- -ALTER SEQUENCE "public"."l1_block_monitor_id_seq" -OWNED BY "public"."l1_block_monitor"."id"; -SELECT setval('"public"."l1_block_monitor_id_seq"', 2, true); - --- ---------------------------- --- Alter sequences owned by --- ---------------------------- -ALTER SEQUENCE "public"."l1_tx_sender_id_seq" -OWNED BY "public"."l1_tx_sender"."id"; -SELECT setval('"public"."l1_tx_sender_id_seq"', 1, false); - --- ---------------------------- --- Alter sequences owned by --- ---------------------------- -ALTER SEQUENCE "public"."l2_asset_info_id_seq" -OWNED BY "public"."l2_asset_info"."id"; -SELECT setval('"public"."l2_asset_info_id_seq"', 3, true); - --- ---------------------------- --- Alter sequences owned by --- ---------------------------- -ALTER SEQUENCE "public"."l2_block_event_monitor_id_seq" -OWNED BY "public"."l2_block_event_monitor"."id"; -SELECT setval('"public"."l2_block_event_monitor_id_seq"', 1, false); - --- ---------------------------- --- Alter sequences owned by --- ---------------------------- -ALTER SEQUENCE "public"."l2_nft_collection_id_seq" -OWNED BY "public"."l2_nft_collection"."id"; -SELECT setval('"public"."l2_nft_collection_id_seq"', 1, false); - --- ---------------------------- --- Alter sequences owned by --- ---------------------------- -ALTER SEQUENCE "public"."l2_nft_exchange_history_id_seq" -OWNED BY "public"."l2_nft_exchange_history"."id"; -SELECT setval('"public"."l2_nft_exchange_history_id_seq"', 2, false); - --- ---------------------------- --- Alter sequences owned by --- ---------------------------- -ALTER SEQUENCE "public"."l2_nft_exchange_id_seq" -OWNED BY "public"."l2_nft_exchange"."id"; -SELECT setval('"public"."l2_nft_exchange_id_seq"', 1, true); - --- ---------------------------- --- Alter sequences owned by --- ---------------------------- -ALTER SEQUENCE "public"."l2_nft_history_id_seq" -OWNED BY "public"."l2_nft_history"."id"; -SELECT setval('"public"."l2_nft_history_id_seq"', 1, false); - --- ---------------------------- --- Alter sequences owned by --- ---------------------------- -ALTER SEQUENCE "public"."l2_nft_id_seq" -OWNED BY "public"."l2_nft"."id"; -SELECT setval('"public"."l2_nft_id_seq"', 2, true); - --- ---------------------------- --- Alter sequences owned by --- ---------------------------- -ALTER SEQUENCE "public"."l2_nft_withdraw_history_id_seq" -OWNED BY "public"."l2_nft_withdraw_history"."id"; -SELECT setval('"public"."l2_nft_withdraw_history_id_seq"', 1, false); - --- ---------------------------- --- Alter sequences owned by --- ---------------------------- -ALTER SEQUENCE "public"."l2_tx_event_monitor_id_seq" -OWNED BY "public"."l2_tx_event_monitor"."id"; -SELECT setval('"public"."l2_tx_event_monitor_id_seq"', 15, true); - --- ---------------------------- --- Alter sequences owned by --- ---------------------------- -ALTER SEQUENCE "public"."liquidity_history_id_seq" -OWNED BY "public"."liquidity_history"."id"; -SELECT setval('"public"."liquidity_history_id_seq"', 1, false); - --- ---------------------------- --- Alter sequences owned by --- ---------------------------- -ALTER SEQUENCE "public"."liquidity_id_seq" -OWNED BY "public"."liquidity"."id"; -SELECT setval('"public"."liquidity_id_seq"', 3, true); - --- ---------------------------- --- Alter sequences owned by --- ---------------------------- -ALTER SEQUENCE "public"."mempool_tx_detail_id_seq" -OWNED BY "public"."mempool_tx_detail"."id"; -SELECT setval('"public"."mempool_tx_detail_id_seq"', 66, true); - --- ---------------------------- --- Alter sequences owned by --- ---------------------------- -ALTER SEQUENCE "public"."mempool_tx_id_seq" -OWNED BY "public"."mempool_tx"."id"; -SELECT setval('"public"."mempool_tx_id_seq"', 26, true); - --- ---------------------------- --- Alter sequences owned by --- ---------------------------- -ALTER SEQUENCE "public"."offer_id_seq" -OWNED BY "public"."offer"."id"; -SELECT setval('"public"."offer_id_seq"', 1, false); - --- ---------------------------- --- Alter sequences owned by --- ---------------------------- -ALTER SEQUENCE "public"."proof_sender_id_seq" -OWNED BY "public"."proof_sender"."id"; -SELECT setval('"public"."proof_sender_id_seq"', 1, false); - --- ---------------------------- --- Alter sequences owned by --- ---------------------------- -ALTER SEQUENCE "public"."sys_config_id_seq" -OWNED BY "public"."sys_config"."id"; -SELECT setval('"public"."sys_config_id_seq"', 11, true); - --- ---------------------------- --- Alter sequences owned by --- ---------------------------- -ALTER SEQUENCE "public"."tx_detail_id_seq" -OWNED BY "public"."tx_detail"."id"; -SELECT setval('"public"."tx_detail_id_seq"', 1, false); - --- ---------------------------- --- Alter sequences owned by --- ---------------------------- -ALTER SEQUENCE "public"."tx_id_seq" -OWNED BY "public"."tx"."id"; -SELECT setval('"public"."tx_id_seq"', 1, false); - --- ---------------------------- --- Indexes structure for table account --- ---------------------------- -CREATE UNIQUE INDEX "idx_account_account_index" ON "public"."account" USING btree ( - "account_index" "pg_catalog"."int8_ops" ASC NULLS LAST -); -CREATE UNIQUE INDEX "idx_account_account_name" ON "public"."account" USING btree ( - "account_name" COLLATE "pg_catalog"."default" "pg_catalog"."text_ops" ASC NULLS LAST -); -CREATE UNIQUE INDEX "idx_account_account_name_hash" ON "public"."account" USING btree ( - "account_name_hash" COLLATE "pg_catalog"."default" "pg_catalog"."text_ops" ASC NULLS LAST -); -CREATE INDEX "idx_account_deleted_at" ON "public"."account" USING btree ( - "deleted_at" "pg_catalog"."timestamptz_ops" ASC NULLS LAST -); -CREATE UNIQUE INDEX "idx_account_public_key" ON "public"."account" USING btree ( - "public_key" COLLATE "pg_catalog"."default" "pg_catalog"."text_ops" ASC NULLS LAST -); - --- ---------------------------- --- Primary Key structure for table account --- ---------------------------- -ALTER TABLE "public"."account" ADD CONSTRAINT "account_pkey" PRIMARY KEY ("id"); - --- ---------------------------- --- Indexes structure for table account_history --- ---------------------------- -CREATE INDEX "idx_account_history_account_index" ON "public"."account_history" USING btree ( - "account_index" "pg_catalog"."int8_ops" ASC NULLS LAST -); -CREATE INDEX "idx_account_history_deleted_at" ON "public"."account_history" USING btree ( - "deleted_at" "pg_catalog"."timestamptz_ops" ASC NULLS LAST -); - --- ---------------------------- --- Primary Key structure for table account_history --- ---------------------------- -ALTER TABLE "public"."account_history" ADD CONSTRAINT "account_history_pkey" PRIMARY KEY ("id"); - --- ---------------------------- --- Indexes structure for table asset_info --- ---------------------------- -CREATE UNIQUE INDEX "idx_asset_info_asset_id" ON "public"."asset_info" USING btree ( - "asset_id" "pg_catalog"."int8_ops" ASC NULLS LAST -); -CREATE INDEX "idx_asset_info_deleted_at" ON "public"."asset_info" USING btree ( - "deleted_at" "pg_catalog"."timestamptz_ops" ASC NULLS LAST -); - --- ---------------------------- --- Primary Key structure for table asset_info --- ---------------------------- -ALTER TABLE "public"."asset_info" ADD CONSTRAINT "asset_info_pkey" PRIMARY KEY ("id"); - --- ---------------------------- --- Indexes structure for table block --- ---------------------------- -CREATE INDEX "idx_block_deleted_at" ON "public"."block" USING btree ( - "deleted_at" "pg_catalog"."timestamptz_ops" ASC NULLS LAST -); - --- ---------------------------- --- Primary Key structure for table block --- ---------------------------- -ALTER TABLE "public"."block" ADD CONSTRAINT "block_pkey" PRIMARY KEY ("id"); - --- ---------------------------- --- Indexes structure for table block_for_commit --- ---------------------------- -CREATE INDEX "idx_block_for_commit_deleted_at" ON "public"."block_for_commit" USING btree ( - "deleted_at" "pg_catalog"."timestamptz_ops" ASC NULLS LAST -); - --- ---------------------------- --- Primary Key structure for table block_for_commit --- ---------------------------- -ALTER TABLE "public"."block_for_commit" ADD CONSTRAINT "block_for_commit_pkey" PRIMARY KEY ("id"); - --- ---------------------------- --- Indexes structure for table fail_tx --- ---------------------------- -CREATE INDEX "idx_fail_tx_deleted_at" ON "public"."fail_tx" USING btree ( - "deleted_at" "pg_catalog"."timestamptz_ops" ASC NULLS LAST -); -CREATE UNIQUE INDEX "idx_fail_tx_tx_hash" ON "public"."fail_tx" USING btree ( - "tx_hash" COLLATE "pg_catalog"."default" "pg_catalog"."text_ops" ASC NULLS LAST -); - --- ---------------------------- --- Primary Key structure for table fail_tx --- ---------------------------- -ALTER TABLE "public"."fail_tx" ADD CONSTRAINT "fail_tx_pkey" PRIMARY KEY ("id"); - --- ---------------------------- --- Indexes structure for table l1_amount --- ---------------------------- -CREATE INDEX "idx_l1_amount_asset_id" ON "public"."l1_amount" USING btree ( - "asset_id" "pg_catalog"."int8_ops" ASC NULLS LAST -); -CREATE INDEX "idx_l1_amount_block_height" ON "public"."l1_amount" USING btree ( - "block_height" "pg_catalog"."int8_ops" ASC NULLS LAST -); -CREATE INDEX "idx_l1_amount_deleted_at" ON "public"."l1_amount" USING btree ( - "deleted_at" "pg_catalog"."timestamptz_ops" ASC NULLS LAST -); - --- ---------------------------- --- Primary Key structure for table l1_amount --- ---------------------------- -ALTER TABLE "public"."l1_amount" ADD CONSTRAINT "l1_amount_pkey" PRIMARY KEY ("id"); - --- ---------------------------- --- Indexes structure for table l1_block_monitor --- ---------------------------- -CREATE INDEX "idx_l1_block_monitor_deleted_at" ON "public"."l1_block_monitor" USING btree ( - "deleted_at" "pg_catalog"."timestamptz_ops" ASC NULLS LAST -); - --- ---------------------------- --- Primary Key structure for table l1_block_monitor --- ---------------------------- -ALTER TABLE "public"."l1_block_monitor" ADD CONSTRAINT "l1_block_monitor_pkey" PRIMARY KEY ("id"); - --- ---------------------------- --- Indexes structure for table l1_tx_sender --- ---------------------------- -CREATE INDEX "idx_l1_tx_sender_deleted_at" ON "public"."l1_tx_sender" USING btree ( - "deleted_at" "pg_catalog"."timestamptz_ops" ASC NULLS LAST -); - --- ---------------------------- --- Primary Key structure for table l1_tx_sender --- ---------------------------- -ALTER TABLE "public"."l1_tx_sender" ADD CONSTRAINT "l1_tx_sender_pkey" PRIMARY KEY ("id"); - --- ---------------------------- --- Indexes structure for table l2_asset_info --- ---------------------------- -CREATE UNIQUE INDEX "idx_l2_asset_info_asset_id" ON "public"."l2_asset_info" USING btree ( - "asset_id" "pg_catalog"."int8_ops" ASC NULLS LAST -); -CREATE INDEX "idx_l2_asset_info_deleted_at" ON "public"."l2_asset_info" USING btree ( - "deleted_at" "pg_catalog"."timestamptz_ops" ASC NULLS LAST -); - --- ---------------------------- --- Primary Key structure for table l2_asset_info --- ---------------------------- -ALTER TABLE "public"."l2_asset_info" ADD CONSTRAINT "l2_asset_info_pkey" PRIMARY KEY ("id"); - --- ---------------------------- --- Indexes structure for table l2_block_event_monitor --- ---------------------------- -CREATE INDEX "idx_l2_block_event_monitor_block_event_type" ON "public"."l2_block_event_monitor" USING btree ( - "block_event_type" "pg_catalog"."int2_ops" ASC NULLS LAST -); -CREATE INDEX "idx_l2_block_event_monitor_deleted_at" ON "public"."l2_block_event_monitor" USING btree ( - "deleted_at" "pg_catalog"."timestamptz_ops" ASC NULLS LAST -); -CREATE INDEX "idx_l2_block_event_monitor_l2_block_height" ON "public"."l2_block_event_monitor" USING btree ( - "l2_block_height" "pg_catalog"."int8_ops" ASC NULLS LAST -); - --- ---------------------------- --- Primary Key structure for table l2_block_event_monitor --- ---------------------------- -ALTER TABLE "public"."l2_block_event_monitor" ADD CONSTRAINT "l2_block_event_monitor_pkey" PRIMARY KEY ("id"); - --- ---------------------------- --- Indexes structure for table l2_nft --- ---------------------------- -CREATE INDEX "idx_l2_nft_deleted_at" ON "public"."l2_nft" USING btree ( - "deleted_at" "pg_catalog"."timestamptz_ops" ASC NULLS LAST -); -CREATE UNIQUE INDEX "idx_l2_nft_nft_index" ON "public"."l2_nft" USING btree ( - "nft_index" "pg_catalog"."int8_ops" ASC NULLS LAST -); - --- ---------------------------- --- Primary Key structure for table l2_nft --- ---------------------------- -ALTER TABLE "public"."l2_nft" ADD CONSTRAINT "l2_nft_pkey" PRIMARY KEY ("id"); - --- ---------------------------- --- Indexes structure for table l2_nft_collection --- ---------------------------- -CREATE INDEX "idx_l2_nft_collection_deleted_at" ON "public"."l2_nft_collection" USING btree ( - "deleted_at" "pg_catalog"."timestamptz_ops" ASC NULLS LAST -); - --- ---------------------------- --- Primary Key structure for table l2_nft_collection --- ---------------------------- -ALTER TABLE "public"."l2_nft_collection" ADD CONSTRAINT "l2_nft_collection_pkey" PRIMARY KEY ("id"); - --- ---------------------------- --- Indexes structure for table l2_nft_exchange --- ---------------------------- -CREATE INDEX "idx_l2_nft_exchange_deleted_at" ON "public"."l2_nft_exchange" USING btree ( - "deleted_at" "pg_catalog"."timestamptz_ops" ASC NULLS LAST -); - --- ---------------------------- --- Primary Key structure for table l2_nft_exchange --- ---------------------------- -ALTER TABLE "public"."l2_nft_exchange" ADD CONSTRAINT "l2_nft_exchange_pkey" PRIMARY KEY ("id"); - --- ---------------------------- --- Indexes structure for table l2_nft_exchange_history --- ---------------------------- -CREATE INDEX "idx_l2_nft_exchange_history_deleted_at" ON "public"."l2_nft_exchange_history" USING btree ( - "deleted_at" "pg_catalog"."timestamptz_ops" ASC NULLS LAST -); - --- ---------------------------- --- Primary Key structure for table l2_nft_exchange_history --- ---------------------------- -ALTER TABLE "public"."l2_nft_exchange_history" ADD CONSTRAINT "l2_nft_exchange_history_pkey" PRIMARY KEY ("id"); - --- ---------------------------- --- Indexes structure for table l2_nft_history --- ---------------------------- -CREATE INDEX "idx_l2_nft_history_deleted_at" ON "public"."l2_nft_history" USING btree ( - "deleted_at" "pg_catalog"."timestamptz_ops" ASC NULLS LAST -); - --- ---------------------------- --- Primary Key structure for table l2_nft_history --- ---------------------------- -ALTER TABLE "public"."l2_nft_history" ADD CONSTRAINT "l2_nft_history_pkey" PRIMARY KEY ("id"); - --- ---------------------------- --- Indexes structure for table l2_nft_withdraw_history --- ---------------------------- -CREATE INDEX "idx_l2_nft_withdraw_history_deleted_at" ON "public"."l2_nft_withdraw_history" USING btree ( - "deleted_at" "pg_catalog"."timestamptz_ops" ASC NULLS LAST -); -CREATE UNIQUE INDEX "idx_l2_nft_withdraw_history_nft_index" ON "public"."l2_nft_withdraw_history" USING btree ( - "nft_index" "pg_catalog"."int8_ops" ASC NULLS LAST -); - --- ---------------------------- --- Primary Key structure for table l2_nft_withdraw_history --- ---------------------------- -ALTER TABLE "public"."l2_nft_withdraw_history" ADD CONSTRAINT "l2_nft_withdraw_history_pkey" PRIMARY KEY ("id"); - --- ---------------------------- --- Indexes structure for table l2_tx_event_monitor --- ---------------------------- -CREATE INDEX "idx_l2_tx_event_monitor_deleted_at" ON "public"."l2_tx_event_monitor" USING btree ( - "deleted_at" "pg_catalog"."timestamptz_ops" ASC NULLS LAST -); - --- ---------------------------- --- Primary Key structure for table l2_tx_event_monitor --- ---------------------------- -ALTER TABLE "public"."l2_tx_event_monitor" ADD CONSTRAINT "l2_tx_event_monitor_pkey" PRIMARY KEY ("id"); - --- ---------------------------- --- Indexes structure for table liquidity --- ---------------------------- -CREATE INDEX "idx_liquidity_deleted_at" ON "public"."liquidity" USING btree ( - "deleted_at" "pg_catalog"."timestamptz_ops" ASC NULLS LAST -); - --- ---------------------------- --- Primary Key structure for table liquidity --- ---------------------------- -ALTER TABLE "public"."liquidity" ADD CONSTRAINT "liquidity_pkey" PRIMARY KEY ("id"); - --- ---------------------------- --- Indexes structure for table liquidity_history --- ---------------------------- -CREATE INDEX "idx_liquidity_history_deleted_at" ON "public"."liquidity_history" USING btree ( - "deleted_at" "pg_catalog"."timestamptz_ops" ASC NULLS LAST -); - --- ---------------------------- --- Primary Key structure for table liquidity_history --- ---------------------------- -ALTER TABLE "public"."liquidity_history" ADD CONSTRAINT "liquidity_history_pkey" PRIMARY KEY ("id"); - --- ---------------------------- --- Indexes structure for table mempool_tx --- ---------------------------- -CREATE INDEX "idx_mempool_tx_deleted_at" ON "public"."mempool_tx" USING btree ( - "deleted_at" "pg_catalog"."timestamptz_ops" ASC NULLS LAST -); -CREATE INDEX "idx_mempool_tx_status" ON "public"."mempool_tx" USING btree ( - "status" "pg_catalog"."int8_ops" ASC NULLS LAST -); -CREATE UNIQUE INDEX "idx_mempool_tx_tx_hash" ON "public"."mempool_tx" USING btree ( - "tx_hash" COLLATE "pg_catalog"."default" "pg_catalog"."text_ops" ASC NULLS LAST -); - --- ---------------------------- --- Primary Key structure for table mempool_tx --- ---------------------------- -ALTER TABLE "public"."mempool_tx" ADD CONSTRAINT "mempool_tx_pkey" PRIMARY KEY ("id"); - --- ---------------------------- --- Indexes structure for table mempool_tx_detail --- ---------------------------- -CREATE INDEX "idx_mempool_tx_detail_account_index" ON "public"."mempool_tx_detail" USING btree ( - "account_index" "pg_catalog"."int8_ops" ASC NULLS LAST -); -CREATE INDEX "idx_mempool_tx_detail_deleted_at" ON "public"."mempool_tx_detail" USING btree ( - "deleted_at" "pg_catalog"."timestamptz_ops" ASC NULLS LAST -); -CREATE INDEX "idx_mempool_tx_detail_tx_id" ON "public"."mempool_tx_detail" USING btree ( - "tx_id" "pg_catalog"."int8_ops" ASC NULLS LAST -); - --- ---------------------------- --- Primary Key structure for table mempool_tx_detail --- ---------------------------- -ALTER TABLE "public"."mempool_tx_detail" ADD CONSTRAINT "mempool_tx_detail_pkey" PRIMARY KEY ("id"); - --- ---------------------------- --- Indexes structure for table offer --- ---------------------------- -CREATE INDEX "idx_offer_deleted_at" ON "public"."offer" USING btree ( - "deleted_at" "pg_catalog"."timestamptz_ops" ASC NULLS LAST -); - --- ---------------------------- --- Primary Key structure for table offer --- ---------------------------- -ALTER TABLE "public"."offer" ADD CONSTRAINT "offer_pkey" PRIMARY KEY ("id"); - --- ---------------------------- --- Indexes structure for table proof_sender --- ---------------------------- -CREATE INDEX "idx_proof_sender_block_number" ON "public"."proof_sender" USING btree ( - "block_number" "pg_catalog"."int8_ops" ASC NULLS LAST -); -CREATE INDEX "idx_proof_sender_deleted_at" ON "public"."proof_sender" USING btree ( - "deleted_at" "pg_catalog"."timestamptz_ops" ASC NULLS LAST -); - --- ---------------------------- --- Primary Key structure for table proof_sender --- ---------------------------- -ALTER TABLE "public"."proof_sender" ADD CONSTRAINT "proof_sender_pkey" PRIMARY KEY ("id"); - --- ---------------------------- --- Indexes structure for table sys_config --- ---------------------------- -CREATE INDEX "idx_sys_config_deleted_at" ON "public"."sys_config" USING btree ( - "deleted_at" "pg_catalog"."timestamptz_ops" ASC NULLS LAST -); - --- ---------------------------- --- Primary Key structure for table sys_config --- ---------------------------- -ALTER TABLE "public"."sys_config" ADD CONSTRAINT "sys_config_pkey" PRIMARY KEY ("id"); - --- ---------------------------- --- Indexes structure for table tx --- ---------------------------- -CREATE INDEX "idx_tx_block_height" ON "public"."tx" USING btree ( - "block_height" "pg_catalog"."int8_ops" ASC NULLS LAST -); -CREATE INDEX "idx_tx_block_id" ON "public"."tx" USING btree ( - "block_id" "pg_catalog"."int8_ops" ASC NULLS LAST -); -CREATE INDEX "idx_tx_deleted_at" ON "public"."tx" USING btree ( - "deleted_at" "pg_catalog"."timestamptz_ops" ASC NULLS LAST -); -CREATE UNIQUE INDEX "idx_tx_tx_hash" ON "public"."tx" USING btree ( - "tx_hash" COLLATE "pg_catalog"."default" "pg_catalog"."text_ops" ASC NULLS LAST -); - --- ---------------------------- --- Primary Key structure for table tx --- ---------------------------- -ALTER TABLE "public"."tx" ADD CONSTRAINT "tx_pkey" PRIMARY KEY ("id"); - --- ---------------------------- --- Indexes structure for table tx_detail --- ---------------------------- -CREATE INDEX "idx_tx_detail_account_index" ON "public"."tx_detail" USING btree ( - "account_index" "pg_catalog"."int8_ops" ASC NULLS LAST -); -CREATE INDEX "idx_tx_detail_deleted_at" ON "public"."tx_detail" USING btree ( - "deleted_at" "pg_catalog"."timestamptz_ops" ASC NULLS LAST -); -CREATE INDEX "idx_tx_detail_tx_id" ON "public"."tx_detail" USING btree ( - "tx_id" "pg_catalog"."int8_ops" ASC NULLS LAST -); - --- ---------------------------- --- Primary Key structure for table tx_detail --- ---------------------------- -ALTER TABLE "public"."tx_detail" ADD CONSTRAINT "tx_detail_pkey" PRIMARY KEY ("id"); diff --git a/common/proverUtil/cancelOffer.go b/common/proverUtil/cancelOffer.go deleted file mode 100644 index f7988c287..000000000 --- a/common/proverUtil/cancelOffer.go +++ /dev/null @@ -1,110 +0,0 @@ -/* - * Copyright © 2021 Zkbas Protocol - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package proverUtil - -import ( - "errors" - - bsmt "github.com/bnb-chain/bas-smt" - "github.com/consensys/gnark-crypto/ecc/bn254/twistededwards/eddsa" - "github.com/zeromicro/go-zero/core/logx" - - "github.com/bnb-chain/zkbas/common/commonTx" - "github.com/bnb-chain/zkbas/common/util" - "github.com/bnb-chain/zkbas/pkg/treedb" -) - -func ConstructCancelOfferCryptoTx( - oTx *Tx, - treeCtx *treedb.Context, - finalityBlockNr uint64, - accountTree bsmt.SparseMerkleTree, - accountAssetsTree *[]bsmt.SparseMerkleTree, - liquidityTree bsmt.SparseMerkleTree, - nftTree bsmt.SparseMerkleTree, - accountModel AccountModel, -) (cryptoTx *CryptoTx, err error) { - if oTx.TxType != commonTx.TxTypeCancelOffer { - logx.Errorf("[ConstructCancelOfferCryptoTx] invalid tx type") - return nil, errors.New("[ConstructCancelOfferCryptoTx] invalid tx type") - } - if oTx == nil || accountTree == nil || accountAssetsTree == nil || liquidityTree == nil || nftTree == nil { - logx.Errorf("[ConstructCancelOfferCryptoTx] invalid params") - return nil, errors.New("[ConstructCancelOfferCryptoTx] invalid params") - } - txInfo, err := commonTx.ParseCancelOfferTxInfo(oTx.TxInfo) - if err != nil { - logx.Errorf("[ConstructCancelOfferCryptoTx] unable to parse register zns tx info:%s", err.Error()) - return nil, err - } - cryptoTxInfo, err := ToCryptoCancelOfferTx(txInfo) - if err != nil { - logx.Errorf("[ConstructCancelOfferCryptoTx] unable to convert to crypto register zns tx: %s", err.Error()) - return nil, err - } - accountKeys, proverAccounts, proverLiquidityInfo, proverNftInfo, err := ConstructProverInfo(oTx, accountModel) - if err != nil { - logx.Errorf("[ConstructCancelOfferCryptoTx] unable to construct prover info: %s", err.Error()) - return nil, err - } - cryptoTx, err = ConstructWitnessInfo( - oTx, - accountModel, - treeCtx, - finalityBlockNr, - accountTree, - accountAssetsTree, - liquidityTree, - nftTree, - accountKeys, - proverAccounts, - proverLiquidityInfo, - proverNftInfo, - ) - if err != nil { - logx.Errorf("[ConstructCancelOfferCryptoTx] unable to construct witness info: %s", err.Error()) - return nil, err - } - cryptoTx.TxType = uint8(oTx.TxType) - cryptoTx.CancelOfferTxInfo = cryptoTxInfo - cryptoTx.Nonce = oTx.Nonce - cryptoTx.ExpiredAt = txInfo.ExpiredAt - cryptoTx.Signature = new(eddsa.Signature) - _, err = cryptoTx.Signature.SetBytes(txInfo.Sig) - if err != nil { - logx.Errorf("[ConstructCancelOfferCryptoTx] invalid sig bytes: %s", err.Error()) - return nil, err - } - return cryptoTx, nil -} - -func ToCryptoCancelOfferTx(txInfo *commonTx.CancelOfferTxInfo) (info *CryptoCancelOfferTx, err error) { - packedFee, err := util.ToPackedFee(txInfo.GasFeeAssetAmount) - if err != nil { - logx.Errorf("[ToCryptoSwapTx] unable to convert to packed fee: %s", err.Error()) - return nil, err - } - info = &CryptoCancelOfferTx{ - AccountIndex: txInfo.AccountIndex, - OfferId: txInfo.OfferId, - GasAccountIndex: txInfo.GasAccountIndex, - GasFeeAssetId: txInfo.GasFeeAssetId, - GasFeeAssetAmount: packedFee, - } - return info, nil -} diff --git a/common/proverUtil/cancelOffer_test.go b/common/proverUtil/cancelOffer_test.go deleted file mode 100644 index 475475562..000000000 --- a/common/proverUtil/cancelOffer_test.go +++ /dev/null @@ -1,83 +0,0 @@ -/* - * Copyright © 2021 Zkbas Protocol - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package proverUtil - -import ( - "encoding/json" - "fmt" - "testing" - - "github.com/bnb-chain/bas-smt/database/memory" - "github.com/zeromicro/go-zero/core/stores/redis" - - "github.com/bnb-chain/zkbas/common/model/account" - "github.com/bnb-chain/zkbas/common/model/basic" - "github.com/bnb-chain/zkbas/common/model/liquidity" - "github.com/bnb-chain/zkbas/common/model/nft" - "github.com/bnb-chain/zkbas/common/model/tx" - "github.com/bnb-chain/zkbas/common/tree" - "github.com/bnb-chain/zkbas/pkg/treedb" -) - -func TestConstructCancelOfferCryptoTxFirst(t *testing.T) { - redisConn := redis.New(basic.CacheConf[0].Host, WithRedis(basic.CacheConf[0].Type, basic.CacheConf[0].Pass)) - txModel := tx.NewTxModel(basic.Connection, basic.CacheConf, basic.DB, redisConn) - accountModel := account.NewAccountModel(basic.Connection, basic.CacheConf, basic.DB) - accountHistoryModel := account.NewAccountHistoryModel(basic.Connection, basic.CacheConf, basic.DB) - //liquidityModel := liquidity.NewLiquidityModel(basic.Connection, basic.CacheConf, basic.DB) - liquidityHistoryModel := liquidity.NewLiquidityHistoryModel(basic.Connection, basic.CacheConf, basic.DB) - //nftModel := nft.NewL2NftModel(basic.Connection, basic.CacheConf, basic.DB) - nftHistoryModel := nft.NewL2NftHistoryModel(basic.Connection, basic.CacheConf, basic.DB) - ctx := &treedb.Context{ - Driver: treedb.MemoryDB, - TreeDB: memory.NewMemoryDB(), - } - txInfo, err := txModel.GetTxByTxId(25) - if err != nil { - t.Fatal(err) - } - blockHeight := int64(24) - accountTree, accountAssetTrees, err := tree.InitAccountTree(accountModel, accountHistoryModel, blockHeight, ctx) - if err != nil { - t.Fatal(err) - } - liquidityTree, err := tree.InitLiquidityTree(liquidityHistoryModel, blockHeight, ctx) - if err != nil { - t.Fatal(err) - } - nftTree, err := tree.InitNftTree(nftHistoryModel, blockHeight, ctx) - if err != nil { - t.Fatal(err) - } - cryptoTx, err := ConstructCancelOfferCryptoTx( - txInfo, - ctx, 0, - accountTree, &accountAssetTrees, - liquidityTree, - nftTree, - accountModel, - ) - if err != nil { - t.Fatal(err) - } - txBytes, err := json.Marshal(cryptoTx) - if err != nil { - t.Fatal(err) - } - fmt.Println(string(txBytes)) -} diff --git a/common/proverUtil/createCollection.go b/common/proverUtil/createCollection.go deleted file mode 100644 index 3f169effa..000000000 --- a/common/proverUtil/createCollection.go +++ /dev/null @@ -1,112 +0,0 @@ -/* - * Copyright © 2021 Zkbas Protocol - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package proverUtil - -import ( - "errors" - - bsmt "github.com/bnb-chain/bas-smt" - "github.com/consensys/gnark-crypto/ecc/bn254/twistededwards/eddsa" - "github.com/zeromicro/go-zero/core/logx" - - "github.com/bnb-chain/zkbas/common/commonTx" - "github.com/bnb-chain/zkbas/common/util" - "github.com/bnb-chain/zkbas/pkg/treedb" -) - -func ConstructCreateCollectionCryptoTx( - oTx *Tx, - treeCtx *treedb.Context, - finalityBlockNr uint64, - accountTree bsmt.SparseMerkleTree, - accountAssetsTree *[]bsmt.SparseMerkleTree, - liquidityTree bsmt.SparseMerkleTree, - nftTree bsmt.SparseMerkleTree, - accountModel AccountModel, -) (cryptoTx *CryptoTx, err error) { - if oTx.TxType != commonTx.TxTypeCreateCollection { - logx.Errorf("[ConstructCreateCollectionCryptoTx] invalid tx type") - return nil, errors.New("[ConstructCreateCollectionCryptoTx] invalid tx type") - } - if oTx == nil || accountTree == nil || accountAssetsTree == nil || liquidityTree == nil || nftTree == nil { - logx.Errorf("[ConstructCreateCollectionCryptoTx] invalid params") - return nil, errors.New("[ConstructCreateCollectionCryptoTx] invalid params") - } - txInfo, err := commonTx.ParseCreateCollectionTxInfo(oTx.TxInfo) - if err != nil { - logx.Errorf("[ConstructCreateCollectionCryptoTx] unable to parse register zns tx info:%s", err.Error()) - return nil, err - } - cryptoTxInfo, err := ToCryptoCreateCollectionTx(txInfo) - if err != nil { - logx.Errorf("[ConstructCreateCollectionCryptoTx] unable to convert to crypto register zns tx: %s", err.Error()) - return nil, err - } - accountKeys, proverAccounts, proverLiquidityInfo, proverNftInfo, err := ConstructProverInfo(oTx, accountModel) - if err != nil { - logx.Errorf("[ConstructCreateCollectionCryptoTx] unable to construct prover info: %s", err.Error()) - return nil, err - } - cryptoTx, err = ConstructWitnessInfo( - oTx, - accountModel, - treeCtx, - finalityBlockNr, - accountTree, - accountAssetsTree, - liquidityTree, - nftTree, - accountKeys, - proverAccounts, - proverLiquidityInfo, - proverNftInfo, - ) - if err != nil { - logx.Errorf("[ConstructCreateCollectionCryptoTx] unable to construct witness info: %s", err.Error()) - return nil, err - } - cryptoTx.TxType = uint8(oTx.TxType) - cryptoTx.CreateCollectionTxInfo = cryptoTxInfo - cryptoTx.Nonce = oTx.Nonce - cryptoTx.ExpiredAt = txInfo.ExpiredAt - cryptoTx.Signature = new(eddsa.Signature) - _, err = cryptoTx.Signature.SetBytes(txInfo.Sig) - if err != nil { - logx.Errorf("[ConstructCreateCollectionCryptoTx] invalid sig bytes: %s", err.Error()) - return nil, err - } - return cryptoTx, nil -} - -func ToCryptoCreateCollectionTx(txInfo *commonTx.CreateCollectionTxInfo) (info *CryptoCreateCollectionTx, err error) { - packedFee, err := util.ToPackedFee(txInfo.GasFeeAssetAmount) - if err != nil { - logx.Errorf("[ToCryptoSwapTx] unable to convert to packed fee: %s", err.Error()) - return nil, err - } - info = &CryptoCreateCollectionTx{ - AccountIndex: txInfo.AccountIndex, - CollectionId: txInfo.CollectionId, - GasAccountIndex: txInfo.GasAccountIndex, - GasFeeAssetId: txInfo.GasFeeAssetId, - GasFeeAssetAmount: packedFee, - ExpiredAt: txInfo.ExpiredAt, - Nonce: txInfo.Nonce, - } - return info, nil -} diff --git a/common/proverUtil/createCollection_test.go b/common/proverUtil/createCollection_test.go deleted file mode 100644 index 50b9d63bc..000000000 --- a/common/proverUtil/createCollection_test.go +++ /dev/null @@ -1,83 +0,0 @@ -/* - * Copyright © 2021 Zkbas Protocol - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package proverUtil - -import ( - "encoding/json" - "fmt" - "testing" - - "github.com/bnb-chain/bas-smt/database/memory" - "github.com/zeromicro/go-zero/core/stores/redis" - - "github.com/bnb-chain/zkbas/common/model/account" - "github.com/bnb-chain/zkbas/common/model/basic" - "github.com/bnb-chain/zkbas/common/model/liquidity" - "github.com/bnb-chain/zkbas/common/model/nft" - "github.com/bnb-chain/zkbas/common/model/tx" - "github.com/bnb-chain/zkbas/common/tree" - "github.com/bnb-chain/zkbas/pkg/treedb" -) - -func TestConstructCreateCollectionCryptoTxFirst(t *testing.T) { - redisConn := redis.New(basic.CacheConf[0].Host, WithRedis(basic.CacheConf[0].Type, basic.CacheConf[0].Pass)) - txModel := tx.NewTxModel(basic.Connection, basic.CacheConf, basic.DB, redisConn) - accountModel := account.NewAccountModel(basic.Connection, basic.CacheConf, basic.DB) - accountHistoryModel := account.NewAccountHistoryModel(basic.Connection, basic.CacheConf, basic.DB) - //liquidityModel := liquidity.NewLiquidityModel(basic.Connection, basic.CacheConf, basic.DB) - liquidityHistoryModel := liquidity.NewLiquidityHistoryModel(basic.Connection, basic.CacheConf, basic.DB) - //nftModel := nft.NewL2NftModel(basic.Connection, basic.CacheConf, basic.DB) - nftHistoryModel := nft.NewL2NftHistoryModel(basic.Connection, basic.CacheConf, basic.DB) - ctx := &treedb.Context{ - Driver: treedb.MemoryDB, - TreeDB: memory.NewMemoryDB(), - } - txInfo, err := txModel.GetTxByTxId(21) - if err != nil { - t.Fatal(err) - } - blockHeight := int64(20) - accountTree, accountAssetTrees, err := tree.InitAccountTree(accountModel, accountHistoryModel, blockHeight, ctx) - if err != nil { - t.Fatal(err) - } - liquidityTree, err := tree.InitLiquidityTree(liquidityHistoryModel, blockHeight, ctx) - if err != nil { - t.Fatal(err) - } - nftTree, err := tree.InitNftTree(nftHistoryModel, blockHeight, ctx) - if err != nil { - t.Fatal(err) - } - cryptoTx, err := ConstructCreateCollectionCryptoTx( - txInfo, - ctx, 0, - accountTree, &accountAssetTrees, - liquidityTree, - nftTree, - accountModel, - ) - if err != nil { - t.Fatal(err) - } - txBytes, err := json.Marshal(cryptoTx) - if err != nil { - t.Fatal(err) - } - fmt.Println(string(txBytes)) -} diff --git a/common/proverUtil/createPair.go b/common/proverUtil/createPair.go deleted file mode 100644 index 1280529c4..000000000 --- a/common/proverUtil/createPair.go +++ /dev/null @@ -1,99 +0,0 @@ -/* - * Copyright © 2021 Zkbas Protocol - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package proverUtil - -import ( - "errors" - - bsmt "github.com/bnb-chain/bas-smt" - "github.com/bnb-chain/zkbas-crypto/legend/circuit/bn254/std" - "github.com/zeromicro/go-zero/core/logx" - - "github.com/bnb-chain/zkbas/common/commonTx" - "github.com/bnb-chain/zkbas/pkg/treedb" -) - -func ConstructCreatePairCryptoTx( - oTx *Tx, - treeCtx *treedb.Context, - finalityBlockNr uint64, - accountTree bsmt.SparseMerkleTree, - accountAssetsTree *[]bsmt.SparseMerkleTree, - liquidityTree bsmt.SparseMerkleTree, - nftTree bsmt.SparseMerkleTree, - accountModel AccountModel, -) (cryptoTx *CryptoTx, err error) { - if oTx.TxType != commonTx.TxTypeCreatePair { - logx.Errorf("[ConstructCreatePairCryptoTx] invalid tx type") - return nil, errors.New("[ConstructCreatePairCryptoTx] invalid tx type") - } - if oTx == nil || accountTree == nil || accountAssetsTree == nil || liquidityTree == nil || nftTree == nil { - logx.Errorf("[ConstructCreatePairCryptoTx] invalid params") - return nil, errors.New("[ConstructCreatePairCryptoTx] invalid params") - } - txInfo, err := commonTx.ParseCreatePairTxInfo(oTx.TxInfo) - if err != nil { - logx.Errorf("[ConstructCreatePairCryptoTx] unable to parse register zns tx info:%s", err.Error()) - return nil, err - } - cryptoTxInfo, err := ToCryptoCreatePairTx(txInfo) - if err != nil { - logx.Errorf("[ConstructCreatePairCryptoTx] unable to convert to crypto register zns tx: %s", err.Error()) - return nil, err - } - accountKeys, proverAccounts, proverLiquidityInfo, proverNftInfo, err := ConstructProverInfo(oTx, accountModel) - if err != nil { - logx.Errorf("[ConstructCreatePairCryptoTx] unable to construct prover info: %s", err.Error()) - return nil, err - } - cryptoTx, err = ConstructWitnessInfo( - oTx, - accountModel, - treeCtx, - finalityBlockNr, - accountTree, - accountAssetsTree, - liquidityTree, - nftTree, - accountKeys, - proverAccounts, - proverLiquidityInfo, - proverNftInfo, - ) - if err != nil { - logx.Errorf("[ConstructCreatePairCryptoTx] unable to construct witness info: %s", err.Error()) - return nil, err - } - cryptoTx.TxType = uint8(oTx.TxType) - cryptoTx.CreatePairTxInfo = cryptoTxInfo - cryptoTx.Nonce = oTx.Nonce - cryptoTx.Signature = std.EmptySignature() - return cryptoTx, nil -} - -func ToCryptoCreatePairTx(txInfo *commonTx.CreatePairTxInfo) (info *CryptoCreatePairTx, err error) { - info = &CryptoCreatePairTx{ - PairIndex: txInfo.PairIndex, - AssetAId: txInfo.AssetAId, - AssetBId: txInfo.AssetBId, - FeeRate: txInfo.FeeRate, - TreasuryAccountIndex: txInfo.TreasuryAccountIndex, - TreasuryRate: txInfo.TreasuryRate, - } - return info, nil -} diff --git a/common/proverUtil/createPair_test.go b/common/proverUtil/createPair_test.go deleted file mode 100644 index 7c841649e..000000000 --- a/common/proverUtil/createPair_test.go +++ /dev/null @@ -1,83 +0,0 @@ -/* - * Copyright © 2021 Zkbas Protocol - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package proverUtil - -import ( - "encoding/json" - "fmt" - "testing" - - "github.com/bnb-chain/bas-smt/database/memory" - "github.com/zeromicro/go-zero/core/stores/redis" - - "github.com/bnb-chain/zkbas/common/model/account" - "github.com/bnb-chain/zkbas/common/model/basic" - "github.com/bnb-chain/zkbas/common/model/liquidity" - "github.com/bnb-chain/zkbas/common/model/nft" - "github.com/bnb-chain/zkbas/common/model/tx" - "github.com/bnb-chain/zkbas/common/tree" - "github.com/bnb-chain/zkbas/pkg/treedb" -) - -func TestConstructCreatePairCryptoTxFirst(t *testing.T) { - redisConn := redis.New(basic.CacheConf[0].Host, WithRedis(basic.CacheConf[0].Type, basic.CacheConf[0].Pass)) - txModel := tx.NewTxModel(basic.Connection, basic.CacheConf, basic.DB, redisConn) - accountModel := account.NewAccountModel(basic.Connection, basic.CacheConf, basic.DB) - accountHistoryModel := account.NewAccountHistoryModel(basic.Connection, basic.CacheConf, basic.DB) - //liquidityModel := liquidity.NewLiquidityModel(basic.Connection, basic.CacheConf, basic.DB) - liquidityHistoryModel := liquidity.NewLiquidityHistoryModel(basic.Connection, basic.CacheConf, basic.DB) - //nftModel := nft.NewL2NftModel(basic.Connection, basic.CacheConf, basic.DB) - nftHistoryModel := nft.NewL2NftHistoryModel(basic.Connection, basic.CacheConf, basic.DB) - ctx := &treedb.Context{ - Driver: treedb.MemoryDB, - TreeDB: memory.NewMemoryDB(), - } - txInfo, err := txModel.GetTxByTxId(9) - if err != nil { - t.Fatal(err) - } - blockHeight := int64(8) - accountTree, accountAssetTrees, err := tree.InitAccountTree(accountModel, accountHistoryModel, blockHeight, ctx) - if err != nil { - t.Fatal(err) - } - liquidityTree, err := tree.InitLiquidityTree(liquidityHistoryModel, blockHeight, ctx) - if err != nil { - t.Fatal(err) - } - nftTree, err := tree.InitNftTree(nftHistoryModel, blockHeight, ctx) - if err != nil { - t.Fatal(err) - } - cryptoTx, err := ConstructCreatePairCryptoTx( - txInfo, - ctx, 0, - accountTree, &accountAssetTrees, - liquidityTree, - nftTree, - accountModel, - ) - if err != nil { - t.Fatal(err) - } - txBytes, err := json.Marshal(cryptoTx) - if err != nil { - t.Fatal(err) - } - fmt.Println(string(txBytes)) -} diff --git a/common/proverUtil/deposit.go b/common/proverUtil/deposit.go deleted file mode 100644 index 77830907a..000000000 --- a/common/proverUtil/deposit.go +++ /dev/null @@ -1,97 +0,0 @@ -/* - * Copyright © 2021 Zkbas Protocol - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package proverUtil - -import ( - "errors" - - bsmt "github.com/bnb-chain/bas-smt" - "github.com/bnb-chain/zkbas-crypto/legend/circuit/bn254/std" - "github.com/zeromicro/go-zero/core/logx" - - "github.com/bnb-chain/zkbas/common/commonTx" - "github.com/bnb-chain/zkbas/pkg/treedb" -) - -func ConstructDepositCryptoTx( - oTx *Tx, - treeCtx *treedb.Context, - finalityBlockNr uint64, - accountTree bsmt.SparseMerkleTree, - accountAssetsTree *[]bsmt.SparseMerkleTree, - liquidityTree bsmt.SparseMerkleTree, - nftTree bsmt.SparseMerkleTree, - accountModel AccountModel, -) (cryptoTx *CryptoTx, err error) { - if oTx.TxType != commonTx.TxTypeDeposit { - logx.Errorf("[ConstructCreatePairCryptoTx] invalid tx type") - return nil, errors.New("[ConstructCreatePairCryptoTx] invalid tx type") - } - if oTx == nil || accountTree == nil || accountAssetsTree == nil || liquidityTree == nil || nftTree == nil { - logx.Errorf("[ConstructDepositCryptoTx] invalid params") - return nil, errors.New("[ConstructDepositCryptoTx] invalid params") - } - txInfo, err := commonTx.ParseDepositTxInfo(oTx.TxInfo) - if err != nil { - logx.Errorf("[ConstructDepositCryptoTx] unable to parse register zns tx info:%s", err.Error()) - return nil, err - } - cryptoTxInfo, err := ToCryptoDepositTx(txInfo) - if err != nil { - logx.Errorf("[ConstructDepositCryptoTx] unable to convert to crypto register zns tx: %s", err.Error()) - return nil, err - } - accountKeys, proverAccounts, proverLiquidityInfo, proverNftInfo, err := ConstructProverInfo(oTx, accountModel) - if err != nil { - logx.Errorf("[ConstructDepositCryptoTx] unable to construct prover info: %s", err.Error()) - return nil, err - } - cryptoTx, err = ConstructWitnessInfo( - oTx, - accountModel, - treeCtx, - finalityBlockNr, - accountTree, - accountAssetsTree, - liquidityTree, - nftTree, - accountKeys, - proverAccounts, - proverLiquidityInfo, - proverNftInfo, - ) - if err != nil { - logx.Errorf("[ConstructDepositCryptoTx] unable to construct witness info: %s", err.Error()) - return nil, err - } - cryptoTx.TxType = uint8(oTx.TxType) - cryptoTx.DepositTxInfo = cryptoTxInfo - cryptoTx.Nonce = oTx.Nonce - cryptoTx.Signature = std.EmptySignature() - return cryptoTx, nil -} - -func ToCryptoDepositTx(txInfo *commonTx.DepositTxInfo) (info *CryptoDepositTx, err error) { - info = &CryptoDepositTx{ - AccountIndex: txInfo.AccountIndex, - AccountNameHash: txInfo.AccountNameHash, - AssetId: txInfo.AssetId, - AssetAmount: txInfo.AssetAmount, - } - return info, nil -} diff --git a/common/proverUtil/depositNft.go b/common/proverUtil/depositNft.go deleted file mode 100644 index 2391b4adb..000000000 --- a/common/proverUtil/depositNft.go +++ /dev/null @@ -1,102 +0,0 @@ -/* - * Copyright © 2021 Zkbas Protocol - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package proverUtil - -import ( - "errors" - - bsmt "github.com/bnb-chain/bas-smt" - "github.com/bnb-chain/zkbas-crypto/legend/circuit/bn254/std" - "github.com/zeromicro/go-zero/core/logx" - - "github.com/bnb-chain/zkbas/common/commonTx" - "github.com/bnb-chain/zkbas/pkg/treedb" -) - -func ConstructDepositNftCryptoTx( - oTx *Tx, - treeCtx *treedb.Context, - finalityBlockNr uint64, - accountTree bsmt.SparseMerkleTree, - accountAssetsTree *[]bsmt.SparseMerkleTree, - liquidityTree bsmt.SparseMerkleTree, - nftTree bsmt.SparseMerkleTree, - accountModel AccountModel, -) (cryptoTx *CryptoTx, err error) { - if oTx.TxType != commonTx.TxTypeDepositNft { - logx.Errorf("[ConstructCreatePairCryptoTx] invalid tx type") - return nil, errors.New("[ConstructCreatePairCryptoTx] invalid tx type") - } - if oTx == nil || accountTree == nil || accountAssetsTree == nil || liquidityTree == nil || nftTree == nil { - logx.Errorf("[ConstructDepositNftCryptoTx] invalid params") - return nil, errors.New("[ConstructDepositNftCryptoTx] invalid params") - } - txInfo, err := commonTx.ParseDepositNftTxInfo(oTx.TxInfo) - if err != nil { - logx.Errorf("[ConstructDepositNftCryptoTx] unable to parse register zns tx info:%s", err.Error()) - return nil, err - } - cryptoTxInfo, err := ToCryptoDepositNftTx(txInfo) - if err != nil { - logx.Errorf("[ConstructDepositNftCryptoTx] unable to convert to crypto register zns tx: %s", err.Error()) - return nil, err - } - accountKeys, proverAccounts, proverLiquidityInfo, proverNftInfo, err := ConstructProverInfo(oTx, accountModel) - if err != nil { - logx.Errorf("[ConstructDepositNftCryptoTx] unable to construct prover info: %s", err.Error()) - return nil, err - } - cryptoTx, err = ConstructWitnessInfo( - oTx, - accountModel, - treeCtx, - finalityBlockNr, - accountTree, - accountAssetsTree, - liquidityTree, - nftTree, - accountKeys, - proverAccounts, - proverLiquidityInfo, - proverNftInfo, - ) - if err != nil { - logx.Errorf("[ConstructDepositNftCryptoTx] unable to construct witness info: %s", err.Error()) - return nil, err - } - cryptoTx.TxType = uint8(oTx.TxType) - cryptoTx.DepositNftTxInfo = cryptoTxInfo - cryptoTx.Nonce = oTx.Nonce - cryptoTx.Signature = std.EmptySignature() - return cryptoTx, nil -} - -func ToCryptoDepositNftTx(txInfo *commonTx.DepositNftTxInfo) (info *CryptoDepositNftTx, err error) { - info = &CryptoDepositNftTx{ - AccountIndex: txInfo.AccountIndex, - NftIndex: txInfo.NftIndex, - NftL1Address: txInfo.NftL1Address, - AccountNameHash: txInfo.AccountNameHash, - NftContentHash: txInfo.NftContentHash, - NftL1TokenId: txInfo.NftL1TokenId, - CreatorAccountIndex: txInfo.CreatorAccountIndex, - CreatorTreasuryRate: txInfo.CreatorTreasuryRate, - CollectionId: txInfo.CollectionId, - } - return info, nil -} diff --git a/common/proverUtil/depositNft_test.go b/common/proverUtil/depositNft_test.go deleted file mode 100644 index 35ae520e0..000000000 --- a/common/proverUtil/depositNft_test.go +++ /dev/null @@ -1,83 +0,0 @@ -/* - * Copyright © 2021 Zkbas Protocol - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package proverUtil - -import ( - "encoding/json" - "fmt" - "testing" - - "github.com/zeromicro/go-zero/core/stores/redis" - - "github.com/bnb-chain/bas-smt/database/memory" - "github.com/bnb-chain/zkbas/common/model/account" - "github.com/bnb-chain/zkbas/common/model/basic" - "github.com/bnb-chain/zkbas/common/model/liquidity" - "github.com/bnb-chain/zkbas/common/model/nft" - "github.com/bnb-chain/zkbas/common/model/tx" - "github.com/bnb-chain/zkbas/common/tree" - "github.com/bnb-chain/zkbas/pkg/treedb" -) - -func TestConstructDepositNftCryptoTxFirst(t *testing.T) { - redisConn := redis.New(basic.CacheConf[0].Host, WithRedis(basic.CacheConf[0].Type, basic.CacheConf[0].Pass)) - txModel := tx.NewTxModel(basic.Connection, basic.CacheConf, basic.DB, redisConn) - accountModel := account.NewAccountModel(basic.Connection, basic.CacheConf, basic.DB) - accountHistoryModel := account.NewAccountHistoryModel(basic.Connection, basic.CacheConf, basic.DB) - //liquidityModel := liquidity.NewLiquidityModel(basic.Connection, basic.CacheConf, basic.DB) - liquidityHistoryModel := liquidity.NewLiquidityHistoryModel(basic.Connection, basic.CacheConf, basic.DB) - //nftModel := nft.NewL2NftModel(basic.Connection, basic.CacheConf, basic.DB) - nftHistoryModel := nft.NewL2NftHistoryModel(basic.Connection, basic.CacheConf, basic.DB) - ctx := &treedb.Context{ - Driver: treedb.MemoryDB, - TreeDB: memory.NewMemoryDB(), - } - txInfo, err := txModel.GetTxByTxId(13) - if err != nil { - t.Fatal(err) - } - blockHeight := int64(12) - accountTree, accountAssetTrees, err := tree.InitAccountTree(accountModel, accountHistoryModel, blockHeight, ctx) - if err != nil { - t.Fatal(err) - } - liquidityTree, err := tree.InitLiquidityTree(liquidityHistoryModel, blockHeight, ctx) - if err != nil { - t.Fatal(err) - } - nftTree, err := tree.InitNftTree(nftHistoryModel, blockHeight, ctx) - if err != nil { - t.Fatal(err) - } - cryptoTx, err := ConstructDepositNftCryptoTx( - txInfo, - ctx, 0, - accountTree, &accountAssetTrees, - liquidityTree, - nftTree, - accountModel, - ) - if err != nil { - t.Fatal(err) - } - txBytes, err := json.Marshal(cryptoTx) - if err != nil { - t.Fatal(err) - } - fmt.Println(string(txBytes)) -} diff --git a/common/proverUtil/deposit_test.go b/common/proverUtil/deposit_test.go deleted file mode 100644 index dba7e6144..000000000 --- a/common/proverUtil/deposit_test.go +++ /dev/null @@ -1,83 +0,0 @@ -/* - * Copyright © 2021 Zkbas Protocol - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package proverUtil - -import ( - "encoding/json" - "fmt" - "testing" - - "github.com/bnb-chain/bas-smt/database/memory" - "github.com/zeromicro/go-zero/core/stores/redis" - - "github.com/bnb-chain/zkbas/common/model/account" - "github.com/bnb-chain/zkbas/common/model/basic" - "github.com/bnb-chain/zkbas/common/model/liquidity" - "github.com/bnb-chain/zkbas/common/model/nft" - "github.com/bnb-chain/zkbas/common/model/tx" - "github.com/bnb-chain/zkbas/common/tree" - "github.com/bnb-chain/zkbas/pkg/treedb" -) - -func TestConstructDepositCryptoTxFirst(t *testing.T) { - redisConn := redis.New(basic.CacheConf[0].Host, WithRedis(basic.CacheConf[0].Type, basic.CacheConf[0].Pass)) - txModel := tx.NewTxModel(basic.Connection, basic.CacheConf, basic.DB, redisConn) - accountModel := account.NewAccountModel(basic.Connection, basic.CacheConf, basic.DB) - accountHistoryModel := account.NewAccountHistoryModel(basic.Connection, basic.CacheConf, basic.DB) - //liquidityModel := liquidity.NewLiquidityModel(basic.Connection, basic.CacheConf, basic.DB) - liquidityHistoryModel := liquidity.NewLiquidityHistoryModel(basic.Connection, basic.CacheConf, basic.DB) - //nftModel := nft.NewL2NftModel(basic.Connection, basic.CacheConf, basic.DB) - nftHistoryModel := nft.NewL2NftHistoryModel(basic.Connection, basic.CacheConf, basic.DB) - ctx := &treedb.Context{ - Driver: treedb.MemoryDB, - TreeDB: memory.NewMemoryDB(), - } - txInfo, err := txModel.GetTxByTxId(5) - if err != nil { - t.Fatal(err) - } - blockHeight := int64(4) - accountTree, accountAssetTrees, err := tree.InitAccountTree(accountModel, accountHistoryModel, blockHeight, ctx) - if err != nil { - t.Fatal(err) - } - liquidityTree, err := tree.InitLiquidityTree(liquidityHistoryModel, blockHeight, ctx) - if err != nil { - t.Fatal(err) - } - nftTree, err := tree.InitNftTree(nftHistoryModel, blockHeight, ctx) - if err != nil { - t.Fatal(err) - } - cryptoTx, err := ConstructDepositCryptoTx( - txInfo, - ctx, 0, - accountTree, &accountAssetTrees, - liquidityTree, - nftTree, - accountModel, - ) - if err != nil { - t.Fatal(err) - } - txBytes, err := json.Marshal(cryptoTx) - if err != nil { - t.Fatal(err) - } - fmt.Println(string(txBytes)) -} diff --git a/common/proverUtil/fullExit.go b/common/proverUtil/fullExit.go deleted file mode 100644 index 8e7df1f40..000000000 --- a/common/proverUtil/fullExit.go +++ /dev/null @@ -1,97 +0,0 @@ -/* - * Copyright © 2021 Zkbas Protocol - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package proverUtil - -import ( - "errors" - - bsmt "github.com/bnb-chain/bas-smt" - "github.com/bnb-chain/zkbas-crypto/legend/circuit/bn254/std" - "github.com/zeromicro/go-zero/core/logx" - - "github.com/bnb-chain/zkbas/common/commonTx" - "github.com/bnb-chain/zkbas/pkg/treedb" -) - -func ConstructFullExitCryptoTx( - oTx *Tx, - treeCtx *treedb.Context, - finalityBlockNr uint64, - accountTree bsmt.SparseMerkleTree, - accountAssetsTree *[]bsmt.SparseMerkleTree, - liquidityTree bsmt.SparseMerkleTree, - nftTree bsmt.SparseMerkleTree, - accountModel AccountModel, -) (cryptoTx *CryptoTx, err error) { - if oTx.TxType != commonTx.TxTypeFullExit { - logx.Errorf("[ConstructFullExitCryptoTx] invalid tx type") - return nil, errors.New("[ConstructFullExitCryptoTx] invalid tx type") - } - if oTx == nil || accountTree == nil || accountAssetsTree == nil || liquidityTree == nil || nftTree == nil { - logx.Errorf("[ConstructFullExitCryptoTx] invalid params") - return nil, errors.New("[ConstructFullExitCryptoTx] invalid params") - } - txInfo, err := commonTx.ParseFullExitTxInfo(oTx.TxInfo) - if err != nil { - logx.Errorf("[ConstructFullExitCryptoTx] unable to parse register zns tx info:%s", err.Error()) - return nil, err - } - cryptoTxInfo, err := ToCryptoFullExitTx(txInfo) - if err != nil { - logx.Errorf("[ConstructFullExitCryptoTx] unable to convert to crypto register zns tx: %s", err.Error()) - return nil, err - } - accountKeys, proverAccounts, proverLiquidityInfo, proverNftInfo, err := ConstructProverInfo(oTx, accountModel) - if err != nil { - logx.Errorf("[ConstructFullExitCryptoTx] unable to construct prover info: %s", err.Error()) - return nil, err - } - cryptoTx, err = ConstructWitnessInfo( - oTx, - accountModel, - treeCtx, - finalityBlockNr, - accountTree, - accountAssetsTree, - liquidityTree, - nftTree, - accountKeys, - proverAccounts, - proverLiquidityInfo, - proverNftInfo, - ) - if err != nil { - logx.Errorf("[ConstructFullExitCryptoTx] unable to construct witness info: %s", err.Error()) - return nil, err - } - cryptoTx.TxType = uint8(oTx.TxType) - cryptoTx.FullExitTxInfo = cryptoTxInfo - cryptoTx.Nonce = oTx.Nonce - cryptoTx.Signature = std.EmptySignature() - return cryptoTx, nil -} - -func ToCryptoFullExitTx(txInfo *commonTx.FullExitTxInfo) (info *CryptoFullExitTx, err error) { - info = &CryptoFullExitTx{ - AccountIndex: txInfo.AccountIndex, - AssetId: txInfo.AssetId, - AssetAmount: txInfo.AssetAmount, - AccountNameHash: txInfo.AccountNameHash, - } - return info, nil -} diff --git a/common/proverUtil/fullExitNft.go b/common/proverUtil/fullExitNft.go deleted file mode 100644 index cc8566590..000000000 --- a/common/proverUtil/fullExitNft.go +++ /dev/null @@ -1,103 +0,0 @@ -/* - * Copyright © 2021 Zkbas Protocol - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package proverUtil - -import ( - "errors" - - bsmt "github.com/bnb-chain/bas-smt" - "github.com/bnb-chain/zkbas-crypto/legend/circuit/bn254/std" - "github.com/zeromicro/go-zero/core/logx" - - "github.com/bnb-chain/zkbas/common/commonTx" - "github.com/bnb-chain/zkbas/pkg/treedb" -) - -func ConstructFullExitNftCryptoTx( - oTx *Tx, - treeCtx *treedb.Context, - finalityBlockNr uint64, - accountTree bsmt.SparseMerkleTree, - accountAssetsTree *[]bsmt.SparseMerkleTree, - liquidityTree bsmt.SparseMerkleTree, - nftTree bsmt.SparseMerkleTree, - accountModel AccountModel, -) (cryptoTx *CryptoTx, err error) { - if oTx.TxType != commonTx.TxTypeFullExitNft { - logx.Errorf("[ConstructFullExitNftCryptoTx] invalid tx type") - return nil, errors.New("[ConstructFullExitNftCryptoTx] invalid tx type") - } - if oTx == nil || accountTree == nil || accountAssetsTree == nil || liquidityTree == nil || nftTree == nil { - logx.Errorf("[ConstructFullExitNftCryptoTx] invalid params") - return nil, errors.New("[ConstructFullExitNftCryptoTx] invalid params") - } - txInfo, err := commonTx.ParseFullExitNftTxInfo(oTx.TxInfo) - if err != nil { - logx.Errorf("[ConstructFullExitNftCryptoTx] unable to parse register zns tx info:%s", err.Error()) - return nil, err - } - cryptoTxInfo, err := ToCryptoFullExitNftTx(txInfo) - if err != nil { - logx.Errorf("[ConstructFullExitNftCryptoTx] unable to convert to crypto register zns tx: %s", err.Error()) - return nil, err - } - accountKeys, proverAccounts, proverLiquidityInfo, proverNftInfo, err := ConstructProverInfo(oTx, accountModel) - if err != nil { - logx.Errorf("[ConstructFullExitNftCryptoTx] unable to construct prover info: %s", err.Error()) - return nil, err - } - cryptoTx, err = ConstructWitnessInfo( - oTx, - accountModel, - treeCtx, - finalityBlockNr, - accountTree, - accountAssetsTree, - liquidityTree, - nftTree, - accountKeys, - proverAccounts, - proverLiquidityInfo, - proverNftInfo, - ) - if err != nil { - logx.Errorf("[ConstructFullExitNftCryptoTx] unable to construct witness info: %s", err.Error()) - return nil, err - } - cryptoTx.TxType = uint8(oTx.TxType) - cryptoTx.FullExitNftTxInfo = cryptoTxInfo - cryptoTx.Nonce = oTx.Nonce - cryptoTx.Signature = std.EmptySignature() - return cryptoTx, nil -} - -func ToCryptoFullExitNftTx(txInfo *commonTx.FullExitNftTxInfo) (info *CryptoFullExitNftTx, err error) { - info = &CryptoFullExitNftTx{ - AccountIndex: txInfo.AccountIndex, - AccountNameHash: txInfo.AccountNameHash, - CreatorAccountIndex: txInfo.CreatorAccountIndex, - CreatorAccountNameHash: txInfo.CreatorAccountNameHash, - CreatorTreasuryRate: txInfo.CreatorTreasuryRate, - NftIndex: txInfo.NftIndex, - CollectionId: txInfo.CollectionId, - NftContentHash: txInfo.NftContentHash, - NftL1Address: txInfo.NftL1Address, - NftL1TokenId: txInfo.NftL1TokenId, - } - return info, nil -} diff --git a/common/proverUtil/fullExitNft_test.go b/common/proverUtil/fullExitNft_test.go deleted file mode 100644 index 27635c60a..000000000 --- a/common/proverUtil/fullExitNft_test.go +++ /dev/null @@ -1,83 +0,0 @@ -/* - * Copyright © 2021 Zkbas Protocol - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package proverUtil - -import ( - "encoding/json" - "fmt" - "testing" - - "github.com/zeromicro/go-zero/core/stores/redis" - - "github.com/bnb-chain/bas-smt/database/memory" - "github.com/bnb-chain/zkbas/common/model/account" - "github.com/bnb-chain/zkbas/common/model/basic" - "github.com/bnb-chain/zkbas/common/model/liquidity" - "github.com/bnb-chain/zkbas/common/model/nft" - "github.com/bnb-chain/zkbas/common/model/tx" - "github.com/bnb-chain/zkbas/common/tree" - "github.com/bnb-chain/zkbas/pkg/treedb" -) - -func TestConstructFullExitNftNftCryptoTxFirst(t *testing.T) { - redisConn := redis.New(basic.CacheConf[0].Host, WithRedis(basic.CacheConf[0].Type, basic.CacheConf[0].Pass)) - txModel := tx.NewTxModel(basic.Connection, basic.CacheConf, basic.DB, redisConn) - accountModel := account.NewAccountModel(basic.Connection, basic.CacheConf, basic.DB) - accountHistoryModel := account.NewAccountHistoryModel(basic.Connection, basic.CacheConf, basic.DB) - //liquidityModel := liquidity.NewLiquidityModel(basic.Connection, basic.CacheConf, basic.DB) - liquidityHistoryModel := liquidity.NewLiquidityHistoryModel(basic.Connection, basic.CacheConf, basic.DB) - //nftModel := nft.NewL2NftModel(basic.Connection, basic.CacheConf, basic.DB) - nftHistoryModel := nft.NewL2NftHistoryModel(basic.Connection, basic.CacheConf, basic.DB) - ctx := &treedb.Context{ - Driver: treedb.MemoryDB, - TreeDB: memory.NewMemoryDB(), - } - txInfo, err := txModel.GetTxByTxId(15) - if err != nil { - t.Fatal(err) - } - blockHeight := int64(14) - accountTree, accountAssetTrees, err := tree.InitAccountTree(accountModel, accountHistoryModel, blockHeight, ctx) - if err != nil { - t.Fatal(err) - } - liquidityTree, err := tree.InitLiquidityTree(liquidityHistoryModel, blockHeight, ctx) - if err != nil { - t.Fatal(err) - } - nftTree, err := tree.InitNftTree(nftHistoryModel, blockHeight, ctx) - if err != nil { - t.Fatal(err) - } - cryptoTx, err := ConstructFullExitNftCryptoTx( - txInfo, - ctx, 0, - accountTree, &accountAssetTrees, - liquidityTree, - nftTree, - accountModel, - ) - if err != nil { - t.Fatal(err) - } - txBytes, err := json.Marshal(cryptoTx) - if err != nil { - t.Fatal(err) - } - fmt.Println(string(txBytes)) -} diff --git a/common/proverUtil/fullExit_test.go b/common/proverUtil/fullExit_test.go deleted file mode 100644 index da6b7ad6a..000000000 --- a/common/proverUtil/fullExit_test.go +++ /dev/null @@ -1,83 +0,0 @@ -/* - * Copyright © 2021 Zkbas Protocol - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package proverUtil - -import ( - "encoding/json" - "fmt" - "testing" - - "github.com/zeromicro/go-zero/core/stores/redis" - - "github.com/bnb-chain/bas-smt/database/memory" - "github.com/bnb-chain/zkbas/common/model/account" - "github.com/bnb-chain/zkbas/common/model/basic" - "github.com/bnb-chain/zkbas/common/model/liquidity" - "github.com/bnb-chain/zkbas/common/model/nft" - "github.com/bnb-chain/zkbas/common/model/tx" - "github.com/bnb-chain/zkbas/common/tree" - "github.com/bnb-chain/zkbas/pkg/treedb" -) - -func TestConstructFullExitCryptoTxFirst(t *testing.T) { - redisConn := redis.New(basic.CacheConf[0].Host, WithRedis(basic.CacheConf[0].Type, basic.CacheConf[0].Pass)) - txModel := tx.NewTxModel(basic.Connection, basic.CacheConf, basic.DB, redisConn) - accountModel := account.NewAccountModel(basic.Connection, basic.CacheConf, basic.DB) - accountHistoryModel := account.NewAccountHistoryModel(basic.Connection, basic.CacheConf, basic.DB) - //liquidityModel := liquidity.NewLiquidityModel(basic.Connection, basic.CacheConf, basic.DB) - liquidityHistoryModel := liquidity.NewLiquidityHistoryModel(basic.Connection, basic.CacheConf, basic.DB) - //nftModel := nft.NewL2NftModel(basic.Connection, basic.CacheConf, basic.DB) - nftHistoryModel := nft.NewL2NftHistoryModel(basic.Connection, basic.CacheConf, basic.DB) - ctx := &treedb.Context{ - Driver: treedb.MemoryDB, - TreeDB: memory.NewMemoryDB(), - } - txInfo, err := txModel.GetTxByTxId(14) - if err != nil { - t.Fatal(err) - } - blockHeight := int64(13) - accountTree, accountAssetTrees, err := tree.InitAccountTree(accountModel, accountHistoryModel, blockHeight, ctx) - if err != nil { - t.Fatal(err) - } - liquidityTree, err := tree.InitLiquidityTree(liquidityHistoryModel, blockHeight, ctx) - if err != nil { - t.Fatal(err) - } - nftTree, err := tree.InitNftTree(nftHistoryModel, blockHeight, ctx) - if err != nil { - t.Fatal(err) - } - cryptoTx, err := ConstructFullExitCryptoTx( - txInfo, - ctx, 0, - accountTree, &accountAssetTrees, - liquidityTree, - nftTree, - accountModel, - ) - if err != nil { - t.Fatal(err) - } - txBytes, err := json.Marshal(cryptoTx) - if err != nil { - t.Fatal(err) - } - fmt.Println(string(txBytes)) -} diff --git a/common/proverUtil/mintNft.go b/common/proverUtil/mintNft.go deleted file mode 100644 index 69b933770..000000000 --- a/common/proverUtil/mintNft.go +++ /dev/null @@ -1,117 +0,0 @@ -/* - * Copyright © 2021 Zkbas Protocol - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package proverUtil - -import ( - "errors" - - bsmt "github.com/bnb-chain/bas-smt" - "github.com/consensys/gnark-crypto/ecc/bn254/twistededwards/eddsa" - "github.com/ethereum/go-ethereum/common" - "github.com/zeromicro/go-zero/core/logx" - - "github.com/bnb-chain/zkbas/common/commonTx" - "github.com/bnb-chain/zkbas/common/util" - "github.com/bnb-chain/zkbas/pkg/treedb" -) - -func ConstructMintNftCryptoTx( - oTx *Tx, - treeCtx *treedb.Context, - finalityBlockNr uint64, - accountTree bsmt.SparseMerkleTree, - accountAssetsTree *[]bsmt.SparseMerkleTree, - liquidityTree bsmt.SparseMerkleTree, - nftTree bsmt.SparseMerkleTree, - accountModel AccountModel, -) (cryptoTx *CryptoTx, err error) { - if oTx.TxType != commonTx.TxTypeMintNft { - logx.Errorf("[ConstructMintNftCryptoTx] invalid tx type") - return nil, errors.New("[ConstructMintNftCryptoTx] invalid tx type") - } - if oTx == nil || accountTree == nil || accountAssetsTree == nil || liquidityTree == nil || nftTree == nil { - logx.Errorf("[ConstructMintNftCryptoTx] invalid params") - return nil, errors.New("[ConstructMintNftCryptoTx] invalid params") - } - txInfo, err := commonTx.ParseMintNftTxInfo(oTx.TxInfo) - if err != nil { - logx.Errorf("[ConstructMintNftCryptoTx] unable to parse register zns tx info:%s", err.Error()) - return nil, err - } - cryptoTxInfo, err := ToCryptoMintNftTx(txInfo) - if err != nil { - logx.Errorf("[ConstructMintNftCryptoTx] unable to convert to crypto register zns tx: %s", err.Error()) - return nil, err - } - accountKeys, proverAccounts, proverLiquidityInfo, proverNftInfo, err := ConstructProverInfo(oTx, accountModel) - if err != nil { - logx.Errorf("[ConstructMintNftCryptoTx] unable to construct prover info: %s", err.Error()) - return nil, err - } - cryptoTx, err = ConstructWitnessInfo( - oTx, - accountModel, - treeCtx, - finalityBlockNr, - accountTree, - accountAssetsTree, - liquidityTree, - nftTree, - accountKeys, - proverAccounts, - proverLiquidityInfo, - proverNftInfo, - ) - if err != nil { - logx.Errorf("[ConstructMintNftCryptoTx] unable to construct witness info: %s", err.Error()) - return nil, err - } - cryptoTx.TxType = uint8(oTx.TxType) - cryptoTx.MintNftTxInfo = cryptoTxInfo - cryptoTx.Nonce = oTx.Nonce - cryptoTx.ExpiredAt = txInfo.ExpiredAt - cryptoTx.Signature = new(eddsa.Signature) - _, err = cryptoTx.Signature.SetBytes(txInfo.Sig) - if err != nil { - logx.Errorf("[ConstructMintNftCryptoTx] invalid sig bytes: %s", err.Error()) - return nil, err - } - return cryptoTx, nil -} - -func ToCryptoMintNftTx(txInfo *commonTx.MintNftTxInfo) (info *CryptoMintNftTx, err error) { - packedFee, err := util.ToPackedFee(txInfo.GasFeeAssetAmount) - if err != nil { - logx.Errorf("[ToCryptoSwapTx] unable to convert to packed fee: %s", err.Error()) - return nil, err - } - info = &CryptoMintNftTx{ - CreatorAccountIndex: txInfo.CreatorAccountIndex, - ToAccountIndex: txInfo.ToAccountIndex, - ToAccountNameHash: common.FromHex(txInfo.ToAccountNameHash), - NftIndex: txInfo.NftIndex, - NftContentHash: common.FromHex(txInfo.NftContentHash), - CreatorTreasuryRate: txInfo.CreatorTreasuryRate, - GasAccountIndex: txInfo.GasAccountIndex, - GasFeeAssetId: txInfo.GasFeeAssetId, - GasFeeAssetAmount: packedFee, - CollectionId: txInfo.NftCollectionId, - ExpiredAt: txInfo.ExpiredAt, - } - return info, nil -} diff --git a/common/proverUtil/mintNft_test.go b/common/proverUtil/mintNft_test.go deleted file mode 100644 index 6e8d9f42c..000000000 --- a/common/proverUtil/mintNft_test.go +++ /dev/null @@ -1,83 +0,0 @@ -/* - * Copyright © 2021 Zkbas Protocol - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package proverUtil - -import ( - "encoding/json" - "fmt" - "testing" - - "github.com/zeromicro/go-zero/core/stores/redis" - - "github.com/bnb-chain/bas-smt/database/memory" - "github.com/bnb-chain/zkbas/common/model/account" - "github.com/bnb-chain/zkbas/common/model/basic" - "github.com/bnb-chain/zkbas/common/model/liquidity" - "github.com/bnb-chain/zkbas/common/model/nft" - "github.com/bnb-chain/zkbas/common/model/tx" - "github.com/bnb-chain/zkbas/common/tree" - "github.com/bnb-chain/zkbas/pkg/treedb" -) - -func TestConstructMintNftCryptoTxFirst(t *testing.T) { - redisConn := redis.New(basic.CacheConf[0].Host, WithRedis(basic.CacheConf[0].Type, basic.CacheConf[0].Pass)) - txModel := tx.NewTxModel(basic.Connection, basic.CacheConf, basic.DB, redisConn) - accountModel := account.NewAccountModel(basic.Connection, basic.CacheConf, basic.DB) - accountHistoryModel := account.NewAccountHistoryModel(basic.Connection, basic.CacheConf, basic.DB) - //liquidityModel := liquidity.NewLiquidityModel(basic.Connection, basic.CacheConf, basic.DB) - liquidityHistoryModel := liquidity.NewLiquidityHistoryModel(basic.Connection, basic.CacheConf, basic.DB) - //nftModel := nft.NewL2NftModel(basic.Connection, basic.CacheConf, basic.DB) - nftHistoryModel := nft.NewL2NftHistoryModel(basic.Connection, basic.CacheConf, basic.DB) - ctx := &treedb.Context{ - Driver: treedb.MemoryDB, - TreeDB: memory.NewMemoryDB(), - } - txInfo, err := txModel.GetTxByTxId(22) - if err != nil { - t.Fatal(err) - } - blockHeight := int64(21) - accountTree, accountAssetTrees, err := tree.InitAccountTree(accountModel, accountHistoryModel, blockHeight, ctx) - if err != nil { - t.Fatal(err) - } - liquidityTree, err := tree.InitLiquidityTree(liquidityHistoryModel, blockHeight, ctx) - if err != nil { - t.Fatal(err) - } - nftTree, err := tree.InitNftTree(nftHistoryModel, blockHeight, ctx) - if err != nil { - t.Fatal(err) - } - cryptoTx, err := ConstructMintNftCryptoTx( - txInfo, - ctx, 0, - accountTree, &accountAssetTrees, - liquidityTree, - nftTree, - accountModel, - ) - if err != nil { - t.Fatal(err) - } - txBytes, err := json.Marshal(cryptoTx) - if err != nil { - t.Fatal(err) - } - fmt.Println(string(txBytes)) -} diff --git a/common/proverUtil/redis.go b/common/proverUtil/redis.go deleted file mode 100644 index 53bfe6bea..000000000 --- a/common/proverUtil/redis.go +++ /dev/null @@ -1,29 +0,0 @@ -/* - * Copyright © 2021 Zkbas Protocol - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package proverUtil - -import ( - "github.com/zeromicro/go-zero/core/stores/redis" -) - -func WithRedis(redisType string, redisPass string) redis.Option { - return func(p *redis.Redis) { - p.Type = redisType - p.Pass = redisPass - } -} diff --git a/common/proverUtil/registerZns.go b/common/proverUtil/registerZns.go deleted file mode 100644 index 1696f9736..000000000 --- a/common/proverUtil/registerZns.go +++ /dev/null @@ -1,108 +0,0 @@ -/* - * Copyright © 2021 Zkbas Protocol - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package proverUtil - -import ( - "errors" - "strings" - - bsmt "github.com/bnb-chain/bas-smt" - "github.com/bnb-chain/zkbas-crypto/legend/circuit/bn254/std" - "github.com/zeromicro/go-zero/core/logx" - - "github.com/bnb-chain/zkbas/common/commonTx" - "github.com/bnb-chain/zkbas/common/util" - "github.com/bnb-chain/zkbas/pkg/treedb" -) - -func ConstructRegisterZnsCryptoTx( - oTx *Tx, - treeCtx *treedb.Context, - finalityBlockNr uint64, - accountTree bsmt.SparseMerkleTree, - accountAssetsTree *[]bsmt.SparseMerkleTree, - liquidityTree bsmt.SparseMerkleTree, - nftTree bsmt.SparseMerkleTree, - accountModel AccountModel, -) (cryptoTx *CryptoTx, err error) { - if oTx.TxType != commonTx.TxTypeRegisterZns { - logx.Errorf("[ConstructCreatePairCryptoTx] invalid tx type") - return nil, errors.New("[ConstructCreatePairCryptoTx] invalid tx type") - } - if oTx == nil || accountTree == nil || accountAssetsTree == nil || liquidityTree == nil || nftTree == nil { - logx.Errorf("[ConstructRegisterZnsCryptoTx] invalid params") - return nil, errors.New("[ConstructRegisterZnsCryptoTx] invalid params") - } - txInfo, err := commonTx.ParseRegisterZnsTxInfo(oTx.TxInfo) - if err != nil { - logx.Errorf("[ConstructRegisterZnsCryptoTx] unable to parse register zns tx info:%s", err.Error()) - return nil, err - } - cryptoTxInfo, err := ToCryptoRegisterZnsTx(txInfo) - if err != nil { - logx.Errorf("[ConstructRegisterZnsCryptoTx] unable to convert to crypto register zns tx: %s", err.Error()) - return nil, err - } - accountKeys, proverAccounts, proverLiquidityInfo, proverNftInfo, err := ConstructProverInfo(oTx, accountModel) - if err != nil { - logx.Errorf("[ConstructRegisterZnsCryptoTx] unable to construct prover info: %s", err.Error()) - return nil, err - } - cryptoTx, err = ConstructWitnessInfo( - oTx, - accountModel, - treeCtx, - finalityBlockNr, - accountTree, - accountAssetsTree, - liquidityTree, - nftTree, - accountKeys, - proverAccounts, - proverLiquidityInfo, - proverNftInfo, - ) - if err != nil { - logx.Errorf("[ConstructRegisterZnsCryptoTx] unable to construct witness info: %s", err.Error()) - return nil, err - } - cryptoTx.TxType = uint8(oTx.TxType) - cryptoTx.RegisterZnsTxInfo = cryptoTxInfo - cryptoTx.Nonce = oTx.Nonce - cryptoTx.Signature = std.EmptySignature() - return cryptoTx, nil -} - -func ToCryptoRegisterZnsTx(txInfo *commonTx.RegisterZnsTxInfo) (info *CryptoRegisterZnsTx, err error) { - accountName := make([]byte, 32) - AccountNameSuffix := ".legend" - realName := strings.Split(txInfo.AccountName, AccountNameSuffix)[0] - copy(accountName[:], realName) - pk, err := util.ParsePubKey(txInfo.PubKey) - if err != nil { - logx.Errorf("[ToCryptoRegisterZnsTx] unable to parse pub key:%s", err.Error()) - return nil, err - } - info = &CryptoRegisterZnsTx{ - AccountIndex: txInfo.AccountIndex, - AccountName: accountName, - AccountNameHash: txInfo.AccountNameHash, - PubKey: pk, - } - return info, nil -} diff --git a/common/proverUtil/registerZns_test.go b/common/proverUtil/registerZns_test.go deleted file mode 100644 index a09f16377..000000000 --- a/common/proverUtil/registerZns_test.go +++ /dev/null @@ -1,135 +0,0 @@ -/* - * Copyright © 2021 Zkbas Protocol - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package proverUtil - -import ( - "encoding/json" - "fmt" - "log" - "testing" - - "github.com/bnb-chain/bas-smt/database/memory" - "github.com/ethereum/go-ethereum/common" - "github.com/zeromicro/go-zero/core/stores/redis" - - "github.com/bnb-chain/zkbas/common/model/account" - "github.com/bnb-chain/zkbas/common/model/basic" - "github.com/bnb-chain/zkbas/common/model/liquidity" - "github.com/bnb-chain/zkbas/common/model/nft" - "github.com/bnb-chain/zkbas/common/model/tx" - "github.com/bnb-chain/zkbas/common/tree" - "github.com/bnb-chain/zkbas/pkg/treedb" -) - -func TestConstructRegisterZnsCryptoTxFirst(t *testing.T) { - redisConn := redis.New(basic.CacheConf[0].Host, WithRedis(basic.CacheConf[0].Type, basic.CacheConf[0].Pass)) - txModel := tx.NewTxModel(basic.Connection, basic.CacheConf, basic.DB, redisConn) - accountModel := account.NewAccountModel(basic.Connection, basic.CacheConf, basic.DB) - accountHistoryModel := account.NewAccountHistoryModel(basic.Connection, basic.CacheConf, basic.DB) - //liquidityModel := liquidity.NewLiquidityModel(basic.Connection, basic.CacheConf, basic.DB) - liquidityHistoryModel := liquidity.NewLiquidityHistoryModel(basic.Connection, basic.CacheConf, basic.DB) - //nftModel := nft.NewL2NftModel(basic.Connection, basic.CacheConf, basic.DB) - nftHistoryModel := nft.NewL2NftHistoryModel(basic.Connection, basic.CacheConf, basic.DB) - ctx := &treedb.Context{ - Driver: treedb.MemoryDB, - TreeDB: memory.NewMemoryDB(), - } - txInfo, err := txModel.GetTxByTxId(3) - if err != nil { - t.Fatal(err) - } - blockHeight := int64(2) - accountTree, accountAssetTrees, err := tree.InitAccountTree(accountModel, accountHistoryModel, blockHeight, ctx) - if err != nil { - t.Fatal(err) - } - liquidityTree, err := tree.InitLiquidityTree(liquidityHistoryModel, blockHeight, ctx) - if err != nil { - t.Fatal(err) - } - nftTree, err := tree.InitNftTree(nftHistoryModel, blockHeight, ctx) - if err != nil { - t.Fatal(err) - } - cryptoTx, err := ConstructRegisterZnsCryptoTx( - txInfo, - ctx, 0, - accountTree, &accountAssetTrees, - liquidityTree, - nftTree, - accountModel, - ) - if err != nil { - t.Fatal(err) - } - log.Println(common.Bytes2Hex(tree.NilAccountNodeHash)) - log.Println(common.Bytes2Hex(cryptoTx.MerkleProofsAccountBefore[0][0])) - txBytes, err := json.Marshal(cryptoTx) - if err != nil { - t.Fatal(err) - } - fmt.Println(string(txBytes)) -} - -func TestConstructRegisterZnsCryptoTxNotFirst(t *testing.T) { - redisConn := redis.New(basic.CacheConf[0].Host, WithRedis(basic.CacheConf[0].Type, basic.CacheConf[0].Pass)) - txModel := tx.NewTxModel(basic.Connection, basic.CacheConf, basic.DB, redisConn) - accountModel := account.NewAccountModel(basic.Connection, basic.CacheConf, basic.DB) - accountHistoryModel := account.NewAccountHistoryModel(basic.Connection, basic.CacheConf, basic.DB) - //liquidityModel := liquidity.NewLiquidityModel(basic.Connection, basic.CacheConf, basic.DB) - liquidityHistoryModel := liquidity.NewLiquidityHistoryModel(basic.Connection, basic.CacheConf, basic.DB) - //nftModel := nft.NewL2NftModel(basic.Connection, basic.CacheConf, basic.DB) - nftHistoryModel := nft.NewL2NftHistoryModel(basic.Connection, basic.CacheConf, basic.DB) - ctx := &treedb.Context{ - Driver: treedb.MemoryDB, - TreeDB: memory.NewMemoryDB(), - } - txInfo, err := txModel.GetTxByTxHash("e5d6dd7c-da46-11ec-8abf-7cb27d9ca483") - if err != nil { - t.Fatal(err) - } - blockHeight := int64(1) - accountTree, accountAssetTrees, err := tree.InitAccountTree(accountModel, accountHistoryModel, blockHeight, ctx) - if err != nil { - t.Fatal(err) - } - liquidityTree, err := tree.InitLiquidityTree(liquidityHistoryModel, blockHeight, ctx) - if err != nil { - t.Fatal(err) - } - nftTree, err := tree.InitNftTree(nftHistoryModel, blockHeight, ctx) - if err != nil { - t.Fatal(err) - } - cryptoTx, err := ConstructRegisterZnsCryptoTx( - txInfo, - ctx, 0, - accountTree, &accountAssetTrees, - liquidityTree, - nftTree, - accountModel, - ) - if err != nil { - t.Fatal(err) - } - txBytes, err := json.Marshal(cryptoTx) - if err != nil { - t.Fatal(err) - } - fmt.Println(string(txBytes)) -} diff --git a/common/proverUtil/removeLiquidity.go b/common/proverUtil/removeLiquidity.go deleted file mode 100644 index 033a96007..000000000 --- a/common/proverUtil/removeLiquidity.go +++ /dev/null @@ -1,154 +0,0 @@ -/* - * Copyright © 2021 Zkbas Protocol - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package proverUtil - -import ( - "errors" - - bsmt "github.com/bnb-chain/bas-smt" - "github.com/consensys/gnark-crypto/ecc/bn254/twistededwards/eddsa" - "github.com/zeromicro/go-zero/core/logx" - - "github.com/bnb-chain/zkbas/common/commonTx" - "github.com/bnb-chain/zkbas/common/util" - "github.com/bnb-chain/zkbas/pkg/treedb" -) - -func ConstructRemoveLiquidityCryptoTx( - oTx *Tx, - treeCtx *treedb.Context, - finalityBlockNr uint64, - accountTree bsmt.SparseMerkleTree, - accountAssetsTree *[]bsmt.SparseMerkleTree, - liquidityTree bsmt.SparseMerkleTree, - nftTree bsmt.SparseMerkleTree, - accountModel AccountModel, -) (cryptoTx *CryptoTx, err error) { - if oTx.TxType != commonTx.TxTypeRemoveLiquidity { - logx.Errorf("[ConstructRemoveLiquidityCryptoTx] invalid tx type") - return nil, errors.New("[ConstructRemoveLiquidityCryptoTx] invalid tx type") - } - if oTx == nil || accountTree == nil || accountAssetsTree == nil || liquidityTree == nil || nftTree == nil { - logx.Errorf("[ConstructRemoveLiquidityCryptoTx] invalid params") - return nil, errors.New("[ConstructRemoveLiquidityCryptoTx] invalid params") - } - txInfo, err := commonTx.ParseRemoveLiquidityTxInfo(oTx.TxInfo) - if err != nil { - logx.Errorf("[ConstructRemoveLiquidityCryptoTx] unable to parse register zns tx info:%s", err.Error()) - return nil, err - } - cryptoTxInfo, err := ToCryptoRemoveLiquidityTx(txInfo) - if err != nil { - logx.Errorf("[ConstructRemoveLiquidityCryptoTx] unable to convert to crypto register zns tx: %s", err.Error()) - return nil, err - } - accountKeys, proverAccounts, proverLiquidityInfo, proverNftInfo, err := ConstructProverInfo(oTx, accountModel) - if err != nil { - logx.Errorf("[ConstructRemoveLiquidityCryptoTx] unable to construct prover info: %s", err.Error()) - return nil, err - } - cryptoTx, err = ConstructWitnessInfo( - oTx, - accountModel, - treeCtx, - finalityBlockNr, - accountTree, - accountAssetsTree, - liquidityTree, - nftTree, - accountKeys, - proverAccounts, - proverLiquidityInfo, - proverNftInfo, - ) - if err != nil { - logx.Errorf("[ConstructRemoveLiquidityCryptoTx] unable to construct witness info: %s", err.Error()) - return nil, err - } - cryptoTx.TxType = uint8(oTx.TxType) - cryptoTx.RemoveLiquidityTxInfo = cryptoTxInfo - cryptoTx.Nonce = oTx.Nonce - cryptoTx.ExpiredAt = txInfo.ExpiredAt - cryptoTx.Signature = new(eddsa.Signature) - _, err = cryptoTx.Signature.SetBytes(txInfo.Sig) - if err != nil { - logx.Errorf("[ConstructRemoveLiquidityCryptoTx] invalid sig bytes: %s", err.Error()) - return nil, err - } - return cryptoTx, nil -} - -func ToCryptoRemoveLiquidityTx(txInfo *commonTx.RemoveLiquidityTxInfo) (info *CryptoRemoveLiquidityTx, err error) { - packedAMinAmount, err := util.ToPackedAmount(txInfo.AssetAMinAmount) - if err != nil { - logx.Errorf("[ToCryptoRemoveLiquidityTx] unable to convert to packed amount: %s", err.Error()) - return nil, err - } - packedBMinAmount, err := util.ToPackedAmount(txInfo.AssetBMinAmount) - if err != nil { - logx.Errorf("[ToCryptoRemoveLiquidityTx] unable to convert to packed amount: %s", err.Error()) - return nil, err - } - packedAAmount, err := util.ToPackedAmount(txInfo.AssetAAmountDelta) - if err != nil { - logx.Errorf("[ToCryptoRemoveLiquidityTx] unable to convert to packed amount: %s", err.Error()) - return nil, err - } - packedBAmount, err := util.ToPackedAmount(txInfo.AssetBAmountDelta) - if err != nil { - logx.Errorf("[ToCryptoRemoveLiquidityTx] unable to convert to packed amount: %s", err.Error()) - return nil, err - } - packedLpAmount, err := util.ToPackedAmount(txInfo.LpAmount) - if err != nil { - logx.Errorf("[ToCryptoRemoveLiquidityTx] unable to convert to packed amount: %s", err.Error()) - return nil, err - } - packedKLast, err := util.ToPackedAmount(txInfo.KLast) - if err != nil { - logx.Errorf("[ToCryptoAddLiquidityTx] unable to convert to packed amount: %s", err.Error()) - return nil, err - } - packedTreasuryAmount, err := util.ToPackedAmount(txInfo.TreasuryAmount) - if err != nil { - logx.Errorf("[ToCryptoAddLiquidityTx] unable to convert to packed amount: %s", err.Error()) - return nil, err - } - packedFee, err := util.ToPackedFee(txInfo.GasFeeAssetAmount) - if err != nil { - logx.Errorf("[ToCryptoRemoveLiquidityTx] unable to convert to packed fee: %s", err.Error()) - return nil, err - } - info = &CryptoRemoveLiquidityTx{ - FromAccountIndex: txInfo.FromAccountIndex, - PairIndex: txInfo.PairIndex, - AssetAId: txInfo.AssetAId, - AssetAMinAmount: packedAMinAmount, - AssetBId: txInfo.AssetBId, - AssetBMinAmount: packedBMinAmount, - LpAmount: packedLpAmount, - KLast: packedKLast, - TreasuryAmount: packedTreasuryAmount, - AssetAAmountDelta: packedAAmount, - AssetBAmountDelta: packedBAmount, - GasAccountIndex: txInfo.GasAccountIndex, - GasFeeAssetId: txInfo.GasFeeAssetId, - GasFeeAssetAmount: packedFee, - } - return info, nil -} diff --git a/common/proverUtil/removeLiquidity_test.go b/common/proverUtil/removeLiquidity_test.go deleted file mode 100644 index 97136aa5c..000000000 --- a/common/proverUtil/removeLiquidity_test.go +++ /dev/null @@ -1,83 +0,0 @@ -/* - * Copyright © 2021 Zkbas Protocol - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package proverUtil - -import ( - "encoding/json" - "fmt" - "testing" - - "github.com/bnb-chain/bas-smt/database/memory" - "github.com/zeromicro/go-zero/core/stores/redis" - - "github.com/bnb-chain/zkbas/common/model/account" - "github.com/bnb-chain/zkbas/common/model/basic" - "github.com/bnb-chain/zkbas/common/model/liquidity" - "github.com/bnb-chain/zkbas/common/model/nft" - "github.com/bnb-chain/zkbas/common/model/tx" - "github.com/bnb-chain/zkbas/common/tree" - "github.com/bnb-chain/zkbas/pkg/treedb" -) - -func TestConstructRemoveLiquidityCryptoTxFirst(t *testing.T) { - redisConn := redis.New(basic.CacheConf[0].Host, WithRedis(basic.CacheConf[0].Type, basic.CacheConf[0].Pass)) - txModel := tx.NewTxModel(basic.Connection, basic.CacheConf, basic.DB, redisConn) - accountModel := account.NewAccountModel(basic.Connection, basic.CacheConf, basic.DB) - accountHistoryModel := account.NewAccountHistoryModel(basic.Connection, basic.CacheConf, basic.DB) - //liquidityModel := liquidity.NewLiquidityModel(basic.Connection, basic.CacheConf, basic.DB) - liquidityHistoryModel := liquidity.NewLiquidityHistoryModel(basic.Connection, basic.CacheConf, basic.DB) - //nftModel := nft.NewL2NftModel(basic.Connection, basic.CacheConf, basic.DB) - nftHistoryModel := nft.NewL2NftHistoryModel(basic.Connection, basic.CacheConf, basic.DB) - ctx := &treedb.Context{ - Driver: treedb.MemoryDB, - TreeDB: memory.NewMemoryDB(), - } - txInfo, err := txModel.GetTxByTxId(20) - if err != nil { - t.Fatal(err) - } - blockHeight := int64(19) - accountTree, accountAssetTrees, err := tree.InitAccountTree(accountModel, accountHistoryModel, blockHeight, ctx) - if err != nil { - t.Fatal(err) - } - liquidityTree, err := tree.InitLiquidityTree(liquidityHistoryModel, blockHeight, ctx) - if err != nil { - t.Fatal(err) - } - nftTree, err := tree.InitNftTree(nftHistoryModel, blockHeight, ctx) - if err != nil { - t.Fatal(err) - } - cryptoTx, err := ConstructRemoveLiquidityCryptoTx( - txInfo, - ctx, 0, - accountTree, &accountAssetTrees, - liquidityTree, - nftTree, - accountModel, - ) - if err != nil { - t.Fatal(err) - } - txBytes, err := json.Marshal(cryptoTx) - if err != nil { - t.Fatal(err) - } - fmt.Println(string(txBytes)) -} diff --git a/common/proverUtil/swap.go b/common/proverUtil/swap.go deleted file mode 100644 index 14c170b82..000000000 --- a/common/proverUtil/swap.go +++ /dev/null @@ -1,130 +0,0 @@ -/* - * Copyright © 2021 Zkbas Protocol - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package proverUtil - -import ( - "errors" - - bsmt "github.com/bnb-chain/bas-smt" - "github.com/consensys/gnark-crypto/ecc/bn254/twistededwards/eddsa" - "github.com/zeromicro/go-zero/core/logx" - - "github.com/bnb-chain/zkbas/common/commonTx" - "github.com/bnb-chain/zkbas/common/util" - "github.com/bnb-chain/zkbas/pkg/treedb" -) - -func ConstructSwapCryptoTx( - oTx *Tx, - treeCtx *treedb.Context, - finalityBlockNr uint64, - accountTree bsmt.SparseMerkleTree, - accountAssetsTree *[]bsmt.SparseMerkleTree, - liquidityTree bsmt.SparseMerkleTree, - nftTree bsmt.SparseMerkleTree, - accountModel AccountModel, -) (cryptoTx *CryptoTx, err error) { - if oTx.TxType != commonTx.TxTypeSwap { - logx.Errorf("[ConstructSwapCryptoTx] invalid tx type") - return nil, errors.New("[ConstructSwapCryptoTx] invalid tx type") - } - if oTx == nil || accountTree == nil || accountAssetsTree == nil || liquidityTree == nil || nftTree == nil { - logx.Errorf("[ConstructSwapCryptoTx] invalid params") - return nil, errors.New("[ConstructSwapCryptoTx] invalid params") - } - txInfo, err := commonTx.ParseSwapTxInfo(oTx.TxInfo) - if err != nil { - logx.Errorf("[ConstructSwapCryptoTx] unable to parse register zns tx info:%s", err.Error()) - return nil, err - } - cryptoTxInfo, err := ToCryptoSwapTx(txInfo) - if err != nil { - logx.Errorf("[ConstructSwapCryptoTx] unable to convert to crypto register zns tx: %s", err.Error()) - return nil, err - } - accountKeys, proverAccounts, proverLiquidityInfo, proverNftInfo, err := ConstructProverInfo(oTx, accountModel) - if err != nil { - logx.Errorf("[ConstructSwapCryptoTx] unable to construct prover info: %s", err.Error()) - return nil, err - } - cryptoTx, err = ConstructWitnessInfo( - oTx, - accountModel, - treeCtx, - finalityBlockNr, - accountTree, - accountAssetsTree, - liquidityTree, - nftTree, - accountKeys, - proverAccounts, - proverLiquidityInfo, - proverNftInfo, - ) - if err != nil { - logx.Errorf("[ConstructSwapCryptoTx] unable to construct witness info: %s", err.Error()) - return nil, err - } - cryptoTx.TxType = uint8(oTx.TxType) - cryptoTx.SwapTxInfo = cryptoTxInfo - cryptoTx.Nonce = oTx.Nonce - cryptoTx.ExpiredAt = txInfo.ExpiredAt - cryptoTx.Signature = new(eddsa.Signature) - _, err = cryptoTx.Signature.SetBytes(txInfo.Sig) - if err != nil { - logx.Errorf("[ConstructSwapCryptoTx] invalid sig bytes: %s", err.Error()) - return nil, err - } - return cryptoTx, nil -} - -func ToCryptoSwapTx(txInfo *commonTx.SwapTxInfo) (info *CryptoSwapTx, err error) { - packedAAmount, err := util.ToPackedAmount(txInfo.AssetAAmount) - if err != nil { - logx.Errorf("[ToCryptoSwapTx] unable to convert to packed amount: %s", err.Error()) - return nil, err - } - packedBMinAmount, err := util.ToPackedAmount(txInfo.AssetBMinAmount) - if err != nil { - logx.Errorf("[ToCryptoSwapTx] unable to convert to packed amount: %s", err.Error()) - return nil, err - } - packedBAmount, err := util.ToPackedAmount(txInfo.AssetBAmountDelta) - if err != nil { - logx.Errorf("[ToCryptoSwapTx] unable to convert to packed amount: %s", err.Error()) - return nil, err - } - packedFee, err := util.ToPackedFee(txInfo.GasFeeAssetAmount) - if err != nil { - logx.Errorf("[ToCryptoSwapTx] unable to convert to packed fee: %s", err.Error()) - return nil, err - } - info = &CryptoSwapTx{ - FromAccountIndex: txInfo.FromAccountIndex, - PairIndex: txInfo.PairIndex, - AssetAId: txInfo.AssetAId, - AssetAAmount: packedAAmount, - AssetBId: txInfo.AssetBId, - AssetBMinAmount: packedBMinAmount, - AssetBAmountDelta: packedBAmount, - GasAccountIndex: txInfo.GasAccountIndex, - GasFeeAssetId: txInfo.GasFeeAssetId, - GasFeeAssetAmount: packedFee, - } - return info, nil -} diff --git a/common/proverUtil/swap_test.go b/common/proverUtil/swap_test.go deleted file mode 100644 index a0d37f215..000000000 --- a/common/proverUtil/swap_test.go +++ /dev/null @@ -1,83 +0,0 @@ -/* - * Copyright © 2021 Zkbas Protocol - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package proverUtil - -import ( - "encoding/json" - "fmt" - "testing" - - "github.com/bnb-chain/bas-smt/database/memory" - "github.com/zeromicro/go-zero/core/stores/redis" - - "github.com/bnb-chain/zkbas/common/model/account" - "github.com/bnb-chain/zkbas/common/model/basic" - "github.com/bnb-chain/zkbas/common/model/liquidity" - "github.com/bnb-chain/zkbas/common/model/nft" - "github.com/bnb-chain/zkbas/common/model/tx" - "github.com/bnb-chain/zkbas/common/tree" - "github.com/bnb-chain/zkbas/pkg/treedb" -) - -func TestConstructSwapCryptoTxFirst(t *testing.T) { - redisConn := redis.New(basic.CacheConf[0].Host, WithRedis(basic.CacheConf[0].Type, basic.CacheConf[0].Pass)) - txModel := tx.NewTxModel(basic.Connection, basic.CacheConf, basic.DB, redisConn) - accountModel := account.NewAccountModel(basic.Connection, basic.CacheConf, basic.DB) - accountHistoryModel := account.NewAccountHistoryModel(basic.Connection, basic.CacheConf, basic.DB) - //liquidityModel := liquidity.NewLiquidityModel(basic.Connection, basic.CacheConf, basic.DB) - liquidityHistoryModel := liquidity.NewLiquidityHistoryModel(basic.Connection, basic.CacheConf, basic.DB) - //nftModel := nft.NewL2NftModel(basic.Connection, basic.CacheConf, basic.DB) - nftHistoryModel := nft.NewL2NftHistoryModel(basic.Connection, basic.CacheConf, basic.DB) - ctx := &treedb.Context{ - Driver: treedb.MemoryDB, - TreeDB: memory.NewMemoryDB(), - } - txInfo, err := txModel.GetTxByTxId(19) - if err != nil { - t.Fatal(err) - } - blockHeight := int64(18) - accountTree, accountAssetTrees, err := tree.InitAccountTree(accountModel, accountHistoryModel, blockHeight, ctx) - if err != nil { - t.Fatal(err) - } - liquidityTree, err := tree.InitLiquidityTree(liquidityHistoryModel, blockHeight, ctx) - if err != nil { - t.Fatal(err) - } - nftTree, err := tree.InitNftTree(nftHistoryModel, blockHeight, ctx) - if err != nil { - t.Fatal(err) - } - cryptoTx, err := ConstructSwapCryptoTx( - txInfo, - ctx, 0, - accountTree, &accountAssetTrees, - liquidityTree, - nftTree, - accountModel, - ) - if err != nil { - t.Fatal(err) - } - txBytes, err := json.Marshal(cryptoTx) - if err != nil { - t.Fatal(err) - } - fmt.Println(string(txBytes)) -} diff --git a/common/proverUtil/transfer.go b/common/proverUtil/transfer.go deleted file mode 100644 index b28a0ee36..000000000 --- a/common/proverUtil/transfer.go +++ /dev/null @@ -1,120 +0,0 @@ -/* - * Copyright © 2021 Zkbas Protocol - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package proverUtil - -import ( - "errors" - - bsmt "github.com/bnb-chain/bas-smt" - "github.com/consensys/gnark-crypto/ecc/bn254/twistededwards/eddsa" - "github.com/ethereum/go-ethereum/common" - "github.com/zeromicro/go-zero/core/logx" - - "github.com/bnb-chain/zkbas/common/commonTx" - "github.com/bnb-chain/zkbas/common/util" - "github.com/bnb-chain/zkbas/pkg/treedb" -) - -func ConstructTransferCryptoTx( - oTx *Tx, - treeCtx *treedb.Context, - finalityBlockNr uint64, - accountTree bsmt.SparseMerkleTree, - accountAssetsTree *[]bsmt.SparseMerkleTree, - liquidityTree bsmt.SparseMerkleTree, - nftTree bsmt.SparseMerkleTree, - accountModel AccountModel, -) (cryptoTx *CryptoTx, err error) { - if oTx.TxType != commonTx.TxTypeTransfer { - logx.Errorf("[ConstructTransferCryptoTx] invalid tx type") - return nil, errors.New("[ConstructTransferCryptoTx] invalid tx type") - } - if oTx == nil || accountTree == nil || accountAssetsTree == nil || liquidityTree == nil || nftTree == nil { - logx.Errorf("[ConstructTransferCryptoTx] invalid params") - return nil, errors.New("[ConstructTransferCryptoTx] invalid params") - } - txInfo, err := commonTx.ParseTransferTxInfo(oTx.TxInfo) - if err != nil { - logx.Errorf("[ConstructTransferCryptoTx] unable to parse register zns tx info:%s", err.Error()) - return nil, err - } - cryptoTxInfo, err := ToCryptoTransferTx(txInfo) - if err != nil { - logx.Errorf("[ConstructTransferCryptoTx] unable to convert to crypto register zns tx: %s", err.Error()) - return nil, err - } - accountKeys, proverAccounts, proverLiquidityInfo, proverNftInfo, err := ConstructProverInfo(oTx, accountModel) - if err != nil { - logx.Errorf("[ConstructTransferCryptoTx] unable to construct prover info: %s", err.Error()) - return nil, err - } - cryptoTx, err = ConstructWitnessInfo( - oTx, - accountModel, - treeCtx, - finalityBlockNr, - accountTree, - accountAssetsTree, - liquidityTree, - nftTree, - accountKeys, - proverAccounts, - proverLiquidityInfo, - proverNftInfo, - ) - if err != nil { - logx.Errorf("[ConstructTransferCryptoTx] unable to construct witness info: %s", err.Error()) - return nil, err - } - cryptoTx.TxType = uint8(oTx.TxType) - cryptoTx.TransferTxInfo = cryptoTxInfo - cryptoTx.Nonce = oTx.Nonce - cryptoTx.ExpiredAt = oTx.ExpiredAt - cryptoTx.Signature = new(eddsa.Signature) - _, err = cryptoTx.Signature.SetBytes(txInfo.Sig) - if err != nil { - logx.Errorf("[ConstructTransferCryptoTx] invalid sig bytes: %s", err.Error()) - return nil, err - } - return cryptoTx, nil -} - -func ToCryptoTransferTx(txInfo *commonTx.TransferTxInfo) (info *CryptoTransferTx, err error) { - packedAmount, err := util.ToPackedAmount(txInfo.AssetAmount) - if err != nil { - logx.Errorf("[ToCryptoTransferTx] unable to convert to packed amount: %s", err.Error()) - return nil, err - } - packedFee, err := util.ToPackedFee(txInfo.GasFeeAssetAmount) - if err != nil { - logx.Errorf("[ToCryptoTransferTx] unable to convert to packed fee: %s", err.Error()) - return nil, err - } - info = &CryptoTransferTx{ - FromAccountIndex: txInfo.FromAccountIndex, - ToAccountIndex: txInfo.ToAccountIndex, - ToAccountNameHash: common.FromHex(txInfo.ToAccountNameHash), - AssetId: txInfo.AssetId, - AssetAmount: packedAmount, - GasAccountIndex: txInfo.GasAccountIndex, - GasFeeAssetId: txInfo.GasFeeAssetId, - GasFeeAssetAmount: packedFee, - CallDataHash: txInfo.CallDataHash, - } - return info, nil -} diff --git a/common/proverUtil/transferNft.go b/common/proverUtil/transferNft.go deleted file mode 100644 index 7b2dcdee5..000000000 --- a/common/proverUtil/transferNft.go +++ /dev/null @@ -1,114 +0,0 @@ -/* - * Copyright © 2021 Zkbas Protocol - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package proverUtil - -import ( - "errors" - - bsmt "github.com/bnb-chain/bas-smt" - "github.com/consensys/gnark-crypto/ecc/bn254/twistededwards/eddsa" - "github.com/ethereum/go-ethereum/common" - "github.com/zeromicro/go-zero/core/logx" - - "github.com/bnb-chain/zkbas/common/commonTx" - "github.com/bnb-chain/zkbas/common/util" - "github.com/bnb-chain/zkbas/pkg/treedb" -) - -func ConstructTransferNftCryptoTx( - oTx *Tx, - treeCtx *treedb.Context, - finalityBlockNr uint64, - accountTree bsmt.SparseMerkleTree, - accountAssetsTree *[]bsmt.SparseMerkleTree, - liquidityTree bsmt.SparseMerkleTree, - nftTree bsmt.SparseMerkleTree, - accountModel AccountModel, -) (cryptoTx *CryptoTx, err error) { - if oTx.TxType != commonTx.TxTypeTransferNft { - logx.Errorf("[ConstructTransferNftCryptoTx] invalid tx type") - return nil, errors.New("[ConstructTransferNftCryptoTx] invalid tx type") - } - if oTx == nil || accountTree == nil || accountAssetsTree == nil || liquidityTree == nil || nftTree == nil { - logx.Errorf("[ConstructTransferNftCryptoTx] invalid params") - return nil, errors.New("[ConstructTransferNftCryptoTx] invalid params") - } - txInfo, err := commonTx.ParseTransferNftTxInfo(oTx.TxInfo) - if err != nil { - logx.Errorf("[ConstructTransferNftCryptoTx] unable to parse register zns tx info:%s", err.Error()) - return nil, err - } - cryptoTxInfo, err := ToCryptoTransferNftTx(txInfo) - if err != nil { - logx.Errorf("[ConstructTransferNftCryptoTx] unable to convert to crypto register zns tx: %s", err.Error()) - return nil, err - } - accountKeys, proverAccounts, proverLiquidityInfo, proverNftInfo, err := ConstructProverInfo(oTx, accountModel) - if err != nil { - logx.Errorf("[ConstructTransferNftCryptoTx] unable to construct prover info: %s", err.Error()) - return nil, err - } - cryptoTx, err = ConstructWitnessInfo( - oTx, - accountModel, - treeCtx, - finalityBlockNr, - accountTree, - accountAssetsTree, - liquidityTree, - nftTree, - accountKeys, - proverAccounts, - proverLiquidityInfo, - proverNftInfo, - ) - if err != nil { - logx.Errorf("[ConstructTransferNftCryptoTx] unable to construct witness info: %s", err.Error()) - return nil, err - } - cryptoTx.TxType = uint8(oTx.TxType) - cryptoTx.TransferNftTxInfo = cryptoTxInfo - cryptoTx.Nonce = oTx.Nonce - cryptoTx.ExpiredAt = txInfo.ExpiredAt - cryptoTx.Signature = new(eddsa.Signature) - _, err = cryptoTx.Signature.SetBytes(txInfo.Sig) - if err != nil { - logx.Errorf("[ConstructTransferNftCryptoTx] invalid sig bytes: %s", err.Error()) - return nil, err - } - return cryptoTx, nil -} - -func ToCryptoTransferNftTx(txInfo *commonTx.TransferNftTxInfo) (info *CryptoTransferNftTx, err error) { - packedFee, err := util.ToPackedFee(txInfo.GasFeeAssetAmount) - if err != nil { - logx.Errorf("[ToCryptoSwapTx] unable to convert to packed fee: %s", err.Error()) - return nil, err - } - info = &CryptoTransferNftTx{ - FromAccountIndex: txInfo.FromAccountIndex, - ToAccountIndex: txInfo.ToAccountIndex, - ToAccountNameHash: common.FromHex(txInfo.ToAccountNameHash), - NftIndex: txInfo.NftIndex, - GasAccountIndex: txInfo.GasAccountIndex, - GasFeeAssetId: txInfo.GasFeeAssetId, - GasFeeAssetAmount: packedFee, - CallDataHash: txInfo.CallDataHash, - } - return info, nil -} diff --git a/common/proverUtil/transferNft_test.go b/common/proverUtil/transferNft_test.go deleted file mode 100644 index a4c1b84b2..000000000 --- a/common/proverUtil/transferNft_test.go +++ /dev/null @@ -1,83 +0,0 @@ -/* - * Copyright © 2021 Zkbas Protocol - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package proverUtil - -import ( - "encoding/json" - "fmt" - "testing" - - "github.com/zeromicro/go-zero/core/stores/redis" - - "github.com/bnb-chain/bas-smt/database/memory" - "github.com/bnb-chain/zkbas/common/model/account" - "github.com/bnb-chain/zkbas/common/model/basic" - "github.com/bnb-chain/zkbas/common/model/liquidity" - "github.com/bnb-chain/zkbas/common/model/nft" - "github.com/bnb-chain/zkbas/common/model/tx" - "github.com/bnb-chain/zkbas/common/tree" - "github.com/bnb-chain/zkbas/pkg/treedb" -) - -func TestConstructTransferNftCryptoTxFirst(t *testing.T) { - redisConn := redis.New(basic.CacheConf[0].Host, WithRedis(basic.CacheConf[0].Type, basic.CacheConf[0].Pass)) - txModel := tx.NewTxModel(basic.Connection, basic.CacheConf, basic.DB, redisConn) - accountModel := account.NewAccountModel(basic.Connection, basic.CacheConf, basic.DB) - accountHistoryModel := account.NewAccountHistoryModel(basic.Connection, basic.CacheConf, basic.DB) - //liquidityModel := liquidity.NewLiquidityModel(basic.Connection, basic.CacheConf, basic.DB) - liquidityHistoryModel := liquidity.NewLiquidityHistoryModel(basic.Connection, basic.CacheConf, basic.DB) - //nftModel := nft.NewL2NftModel(basic.Connection, basic.CacheConf, basic.DB) - nftHistoryModel := nft.NewL2NftHistoryModel(basic.Connection, basic.CacheConf, basic.DB) - ctx := &treedb.Context{ - Driver: treedb.MemoryDB, - TreeDB: memory.NewMemoryDB(), - } - txInfo, err := txModel.GetTxByTxId(23) - if err != nil { - t.Fatal(err) - } - blockHeight := int64(22) - accountTree, accountAssetTrees, err := tree.InitAccountTree(accountModel, accountHistoryModel, blockHeight, ctx) - if err != nil { - t.Fatal(err) - } - liquidityTree, err := tree.InitLiquidityTree(liquidityHistoryModel, blockHeight, ctx) - if err != nil { - t.Fatal(err) - } - nftTree, err := tree.InitNftTree(nftHistoryModel, blockHeight, ctx) - if err != nil { - t.Fatal(err) - } - cryptoTx, err := ConstructTransferNftCryptoTx( - txInfo, - ctx, 0, - accountTree, &accountAssetTrees, - liquidityTree, - nftTree, - accountModel, - ) - if err != nil { - t.Fatal(err) - } - txBytes, err := json.Marshal(cryptoTx) - if err != nil { - t.Fatal(err) - } - fmt.Println(string(txBytes)) -} diff --git a/common/proverUtil/transfer_test.go b/common/proverUtil/transfer_test.go deleted file mode 100644 index 2c64eb897..000000000 --- a/common/proverUtil/transfer_test.go +++ /dev/null @@ -1,83 +0,0 @@ -/* - * Copyright © 2021 Zkbas Protocol - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package proverUtil - -import ( - "encoding/json" - "fmt" - "testing" - - "github.com/bnb-chain/bas-smt/database/memory" - "github.com/zeromicro/go-zero/core/stores/redis" - - "github.com/bnb-chain/zkbas/common/model/account" - "github.com/bnb-chain/zkbas/common/model/basic" - "github.com/bnb-chain/zkbas/common/model/liquidity" - "github.com/bnb-chain/zkbas/common/model/nft" - "github.com/bnb-chain/zkbas/common/model/tx" - "github.com/bnb-chain/zkbas/common/tree" - "github.com/bnb-chain/zkbas/pkg/treedb" -) - -func TestConstructTransferCryptoTxFirst(t *testing.T) { - redisConn := redis.New(basic.CacheConf[0].Host, WithRedis(basic.CacheConf[0].Type, basic.CacheConf[0].Pass)) - txModel := tx.NewTxModel(basic.Connection, basic.CacheConf, basic.DB, redisConn) - accountModel := account.NewAccountModel(basic.Connection, basic.CacheConf, basic.DB) - accountHistoryModel := account.NewAccountHistoryModel(basic.Connection, basic.CacheConf, basic.DB) - //liquidityModel := liquidity.NewLiquidityModel(basic.Connection, basic.CacheConf, basic.DB) - liquidityHistoryModel := liquidity.NewLiquidityHistoryModel(basic.Connection, basic.CacheConf, basic.DB) - //nftModel := nft.NewL2NftModel(basic.Connection, basic.CacheConf, basic.DB) - nftHistoryModel := nft.NewL2NftHistoryModel(basic.Connection, basic.CacheConf, basic.DB) - ctx := &treedb.Context{ - Driver: treedb.MemoryDB, - TreeDB: memory.NewMemoryDB(), - } - txInfo, err := txModel.GetTxByTxId(19) - if err != nil { - t.Fatal(err) - } - blockHeight := int64(18) - accountTree, accountAssetTrees, err := tree.InitAccountTree(accountModel, accountHistoryModel, blockHeight, ctx) - if err != nil { - t.Fatal(err) - } - liquidityTree, err := tree.InitLiquidityTree(liquidityHistoryModel, blockHeight, ctx) - if err != nil { - t.Fatal(err) - } - nftTree, err := tree.InitNftTree(nftHistoryModel, blockHeight, ctx) - if err != nil { - t.Fatal(err) - } - cryptoTx, err := ConstructTransferCryptoTx( - txInfo, - ctx, 0, - accountTree, &accountAssetTrees, - liquidityTree, - nftTree, - accountModel, - ) - if err != nil { - t.Fatal(err) - } - txBytes, err := json.Marshal(cryptoTx) - if err != nil { - t.Fatal(err) - } - fmt.Println(string(txBytes)) -} diff --git a/common/proverUtil/txHelper.go b/common/proverUtil/txHelper.go deleted file mode 100644 index 498ac30de..000000000 --- a/common/proverUtil/txHelper.go +++ /dev/null @@ -1,196 +0,0 @@ -/* - * Copyright © 2021 Zkbas Protocol - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package proverUtil - -import ( - "errors" - - "github.com/zeromicro/go-zero/core/logx" - - "github.com/bnb-chain/zkbas/common/commonAsset" - "github.com/bnb-chain/zkbas/common/commonTx" -) - -func ConstructProverInfo( - oTx *Tx, - accountModel AccountModel, -) ( - accountKeys []int64, - proverAccounts []*ProverAccountInfo, - proverLiquidityInfo *ProverLiquidityInfo, - proverNftInfo *ProverNftInfo, - err error, -) { - var ( - // init account asset map, because if we have the same asset detail, the before will be the after of the last one - accountAssetMap = make(map[int64]map[int64]*AccountAsset) - accountMap = make(map[int64]*Account) - lastAccountOrder = int64(-2) - accountCount = -1 - ) - // init prover account map - if oTx.TxType == commonTx.TxTypeRegisterZns { - accountKeys = append(accountKeys, oTx.AccountIndex) - } - for _, txDetail := range oTx.TxDetails { - switch txDetail.AssetType { - case commonAsset.GeneralAssetType: - // get account info - if accountMap[txDetail.AccountIndex] == nil { - accountInfo, err := accountModel.GetConfirmedAccountByAccountIndex(txDetail.AccountIndex) - if err != nil { - logx.Errorf("[ConstructProverInfo] unable to get valid account by index: %s", err.Error()) - return nil, nil, nil, nil, err - } - // get current nonce - accountInfo.Nonce = txDetail.Nonce - accountMap[txDetail.AccountIndex] = accountInfo - } else { - if lastAccountOrder != txDetail.AccountOrder { - if oTx.AccountIndex == txDetail.AccountIndex { - accountMap[txDetail.AccountIndex].Nonce = oTx.Nonce - } - } - } - if lastAccountOrder != txDetail.AccountOrder { - accountKeys = append(accountKeys, txDetail.AccountIndex) - lastAccountOrder = txDetail.AccountOrder - proverAccounts = append(proverAccounts, &ProverAccountInfo{ - AccountInfo: &Account{ - AccountIndex: accountMap[txDetail.AccountIndex].AccountIndex, - AccountName: accountMap[txDetail.AccountIndex].AccountName, - PublicKey: accountMap[txDetail.AccountIndex].PublicKey, - AccountNameHash: accountMap[txDetail.AccountIndex].AccountNameHash, - L1Address: accountMap[txDetail.AccountIndex].L1Address, - Nonce: accountMap[txDetail.AccountIndex].Nonce, - CollectionNonce: txDetail.CollectionNonce, - AssetInfo: accountMap[txDetail.AccountIndex].AssetInfo, - AssetRoot: accountMap[txDetail.AccountIndex].AssetRoot, - Status: accountMap[txDetail.AccountIndex].Status, - }, - }) - accountCount++ - } - if accountAssetMap[txDetail.AccountIndex] == nil { - accountAssetMap[txDetail.AccountIndex] = make(map[int64]*AccountAsset) - } - if accountAssetMap[txDetail.AccountIndex][txDetail.AssetId] == nil { - // set account before info - oAsset, err := commonAsset.ParseAccountAsset(txDetail.Balance) - if err != nil { - logx.Errorf("[ConstructProverInfo] unable to parse account asset:%s", err.Error()) - return nil, nil, nil, nil, err - } - proverAccounts[accountCount].AccountAssets = append( - proverAccounts[accountCount].AccountAssets, - oAsset, - ) - } else { - // set account before info - proverAccounts[accountCount].AccountAssets = append( - proverAccounts[accountCount].AccountAssets, - &AccountAsset{ - AssetId: accountAssetMap[txDetail.AccountIndex][txDetail.AssetId].AssetId, - Balance: accountAssetMap[txDetail.AccountIndex][txDetail.AssetId].Balance, - LpAmount: accountAssetMap[txDetail.AccountIndex][txDetail.AssetId].LpAmount, - OfferCanceledOrFinalized: accountAssetMap[txDetail.AccountIndex][txDetail.AssetId].OfferCanceledOrFinalized, - }, - ) - } - // set tx detail - proverAccounts[accountCount].AssetsRelatedTxDetails = append( - proverAccounts[accountCount].AssetsRelatedTxDetails, - txDetail, - ) - // update asset info - newBalance, err := commonAsset.ComputeNewBalance(txDetail.AssetType, txDetail.Balance, txDetail.BalanceDelta) - if err != nil { - logx.Errorf("[ConstructProverInfo] unable to compute new balance: %s", err.Error()) - return nil, nil, nil, nil, err - } - nAsset, err := commonAsset.ParseAccountAsset(newBalance) - if err != nil { - logx.Errorf("[ConstructProverInfo] unable to parse account asset:%s", err.Error()) - return nil, nil, nil, nil, err - } - accountAssetMap[txDetail.AccountIndex][txDetail.AssetId] = nAsset - break - case commonAsset.LiquidityAssetType: - proverLiquidityInfo = new(ProverLiquidityInfo) - proverLiquidityInfo.LiquidityRelatedTxDetail = txDetail - poolInfo, err := commonAsset.ParseLiquidityInfo(txDetail.Balance) - if err != nil { - logx.Errorf("[ConstructProverInfo] unable to parse pool info: %s", err.Error()) - return nil, nil, nil, nil, err - } - proverLiquidityInfo.LiquidityInfo = poolInfo - break - case commonAsset.NftAssetType: - proverNftInfo = new(ProverNftInfo) - proverNftInfo.NftRelatedTxDetail = txDetail - nftInfo, err := commonAsset.ParseNftInfo(txDetail.Balance) - if err != nil { - logx.Errorf("[ConstructProverInfo] unable to parse nft info: %s", err.Error()) - return nil, nil, nil, nil, err - } - proverNftInfo.NftInfo = nftInfo - break - case commonAsset.CollectionNonceAssetType: - // get account info - if accountMap[txDetail.AccountIndex] == nil { - accountInfo, err := accountModel.GetConfirmedAccountByAccountIndex(txDetail.AccountIndex) - if err != nil { - logx.Errorf("[ConstructProverInfo] unable to get valid account by index: %s", err.Error()) - return nil, nil, nil, nil, err - } - // get current nonce - accountInfo.Nonce = txDetail.Nonce - accountInfo.CollectionNonce = txDetail.CollectionNonce - accountMap[txDetail.AccountIndex] = accountInfo - if lastAccountOrder != txDetail.AccountOrder { - accountKeys = append(accountKeys, txDetail.AccountIndex) - lastAccountOrder = txDetail.AccountOrder - proverAccounts = append(proverAccounts, &ProverAccountInfo{ - AccountInfo: &Account{ - AccountIndex: accountMap[txDetail.AccountIndex].AccountIndex, - AccountName: accountMap[txDetail.AccountIndex].AccountName, - PublicKey: accountMap[txDetail.AccountIndex].PublicKey, - AccountNameHash: accountMap[txDetail.AccountIndex].AccountNameHash, - L1Address: accountMap[txDetail.AccountIndex].L1Address, - Nonce: accountMap[txDetail.AccountIndex].Nonce, - CollectionNonce: txDetail.CollectionNonce, - AssetInfo: accountMap[txDetail.AccountIndex].AssetInfo, - AssetRoot: accountMap[txDetail.AccountIndex].AssetRoot, - Status: accountMap[txDetail.AccountIndex].Status, - }, - }) - accountCount++ - } - } else { - accountMap[txDetail.AccountIndex].Nonce = txDetail.Nonce - accountMap[txDetail.AccountIndex].CollectionNonce = txDetail.CollectionNonce - } - break - default: - logx.Errorf("[ConstructProverInfo] invalid asset type") - return nil, nil, nil, nil, - errors.New("[ConstructProverInfo] invalid asset type") - } - } - return accountKeys, proverAccounts, proverLiquidityInfo, proverNftInfo, nil -} diff --git a/common/proverUtil/updatePairRate.go b/common/proverUtil/updatePairRate.go deleted file mode 100644 index 62196efde..000000000 --- a/common/proverUtil/updatePairRate.go +++ /dev/null @@ -1,97 +0,0 @@ -/* - * Copyright © 2021 Zkbas Protocol - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package proverUtil - -import ( - "errors" - - bsmt "github.com/bnb-chain/bas-smt" - "github.com/bnb-chain/zkbas-crypto/legend/circuit/bn254/std" - "github.com/zeromicro/go-zero/core/logx" - - "github.com/bnb-chain/zkbas/common/commonTx" - "github.com/bnb-chain/zkbas/pkg/treedb" -) - -func ConstructUpdatePairRateCryptoTx( - oTx *Tx, - treeCtx *treedb.Context, - finalityBlockNr uint64, - accountTree bsmt.SparseMerkleTree, - accountAssetsTree *[]bsmt.SparseMerkleTree, - liquidityTree bsmt.SparseMerkleTree, - nftTree bsmt.SparseMerkleTree, - accountModel AccountModel, -) (cryptoTx *CryptoTx, err error) { - if oTx.TxType != commonTx.TxTypeUpdatePairRate { - logx.Errorf("[ConstructUpdatePairRateCryptoTx] invalid tx type") - return nil, errors.New("[ConstructUpdatePairRateCryptoTx] invalid tx type") - } - if oTx == nil || accountTree == nil || accountAssetsTree == nil || liquidityTree == nil || nftTree == nil { - logx.Errorf("[ConstructUpdatePairRateCryptoTx] invalid params") - return nil, errors.New("[ConstructUpdatePairRateCryptoTx] invalid params") - } - txInfo, err := commonTx.ParseUpdatePairRateTxInfo(oTx.TxInfo) - if err != nil { - logx.Errorf("[ConstructUpdatePairRateCryptoTx] unable to parse register zns tx info:%s", err.Error()) - return nil, err - } - cryptoTxInfo, err := ToCryptoUpdatePairRateTx(txInfo) - if err != nil { - logx.Errorf("[ConstructUpdatePairRateCryptoTx] unable to convert to crypto register zns tx: %s", err.Error()) - return nil, err - } - accountKeys, proverAccounts, proverLiquidityInfo, proverNftInfo, err := ConstructProverInfo(oTx, accountModel) - if err != nil { - logx.Errorf("[ConstructUpdatePairRateCryptoTx] unable to construct prover info: %s", err.Error()) - return nil, err - } - cryptoTx, err = ConstructWitnessInfo( - oTx, - accountModel, - treeCtx, - finalityBlockNr, - accountTree, - accountAssetsTree, - liquidityTree, - nftTree, - accountKeys, - proverAccounts, - proverLiquidityInfo, - proverNftInfo, - ) - if err != nil { - logx.Errorf("[ConstructUpdatePairRateCryptoTx] unable to construct witness info: %s", err.Error()) - return nil, err - } - cryptoTx.TxType = uint8(oTx.TxType) - cryptoTx.UpdatePairRateTxInfo = cryptoTxInfo - cryptoTx.Nonce = oTx.Nonce - cryptoTx.Signature = std.EmptySignature() - return cryptoTx, nil -} - -func ToCryptoUpdatePairRateTx(txInfo *commonTx.UpdatePairRateTxInfo) (info *CryptoUpdatePairRateTx, err error) { - info = &CryptoUpdatePairRateTx{ - PairIndex: txInfo.PairIndex, - FeeRate: txInfo.FeeRate, - TreasuryAccountIndex: txInfo.TreasuryAccountIndex, - TreasuryRate: txInfo.TreasuryRate, - } - return info, nil -} diff --git a/common/proverUtil/updatePairRate_test.go b/common/proverUtil/updatePairRate_test.go deleted file mode 100644 index 17cbe28bf..000000000 --- a/common/proverUtil/updatePairRate_test.go +++ /dev/null @@ -1,83 +0,0 @@ -/* - * Copyright © 2021 Zkbas Protocol - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package proverUtil - -import ( - "encoding/json" - "fmt" - "testing" - - "github.com/zeromicro/go-zero/core/stores/redis" - - "github.com/bnb-chain/bas-smt/database/memory" - "github.com/bnb-chain/zkbas/common/model/account" - "github.com/bnb-chain/zkbas/common/model/basic" - "github.com/bnb-chain/zkbas/common/model/liquidity" - "github.com/bnb-chain/zkbas/common/model/nft" - "github.com/bnb-chain/zkbas/common/model/tx" - "github.com/bnb-chain/zkbas/common/tree" - "github.com/bnb-chain/zkbas/pkg/treedb" -) - -func TestConstructUpdatePairRateCryptoTxFirst(t *testing.T) { - redisConn := redis.New(basic.CacheConf[0].Host, WithRedis(basic.CacheConf[0].Type, basic.CacheConf[0].Pass)) - txModel := tx.NewTxModel(basic.Connection, basic.CacheConf, basic.DB, redisConn) - accountModel := account.NewAccountModel(basic.Connection, basic.CacheConf, basic.DB) - accountHistoryModel := account.NewAccountHistoryModel(basic.Connection, basic.CacheConf, basic.DB) - //liquidityModel := liquidity.NewLiquidityModel(basic.Connection, basic.CacheConf, basic.DB) - liquidityHistoryModel := liquidity.NewLiquidityHistoryModel(basic.Connection, basic.CacheConf, basic.DB) - //nftModel := nft.NewL2NftModel(basic.Connection, basic.CacheConf, basic.DB) - nftHistoryModel := nft.NewL2NftHistoryModel(basic.Connection, basic.CacheConf, basic.DB) - ctx := &treedb.Context{ - Driver: treedb.MemoryDB, - TreeDB: memory.NewMemoryDB(), - } - txInfo, err := txModel.GetTxByTxId(12) - if err != nil { - t.Fatal(err) - } - blockHeight := int64(11) - accountTree, accountAssetTrees, err := tree.InitAccountTree(accountModel, accountHistoryModel, blockHeight, ctx) - if err != nil { - t.Fatal(err) - } - liquidityTree, err := tree.InitLiquidityTree(liquidityHistoryModel, blockHeight, ctx) - if err != nil { - t.Fatal(err) - } - nftTree, err := tree.InitNftTree(nftHistoryModel, blockHeight, ctx) - if err != nil { - t.Fatal(err) - } - cryptoTx, err := ConstructUpdatePairRateCryptoTx( - txInfo, - ctx, 0, - accountTree, &accountAssetTrees, - liquidityTree, - nftTree, - accountModel, - ) - if err != nil { - t.Fatal(err) - } - txBytes, err := json.Marshal(cryptoTx) - if err != nil { - t.Fatal(err) - } - fmt.Println(string(txBytes)) -} diff --git a/common/proverUtil/util.go b/common/proverUtil/util.go deleted file mode 100644 index 18ef755de..000000000 --- a/common/proverUtil/util.go +++ /dev/null @@ -1,393 +0,0 @@ -/* - * Copyright © 2021 Zkbas Protocol - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package proverUtil - -import ( - "errors" - - bsmt "github.com/bnb-chain/bas-smt" - cryptoBlock "github.com/bnb-chain/zkbas-crypto/legend/circuit/bn254/block" - "github.com/ethereum/go-ethereum/common" - "github.com/zeromicro/go-zero/core/logx" - - "github.com/bnb-chain/zkbas/common/commonTx" - "github.com/bnb-chain/zkbas/common/model/block" - "github.com/bnb-chain/zkbas/pkg/treedb" -) - -func SetFixedAccountArray(proof [][]byte) (res [AccountMerkleLevels][]byte, err error) { - if len(proof) != AccountMerkleLevels { - logx.Errorf("[SetFixedAccountArray] invalid size") - return res, errors.New("[SetFixedAccountArray] invalid size") - } - copy(res[:], proof[:]) - return res, nil -} - -func SetFixedAccountAssetArray(proof [][]byte) (res [AssetMerkleLevels][]byte, err error) { - if len(proof) != AssetMerkleLevels { - logx.Errorf("[SetFixedAccountAssetArray] invalid size") - return res, errors.New("[SetFixedAccountAssetArray] invalid size") - } - copy(res[:], proof[:]) - return res, nil -} - -func SetFixedLiquidityArray(proof [][]byte) (res [LiquidityMerkleLevels][]byte, err error) { - if len(proof) != LiquidityMerkleLevels { - logx.Errorf("[SetFixedLiquidityArray] invalid size") - return res, errors.New("[SetFixedLiquidityArray] invalid size") - } - copy(res[:], proof[:]) - return res, nil -} - -func SetFixedNftArray(proof [][]byte) (res [NftMerkleLevels][]byte, err error) { - if len(proof) != NftMerkleLevels { - logx.Errorf("[SetFixedNftArray] invalid size") - return res, errors.New("[SetFixedNftArray] invalid size") - } - copy(res[:], proof[:]) - return res, nil -} - -func ConstructCryptoTx( - oTx *Tx, - treeCtx *treedb.Context, - accountTree bsmt.SparseMerkleTree, - assetTrees *[]bsmt.SparseMerkleTree, - liquidityTree bsmt.SparseMerkleTree, - nftTree bsmt.SparseMerkleTree, - accountModel AccountModel, - finalityBlockNr uint64, -) (cryptoTx *CryptoTx, err error) { - switch oTx.TxType { - case commonTx.TxTypeEmpty: - logx.Error("[ConstructProverBlocks] there should be no empty tx") - break - case commonTx.TxTypeRegisterZns: - cryptoTx, err = ConstructRegisterZnsCryptoTx( - oTx, - treeCtx, - finalityBlockNr, - accountTree, - assetTrees, - liquidityTree, - nftTree, - accountModel, - ) - if err != nil { - logx.Errorf("[ConstructProverBlocks] unable to construct registerZNS crypto tx: %x", err.Error()) - return nil, err - } - break - case commonTx.TxTypeCreatePair: - cryptoTx, err = ConstructCreatePairCryptoTx( - oTx, - treeCtx, - finalityBlockNr, - accountTree, - assetTrees, - liquidityTree, - nftTree, - accountModel, - ) - if err != nil { - logx.Errorf("[ConstructProverBlocks] unable to construct create pair crypto tx: %s", err.Error()) - return nil, err - } - break - case commonTx.TxTypeUpdatePairRate: - cryptoTx, err = ConstructUpdatePairRateCryptoTx( - oTx, - treeCtx, - finalityBlockNr, - accountTree, - assetTrees, - liquidityTree, - nftTree, - accountModel, - ) - if err != nil { - logx.Errorf("[ConstructProverBlocks] unable to construct update pair crypto tx: %s", err.Error()) - return nil, err - } - break - case commonTx.TxTypeDeposit: - cryptoTx, err = ConstructDepositCryptoTx( - oTx, - treeCtx, - finalityBlockNr, - accountTree, - assetTrees, - liquidityTree, - nftTree, - accountModel, - ) - if err != nil { - logx.Errorf("[ConstructProverBlocks] unable to construct deposit crypto tx: %s", err.Error()) - return nil, err - } - break - case commonTx.TxTypeDepositNft: - cryptoTx, err = ConstructDepositNftCryptoTx( - oTx, - treeCtx, - finalityBlockNr, - accountTree, - assetTrees, - liquidityTree, - nftTree, - accountModel, - ) - if err != nil { - logx.Errorf("[ConstructProverBlocks] unable to construct deposit nft crypto tx: %s", err.Error()) - return nil, err - } - break - case commonTx.TxTypeTransfer: - cryptoTx, err = ConstructTransferCryptoTx( - oTx, - treeCtx, - finalityBlockNr, - accountTree, - assetTrees, - liquidityTree, - nftTree, - accountModel, - ) - if err != nil { - logx.Errorf("[ConstructProverBlocks] unable to construct transfer crypto tx: %s", err.Error()) - return nil, err - } - break - case commonTx.TxTypeSwap: - cryptoTx, err = ConstructSwapCryptoTx( - oTx, - treeCtx, - finalityBlockNr, - accountTree, - assetTrees, - liquidityTree, - nftTree, - accountModel, - ) - if err != nil { - logx.Errorf("[ConstructProverBlocks] unable to construct swap crypto tx: %s", err.Error()) - return nil, err - } - break - case commonTx.TxTypeAddLiquidity: - cryptoTx, err = ConstructAddLiquidityCryptoTx( - oTx, - treeCtx, - finalityBlockNr, - accountTree, - assetTrees, - liquidityTree, - nftTree, - accountModel, - ) - if err != nil { - logx.Errorf("[ConstructProverBlocks] unable to construct add liquidity crypto tx: %s", err.Error()) - return nil, err - } - break - case commonTx.TxTypeRemoveLiquidity: - cryptoTx, err = ConstructRemoveLiquidityCryptoTx( - oTx, - treeCtx, - finalityBlockNr, - accountTree, - assetTrees, - liquidityTree, - nftTree, - accountModel, - ) - if err != nil { - logx.Errorf("[ConstructProverBlocks] unable to construct remove liquidity crypto tx: %s", err.Error()) - return nil, err - } - break - case commonTx.TxTypeWithdraw: - cryptoTx, err = ConstructWithdrawCryptoTx( - oTx, - treeCtx, - finalityBlockNr, - accountTree, - assetTrees, - liquidityTree, - nftTree, - accountModel, - ) - if err != nil { - logx.Errorf("[ConstructProverBlocks] unable to construct withdraw crypto tx: %s", err.Error()) - return nil, err - } - break - case commonTx.TxTypeCreateCollection: - cryptoTx, err = ConstructCreateCollectionCryptoTx( - oTx, - treeCtx, - finalityBlockNr, - accountTree, - assetTrees, - liquidityTree, - nftTree, - accountModel, - ) - if err != nil { - logx.Errorf("[ConstructProverBlocks] unable to construct create collection crypto tx: %s", err.Error()) - return nil, err - } - break - case commonTx.TxTypeMintNft: - cryptoTx, err = ConstructMintNftCryptoTx( - oTx, - treeCtx, - finalityBlockNr, - accountTree, - assetTrees, - liquidityTree, - nftTree, - accountModel, - ) - if err != nil { - logx.Errorf("[ConstructProverBlocks] unable to construct mint nft crypto tx: %s", err.Error()) - return nil, err - } - break - case commonTx.TxTypeTransferNft: - cryptoTx, err = ConstructTransferNftCryptoTx( - oTx, - treeCtx, - finalityBlockNr, - accountTree, - assetTrees, - liquidityTree, - nftTree, - accountModel, - ) - if err != nil { - logx.Errorf("[ConstructProverBlocks] unable to construct transfer nft crypto tx: %s", err.Error()) - return nil, err - } - break - case commonTx.TxTypeAtomicMatch: - cryptoTx, err = ConstructAtomicMatchCryptoTx( - oTx, - treeCtx, - finalityBlockNr, - accountTree, - assetTrees, - liquidityTree, - nftTree, - accountModel, - ) - if err != nil { - logx.Errorf("[ConstructProverBlocks] unable to construct atomic match crypto tx: %s", err.Error()) - return nil, err - } - break - case commonTx.TxTypeCancelOffer: - cryptoTx, err = ConstructCancelOfferCryptoTx( - oTx, - treeCtx, - finalityBlockNr, - accountTree, - assetTrees, - liquidityTree, - nftTree, - accountModel, - ) - if err != nil { - logx.Errorf("[ConstructProverBlocks] unable to construct cancel offer crypto tx: %s", err.Error()) - return nil, err - } - break - case commonTx.TxTypeWithdrawNft: - cryptoTx, err = ConstructWithdrawNftCryptoTx( - oTx, - treeCtx, - finalityBlockNr, - accountTree, - assetTrees, - liquidityTree, - nftTree, - accountModel, - ) - if err != nil { - logx.Errorf("[ConstructProverBlocks] unable to construct withdraw nft crypto tx: %s", err.Error()) - return nil, err - } - break - case commonTx.TxTypeFullExit: - cryptoTx, err = ConstructFullExitCryptoTx( - oTx, - treeCtx, - finalityBlockNr, - accountTree, - assetTrees, - liquidityTree, - nftTree, - accountModel, - ) - if err != nil { - logx.Errorf("[ConstructProverBlocks] unable to construct full exit crypto tx: %s", err.Error()) - return nil, err - } - break - case commonTx.TxTypeFullExitNft: - cryptoTx, err = ConstructFullExitNftCryptoTx( - oTx, - treeCtx, - finalityBlockNr, - accountTree, - assetTrees, - liquidityTree, - nftTree, - accountModel, - ) - if err != nil { - logx.Errorf("[ConstructProverBlocks] unable to construct full exit nft crypto tx: %s", err.Error()) - return nil, err - } - break - default: - return nil, errors.New("tx type error") - } - return cryptoTx, nil -} - -func BlockToCryptoBlock( - oBlock *block.Block, - oldStateRoot, newStateRoot []byte, - cryptoTxs []*cryptoBlock.Tx, -) (cBlock *cryptoBlock.Block, err error) { - cBlock = &cryptoBlock.Block{ - BlockNumber: oBlock.BlockHeight, - CreatedAt: oBlock.CreatedAt.UnixMilli(), - OldStateRoot: oldStateRoot, - NewStateRoot: newStateRoot, - BlockCommitment: common.FromHex(oBlock.BlockCommitment), - } - for i := 0; i < len(cryptoTxs); i++ { - cBlock.Txs = append(cBlock.Txs, cryptoTxs[i]) - } - return cBlock, nil -} diff --git a/common/proverUtil/withdraw.go b/common/proverUtil/withdraw.go deleted file mode 100644 index 896035f93..000000000 --- a/common/proverUtil/withdraw.go +++ /dev/null @@ -1,115 +0,0 @@ -/* - * Copyright © 2021 Zkbas Protocol - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package proverUtil - -import ( - "errors" - "math/big" - - bsmt "github.com/bnb-chain/bas-smt" - "github.com/bnb-chain/zkbas-crypto/wasm/legend/legendTxTypes" - "github.com/consensys/gnark-crypto/ecc/bn254/twistededwards/eddsa" - "github.com/zeromicro/go-zero/core/logx" - - "github.com/bnb-chain/zkbas/common/commonTx" - "github.com/bnb-chain/zkbas/common/util" - "github.com/bnb-chain/zkbas/pkg/treedb" -) - -func ConstructWithdrawCryptoTx( - oTx *Tx, - treeCtx *treedb.Context, - finalityBlockNr uint64, - accountTree bsmt.SparseMerkleTree, - accountAssetsTree *[]bsmt.SparseMerkleTree, - liquidityTree bsmt.SparseMerkleTree, - nftTree bsmt.SparseMerkleTree, - accountModel AccountModel, -) (cryptoTx *CryptoTx, err error) { - if oTx.TxType != commonTx.TxTypeWithdraw { - logx.Errorf("[ConstructWithdrawCryptoTx] invalid tx type") - return nil, errors.New("[ConstructWithdrawCryptoTx] invalid tx type") - } - if oTx == nil || accountTree == nil || accountAssetsTree == nil || liquidityTree == nil || nftTree == nil { - logx.Errorf("[ConstructWithdrawCryptoTx] invalid params") - return nil, errors.New("[ConstructWithdrawCryptoTx] invalid params") - } - txInfo, err := commonTx.ParseWithdrawTxInfo(oTx.TxInfo) - if err != nil { - logx.Errorf("[ConstructWithdrawCryptoTx] unable to parse register zns tx info:%s", err.Error()) - return nil, err - } - cryptoTxInfo, err := ToCryptoWithdrawTx(txInfo) - if err != nil { - logx.Errorf("[ConstructWithdrawCryptoTx] unable to convert to crypto register zns tx: %s", err.Error()) - return nil, err - } - accountKeys, proverAccounts, proverLiquidityInfo, proverNftInfo, err := ConstructProverInfo(oTx, accountModel) - if err != nil { - logx.Errorf("[ConstructWithdrawCryptoTx] unable to construct prover info: %s", err.Error()) - return nil, err - } - cryptoTx, err = ConstructWitnessInfo( - oTx, - accountModel, - treeCtx, - finalityBlockNr, - accountTree, - accountAssetsTree, - liquidityTree, - nftTree, - accountKeys, - proverAccounts, - proverLiquidityInfo, - proverNftInfo, - ) - if err != nil { - logx.Errorf("[ConstructWithdrawCryptoTx] unable to construct witness info: %s", err.Error()) - return nil, err - } - cryptoTx.TxType = uint8(oTx.TxType) - cryptoTx.WithdrawTxInfo = cryptoTxInfo - cryptoTx.Nonce = oTx.Nonce - cryptoTx.ExpiredAt = oTx.ExpiredAt - cryptoTx.Signature = new(eddsa.Signature) - _, err = cryptoTx.Signature.SetBytes(txInfo.Sig) - if err != nil { - logx.Errorf("[ConstructWithdrawCryptoTx] invalid sig bytes: %s", err.Error()) - return nil, err - } - return cryptoTx, nil -} - -func ToCryptoWithdrawTx(txInfo *commonTx.WithdrawTxInfo) (info *CryptoWithdrawTx, err error) { - packedFee, err := util.ToPackedFee(txInfo.GasFeeAssetAmount) - if err != nil { - logx.Errorf("[ToCryptoWithdrawTx] unable to convert to packed fee: %s", err.Error()) - return nil, err - } - addrBytes := legendTxTypes.PaddingAddressToBytes32(txInfo.ToAddress) - info = &CryptoWithdrawTx{ - FromAccountIndex: txInfo.FromAccountIndex, - AssetId: txInfo.AssetId, - AssetAmount: txInfo.AssetAmount, - GasAccountIndex: txInfo.GasAccountIndex, - GasFeeAssetId: txInfo.GasFeeAssetId, - GasFeeAssetAmount: packedFee, - ToAddress: new(big.Int).SetBytes(addrBytes), - } - return info, nil -} diff --git a/common/proverUtil/withdrawNft.go b/common/proverUtil/withdrawNft.go deleted file mode 100644 index cac49d958..000000000 --- a/common/proverUtil/withdrawNft.go +++ /dev/null @@ -1,118 +0,0 @@ -/* - * Copyright © 2021 Zkbas Protocol - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package proverUtil - -import ( - "errors" - - bsmt "github.com/bnb-chain/bas-smt" - "github.com/consensys/gnark-crypto/ecc/bn254/twistededwards/eddsa" - "github.com/zeromicro/go-zero/core/logx" - - "github.com/bnb-chain/zkbas/common/commonTx" - "github.com/bnb-chain/zkbas/common/util" - "github.com/bnb-chain/zkbas/pkg/treedb" -) - -func ConstructWithdrawNftCryptoTx( - oTx *Tx, - treeCtx *treedb.Context, - finalityBlockNr uint64, - accountTree bsmt.SparseMerkleTree, - accountAssetsTree *[]bsmt.SparseMerkleTree, - liquidityTree bsmt.SparseMerkleTree, - nftTree bsmt.SparseMerkleTree, - accountModel AccountModel, -) (cryptoTx *CryptoTx, err error) { - if oTx.TxType != commonTx.TxTypeWithdrawNft { - logx.Errorf("[ConstructWithdrawNftCryptoTx] invalid tx type") - return nil, errors.New("[ConstructWithdrawNftCryptoTx] invalid tx type") - } - if oTx == nil || accountTree == nil || accountAssetsTree == nil || liquidityTree == nil || nftTree == nil { - logx.Errorf("[ConstructWithdrawNftCryptoTx] invalid params") - return nil, errors.New("[ConstructWithdrawNftCryptoTx] invalid params") - } - txInfo, err := commonTx.ParseWithdrawNftTxInfo(oTx.TxInfo) - if err != nil { - logx.Errorf("[ConstructWithdrawNftCryptoTx] unable to parse register zns tx info:%s", err.Error()) - return nil, err - } - cryptoTxInfo, err := ToCryptoWithdrawNftTx(txInfo) - if err != nil { - logx.Errorf("[ConstructWithdrawNftCryptoTx] unable to convert to crypto register zns tx: %s", err.Error()) - return nil, err - } - accountKeys, proverAccounts, proverLiquidityInfo, proverNftInfo, err := ConstructProverInfo(oTx, accountModel) - if err != nil { - logx.Errorf("[ConstructWithdrawNftCryptoTx] unable to construct prover info: %s", err.Error()) - return nil, err - } - cryptoTx, err = ConstructWitnessInfo( - oTx, - accountModel, - treeCtx, - finalityBlockNr, - accountTree, - accountAssetsTree, - liquidityTree, - nftTree, - accountKeys, - proverAccounts, - proverLiquidityInfo, - proverNftInfo, - ) - if err != nil { - logx.Errorf("[ConstructWithdrawNftCryptoTx] unable to construct witness info: %s", err.Error()) - return nil, err - } - cryptoTx.TxType = uint8(oTx.TxType) - cryptoTx.WithdrawNftTxInfo = cryptoTxInfo - cryptoTx.Nonce = oTx.Nonce - cryptoTx.ExpiredAt = txInfo.ExpiredAt - cryptoTx.Signature = new(eddsa.Signature) - _, err = cryptoTx.Signature.SetBytes(txInfo.Sig) - if err != nil { - logx.Errorf("[ConstructWithdrawNftCryptoTx] invalid sig bytes: %s", err.Error()) - return nil, err - } - return cryptoTx, nil -} - -func ToCryptoWithdrawNftTx(txInfo *commonTx.WithdrawNftTxInfo) (info *CryptoWithdrawNftTx, err error) { - packedFee, err := util.ToPackedFee(txInfo.GasFeeAssetAmount) - if err != nil { - logx.Errorf("[ToCryptoSwapTx] unable to convert to packed fee: %s", err.Error()) - return nil, err - } - info = &CryptoWithdrawNftTx{ - AccountIndex: txInfo.AccountIndex, - CreatorAccountIndex: txInfo.CreatorAccountIndex, - CreatorAccountNameHash: txInfo.CreatorAccountNameHash, - CreatorTreasuryRate: txInfo.CreatorTreasuryRate, - NftIndex: txInfo.NftIndex, - NftContentHash: txInfo.NftContentHash, - NftL1Address: txInfo.NftL1Address, - NftL1TokenId: txInfo.NftL1TokenId, - ToAddress: txInfo.ToAddress, - GasAccountIndex: txInfo.GasAccountIndex, - GasFeeAssetId: txInfo.GasFeeAssetId, - GasFeeAssetAmount: packedFee, - CollectionId: txInfo.CollectionId, - } - return info, nil -} diff --git a/common/proverUtil/withdrawNft_test.go b/common/proverUtil/withdrawNft_test.go deleted file mode 100644 index e2592da6e..000000000 --- a/common/proverUtil/withdrawNft_test.go +++ /dev/null @@ -1,84 +0,0 @@ -/* - * Copyright © 2021 Zkbas Protocol - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package proverUtil - -import ( - "encoding/json" - "fmt" - "testing" - - "github.com/bnb-chain/bas-smt/database/memory" - "github.com/zeromicro/go-zero/core/stores/redis" - - "github.com/bnb-chain/zkbas/common/model/account" - "github.com/bnb-chain/zkbas/common/model/basic" - "github.com/bnb-chain/zkbas/common/model/liquidity" - "github.com/bnb-chain/zkbas/common/model/nft" - "github.com/bnb-chain/zkbas/common/model/tx" - "github.com/bnb-chain/zkbas/common/tree" - "github.com/bnb-chain/zkbas/pkg/treedb" -) - -func TestConstructWithdrawNftCryptoTxFirst(t *testing.T) { - redisConn := redis.New(basic.CacheConf[0].Host, WithRedis(basic.CacheConf[0].Type, basic.CacheConf[0].Pass)) - txModel := tx.NewTxModel(basic.Connection, basic.CacheConf, basic.DB, redisConn) - accountModel := account.NewAccountModel(basic.Connection, basic.CacheConf, basic.DB) - accountHistoryModel := account.NewAccountHistoryModel(basic.Connection, basic.CacheConf, basic.DB) - //liquidityModel := liquidity.NewLiquidityModel(basic.Connection, basic.CacheConf, basic.DB) - liquidityHistoryModel := liquidity.NewLiquidityHistoryModel(basic.Connection, basic.CacheConf, basic.DB) - //nftModel := nft.NewL2NftModel(basic.Connection, basic.CacheConf, basic.DB) - nftHistoryModel := nft.NewL2NftHistoryModel(basic.Connection, basic.CacheConf, basic.DB) - - ctx := &treedb.Context{ - Driver: treedb.MemoryDB, - TreeDB: memory.NewMemoryDB(), - } - txInfo, err := txModel.GetTxByTxId(26) - if err != nil { - t.Fatal(err) - } - blockHeight := int64(25) - accountTree, accountAssetTrees, err := tree.InitAccountTree(accountModel, accountHistoryModel, blockHeight, ctx) - if err != nil { - t.Fatal(err) - } - liquidityTree, err := tree.InitLiquidityTree(liquidityHistoryModel, blockHeight, ctx) - if err != nil { - t.Fatal(err) - } - nftTree, err := tree.InitNftTree(nftHistoryModel, blockHeight, ctx) - if err != nil { - t.Fatal(err) - } - cryptoTx, err := ConstructWithdrawNftCryptoTx( - txInfo, - ctx, 0, - accountTree, &accountAssetTrees, - liquidityTree, - nftTree, - accountModel, - ) - if err != nil { - t.Fatal(err) - } - txBytes, err := json.Marshal(cryptoTx) - if err != nil { - t.Fatal(err) - } - fmt.Println(string(txBytes)) -} diff --git a/common/proverUtil/withdraw_test.go b/common/proverUtil/withdraw_test.go deleted file mode 100644 index a2f3203eb..000000000 --- a/common/proverUtil/withdraw_test.go +++ /dev/null @@ -1,83 +0,0 @@ -/* - * Copyright © 2021 Zkbas Protocol - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package proverUtil - -import ( - "encoding/json" - "fmt" - "testing" - - "github.com/zeromicro/go-zero/core/stores/redis" - - "github.com/bnb-chain/bas-smt/database/memory" - "github.com/bnb-chain/zkbas/common/model/account" - "github.com/bnb-chain/zkbas/common/model/basic" - "github.com/bnb-chain/zkbas/common/model/liquidity" - "github.com/bnb-chain/zkbas/common/model/nft" - "github.com/bnb-chain/zkbas/common/model/tx" - "github.com/bnb-chain/zkbas/common/tree" - "github.com/bnb-chain/zkbas/pkg/treedb" -) - -func TestConstructWithdrawCryptoTxFirst(t *testing.T) { - redisConn := redis.New(basic.CacheConf[0].Host, WithRedis(basic.CacheConf[0].Type, basic.CacheConf[0].Pass)) - txModel := tx.NewTxModel(basic.Connection, basic.CacheConf, basic.DB, redisConn) - accountModel := account.NewAccountModel(basic.Connection, basic.CacheConf, basic.DB) - accountHistoryModel := account.NewAccountHistoryModel(basic.Connection, basic.CacheConf, basic.DB) - //liquidityModel := liquidity.NewLiquidityModel(basic.Connection, basic.CacheConf, basic.DB) - liquidityHistoryModel := liquidity.NewLiquidityHistoryModel(basic.Connection, basic.CacheConf, basic.DB) - //nftModel := nft.NewL2NftModel(basic.Connection, basic.CacheConf, basic.DB) - nftHistoryModel := nft.NewL2NftHistoryModel(basic.Connection, basic.CacheConf, basic.DB) - ctx := &treedb.Context{ - Driver: treedb.MemoryDB, - TreeDB: memory.NewMemoryDB(), - } - txInfo, err := txModel.GetTxByTxId(17) - if err != nil { - t.Fatal(err) - } - blockHeight := int64(16) - accountTree, accountAssetTrees, err := tree.InitAccountTree(accountModel, accountHistoryModel, blockHeight, ctx) - if err != nil { - t.Fatal(err) - } - liquidityTree, err := tree.InitLiquidityTree(liquidityHistoryModel, blockHeight, ctx) - if err != nil { - t.Fatal(err) - } - nftTree, err := tree.InitNftTree(nftHistoryModel, blockHeight, ctx) - if err != nil { - t.Fatal(err) - } - cryptoTx, err := ConstructWithdrawCryptoTx( - txInfo, - ctx, 0, - accountTree, &accountAssetTrees, - liquidityTree, - nftTree, - accountModel, - ) - if err != nil { - t.Fatal(err) - } - txBytes, err := json.Marshal(cryptoTx) - if err != nil { - t.Fatal(err) - } - fmt.Println(string(txBytes)) -} diff --git a/common/proverUtil/witnessHelper.go b/common/proverUtil/witnessHelper.go deleted file mode 100644 index 38ddfd524..000000000 --- a/common/proverUtil/witnessHelper.go +++ /dev/null @@ -1,502 +0,0 @@ -/* - * Copyright © 2021 Zkbas Protocol - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package proverUtil - -import ( - "errors" - "math/big" - - bsmt "github.com/bnb-chain/bas-smt" - "github.com/bnb-chain/zkbas-crypto/legend/circuit/bn254/std" - "github.com/ethereum/go-ethereum/common" - "github.com/zeromicro/go-zero/core/logx" - - "github.com/bnb-chain/zkbas/common/commonAsset" - "github.com/bnb-chain/zkbas/common/commonConstant" - "github.com/bnb-chain/zkbas/common/commonTx" - "github.com/bnb-chain/zkbas/common/tree" - "github.com/bnb-chain/zkbas/common/util" - "github.com/bnb-chain/zkbas/pkg/treedb" -) - -func ConstructWitnessInfo( - oTx *Tx, - accountModel AccountModel, - treeCtx *treedb.Context, - finalityBlockNr uint64, - accountTree bsmt.SparseMerkleTree, - accountAssetTrees *[]bsmt.SparseMerkleTree, - liquidityTree bsmt.SparseMerkleTree, - nftTree bsmt.SparseMerkleTree, - accountKeys []int64, - proverAccounts []*ProverAccountInfo, - proverLiquidityInfo *ProverLiquidityInfo, - proverNftInfo *ProverNftInfo, -) ( - cryptoTx *CryptoTx, - err error, -) { - // construct account witness - AccountRootBefore, AccountsInfoBefore, MerkleProofsAccountAssetsBefore, MerkleProofsAccountBefore, err := - ConstructAccountWitness(oTx, treeCtx, finalityBlockNr, accountModel, accountTree, accountAssetTrees, accountKeys, proverAccounts) - if err != nil { - logx.Errorf("[ConstructWitnessInfo] unable to construct account witness: %s", err.Error()) - return nil, err - } - // construct liquidity witness - LiquidityRootBefore, LiquidityBefore, MerkleProofsLiquidityBefore, err := - ConstructLiquidityWitness(liquidityTree, proverLiquidityInfo) - if err != nil { - logx.Errorf("[ConstructWitnessInfo] unable to construct liquidity witness: %s", err.Error()) - return nil, err - } - // construct nft witness - NftRootBefore, NftBefore, MerkleProofsNftBefore, err := - ConstructNftWitness(nftTree, proverNftInfo) - if err != nil { - logx.Errorf("[ConstructWitnessInfo] unable to construct nft witness: %s", err.Error()) - return nil, err - } - stateRootBefore := tree.ComputeStateRootHash(AccountRootBefore, LiquidityRootBefore, NftRootBefore) - stateRootAfter := tree.ComputeStateRootHash(accountTree.Root(), liquidityTree.Root(), nftTree.Root()) - cryptoTx = &CryptoTx{ - AccountRootBefore: AccountRootBefore, - AccountsInfoBefore: AccountsInfoBefore, - LiquidityRootBefore: LiquidityRootBefore, - LiquidityBefore: LiquidityBefore, - NftRootBefore: NftRootBefore, - NftBefore: NftBefore, - StateRootBefore: stateRootBefore, - MerkleProofsAccountAssetsBefore: MerkleProofsAccountAssetsBefore, - MerkleProofsAccountBefore: MerkleProofsAccountBefore, - MerkleProofsLiquidityBefore: MerkleProofsLiquidityBefore, - MerkleProofsNftBefore: MerkleProofsNftBefore, - StateRootAfter: stateRootAfter, - } - return cryptoTx, nil -} - -func ConstructAccountWitness( - oTx *Tx, - treeCtx *treedb.Context, - finalityBlockNr uint64, - accountModel AccountModel, - accountTree bsmt.SparseMerkleTree, - accountAssetTrees *[]bsmt.SparseMerkleTree, - accountKeys []int64, - proverAccounts []*ProverAccountInfo, -) ( - AccountRootBefore []byte, - // account before info, size is 5 - AccountsInfoBefore [NbAccountsPerTx]*CryptoAccount, - // before account asset merkle proof - MerkleProofsAccountAssetsBefore [NbAccountsPerTx][NbAccountAssetsPerAccount][AssetMerkleLevels][]byte, - // before account merkle proof - MerkleProofsAccountBefore [NbAccountsPerTx][AccountMerkleLevels][]byte, - err error, -) { - AccountRootBefore = accountTree.Root() - var ( - accountCount = 0 - ) - for _, accountKey := range accountKeys { - var ( - cryptoAccount = new(CryptoAccount) - // get account asset before - assetCount = 0 - ) - // get account before - accountMerkleProofs, err := accountTree.GetProof(uint64(accountKey)) - if err != nil { - logx.Errorf("[ConstructAccountWitness] unable to build merkle proofs: %s", err.Error()) - return AccountRootBefore, AccountsInfoBefore, MerkleProofsAccountAssetsBefore, MerkleProofsAccountBefore, err - } - // it means this is a registerZNS tx - if proverAccounts == nil { - if accountKey != int64(len(*accountAssetTrees)) { - logx.Errorf("[ConstructAccountWitness] invalid key") - return AccountRootBefore, AccountsInfoBefore, MerkleProofsAccountAssetsBefore, MerkleProofsAccountBefore, - errors.New("[ConstructAccountWitness] invalid key") - } - emptyAccountAssetTree, err := tree.NewEmptyAccountAssetTree(treeCtx, accountKey, finalityBlockNr) - if err != nil { - logx.Errorf("[ConstructAccountWitness] unable to create empty account asset tree: %s", err.Error()) - return AccountRootBefore, AccountsInfoBefore, MerkleProofsAccountAssetsBefore, MerkleProofsAccountBefore, err - } - *accountAssetTrees = append(*accountAssetTrees, emptyAccountAssetTree) - cryptoAccount = std.EmptyAccount(accountKey, tree.NilAccountAssetRoot) - // update account info - accountInfo, err := accountModel.GetConfirmedAccountByAccountIndex(accountKey) - if err != nil { - logx.Errorf("[ConstructAccountWitness] unable to get confirmed account by account index: %s", err.Error()) - return AccountRootBefore, AccountsInfoBefore, MerkleProofsAccountAssetsBefore, MerkleProofsAccountBefore, err - } - proverAccounts = append(proverAccounts, &ProverAccountInfo{ - AccountInfo: &Account{ - AccountIndex: accountInfo.AccountIndex, - AccountName: accountInfo.AccountName, - PublicKey: accountInfo.PublicKey, - AccountNameHash: accountInfo.AccountNameHash, - L1Address: accountInfo.L1Address, - Nonce: commonConstant.NilNonce, - CollectionNonce: commonConstant.NilCollectionId, - AssetInfo: commonConstant.NilAssetInfo, - AssetRoot: common.Bytes2Hex(tree.NilAccountAssetRoot), - Status: accountInfo.Status, - }, - }) - } else { - proverAccountInfo := proverAccounts[accountCount] - pk, err := util.ParsePubKey(proverAccountInfo.AccountInfo.PublicKey) - if err != nil { - logx.Errorf("[ConstructAccountWitness] unable to parse pub key: %s", err.Error()) - return AccountRootBefore, AccountsInfoBefore, MerkleProofsAccountAssetsBefore, MerkleProofsAccountBefore, err - } - cryptoAccount = &CryptoAccount{ - AccountIndex: accountKey, - AccountNameHash: common.FromHex(proverAccountInfo.AccountInfo.AccountNameHash), - AccountPk: pk, - Nonce: proverAccountInfo.AccountInfo.Nonce, - CollectionNonce: proverAccountInfo.AccountInfo.CollectionNonce, - AssetRoot: (*accountAssetTrees)[accountKey].Root(), - } - for i, accountAsset := range proverAccountInfo.AccountAssets { - assetMerkleProof, err := (*accountAssetTrees)[accountKey].GetProof(uint64(accountAsset.AssetId)) - if err != nil { - logx.Errorf("[ConstructAccountWitness] unable to build merkle proofs: %s", err.Error()) - return AccountRootBefore, AccountsInfoBefore, MerkleProofsAccountAssetsBefore, MerkleProofsAccountBefore, err - } - // set crypto account asset - cryptoAccount.AssetsInfo[assetCount] = &CryptoAccountAsset{ - AssetId: accountAsset.AssetId, - Balance: accountAsset.Balance, - LpAmount: accountAsset.LpAmount, - OfferCanceledOrFinalized: accountAsset.OfferCanceledOrFinalized, - } - - // set merkle proof - MerkleProofsAccountAssetsBefore[accountCount][assetCount], err = SetFixedAccountAssetArray(assetMerkleProof) - if err != nil { - logx.Errorf("[ConstructAccountWitness] unable to set fixed merkle proof: %s", err.Error()) - return AccountRootBefore, AccountsInfoBefore, MerkleProofsAccountAssetsBefore, MerkleProofsAccountBefore, err - } - // update asset merkle tree - nBalance, err := commonAsset.ComputeNewBalance( - proverAccountInfo.AssetsRelatedTxDetails[i].AssetType, - proverAccountInfo.AssetsRelatedTxDetails[i].Balance, - proverAccountInfo.AssetsRelatedTxDetails[i].BalanceDelta, - ) - if err != nil { - logx.Errorf("[ConstructAccountWitness] unable to compute new balance: %s", err.Error()) - return AccountRootBefore, AccountsInfoBefore, MerkleProofsAccountAssetsBefore, MerkleProofsAccountBefore, err - } - nAsset, err := commonAsset.ParseAccountAsset(nBalance) - if err != nil { - logx.Errorf("[ConstructAccountWitness] unable to parse account asset: %s", err.Error()) - return AccountRootBefore, AccountsInfoBefore, MerkleProofsAccountAssetsBefore, MerkleProofsAccountBefore, err - } - nAssetHash, err := tree.ComputeAccountAssetLeafHash(nAsset.Balance.String(), nAsset.LpAmount.String(), nAsset.OfferCanceledOrFinalized.String()) - if err != nil { - logx.Errorf("[ConstructAccountWitness] unable to compute account asset leaf hash: %s", err.Error()) - return AccountRootBefore, AccountsInfoBefore, MerkleProofsAccountAssetsBefore, MerkleProofsAccountBefore, err - } - err = (*accountAssetTrees)[accountKey].Set(uint64(accountAsset.AssetId), nAssetHash) - if err != nil { - logx.Errorf("[ConstructAccountWitness] unable to update asset tree: %s", err.Error()) - return AccountRootBefore, AccountsInfoBefore, MerkleProofsAccountAssetsBefore, MerkleProofsAccountBefore, err - } - - assetCount++ - } - if err != nil { - logx.Errorf("[ConstructAccountWitness] unable to commit asset tree: %s", err.Error()) - return AccountRootBefore, AccountsInfoBefore, MerkleProofsAccountAssetsBefore, MerkleProofsAccountBefore, err - } - } - // padding empty account asset - for assetCount < NbAccountAssetsPerAccount { - cryptoAccount.AssetsInfo[assetCount] = std.EmptyAccountAsset(LastAccountAssetId) - assetMerkleProof, err := (*accountAssetTrees)[accountKey].GetProof(LastAccountAssetId) - if err != nil { - logx.Errorf("[ConstructAccountWitness] unable to build merkle proofs: %s", err.Error()) - return AccountRootBefore, AccountsInfoBefore, MerkleProofsAccountAssetsBefore, MerkleProofsAccountBefore, err - } - MerkleProofsAccountAssetsBefore[accountCount][assetCount], err = SetFixedAccountAssetArray(assetMerkleProof) - if err != nil { - logx.Errorf("[ConstructAccountWitness] unable to set fixed merkle proof: %s", err.Error()) - return AccountRootBefore, AccountsInfoBefore, MerkleProofsAccountAssetsBefore, MerkleProofsAccountBefore, err - } - assetCount++ - } - // set account merkle proof - MerkleProofsAccountBefore[accountCount], err = SetFixedAccountArray(accountMerkleProofs) - if err != nil { - logx.Errorf("[ConstructAccountWitness] unable to set fixed merkle proof: %s", err.Error()) - return AccountRootBefore, AccountsInfoBefore, MerkleProofsAccountAssetsBefore, MerkleProofsAccountBefore, err - } - // update account merkle tree - nonce := cryptoAccount.Nonce - collectionNonce := cryptoAccount.CollectionNonce - if oTx.AccountIndex == accountKey && oTx.Nonce != commonConstant.NilNonce { - nonce = oTx.Nonce - } - if oTx.AccountIndex == accountKey && oTx.TxType == commonTx.TxTypeCreateCollection { - collectionNonce++ - } - nAccountHash, err := tree.ComputeAccountLeafHash( - proverAccounts[accountCount].AccountInfo.AccountNameHash, - proverAccounts[accountCount].AccountInfo.PublicKey, - nonce, - collectionNonce, - (*accountAssetTrees)[accountKey].Root(), - ) - if err != nil { - logx.Errorf("[ConstructAccountWitness] unable to compute account leaf hash: %s", err.Error()) - return AccountRootBefore, AccountsInfoBefore, MerkleProofsAccountAssetsBefore, MerkleProofsAccountBefore, err - } - err = accountTree.Set(uint64(accountKey), nAccountHash) - if err != nil { - logx.Errorf("[ConstructAccountWitness] unable to update account tree: %s", err.Error()) - return AccountRootBefore, AccountsInfoBefore, MerkleProofsAccountAssetsBefore, MerkleProofsAccountBefore, err - } - // set account info before - AccountsInfoBefore[accountCount] = cryptoAccount - // add count - accountCount++ - } - if err != nil { - logx.Errorf("[ConstructAccountWitness] unable to commit account tree: %s", err.Error()) - return AccountRootBefore, AccountsInfoBefore, MerkleProofsAccountAssetsBefore, MerkleProofsAccountBefore, err - } - // padding empty account - emptyAssetTree, err := tree.NewMemAccountAssetTree() - if err != nil { - logx.Errorf("[ConstructAccountWitness] unable to new empty account asset tree: %s", err.Error()) - return AccountRootBefore, AccountsInfoBefore, MerkleProofsAccountAssetsBefore, MerkleProofsAccountBefore, err - } - for accountCount < NbAccountsPerTx { - AccountsInfoBefore[accountCount] = std.EmptyAccount(LastAccountIndex, tree.NilAccountAssetRoot) - // get account before - accountMerkleProofs, err := accountTree.GetProof(LastAccountIndex) - if err != nil { - logx.Errorf("[ConstructAccountWitness] unable to build merkle proofs: %s", err.Error()) - return AccountRootBefore, AccountsInfoBefore, MerkleProofsAccountAssetsBefore, MerkleProofsAccountBefore, err - } - // set account merkle proof - MerkleProofsAccountBefore[accountCount], err = SetFixedAccountArray(accountMerkleProofs) - if err != nil { - logx.Errorf("[ConstructAccountWitness] unable to set fixed merkle proof: %s", err.Error()) - return AccountRootBefore, AccountsInfoBefore, MerkleProofsAccountAssetsBefore, MerkleProofsAccountBefore, err - } - for i := 0; i < NbAccountAssetsPerAccount; i++ { - assetMerkleProof, err := emptyAssetTree.GetProof(0) - if err != nil { - logx.Errorf("[ConstructAccountWitness] unable to build merkle proofs: %s", err.Error()) - return AccountRootBefore, AccountsInfoBefore, MerkleProofsAccountAssetsBefore, MerkleProofsAccountBefore, err - } - MerkleProofsAccountAssetsBefore[accountCount][i], err = SetFixedAccountAssetArray(assetMerkleProof) - if err != nil { - logx.Errorf("[ConstructAccountWitness] unable to set fixed merkle proof: %s", err.Error()) - return AccountRootBefore, AccountsInfoBefore, MerkleProofsAccountAssetsBefore, MerkleProofsAccountBefore, err - } - } - accountCount++ - - } - return AccountRootBefore, AccountsInfoBefore, MerkleProofsAccountAssetsBefore, MerkleProofsAccountBefore, nil -} - -func ConstructLiquidityWitness( - liquidityTree bsmt.SparseMerkleTree, - proverLiquidityInfo *ProverLiquidityInfo, -) ( - // liquidity root before - LiquidityRootBefore []byte, - // liquidity before - LiquidityBefore *CryptoLiquidity, - // before liquidity merkle proof - MerkleProofsLiquidityBefore [LiquidityMerkleLevels][]byte, - err error, -) { - LiquidityRootBefore = liquidityTree.Root() - if proverLiquidityInfo == nil { - liquidityMerkleProofs, err := liquidityTree.GetProof(LastPairIndex) - if err != nil { - logx.Errorf("[ConstructLiquidityWitness] unable to build merkle proofs: %s", err.Error()) - return LiquidityRootBefore, LiquidityBefore, MerkleProofsLiquidityBefore, err - } - MerkleProofsLiquidityBefore, err = SetFixedLiquidityArray(liquidityMerkleProofs) - if err != nil { - logx.Errorf("[ConstructLiquidityWitness] unable to set fixed liquidity array: %s", err.Error()) - return LiquidityRootBefore, LiquidityBefore, MerkleProofsLiquidityBefore, err - } - LiquidityBefore = std.EmptyLiquidity(LastPairIndex) - return LiquidityRootBefore, LiquidityBefore, MerkleProofsLiquidityBefore, nil - } - liquidityMerkleProofs, err := liquidityTree.GetProof(uint64(proverLiquidityInfo.LiquidityInfo.PairIndex)) - if err != nil { - logx.Errorf("[ConstructLiquidityWitness] unable to build merkle proofs: %s", err.Error()) - return LiquidityRootBefore, LiquidityBefore, MerkleProofsLiquidityBefore, err - } - MerkleProofsLiquidityBefore, err = SetFixedLiquidityArray(liquidityMerkleProofs) - if err != nil { - logx.Errorf("[ConstructLiquidityWitness] unable to set fixed liquidity array: %s", err.Error()) - return LiquidityRootBefore, LiquidityBefore, MerkleProofsLiquidityBefore, err - } - LiquidityBefore = &CryptoLiquidity{ - PairIndex: proverLiquidityInfo.LiquidityInfo.PairIndex, - AssetAId: proverLiquidityInfo.LiquidityInfo.AssetAId, - AssetA: proverLiquidityInfo.LiquidityInfo.AssetA, - AssetBId: proverLiquidityInfo.LiquidityInfo.AssetBId, - AssetB: proverLiquidityInfo.LiquidityInfo.AssetB, - LpAmount: proverLiquidityInfo.LiquidityInfo.LpAmount, - KLast: proverLiquidityInfo.LiquidityInfo.KLast, - FeeRate: proverLiquidityInfo.LiquidityInfo.FeeRate, - TreasuryAccountIndex: proverLiquidityInfo.LiquidityInfo.TreasuryAccountIndex, - TreasuryRate: proverLiquidityInfo.LiquidityInfo.TreasuryRate, - } - // update liquidity tree - nBalance, err := commonAsset.ComputeNewBalance( - proverLiquidityInfo.LiquidityRelatedTxDetail.AssetType, - proverLiquidityInfo.LiquidityRelatedTxDetail.Balance, - proverLiquidityInfo.LiquidityRelatedTxDetail.BalanceDelta, - ) - if err != nil { - logx.Errorf("[ConstructLiquidityWitness] unable to compute new balance: %s", err.Error()) - return LiquidityRootBefore, LiquidityBefore, MerkleProofsLiquidityBefore, err - } - nPoolInfo, err := commonAsset.ParseLiquidityInfo(nBalance) - if err != nil { - logx.Errorf("[ConstructLiquidityWitness] unable to parse pool info: %s", err.Error()) - return LiquidityRootBefore, LiquidityBefore, MerkleProofsLiquidityBefore, err - } - nLiquidityHash, err := tree.ComputeLiquidityAssetLeafHash( - nPoolInfo.AssetAId, - nPoolInfo.AssetA.String(), - nPoolInfo.AssetBId, - nPoolInfo.AssetB.String(), - nPoolInfo.LpAmount.String(), - nPoolInfo.KLast.String(), - nPoolInfo.FeeRate, - nPoolInfo.TreasuryAccountIndex, - nPoolInfo.TreasuryRate, - ) - if err != nil { - logx.Errorf("[ConstructLiquidityWitness] unable to compute liquidity node hash: %s", err.Error()) - return LiquidityRootBefore, LiquidityBefore, MerkleProofsLiquidityBefore, err - } - err = liquidityTree.Set(uint64(proverLiquidityInfo.LiquidityInfo.PairIndex), nLiquidityHash) - if err != nil { - logx.Errorf("[ConstructLiquidityWitness] unable to update liquidity tree: %s", err.Error()) - return LiquidityRootBefore, LiquidityBefore, MerkleProofsLiquidityBefore, err - } - return LiquidityRootBefore, LiquidityBefore, MerkleProofsLiquidityBefore, nil -} - -func ConstructNftWitness( - nftTree bsmt.SparseMerkleTree, - proverNftInfo *ProverNftInfo, -) ( - // nft root before - NftRootBefore []byte, - // nft before - NftBefore *CryptoNft, - // before nft tree merkle proof - MerkleProofsNftBefore [NftMerkleLevels][]byte, - err error, -) { - NftRootBefore = nftTree.Root() - if proverNftInfo == nil { - liquidityMerkleProofs, err := nftTree.GetProof(LastNftIndex) - if err != nil { - logx.Errorf("[ConstructLiquidityWitness] unable to build merkle proofs: %s", err.Error()) - return NftRootBefore, NftBefore, MerkleProofsNftBefore, err - } - MerkleProofsNftBefore, err = SetFixedNftArray(liquidityMerkleProofs) - if err != nil { - logx.Errorf("[ConstructLiquidityWitness] unable to set fixed nft array: %s", err.Error()) - return NftRootBefore, NftBefore, MerkleProofsNftBefore, err - } - NftBefore = std.EmptyNft(LastNftIndex) - return NftRootBefore, NftBefore, MerkleProofsNftBefore, nil - } - nftMerkleProofs, err := nftTree.GetProof(uint64(proverNftInfo.NftInfo.NftIndex)) - if err != nil { - logx.Errorf("[ConstructNftWitness] unable to build merkle proofs: %s", err.Error()) - return NftRootBefore, NftBefore, MerkleProofsNftBefore, err - } - MerkleProofsNftBefore, err = SetFixedNftArray(nftMerkleProofs) - if err != nil { - logx.Errorf("[ConstructNftWitness] unable to set fixed liquidity array: %s", err.Error()) - return NftRootBefore, NftBefore, MerkleProofsNftBefore, err - } - nftL1TokenId, isValid := new(big.Int).SetString(proverNftInfo.NftInfo.NftL1TokenId, 10) - if !isValid { - logx.Errorf("[ConstructNftWitness] unable to parse big int") - return NftRootBefore, NftBefore, MerkleProofsNftBefore, errors.New("[ConstructNftWitness] unable to parse big int") - } - NftBefore = &CryptoNft{ - NftIndex: proverNftInfo.NftInfo.NftIndex, - NftContentHash: common.FromHex(proverNftInfo.NftInfo.NftContentHash), - CreatorAccountIndex: proverNftInfo.NftInfo.CreatorAccountIndex, - OwnerAccountIndex: proverNftInfo.NftInfo.OwnerAccountIndex, - NftL1Address: new(big.Int).SetBytes(common.FromHex(proverNftInfo.NftInfo.NftL1Address)), - NftL1TokenId: nftL1TokenId, - CreatorTreasuryRate: proverNftInfo.NftInfo.CreatorTreasuryRate, - CollectionId: proverNftInfo.NftInfo.CollectionId, - } - // update liquidity tree - nBalance, err := commonAsset.ComputeNewBalance( - proverNftInfo.NftRelatedTxDetail.AssetType, - proverNftInfo.NftRelatedTxDetail.Balance, - proverNftInfo.NftRelatedTxDetail.BalanceDelta, - ) - if err != nil { - logx.Errorf("[ConstructNftWitness] unable to compute new balance: %s", err.Error()) - return NftRootBefore, NftBefore, MerkleProofsNftBefore, err - } - nNftInfo, err := commonAsset.ParseNftInfo(nBalance) - if err != nil { - logx.Errorf("[ConstructNftWitness] unable to parse pool info: %s", err.Error()) - return NftRootBefore, NftBefore, MerkleProofsNftBefore, err - } - nNftHash, err := tree.ComputeNftAssetLeafHash( - nNftInfo.CreatorAccountIndex, - nNftInfo.OwnerAccountIndex, - nNftInfo.NftContentHash, - nNftInfo.NftL1Address, - nNftInfo.NftL1TokenId, - nNftInfo.CreatorTreasuryRate, - nNftInfo.CollectionId, - ) - - if err != nil { - logx.Errorf("[ConstructNftWitness] unable to compute liquidity node hash: %s", err.Error()) - return NftRootBefore, NftBefore, MerkleProofsNftBefore, err - } - err = nftTree.Set(uint64(proverNftInfo.NftInfo.NftIndex), nNftHash) - if err != nil { - logx.Errorf("[ConstructNftWitness] unable to update liquidity tree: %s", err.Error()) - return NftRootBefore, NftBefore, MerkleProofsNftBefore, err - } - if err != nil { - logx.Errorf("[ConstructNftWitness] unable to commit liquidity tree: %s", err.Error()) - return NftRootBefore, NftBefore, MerkleProofsNftBefore, err - } - return NftRootBefore, NftBefore, MerkleProofsNftBefore, nil -} diff --git a/common/util/pubKeyHelper.go b/common/pubkey.go similarity index 84% rename from common/util/pubKeyHelper.go rename to common/pubkey.go index 7f0b129ee..2d59e9f7a 100644 --- a/common/util/pubKeyHelper.go +++ b/common/pubkey.go @@ -1,5 +1,5 @@ /* - * Copyright © 2021 Zkbas Protocol + * Copyright © 2021 ZkBAS Protocol * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -15,12 +15,11 @@ * */ -package util +package common import ( "github.com/consensys/gnark-crypto/ecc/bn254/twistededwards/eddsa" "github.com/ethereum/go-ethereum/common" - "github.com/zeromicro/go-zero/core/logx" ) func ParsePubKey(pkStr string) (pk *eddsa.PublicKey, err error) { @@ -28,7 +27,6 @@ func ParsePubKey(pkStr string) (pk *eddsa.PublicKey, err error) { pk = new(eddsa.PublicKey) _, err = pk.A.SetBytes(pkBytes) if err != nil { - logx.Errorf("[ParsePubKey] unable to set pk bytes: %s", err.Error()) return nil, err } return pk, nil diff --git a/common/util/globalmapHandler/lockHelper.go b/common/redislock/lock.go similarity index 75% rename from common/util/globalmapHandler/lockHelper.go rename to common/redislock/lock.go index 9283f1397..3fa7e9017 100644 --- a/common/util/globalmapHandler/lockHelper.go +++ b/common/redislock/lock.go @@ -1,5 +1,5 @@ /* - * Copyright © 2021 Zkbas Protocol + * Copyright © 2021 ZkBAS Protocol * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -15,16 +15,21 @@ * */ -package globalmapHandler +package redislock import ( "errors" "time" - "github.com/zeromicro/go-zero/core/logx" "github.com/zeromicro/go-zero/core/stores/redis" ) +const ( + LockExpiryTime = 10 // seconds + RetryInterval = 500 * time.Millisecond + MaxRetryTimes = 3 +) + func GetRedisLockByKey(conn *redis.Redis, keyLock string) (redisLock *redis.RedisLock) { // get lock redisLock = redis.NewRedisLock(conn, keyLock) @@ -37,7 +42,6 @@ func TryAcquireLock(redisLock *redis.RedisLock) (err error) { // lock ok, err := redisLock.Acquire() if err != nil { - logx.Errorf("[GetLatestAssetByLock] unable to acquire the lock: %s", err.Error()) return err } // re-try for three times @@ -47,12 +51,10 @@ func TryAcquireLock(redisLock *redis.RedisLock) (err error) { count := 0 for { if count > MaxRetryTimes { - logx.Errorf("[GetLatestAssetByLock] the lock has been used, re-try later") - return errors.New("[GetLatestAssetByLock] the lock has been used, re-try later") + return errors.New("the lock has been used, re-try later") } ok, err = redisLock.Acquire() if err != nil { - logx.Errorf("[GetLatestAssetByLock] unable to acquire the lock: %s", err.Error()) return err } if ok { diff --git a/common/util/stringHelper.go b/common/string.go similarity index 62% rename from common/util/stringHelper.go rename to common/string.go index a21d5c9f5..6301313f9 100644 --- a/common/util/stringHelper.go +++ b/common/string.go @@ -1,5 +1,5 @@ /* - * Copyright © 2021 Zkbas Protocol + * Copyright © 2021 ZkBAS Protocol * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -15,15 +15,17 @@ * */ -package util +package common import ( "bytes" "errors" + "math/big" "strings" "unicode" "github.com/ethereum/go-ethereum/common" + "github.com/ethereum/go-ethereum/crypto" ) func LowerCase(s string) string { @@ -52,20 +54,10 @@ func SerializeAccountName(a []byte) string { return string(bytes.Trim(a[:], "\x00")) + ".legend" } -//func AccountNameHash(accountName string) (res string, err error) { -// words := strings.Split(accountName, ".") -// if len(words) != 2 { -// return "", errors.New("[AccountNameHash] invalid account name") -// } -// hFunc := mimc.NewMiMC() -// buf := make([]byte, 32) -// label := MiMCHash(hFunc, []byte(words[0])) -// res = common.Bytes2Hex( -// MiMCHash(hFunc, append( -// MiMCHash(hFunc, append(buf, -// MiMCHash(hFunc, []byte(words[1]))...)), label...))) -// return res, nil -//} +func keccakHash(value []byte) []byte { + hashVal := crypto.Keccak256Hash(value) + return hashVal[:] +} func AccountNameHash(accountName string) (res string, err error) { // TODO Keccak256 @@ -73,11 +65,23 @@ func AccountNameHash(accountName string) (res string, err error) { if len(words) != 2 { return "", errors.New("[AccountNameHash] invalid account name") } - buf := make([]byte, 32) - label := KeccakHash([]byte(words[0])) - res = common.Bytes2Hex( - KeccakHash(append( - KeccakHash(append(buf, - KeccakHash([]byte(words[1]))...)), label...))) + + q, _ := big.NewInt(0).SetString("21888242871839275222246405745257275088548364400416034343698204186575808495617", 10) + + rootNode := make([]byte, 32) + hashOfBaseNode := keccakHash(append(rootNode, keccakHash([]byte(words[1]))...)) + + baseNode := big.NewInt(0).Mod(big.NewInt(0).SetBytes(hashOfBaseNode), q) + baseNodeBytes := make([]byte, 32) + baseNode.FillBytes(baseNodeBytes) + + nameHash := keccakHash([]byte(words[0])) + subNameHash := keccakHash(append(baseNodeBytes, nameHash...)) + + subNode := big.NewInt(0).Mod(big.NewInt(0).SetBytes(subNameHash), q) + subNodeBytes := make([]byte, 32) + subNode.FillBytes(subNodeBytes) + + res = common.Bytes2Hex(subNodeBytes) return res, nil } diff --git a/common/tree/accountTree.go b/common/tree/accountTree.go deleted file mode 100644 index d89cf2f96..000000000 --- a/common/tree/accountTree.go +++ /dev/null @@ -1,247 +0,0 @@ -/* - * Copyright © 2021 Zkbas Protocol - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package tree - -import ( - "errors" - "strconv" - - bsmt "github.com/bnb-chain/bas-smt" - "github.com/bnb-chain/bas-smt/database/memory" - "github.com/bnb-chain/zkbas-crypto/hash/bn254/zmimc" - "github.com/zeromicro/go-zero/core/logx" - - "github.com/bnb-chain/zkbas/common/commonAsset" - "github.com/bnb-chain/zkbas/common/commonConstant" - "github.com/bnb-chain/zkbas/common/model/account" - "github.com/bnb-chain/zkbas/pkg/treedb" -) - -func accountAssetNamespace(index int64) string { - return AccountAssetPrefix + strconv.Itoa(int(index)) + ":" -} - -// TODO optimize, bad performance -func InitAccountTree( - accountModel AccountModel, - accountHistoryModel AccountHistoryModel, - blockHeight int64, - ctx *treedb.Context, -) ( - accountTree bsmt.SparseMerkleTree, accountAssetTrees []bsmt.SparseMerkleTree, err error, -) { - // TODO: If there are too many accounts, it may cause reading too long, which can be optimized again - accountNums, err := accountHistoryModel.GetValidAccountNums(blockHeight) - if err != nil { - logx.Errorf("[InitAccountTree] unable to get all accountNums") - return nil, nil, err - } - - opts := ctx.Options(blockHeight) - - // init account state trees - accountAssetTrees = make([]bsmt.SparseMerkleTree, accountNums) - for index := int64(0); index < int64(accountNums); index++ { - // create account assets tree - accountAssetTrees[index], err = bsmt.NewBASSparseMerkleTree(bsmt.NewHasher(zmimc.Hmimc), - treedb.SetNamespace(ctx, accountAssetNamespace(index)), AssetTreeHeight, NilAccountAssetNodeHash, - opts...) - if err != nil { - logx.Errorf("[InitAccountTree] unable to create new tree by assets: %s", err.Error()) - return nil, nil, err - } - } - accountTree, err = bsmt.NewBASSparseMerkleTree(bsmt.NewHasher(zmimc.Hmimc), - treedb.SetNamespace(ctx, AccountPrefix), AccountTreeHeight, NilAccountNodeHash, - opts...) - if err != nil { - logx.Errorf("[InitAccountTree] unable to create new account tree: %s", err.Error()) - return nil, nil, err - } - - if accountNums == 0 { - return accountTree, accountAssetTrees, nil - } - - if ctx.IsLoad() { - _, accountHistories, err := accountHistoryModel.GetValidAccounts(blockHeight) - if err != nil { - logx.Errorf("[InitAccountTree] unable to get all accountHistories") - return nil, nil, err - } - - var ( - accountInfoMap = make(map[int64]*account.Account) - ) - - for _, accountHistory := range accountHistories { - if accountInfoMap[accountHistory.AccountIndex] == nil { - accountInfo, err := accountModel.GetAccountByAccountIndex(accountHistory.AccountIndex) - if err != nil { - logx.Errorf("[InitAccountTree] unable to get account by account index: %s", err.Error()) - return nil, nil, err - } - accountInfoMap[accountHistory.AccountIndex] = &account.Account{ - AccountIndex: accountInfo.AccountIndex, - AccountName: accountInfo.AccountName, - PublicKey: accountInfo.PublicKey, - AccountNameHash: accountInfo.AccountNameHash, - L1Address: accountInfo.L1Address, - Nonce: 0, - CollectionNonce: 0, - Status: account.AccountStatusConfirmed, - } - } - if accountHistory.Nonce != commonConstant.NilNonce { - accountInfoMap[accountHistory.AccountIndex].Nonce = accountHistory.Nonce - } - if accountHistory.CollectionNonce != commonConstant.NilNonce { - accountInfoMap[accountHistory.AccountIndex].CollectionNonce = accountHistory.CollectionNonce - } - accountInfoMap[accountHistory.AccountIndex].AssetInfo = accountHistory.AssetInfo - accountInfoMap[accountHistory.AccountIndex].AssetRoot = accountHistory.AssetRoot - } - - // get related account info - for accountIndex := int64(0); accountIndex < int64(len(accountHistories)); accountIndex++ { - if accountInfoMap[accountIndex] == nil { - logx.Errorf("[InitAccountTree] invalid account index") - return nil, nil, errors.New("[InitAccountTree] invalid account index") - } - oAccountInfo := accountInfoMap[accountIndex] - accountInfo, err := commonAsset.ToFormatAccountInfo(oAccountInfo) - if err != nil { - logx.Errorf("[InitAccountTree] unable to convert to format account info: %s", err.Error()) - return nil, nil, err - } - // create account assets node - for assetId, assetInfo := range accountInfo.AssetInfo { - hashVal, err := AssetToNode( - assetInfo.Balance.String(), - assetInfo.LpAmount.String(), - assetInfo.OfferCanceledOrFinalized.String(), - ) - if err != nil { - logx.Errorf("[InitAccountTree] unable to convert asset to node: %s", err.Error()) - return nil, nil, err - } - err = accountAssetTrees[accountIndex].Set(uint64(assetId), hashVal) - if err != nil { - logx.Errorf("[InitAccountTree] unable to set asset to tree: %s", err.Error()) - return nil, nil, err - } - _, err = accountAssetTrees[accountIndex].Commit(nil) - if err != nil { - logx.Errorf("[InitAccountTree] unable to commit asset tree: %s", err.Error()) - return nil, nil, err - } - } - accountHashVal, err := AccountToNode( - accountInfoMap[accountIndex].AccountNameHash, - accountInfoMap[accountIndex].PublicKey, - accountInfoMap[accountIndex].Nonce, - accountInfoMap[accountIndex].CollectionNonce, - accountAssetTrees[accountIndex].Root(), - ) - if err != nil { - logx.Errorf("[InitAccountTree] unable to convert account to node: %s", err.Error()) - return nil, nil, err - } - err = accountTree.Set(uint64(accountIndex), accountHashVal) - if err != nil { - logx.Errorf("[InitAccountTree] unable to set account to tree: %s", err.Error()) - return nil, nil, err - } - _, err = accountTree.Commit(nil) - if err != nil { - logx.Errorf("[InitAccountTree] unable to commit account tree: %s", err.Error()) - return nil, nil, err - } - } - } else { - if accountTree.LatestVersion() > bsmt.Version(blockHeight) && !accountTree.IsEmpty() { - err := accountTree.Rollback(bsmt.Version(blockHeight)) - logx.Infof("[InitAccountTree] account tree version [%d] if higher than block, rollback to %d", accountTree.LatestVersion(), blockHeight) - if err != nil { - logx.Errorf("[InitAccountTree] unable to rollback account tree: %s, version: %d", err.Error(), blockHeight) - return nil, nil, err - } - } - - for i, assetTree := range accountAssetTrees { - if assetTree.LatestVersion() > bsmt.Version(blockHeight) && !assetTree.IsEmpty() { - err := assetTree.Rollback(bsmt.Version(blockHeight)) - logx.Infof("[InitAccountTree] asset tree %d version [%d] if higher than block, rollback to %d", i, assetTree.LatestVersion(), blockHeight) - if err != nil { - logx.Errorf("[InitAccountTree] unable to rollback asset [%d] tree: %s, version: %d", i, err.Error(), blockHeight) - return nil, nil, err - } - } - } - } - - return accountTree, accountAssetTrees, nil -} - -func AssetToNode(balance string, lpAmount string, offerCanceledOrFinalized string) (hashVal []byte, err error) { - hashVal, err = ComputeAccountAssetLeafHash(balance, lpAmount, offerCanceledOrFinalized) - if err != nil { - logx.Errorf("[AccountToNode] unable to compute asset leaf hash: %s", err.Error()) - return nil, err - } - - return hashVal, nil -} - -func AccountToNode( - accountNameHash string, - publicKey string, - nonce int64, - collectionNonce int64, - assetRoot []byte, -) (hashVal []byte, err error) { - hashVal, err = ComputeAccountLeafHash( - accountNameHash, - publicKey, - nonce, - collectionNonce, - assetRoot) - if err != nil { - logx.Errorf("[AccountToNode] unable to compute account leaf hash: %s", err.Error()) - return nil, err - } - - return hashVal, nil -} - -func NewEmptyAccountAssetTree( - ctx *treedb.Context, - index int64, - blockHeight uint64, -) (tree bsmt.SparseMerkleTree, err error) { - return bsmt.NewBASSparseMerkleTree( - bsmt.NewHasher(zmimc.Hmimc), - treedb.SetNamespace(ctx, accountAssetNamespace(index)), - AssetTreeHeight, NilAccountAssetNodeHash, - ctx.Options(int64(blockHeight))...) -} - -func NewMemAccountAssetTree() (tree bsmt.SparseMerkleTree, err error) { - return bsmt.NewBASSparseMerkleTree(bsmt.NewHasher(zmimc.Hmimc), - memory.NewMemoryDB(), AssetTreeHeight, NilAccountAssetNodeHash) -} diff --git a/common/tree/hashHelper.go b/common/tree/hashHelper.go deleted file mode 100644 index 26da35536..000000000 --- a/common/tree/hashHelper.go +++ /dev/null @@ -1,167 +0,0 @@ -/* - * Copyright © 2021 Zkbas Protocol - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package tree - -import ( - "bytes" - "math/big" - - curve "github.com/bnb-chain/zkbas-crypto/ecc/ztwistededwards/tebn254" - "github.com/bnb-chain/zkbas-crypto/ffmath" - "github.com/consensys/gnark-crypto/ecc/bn254/fr/mimc" - "github.com/ethereum/go-ethereum/common" - "github.com/zeromicro/go-zero/core/logx" - - "github.com/bnb-chain/zkbas/common/util" -) - -func ComputeAccountLeafHash( - accountNameHash string, - pk string, - nonce int64, - collectionNonce int64, - assetRoot []byte, -) (hashVal []byte, err error) { - hFunc := mimc.NewMiMC() - var buf bytes.Buffer - buf.Write(common.FromHex(accountNameHash)) - err = util.PaddingPkIntoBuf(&buf, pk) - if err != nil { - logx.Errorf("[ComputeAccountAssetLeafHash] unable to write pk into buf: %s", err.Error()) - return nil, err - } - util.PaddingInt64IntoBuf(&buf, nonce) - util.PaddingInt64IntoBuf(&buf, collectionNonce) - buf.Write(assetRoot) - hFunc.Reset() - hFunc.Write(buf.Bytes()) - hashVal = hFunc.Sum(nil) - return hashVal, nil -} - -func ComputeAccountAssetLeafHash( - balance string, - lpAmount string, - offerCanceledOrFinalized string, -) (hashVal []byte, err error) { - hFunc := mimc.NewMiMC() - var buf bytes.Buffer - err = util.PaddingStringBigIntIntoBuf(&buf, balance) - if err != nil { - logx.Errorf("[ComputeAccountAssetLeafHash] invalid balance: %s", err.Error()) - return nil, err - } - err = util.PaddingStringBigIntIntoBuf(&buf, lpAmount) - if err != nil { - logx.Errorf("[ComputeAccountAssetLeafHash] invalid balance: %s", err.Error()) - return nil, err - } - err = util.PaddingStringBigIntIntoBuf(&buf, offerCanceledOrFinalized) - if err != nil { - logx.Errorf("[ComputeAccountAssetLeafHash] invalid balance: %s", err.Error()) - return nil, err - } - hFunc.Write(buf.Bytes()) - return hFunc.Sum(nil), nil -} - -func ComputeLiquidityAssetLeafHash( - assetAId int64, - assetA string, - assetBId int64, - assetB string, - lpAmount string, - kLast string, - feeRate int64, - treasuryAccountIndex int64, - treasuryRate int64, -) (hashVal []byte, err error) { - hFunc := mimc.NewMiMC() - var buf bytes.Buffer - util.PaddingInt64IntoBuf(&buf, assetAId) - err = util.PaddingStringBigIntIntoBuf(&buf, assetA) - if err != nil { - logx.Errorf("[ComputeLiquidityAssetLeafHash] unable to write big int to buf: %s", err.Error()) - return nil, err - } - util.PaddingInt64IntoBuf(&buf, assetBId) - err = util.PaddingStringBigIntIntoBuf(&buf, assetB) - if err != nil { - logx.Errorf("[ComputeLiquidityAssetLeafHash] unable to write big int to buf: %s", err.Error()) - return nil, err - } - err = util.PaddingStringBigIntIntoBuf(&buf, lpAmount) - if err != nil { - logx.Errorf("[ComputeLiquidityAssetLeafHash] unable to write big int to buf: %s", err.Error()) - return nil, err - } - err = util.PaddingStringBigIntIntoBuf(&buf, kLast) - if err != nil { - logx.Errorf("[ComputeLiquidityAssetLeafHash] unable to write big int to buf: %s", err.Error()) - return nil, err - } - util.PaddingInt64IntoBuf(&buf, feeRate) - util.PaddingInt64IntoBuf(&buf, treasuryAccountIndex) - util.PaddingInt64IntoBuf(&buf, treasuryRate) - hFunc.Write(buf.Bytes()) - hashVal = hFunc.Sum(nil) - return hashVal, nil -} - -func ComputeNftAssetLeafHash( - creatorAccountIndex int64, - ownerAccountIndex int64, - nftContentHash string, - nftL1Address string, - nftL1TokenId string, - creatorTreasuryRate int64, - collectionId int64, -) (hashVal []byte, err error) { - hFunc := mimc.NewMiMC() - var buf bytes.Buffer - util.PaddingInt64IntoBuf(&buf, creatorAccountIndex) - util.PaddingInt64IntoBuf(&buf, ownerAccountIndex) - buf.Write(ffmath.Mod(new(big.Int).SetBytes(common.FromHex(nftContentHash)), curve.Modulus).FillBytes(make([]byte, 32))) - err = util.PaddingAddressIntoBuf(&buf, nftL1Address) - if err != nil { - logx.Errorf("[ComputeNftAssetLeafHash] unable to write address to buf: %s", err.Error()) - return nil, err - } - err = util.PaddingStringBigIntIntoBuf(&buf, nftL1TokenId) - if err != nil { - logx.Errorf("[ComputeNftAssetLeafHash] unable to write big int to buf: %s", err.Error()) - return nil, err - } - util.PaddingInt64IntoBuf(&buf, creatorTreasuryRate) - util.PaddingInt64IntoBuf(&buf, collectionId) - hFunc.Write(buf.Bytes()) - hashVal = hFunc.Sum(nil) - return hashVal, nil -} - -func ComputeStateRootHash( - accountRoot []byte, - liquidityRoot []byte, - nftRoot []byte, -) []byte { - hFunc := mimc.NewMiMC() - hFunc.Write(accountRoot) - hFunc.Write(liquidityRoot) - hFunc.Write(nftRoot) - return hFunc.Sum(nil) -} diff --git a/common/tree/liquidityTree.go b/common/tree/liquidityTree.go deleted file mode 100644 index d91f707f6..000000000 --- a/common/tree/liquidityTree.go +++ /dev/null @@ -1,113 +0,0 @@ -/* - * Copyright © 2021 Zkbas Protocol - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package tree - -import ( - bsmt "github.com/bnb-chain/bas-smt" - "github.com/bnb-chain/zkbas-crypto/hash/bn254/zmimc" - "github.com/zeromicro/go-zero/core/logx" - - "github.com/bnb-chain/zkbas/errorcode" - "github.com/bnb-chain/zkbas/pkg/treedb" -) - -// TODO replace history as liquidity + liquidityHistory -func InitLiquidityTree( - liquidityHistoryModel LiquidityHistoryModel, - blockHeight int64, - ctx *treedb.Context, -) ( - liquidityTree bsmt.SparseMerkleTree, err error, -) { - - liquidityTree, err = bsmt.NewBASSparseMerkleTree(bsmt.NewHasher(zmimc.Hmimc), - treedb.SetNamespace(ctx, LiquidityPrefix), LiquidityTreeHeight, NilLiquidityNodeHash, - ctx.Options(blockHeight)...) - if err != nil { - logx.Errorf("[InitLiquidityTree] unable to create tree from db: %s", err.Error()) - return nil, err - } - - if ctx.IsLoad() { - liquidityAssets, err := liquidityHistoryModel.GetLatestLiquidityByBlockHeight(blockHeight) - if err != nil { - if err != errorcode.DbErrNotFound { - logx.Errorf("[InitLiquidityTree] unable to get latest nft assets: %s", err.Error()) - return nil, err - } - } - for _, liquidityAsset := range liquidityAssets { - pairIndex := liquidityAsset.PairIndex - hashVal, err := LiquidityAssetToNode( - liquidityAsset.AssetAId, liquidityAsset.AssetA, - liquidityAsset.AssetBId, liquidityAsset.AssetB, - liquidityAsset.LpAmount, liquidityAsset.KLast, - liquidityAsset.FeeRate, liquidityAsset.TreasuryAccountIndex, liquidityAsset.TreasuryRate) - if err != nil { - logx.Errorf("[InitLiquidityTree] unable to convert liquidity asset to node: %s", err.Error()) - return nil, err - } - err = liquidityTree.Set(uint64(pairIndex), hashVal) - if err != nil { - logx.Errorf("[InitLiquidityTree] unable to write liquidity asset to tree: %s", err.Error()) - return nil, err - } - _, err = liquidityTree.Commit(nil) - if err != nil { - logx.Errorf("[InitLiquidityTree] unable to commit liquidity tree: %s", err.Error()) - return nil, err - } - } - } else if liquidityTree.LatestVersion() > bsmt.Version(blockHeight) && !liquidityTree.IsEmpty() { - err := liquidityTree.Rollback(bsmt.Version(blockHeight)) - logx.Infof("[InitLiquidityTree] liquidity tree version [%d] if higher than block, rollback to %d", liquidityTree.LatestVersion(), blockHeight) - if err != nil { - logx.Errorf("[InitLiquidityTree] unable to rollback liquidity tree: %s, version: %d", err.Error(), blockHeight) - return nil, err - } - } - - return liquidityTree, nil -} - -func LiquidityAssetToNode( - assetAId int64, - assetA string, - assetBId int64, - assetB string, - lpAmount string, - kLast string, - feeRate int64, - treasuryAccountIndex int64, - treasuryFeeRate int64, -) (hashVal []byte, err error) { - hashVal, err = ComputeLiquidityAssetLeafHash( - assetAId, assetA, - assetBId, assetB, - lpAmount, - kLast, - feeRate, - treasuryAccountIndex, - treasuryFeeRate, - ) - if err != nil { - logx.Errorf("[AccountToNode] unable to compute liquidity asset leaf hash: %s", err.Error()) - return nil, err - } - return hashVal, nil -} diff --git a/common/tree/nftTree.go b/common/tree/nftTree.go deleted file mode 100644 index eefe5331a..000000000 --- a/common/tree/nftTree.go +++ /dev/null @@ -1,95 +0,0 @@ -/* - * Copyright © 2021 Zkbas Protocol - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package tree - -import ( - bsmt "github.com/bnb-chain/bas-smt" - "github.com/bnb-chain/zkbas-crypto/hash/bn254/zmimc" - "github.com/zeromicro/go-zero/core/logx" - - "github.com/bnb-chain/zkbas/pkg/treedb" -) - -// TODO replace history as nft + nftHistory -func InitNftTree( - nftHistoryModel L2NftHistoryModel, - blockHeight int64, - ctx *treedb.Context, -) ( - nftTree bsmt.SparseMerkleTree, err error, -) { - nftTree, err = bsmt.NewBASSparseMerkleTree(bsmt.NewHasher(zmimc.Hmimc), - treedb.SetNamespace(ctx, NFTPrefix), NftTreeHeight, NilNftNodeHash, - ctx.Options(blockHeight)...) - if err != nil { - logx.Errorf("[InitNftTree] unable to create tree from db: %s", err.Error()) - return nil, err - } - - if ctx.IsLoad() { - _, nftAssets, err := nftHistoryModel.GetLatestNftAssetsByBlockHeight(blockHeight) - if err != nil { - logx.Errorf("[InitNftTree] unable to get latest nft assets: %s", err.Error()) - return nil, err - } - for _, nftAsset := range nftAssets { - nftIndex := nftAsset.NftIndex - hashVal, err := NftAssetToNode(nftAsset) - if err != nil { - logx.Errorf("[InitNftTree] unable to convert nft asset to node: %s", err.Error()) - return nil, err - } - - err = nftTree.Set(uint64(nftIndex), hashVal) - if err != nil { - logx.Errorf("[InitNftTree] unable to write nft asset to tree: %s", err.Error()) - return nil, err - } - _, err = nftTree.Commit(nil) - if err != nil { - logx.Errorf("[InitNftTree] unable to commit nft tree: %s", err.Error()) - return nil, err - } - } - } else if nftTree.LatestVersion() > bsmt.Version(blockHeight) && !nftTree.IsEmpty() { - err := nftTree.Rollback(bsmt.Version(blockHeight)) - logx.Infof("[InitNftTree] nft tree version [%d] if higher than block, rollback to %d", nftTree.LatestVersion(), blockHeight) - if err != nil { - logx.Errorf("[InitNftTree] unable to rollback nft tree: %s, version: %d", err.Error(), blockHeight) - return nil, err - } - } - return nftTree, nil -} - -func NftAssetToNode(nftAsset *AccountL2NftHistory) (hashVal []byte, err error) { - hashVal, err = ComputeNftAssetLeafHash( - nftAsset.CreatorAccountIndex, - nftAsset.OwnerAccountIndex, - nftAsset.NftContentHash, - nftAsset.NftL1Address, - nftAsset.NftL1TokenId, - nftAsset.CreatorTreasuryRate, - nftAsset.CollectionId, - ) - if err != nil { - logx.Errorf("[NftAssetToNode] unable to compute nft asset leaf hash: %s", err.Error()) - return nil, err - } - return hashVal, nil -} diff --git a/common/util/bytesParser.go b/common/util/bytesParser.go deleted file mode 100644 index b173a8d6e..000000000 --- a/common/util/bytesParser.go +++ /dev/null @@ -1,62 +0,0 @@ -/* - * Copyright © 2021 Zkbas Protocol - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package util - -import ( - "encoding/binary" - "math/big" - - "github.com/ethereum/go-ethereum/common" -) - -func ReadUint8(buf []byte, offset int) (newOffset int, res uint8) { - return offset + 1, buf[offset] -} - -func ReadUint16(buf []byte, offset int) (newOffset int, res uint16) { - res = binary.BigEndian.Uint16(buf[offset : offset+2]) - return offset + 2, res -} - -func ReadUint32(buf []byte, offset int) (newOffset int, res uint32) { - res = binary.BigEndian.Uint32(buf[offset : offset+4]) - return offset + 4, res -} - -func ReadUint40(buf []byte, offset int) (newOffset int, res int64) { - return offset + 5, new(big.Int).SetBytes(buf[offset : offset+5]).Int64() -} - -func ReadUint128(buf []byte, offset int) (newOffset int, res *big.Int) { - return offset + 16, new(big.Int).SetBytes(buf[offset : offset+16]) -} - -func ReadUint256(buf []byte, offset int) (newOffset int, res *big.Int) { - return offset + 32, new(big.Int).SetBytes(buf[offset : offset+32]) -} - -func ReadBytes32(buf []byte, offset int) (newOffset int, res []byte) { - res = make([]byte, 32) - copy(res[:], buf[offset:offset+32]) - return offset + 32, res -} - -func ReadAddress(buf []byte, offset int) (newOffset int, res string) { - res = common.BytesToAddress(buf[offset : offset+20]).Hex() - return offset + 20, res -} diff --git a/common/util/constant.go b/common/util/constant.go deleted file mode 100644 index a202e5136..000000000 --- a/common/util/constant.go +++ /dev/null @@ -1,98 +0,0 @@ -/* - * Copyright © 2021 Zkbas Protocol - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package util - -import ( - "github.com/bnb-chain/zkbas/common/commonTx" -) - -type ( - RegisterZnsTxInfo = commonTx.RegisterZnsTxInfo - CreatePairTxInfo = commonTx.CreatePairTxInfo - UpdatePairRateTxInfo = commonTx.UpdatePairRateTxInfo - DepositTxInfo = commonTx.DepositTxInfo - DepositNftTxInfo = commonTx.DepositNftTxInfo - FullExitTxInfo = commonTx.FullExitTxInfo - FullExitNftTxInfo = commonTx.FullExitNftTxInfo -) - -const ( - Base = 10 - AddressSize = 20 - - FeeRateBase = 10000 - - EmptyStringKeccak = "0xc5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470" -) - -const ( - CHUNK_BYTES_SIZE = 32 - TXTYPE_BYTES_SIZE = 1 - NFTTYPE_BYTES_SIZE = 1 - ADDRESS_BYTES_SIZE = 20 - ACCOUNTINDEX_BYTES_SIZE = 4 - ACCOUNTNAME_BYTES_SIZE = 32 - ACCOUNTNAMEHASH_BYTES_SIZE = 32 - PUBKEY_BYTES_SIZE = 32 - ASSETID_BYTES_SIZE = 2 - PAIRINDEX_BYTES_SIZE = 2 - PACKEDAMOUNT_BYTES_SIZE = 5 - STATEAMOUNT_BYTES_SIZE = 16 - NFTINDEX_BYTES_SIZE = 5 - NFTTOKENID_BYTES_SIZE = 32 - NFTCONTENTHASH_BYTES_SIZE = 32 - FEERATE_BYTES_SIZE = 2 - COLLECTIONID_BYTES_SIZE = 2 - - RegisterZnsPubDataSize = TXTYPE_BYTES_SIZE + ACCOUNTINDEX_BYTES_SIZE + ACCOUNTNAME_BYTES_SIZE + - ACCOUNTNAMEHASH_BYTES_SIZE + PUBKEY_BYTES_SIZE + PUBKEY_BYTES_SIZE - CreatePairPubDataSize = TXTYPE_BYTES_SIZE + PAIRINDEX_BYTES_SIZE + - ASSETID_BYTES_SIZE + ASSETID_BYTES_SIZE + FEERATE_BYTES_SIZE + ACCOUNTINDEX_BYTES_SIZE + FEERATE_BYTES_SIZE - UpdatePairRatePubdataSize = TXTYPE_BYTES_SIZE + PAIRINDEX_BYTES_SIZE + - FEERATE_BYTES_SIZE + ACCOUNTINDEX_BYTES_SIZE + FEERATE_BYTES_SIZE - DepositPubDataSize = TXTYPE_BYTES_SIZE + ACCOUNTINDEX_BYTES_SIZE + - ACCOUNTNAMEHASH_BYTES_SIZE + ASSETID_BYTES_SIZE + STATEAMOUNT_BYTES_SIZE - DepositNftPubDataSize = TXTYPE_BYTES_SIZE + ACCOUNTINDEX_BYTES_SIZE + NFTINDEX_BYTES_SIZE + ADDRESS_BYTES_SIZE + - ACCOUNTINDEX_BYTES_SIZE + FEERATE_BYTES_SIZE + NFTCONTENTHASH_BYTES_SIZE + NFTTOKENID_BYTES_SIZE + - ACCOUNTNAMEHASH_BYTES_SIZE + COLLECTIONID_BYTES_SIZE - FullExitPubDataSize = TXTYPE_BYTES_SIZE + ACCOUNTINDEX_BYTES_SIZE + - ACCOUNTNAMEHASH_BYTES_SIZE + ASSETID_BYTES_SIZE + STATEAMOUNT_BYTES_SIZE - FullExitNftPubDataSize = TXTYPE_BYTES_SIZE + ACCOUNTINDEX_BYTES_SIZE + ACCOUNTINDEX_BYTES_SIZE + FEERATE_BYTES_SIZE + - NFTINDEX_BYTES_SIZE + COLLECTIONID_BYTES_SIZE + ADDRESS_BYTES_SIZE + - ACCOUNTNAMEHASH_BYTES_SIZE + ACCOUNTNAMEHASH_BYTES_SIZE + - NFTCONTENTHASH_BYTES_SIZE + NFTTOKENID_BYTES_SIZE -) - -const ( - TypeAccountIndex = iota - TypeAssetId - TypeAccountName - TypeAccountNameOmitSpace - TypeAccountPk - TypePairIndex - TypeLimit - TypeOffset - TypeHash - TypeBlockHeight - TypeTxType - TypeChainId - TypeLPAmount - TypeAssetAmount - TypeBoolean - TypeGasFee -) diff --git a/common/util/globalmapHandler/constant.go b/common/util/globalmapHandler/constant.go deleted file mode 100644 index 40522ed82..000000000 --- a/common/util/globalmapHandler/constant.go +++ /dev/null @@ -1,43 +0,0 @@ -/* - * Copyright © 2021 Zkbas Protocol - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package globalmapHandler - -import ( - "time" - - "github.com/zeromicro/go-zero/core/stores/redis" - - "github.com/bnb-chain/zkbas/common/commonAsset" - "github.com/bnb-chain/zkbas/common/model/nft" -) - -type ( - NftModel = nft.L2NftModel - Redis = redis.Redis - RedisLock = redis.RedisLock - - LiquidityInfo = commonAsset.LiquidityInfo -) - -const ( - LockExpiryTime = 10 // seconds - RetryInterval = 500 * time.Millisecond - MaxRetryTimes = 3 - - LiquidityExpiryTime = 30 // seconds - NftExpiryTime = 30 // seconds -) diff --git a/common/util/globalmapHandler/nftIndexForWrite.go b/common/util/globalmapHandler/nftIndexForWrite.go deleted file mode 100644 index 42d47b051..000000000 --- a/common/util/globalmapHandler/nftIndexForWrite.go +++ /dev/null @@ -1,49 +0,0 @@ -/* - * Copyright © 2021 Zkbas Protocol - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package globalmapHandler - -import ( - "github.com/zeromicro/go-zero/core/logx" - - "github.com/bnb-chain/zkbas/common/util" -) - -func GetLatestNftIndexForWrite( - nftModel NftModel, - redisConnection *Redis, -) ( - redisLock *RedisLock, - nftIndex int64, - err error, -) { - key := util.GetNftIndexKeyForWrite() - lockKey := util.GetLockKey(key) - redisLock = GetRedisLockByKey(redisConnection, lockKey) - err = TryAcquireLock(redisLock) - if err != nil { - logx.Errorf("[GetLatestNftIndexForWrite] unable to get lock: %s", err.Error()) - return nil, -1, err - } - lastNftIndex, err := nftModel.GetLatestNftIndex() - if err != nil { - redisLock.Release() - logx.Errorf("[GetLatestNftIndexForWrite] unable to get latest nft index: %s", err.Error()) - return nil, -1, err - } - return redisLock, lastNftIndex + 1, nil -} diff --git a/common/util/nftHelper_test.go b/common/util/nftHelper_test.go deleted file mode 100644 index 2436b5fbb..000000000 --- a/common/util/nftHelper_test.go +++ /dev/null @@ -1,32 +0,0 @@ -/* - * Copyright © 2021 Zkbas Protocol - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package util - -import ( - "fmt" - "math/big" - "testing" -) - -func TestEmptyNftInfo(t *testing.T) { - a, isValid := new(big.Int).SetString("0", 16) - if !isValid { - t.Fatal("invalid") - } - fmt.Println(a.String()) -} diff --git a/common/util/onchainHelper.go b/common/util/onchainHelper.go deleted file mode 100644 index e3c17fd8f..000000000 --- a/common/util/onchainHelper.go +++ /dev/null @@ -1,30 +0,0 @@ -package util - -import ( - "math/big" - - zkbas "github.com/bnb-chain/zkbas-eth-rpc/zkbas/core/legend" - "github.com/ethereum/go-ethereum/common" - - "github.com/bnb-chain/zkbas/common/model/block" -) - -func ConstructStoredBlockInfo(oBlock *block.Block) zkbas.StorageStoredBlockInfo { - var ( - PendingOnchainOperationsHash [32]byte - StateRoot [32]byte - Commitment [32]byte - ) - copy(PendingOnchainOperationsHash[:], common.FromHex(oBlock.PendingOnChainOperationsHash)[:]) - copy(StateRoot[:], common.FromHex(oBlock.StateRoot)[:]) - copy(Commitment[:], common.FromHex(oBlock.BlockCommitment)[:]) - return zkbas.StorageStoredBlockInfo{ - BlockNumber: uint32(oBlock.BlockHeight), - PriorityOperations: uint64(oBlock.PriorityOperations), - PendingOnchainOperationsHash: PendingOnchainOperationsHash, - Timestamp: big.NewInt(oBlock.CreatedAt.UnixMilli()), - StateRoot: StateRoot, - Commitment: Commitment, - BlockSize: oBlock.BlockSize, - } -} diff --git a/common/util/packedAmountHelper.go b/common/util/packedAmountHelper.go deleted file mode 100644 index 2b11cf43e..000000000 --- a/common/util/packedAmountHelper.go +++ /dev/null @@ -1,46 +0,0 @@ -/* - * Copyright © 2021 Zkbas Protocol - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package util - -import ( - "math/big" - - "github.com/bnb-chain/zkbas-crypto/util" -) - -/* - ToPackedAmount: convert big int to 40 bit, 5 bits for 10^x, 35 bits for a * 10^x -*/ -func ToPackedAmount(amount *big.Int) (res int64, err error) { - return util.ToPackedAmount(amount) -} - -func CleanPackedAmount(amount *big.Int) (nAmount *big.Int, err error) { - return util.CleanPackedAmount(amount) -} - -/* - ToPackedFee: convert big int to 16 bit, 5 bits for 10^x, 11 bits for a * 10^x -*/ -func ToPackedFee(amount *big.Int) (res int64, err error) { - return util.ToPackedFee(amount) -} - -func CleanPackedFee(amount *big.Int) (nAmount *big.Int, err error) { - return util.CleanPackedFee(amount) -} diff --git a/common/util/pubdataHelper.go b/common/util/pubdataHelper.go deleted file mode 100644 index b223bafab..000000000 --- a/common/util/pubdataHelper.go +++ /dev/null @@ -1,776 +0,0 @@ -/* - * Copyright © 2021 Zkbas Protocol - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package util - -import ( - "bytes" - "errors" - "math/big" - - curve "github.com/bnb-chain/zkbas-crypto/ecc/ztwistededwards/tebn254" - "github.com/bnb-chain/zkbas-crypto/ffmath" - "github.com/ethereum/go-ethereum/common" - "github.com/zeromicro/go-zero/core/logx" - - "github.com/bnb-chain/zkbas/common/commonTx" - "github.com/bnb-chain/zkbas/common/model/mempool" -) - -func ConvertTxToRegisterZNSPubData(oTx *mempool.MempoolTx) (pubData []byte, err error) { - if oTx.TxType != commonTx.TxTypeRegisterZns { - logx.Errorf("[ConvertTxToRegisterZNSPubData] invalid tx type") - return nil, errors.New("[ConvertTxToRegisterZNSPubData] invalid tx type") - } - // parse tx - txInfo, err := commonTx.ParseRegisterZnsTxInfo(oTx.TxInfo) - if err != nil { - logx.Errorf("[ConvertTxToRegisterZNSPubData] unable to parse tx info: %s", err.Error()) - return nil, err - } - var buf bytes.Buffer - buf.WriteByte(uint8(oTx.TxType)) - buf.Write(Uint32ToBytes(uint32(txInfo.AccountIndex))) - chunk := SuffixPaddingBufToChunkSize(buf.Bytes()) - buf.Reset() - buf.Write(chunk) - buf.Write(PrefixPaddingBufToChunkSize(AccountNameToBytes32(txInfo.AccountName))) - buf.Write(PrefixPaddingBufToChunkSize(txInfo.AccountNameHash)) - pk, err := ParsePubKey(txInfo.PubKey) - if err != nil { - logx.Errorf("[ConvertTxToRegisterZNSPubData] unable to parse pub key: %s", err.Error()) - return nil, err - } - // because we can get Y from X, so we only need to store X is enough - buf.Write(PrefixPaddingBufToChunkSize(pk.A.X.Marshal())) - buf.Write(PrefixPaddingBufToChunkSize(pk.A.Y.Marshal())) - buf.Write(PrefixPaddingBufToChunkSize([]byte{})) - return buf.Bytes(), nil -} - -func ConvertTxToCreatePairPubData(oTx *mempool.MempoolTx) (pubData []byte, err error) { - if oTx.TxType != commonTx.TxTypeCreatePair { - logx.Errorf("[ConvertTxToCreatePairPubData] invalid tx type") - return nil, errors.New("[ConvertTxToCreatePairPubData] invalid tx type") - } - // parse tx - txInfo, err := commonTx.ParseCreatePairTxInfo(oTx.TxInfo) - if err != nil { - logx.Errorf("[ConvertTxToCreatePairPubData] unable to parse tx info: %s", err.Error()) - return nil, err - } - var buf bytes.Buffer - buf.WriteByte(uint8(oTx.TxType)) - buf.Write(Uint16ToBytes(uint16(txInfo.PairIndex))) - buf.Write(Uint16ToBytes(uint16(txInfo.AssetAId))) - buf.Write(Uint16ToBytes(uint16(txInfo.AssetBId))) - buf.Write(Uint16ToBytes(uint16(txInfo.FeeRate))) - buf.Write(Uint32ToBytes(uint32(txInfo.TreasuryAccountIndex))) - buf.Write(Uint16ToBytes(uint16(txInfo.TreasuryRate))) - chunk := SuffixPaddingBufToChunkSize(buf.Bytes()) - buf.Reset() - buf.Write(chunk) - buf.Write(PrefixPaddingBufToChunkSize([]byte{})) - buf.Write(PrefixPaddingBufToChunkSize([]byte{})) - buf.Write(PrefixPaddingBufToChunkSize([]byte{})) - buf.Write(PrefixPaddingBufToChunkSize([]byte{})) - buf.Write(PrefixPaddingBufToChunkSize([]byte{})) - return buf.Bytes(), nil -} - -func ConvertTxToUpdatePairRatePubData(oTx *mempool.MempoolTx) (pubData []byte, err error) { - if oTx.TxType != commonTx.TxTypeUpdatePairRate { - logx.Errorf("[ConvertTxToUpdatePairRatePubData] invalid tx type") - return nil, errors.New("[ConvertTxToUpdatePairRatePubData] invalid tx type") - } - // parse tx - txInfo, err := commonTx.ParseUpdatePairRateTxInfo(oTx.TxInfo) - if err != nil { - logx.Errorf("[ConvertTxToUpdatePairRatePubData] unable to parse tx info: %s", err.Error()) - return nil, err - } - var buf bytes.Buffer - buf.WriteByte(uint8(oTx.TxType)) - buf.Write(Uint16ToBytes(uint16(txInfo.PairIndex))) - buf.Write(Uint16ToBytes(uint16(txInfo.FeeRate))) - buf.Write(Uint32ToBytes(uint32(txInfo.TreasuryAccountIndex))) - buf.Write(Uint16ToBytes(uint16(txInfo.TreasuryRate))) - chunk := SuffixPaddingBufToChunkSize(buf.Bytes()) - buf.Reset() - buf.Write(chunk) - buf.Write(PrefixPaddingBufToChunkSize([]byte{})) - buf.Write(PrefixPaddingBufToChunkSize([]byte{})) - buf.Write(PrefixPaddingBufToChunkSize([]byte{})) - buf.Write(PrefixPaddingBufToChunkSize([]byte{})) - buf.Write(PrefixPaddingBufToChunkSize([]byte{})) - return buf.Bytes(), nil -} - -func ConvertTxToDepositPubData(oTx *mempool.MempoolTx) (pubData []byte, err error) { - if oTx.TxType != commonTx.TxTypeDeposit { - logx.Errorf("[ConvertTxToDepositPubData] invalid tx type") - return nil, errors.New("[ConvertTxToDepositPubData] invalid tx type") - } - // parse tx - txInfo, err := commonTx.ParseDepositTxInfo(oTx.TxInfo) - if err != nil { - logx.Errorf("[ConvertTxToDepositPubData] unable to parse tx info: %s", err.Error()) - return nil, err - } - var buf bytes.Buffer - buf.WriteByte(uint8(oTx.TxType)) - buf.Write(Uint32ToBytes(uint32(txInfo.AccountIndex))) - buf.Write(Uint16ToBytes(uint16(txInfo.AssetId))) - buf.Write(Uint128ToBytes(txInfo.AssetAmount)) - chunk1 := SuffixPaddingBufToChunkSize(buf.Bytes()) - buf.Reset() - buf.Write(chunk1) - buf.Write(PrefixPaddingBufToChunkSize(txInfo.AccountNameHash)) - buf.Write(PrefixPaddingBufToChunkSize([]byte{})) - buf.Write(PrefixPaddingBufToChunkSize([]byte{})) - buf.Write(PrefixPaddingBufToChunkSize([]byte{})) - buf.Write(PrefixPaddingBufToChunkSize([]byte{})) - return buf.Bytes(), nil -} - -func ConvertTxToDepositNftPubData(oTx *mempool.MempoolTx) (pubData []byte, err error) { - if oTx.TxType != commonTx.TxTypeDepositNft { - logx.Errorf("[ConvertTxToDepositNftPubData] invalid tx type") - return nil, errors.New("[ConvertTxToDepositNftPubData] invalid tx type") - } - // parse tx - txInfo, err := commonTx.ParseDepositNftTxInfo(oTx.TxInfo) - if err != nil { - logx.Errorf("[ConvertTxToDepositNftPubData] unable to parse tx info: %s", err.Error()) - return nil, err - } - var buf bytes.Buffer - buf.WriteByte(uint8(oTx.TxType)) - buf.Write(Uint32ToBytes(uint32(txInfo.AccountIndex))) - buf.Write(Uint40ToBytes(txInfo.NftIndex)) - buf.Write(AddressStrToBytes(txInfo.NftL1Address)) - chunk1 := SuffixPaddingBufToChunkSize(buf.Bytes()) - buf.Reset() - buf.Write(Uint32ToBytes(uint32(txInfo.CreatorAccountIndex))) - buf.Write(Uint16ToBytes(uint16(txInfo.CreatorTreasuryRate))) - buf.Write(Uint16ToBytes(uint16(txInfo.CollectionId))) - chunk2 := PrefixPaddingBufToChunkSize(buf.Bytes()) - buf.Reset() - buf.Write(chunk1) - buf.Write(chunk2) - buf.Write(PrefixPaddingBufToChunkSize(txInfo.NftContentHash)) - buf.Write(Uint256ToBytes(txInfo.NftL1TokenId)) - buf.Write(PrefixPaddingBufToChunkSize(txInfo.AccountNameHash)) - buf.Write(PrefixPaddingBufToChunkSize([]byte{})) - return buf.Bytes(), nil -} - -func ConvertTxToTransferPubData(oTx *mempool.MempoolTx) (pubData []byte, err error) { - if oTx.TxType != commonTx.TxTypeTransfer { - logx.Errorf("[ConvertTxToTransferPubData] invalid tx type") - return nil, errors.New("[ConvertTxToTransferPubData] invalid tx type") - } - // parse tx - txInfo, err := commonTx.ParseTransferTxInfo(oTx.TxInfo) - if err != nil { - logx.Errorf("[ConvertTxToDepositPubData] unable to parse tx info: %s", err.Error()) - return nil, err - } - var buf bytes.Buffer - buf.WriteByte(uint8(oTx.TxType)) - buf.Write(Uint32ToBytes(uint32(txInfo.FromAccountIndex))) - buf.Write(Uint32ToBytes(uint32(txInfo.ToAccountIndex))) - buf.Write(Uint16ToBytes(uint16(txInfo.AssetId))) - packedAmountBytes, err := AmountToPackedAmountBytes(txInfo.AssetAmount) - if err != nil { - logx.Errorf("[ConvertTxToDepositPubData] unable to convert amount to packed amount: %s", err.Error()) - return nil, err - } - buf.Write(packedAmountBytes) - buf.Write(Uint32ToBytes(uint32(txInfo.GasAccountIndex))) - buf.Write(Uint16ToBytes(uint16(txInfo.GasFeeAssetId))) - packedFeeBytes, err := FeeToPackedFeeBytes(txInfo.GasFeeAssetAmount) - if err != nil { - logx.Errorf("[ConvertTxToDepositPubData] unable to convert amount to packed fee amount: %s", err.Error()) - return nil, err - } - buf.Write(packedFeeBytes) - chunk := SuffixPaddingBufToChunkSize(buf.Bytes()) - buf.Reset() - buf.Write(chunk) - buf.Write(PrefixPaddingBufToChunkSize(txInfo.CallDataHash)) - buf.Write(PrefixPaddingBufToChunkSize([]byte{})) - buf.Write(PrefixPaddingBufToChunkSize([]byte{})) - buf.Write(PrefixPaddingBufToChunkSize([]byte{})) - buf.Write(PrefixPaddingBufToChunkSize([]byte{})) - pubData = buf.Bytes() - return pubData, nil -} - -func ConvertTxToSwapPubData(oTx *mempool.MempoolTx) (pubData []byte, err error) { - if oTx.TxType != commonTx.TxTypeSwap { - logx.Errorf("[ConvertTxToSwapPubData] invalid tx type") - return nil, errors.New("[ConvertTxToSwapPubData] invalid tx type") - } - // parse tx - txInfo, err := commonTx.ParseSwapTxInfo(oTx.TxInfo) - if err != nil { - logx.Errorf("[ConvertTxToSwapPubData] unable to parse tx info: %s", err.Error()) - return nil, err - } - var buf bytes.Buffer - buf.WriteByte(uint8(oTx.TxType)) - buf.Write(Uint32ToBytes(uint32(txInfo.FromAccountIndex))) - buf.Write(Uint16ToBytes(uint16(txInfo.PairIndex))) - packedAssetAAmountBytes, err := AmountToPackedAmountBytes(txInfo.AssetAAmount) - if err != nil { - logx.Errorf("[ConvertTxToDepositPubData] unable to convert amount to packed amount: %s", err.Error()) - return nil, err - } - buf.Write(packedAssetAAmountBytes) - packedAssetBAmountDeltaBytes, err := AmountToPackedAmountBytes(txInfo.AssetBAmountDelta) - if err != nil { - logx.Errorf("[ConvertTxToDepositPubData] unable to convert amount to packed amount: %s", err.Error()) - return nil, err - } - buf.Write(packedAssetBAmountDeltaBytes) - buf.Write(Uint32ToBytes(uint32(txInfo.GasAccountIndex))) - buf.Write(Uint16ToBytes(uint16(txInfo.GasFeeAssetId))) - packedFeeBytes, err := FeeToPackedFeeBytes(txInfo.GasFeeAssetAmount) - if err != nil { - logx.Errorf("[ConvertTxToDepositPubData] unable to convert amount to packed fee amount: %s", err.Error()) - return nil, err - } - buf.Write(packedFeeBytes) - chunk := SuffixPaddingBufToChunkSize(buf.Bytes()) - buf.Reset() - buf.Write(chunk) - buf.Write(PrefixPaddingBufToChunkSize([]byte{})) - buf.Write(PrefixPaddingBufToChunkSize([]byte{})) - buf.Write(PrefixPaddingBufToChunkSize([]byte{})) - buf.Write(PrefixPaddingBufToChunkSize([]byte{})) - buf.Write(PrefixPaddingBufToChunkSize([]byte{})) - return buf.Bytes(), nil -} - -func ConvertTxToAddLiquidityPubData(oTx *mempool.MempoolTx) (pubData []byte, err error) { - if oTx.TxType != commonTx.TxTypeAddLiquidity { - logx.Errorf("[ConvertTxToAddLiquidityPubData] invalid tx type") - return nil, errors.New("[ConvertTxToAddLiquidityPubData] invalid tx type") - } - // parse tx - txInfo, err := commonTx.ParseAddLiquidityTxInfo(oTx.TxInfo) - if err != nil { - logx.Errorf("[ConvertTxToAddLiquidityPubData] unable to parse tx info: %s", err.Error()) - return nil, err - } - var buf bytes.Buffer - buf.WriteByte(uint8(oTx.TxType)) - buf.Write(Uint32ToBytes(uint32(txInfo.FromAccountIndex))) - buf.Write(Uint16ToBytes(uint16(txInfo.PairIndex))) - packedAssetAAmountBytes, err := AmountToPackedAmountBytes(txInfo.AssetAAmount) - if err != nil { - logx.Errorf("[ConvertTxToDepositPubData] unable to convert amount to packed amount: %s", err.Error()) - return nil, err - } - buf.Write(packedAssetAAmountBytes) - packedAssetBAmountBytes, err := AmountToPackedAmountBytes(txInfo.AssetBAmount) - if err != nil { - logx.Errorf("[ConvertTxToDepositPubData] unable to convert amount to packed amount: %s", err.Error()) - return nil, err - } - buf.Write(packedAssetBAmountBytes) - LpAmountBytes, err := AmountToPackedAmountBytes(txInfo.LpAmount) - if err != nil { - logx.Errorf("[ConvertTxToDepositPubData] unable to convert amount to packed amount: %s", err.Error()) - return nil, err - } - buf.Write(LpAmountBytes) - KLastBytes, err := AmountToPackedAmountBytes(txInfo.KLast) - if err != nil { - logx.Errorf("[ConvertTxToDepositPubData] unable to convert amount to packed amount: %s", err.Error()) - return nil, err - } - buf.Write(KLastBytes) - chunk1 := SuffixPaddingBufToChunkSize(buf.Bytes()) - buf.Reset() - treasuryAmountBytes, err := AmountToPackedAmountBytes(txInfo.TreasuryAmount) - if err != nil { - logx.Errorf("[ConvertTxToDepositPubData] unable to convert amount to packed amount: %s", err.Error()) - return nil, err - } - buf.Write(treasuryAmountBytes) - buf.Write(Uint32ToBytes(uint32(txInfo.GasAccountIndex))) - buf.Write(Uint16ToBytes(uint16(txInfo.GasFeeAssetId))) - packedFeeBytes, err := FeeToPackedFeeBytes(txInfo.GasFeeAssetAmount) - if err != nil { - logx.Errorf("[ConvertTxToDepositPubData] unable to convert amount to packed fee amount: %s", err.Error()) - return nil, err - } - buf.Write(packedFeeBytes) - chunk2 := PrefixPaddingBufToChunkSize(buf.Bytes()) - buf.Reset() - buf.Write(chunk1) - buf.Write(chunk2) - buf.Write(PrefixPaddingBufToChunkSize([]byte{})) - buf.Write(PrefixPaddingBufToChunkSize([]byte{})) - buf.Write(PrefixPaddingBufToChunkSize([]byte{})) - buf.Write(PrefixPaddingBufToChunkSize([]byte{})) - return buf.Bytes(), nil -} - -func ConvertTxToRemoveLiquidityPubData(oTx *mempool.MempoolTx) (pubData []byte, err error) { - if oTx.TxType != commonTx.TxTypeRemoveLiquidity { - logx.Errorf("[ConvertTxToRemoveLiquidityPubData] invalid tx type") - return nil, errors.New("[ConvertTxToRemoveLiquidityPubData] invalid tx type") - } - // parse tx - txInfo, err := commonTx.ParseRemoveLiquidityTxInfo(oTx.TxInfo) - if err != nil { - logx.Errorf("[ConvertTxToRemoveLiquidityPubData] unable to parse tx info: %s", err.Error()) - return nil, err - } - var buf bytes.Buffer - buf.WriteByte(uint8(oTx.TxType)) - buf.Write(Uint32ToBytes(uint32(txInfo.FromAccountIndex))) - buf.Write(Uint16ToBytes(uint16(txInfo.PairIndex))) - packedAssetAAmountBytes, err := AmountToPackedAmountBytes(txInfo.AssetAAmountDelta) - if err != nil { - logx.Errorf("[ConvertTxToDepositPubData] unable to convert amount to packed amount: %s", err.Error()) - return nil, err - } - buf.Write(packedAssetAAmountBytes) - packedAssetBAmountBytes, err := AmountToPackedAmountBytes(txInfo.AssetBAmountDelta) - if err != nil { - logx.Errorf("[ConvertTxToDepositPubData] unable to convert amount to packed amount: %s", err.Error()) - return nil, err - } - buf.Write(packedAssetBAmountBytes) - LpAmountBytes, err := AmountToPackedAmountBytes(txInfo.LpAmount) - if err != nil { - logx.Errorf("[ConvertTxToDepositPubData] unable to convert amount to packed amount: %s", err.Error()) - return nil, err - } - buf.Write(LpAmountBytes) - KLastBytes, err := AmountToPackedAmountBytes(txInfo.KLast) - if err != nil { - logx.Errorf("[ConvertTxToDepositPubData] unable to convert amount to packed amount: %s", err.Error()) - return nil, err - } - buf.Write(KLastBytes) - chunk1 := SuffixPaddingBufToChunkSize(buf.Bytes()) - buf.Reset() - treasuryAmountBytes, err := AmountToPackedAmountBytes(txInfo.TreasuryAmount) - if err != nil { - logx.Errorf("[ConvertTxToDepositPubData] unable to convert amount to packed amount: %s", err.Error()) - return nil, err - } - buf.Write(treasuryAmountBytes) - buf.Write(Uint32ToBytes(uint32(txInfo.GasAccountIndex))) - buf.Write(Uint16ToBytes(uint16(txInfo.GasFeeAssetId))) - packedFeeBytes, err := FeeToPackedFeeBytes(txInfo.GasFeeAssetAmount) - if err != nil { - logx.Errorf("[ConvertTxToDepositPubData] unable to convert amount to packed fee amount: %s", err.Error()) - return nil, err - } - buf.Write(packedFeeBytes) - chunk2 := PrefixPaddingBufToChunkSize(buf.Bytes()) - buf.Reset() - buf.Write(chunk1) - buf.Write(chunk2) - buf.Write(PrefixPaddingBufToChunkSize([]byte{})) - buf.Write(PrefixPaddingBufToChunkSize([]byte{})) - buf.Write(PrefixPaddingBufToChunkSize([]byte{})) - buf.Write(PrefixPaddingBufToChunkSize([]byte{})) - return buf.Bytes(), nil -} - -func ConvertTxToWithdrawPubData(oTx *mempool.MempoolTx) (pubData []byte, err error) { - if oTx.TxType != commonTx.TxTypeWithdraw { - logx.Errorf("[ConvertTxToWithdrawPubData] invalid tx type") - return nil, errors.New("[ConvertTxToWithdrawPubData] invalid tx type") - } - // parse tx - txInfo, err := commonTx.ParseWithdrawTxInfo(oTx.TxInfo) - if err != nil { - logx.Errorf("[ConvertTxToWithdrawPubData] unable to parse tx info: %s", err.Error()) - return nil, err - } - var buf bytes.Buffer - buf.WriteByte(uint8(oTx.TxType)) - buf.Write(Uint32ToBytes(uint32(txInfo.FromAccountIndex))) - buf.Write(AddressStrToBytes(txInfo.ToAddress)) - buf.Write(Uint16ToBytes(uint16(txInfo.AssetId))) - chunk1 := SuffixPaddingBufToChunkSize(buf.Bytes()) - buf.Reset() - buf.Write(Uint128ToBytes(txInfo.AssetAmount)) - buf.Write(Uint32ToBytes(uint32(txInfo.GasAccountIndex))) - buf.Write(Uint16ToBytes(uint16(txInfo.GasFeeAssetId))) - packedFeeBytes, err := FeeToPackedFeeBytes(txInfo.GasFeeAssetAmount) - if err != nil { - logx.Errorf("[ConvertTxToDepositPubData] unable to convert amount to packed fee amount: %s", err.Error()) - return nil, err - } - buf.Write(packedFeeBytes) - chunk2 := PrefixPaddingBufToChunkSize(buf.Bytes()) - buf.Reset() - buf.Write(chunk1) - buf.Write(chunk2) - buf.Write(PrefixPaddingBufToChunkSize([]byte{})) - buf.Write(PrefixPaddingBufToChunkSize([]byte{})) - buf.Write(PrefixPaddingBufToChunkSize([]byte{})) - buf.Write(PrefixPaddingBufToChunkSize([]byte{})) - return buf.Bytes(), nil -} - -func ConvertTxToCreateCollectionPubData(oTx *mempool.MempoolTx) (pubData []byte, err error) { - if oTx.TxType != commonTx.TxTypeCreateCollection { - logx.Errorf("[ConvertTxToCreateCollectionPubData] invalid tx type") - return nil, errors.New("[ConvertTxToCreateCollectionPubData] invalid tx type") - } - // parse tx - txInfo, err := commonTx.ParseCreateCollectionTxInfo(oTx.TxInfo) - if err != nil { - logx.Errorf("[ConvertTxToCreateCollectionPubData] unable to parse tx info: %s", err.Error()) - return nil, err - } - var buf bytes.Buffer - buf.WriteByte(uint8(oTx.TxType)) - buf.Write(Uint32ToBytes(uint32(txInfo.AccountIndex))) - buf.Write(Uint16ToBytes(uint16(txInfo.CollectionId))) - buf.Write(Uint32ToBytes(uint32(txInfo.GasAccountIndex))) - buf.Write(Uint16ToBytes(uint16(txInfo.GasFeeAssetId))) - packedFeeBytes, err := FeeToPackedFeeBytes(txInfo.GasFeeAssetAmount) - if err != nil { - logx.Errorf("[ConvertTxToDepositPubData] unable to convert amount to packed fee amount: %s", err.Error()) - return nil, err - } - buf.Write(packedFeeBytes) - chunk := SuffixPaddingBufToChunkSize(buf.Bytes()) - buf.Reset() - buf.Write(chunk) - buf.Write(PrefixPaddingBufToChunkSize([]byte{})) - buf.Write(PrefixPaddingBufToChunkSize([]byte{})) - buf.Write(PrefixPaddingBufToChunkSize([]byte{})) - buf.Write(PrefixPaddingBufToChunkSize([]byte{})) - buf.Write(PrefixPaddingBufToChunkSize([]byte{})) - return buf.Bytes(), nil -} - -func ConvertTxToMintNftPubData(oTx *mempool.MempoolTx) (pubData []byte, err error) { - if oTx.TxType != commonTx.TxTypeMintNft { - logx.Errorf("[ConvertTxToMintNftPubData] invalid tx type") - return nil, errors.New("[ConvertTxToMintNftPubData] invalid tx type") - } - // parse tx - txInfo, err := commonTx.ParseMintNftTxInfo(oTx.TxInfo) - if err != nil { - logx.Errorf("[ConvertTxToMintNftPubData] unable to parse tx info: %s", err.Error()) - return nil, err - } - var buf bytes.Buffer - buf.WriteByte(uint8(oTx.TxType)) - buf.Write(Uint32ToBytes(uint32(txInfo.CreatorAccountIndex))) - buf.Write(Uint32ToBytes(uint32(txInfo.ToAccountIndex))) - buf.Write(Uint40ToBytes(txInfo.NftIndex)) - buf.Write(Uint32ToBytes(uint32(txInfo.GasAccountIndex))) - buf.Write(Uint16ToBytes(uint16(txInfo.GasFeeAssetId))) - packedFeeBytes, err := FeeToPackedFeeBytes(txInfo.GasFeeAssetAmount) - if err != nil { - logx.Errorf("[ConvertTxToDepositPubData] unable to convert amount to packed fee amount: %s", err.Error()) - return nil, err - } - buf.Write(packedFeeBytes) - buf.Write(Uint16ToBytes(uint16(txInfo.CreatorTreasuryRate))) - buf.Write(Uint16ToBytes(uint16(txInfo.NftCollectionId))) - chunk := SuffixPaddingBufToChunkSize(buf.Bytes()) - buf.Reset() - buf.Write(chunk) - buf.Write(PrefixPaddingBufToChunkSize(common.FromHex(txInfo.NftContentHash))) - buf.Write(PrefixPaddingBufToChunkSize([]byte{})) - buf.Write(PrefixPaddingBufToChunkSize([]byte{})) - buf.Write(PrefixPaddingBufToChunkSize([]byte{})) - buf.Write(PrefixPaddingBufToChunkSize([]byte{})) - return buf.Bytes(), nil -} - -func ConvertTxToTransferNftPubData(oTx *mempool.MempoolTx) (pubData []byte, err error) { - if oTx.TxType != commonTx.TxTypeTransferNft { - logx.Errorf("[ConvertTxToMintNftPubData] invalid tx type") - return nil, errors.New("[ConvertTxToMintNftPubData] invalid tx type") - } - // parse tx - txInfo, err := commonTx.ParseTransferNftTxInfo(oTx.TxInfo) - if err != nil { - logx.Errorf("[ConvertTxToMintNftPubData] unable to parse tx info: %s", err.Error()) - return nil, err - } - var buf bytes.Buffer - buf.WriteByte(uint8(oTx.TxType)) - buf.Write(Uint32ToBytes(uint32(txInfo.FromAccountIndex))) - buf.Write(Uint32ToBytes(uint32(txInfo.ToAccountIndex))) - buf.Write(Uint40ToBytes(txInfo.NftIndex)) - buf.Write(Uint32ToBytes(uint32(txInfo.GasAccountIndex))) - buf.Write(Uint16ToBytes(uint16(txInfo.GasFeeAssetId))) - packedFeeBytes, err := FeeToPackedFeeBytes(txInfo.GasFeeAssetAmount) - if err != nil { - logx.Errorf("[ConvertTxToDepositPubData] unable to convert amount to packed fee amount: %s", err.Error()) - return nil, err - } - buf.Write(packedFeeBytes) - chunk := SuffixPaddingBufToChunkSize(buf.Bytes()) - buf.Reset() - buf.Write(chunk) - buf.Write(PrefixPaddingBufToChunkSize(txInfo.CallDataHash)) - buf.Write(PrefixPaddingBufToChunkSize([]byte{})) - buf.Write(PrefixPaddingBufToChunkSize([]byte{})) - buf.Write(PrefixPaddingBufToChunkSize([]byte{})) - buf.Write(PrefixPaddingBufToChunkSize([]byte{})) - return buf.Bytes(), nil -} - -func ConvertTxToAtomicMatchPubData(oTx *mempool.MempoolTx) (pubData []byte, err error) { - if oTx.TxType != commonTx.TxTypeAtomicMatch { - logx.Errorf("[ConvertTxToAtomicMatchPubData] invalid tx type") - return nil, errors.New("[ConvertTxToAtomicMatchPubData] invalid tx type") - } - // parse tx - txInfo, err := commonTx.ParseAtomicMatchTxInfo(oTx.TxInfo) - if err != nil { - logx.Errorf("[ConvertTxToAtomicMatchPubData] unable to parse tx info: %s", err.Error()) - return nil, err - } - var buf bytes.Buffer - buf.WriteByte(uint8(oTx.TxType)) - buf.Write(Uint32ToBytes(uint32(txInfo.AccountIndex))) - buf.Write(Uint32ToBytes(uint32(txInfo.BuyOffer.AccountIndex))) - buf.Write(Uint24ToBytes(txInfo.BuyOffer.OfferId)) - buf.Write(Uint32ToBytes(uint32(txInfo.SellOffer.AccountIndex))) - buf.Write(Uint24ToBytes(txInfo.SellOffer.OfferId)) - buf.Write(Uint40ToBytes(txInfo.BuyOffer.NftIndex)) - buf.Write(Uint16ToBytes(uint16(txInfo.SellOffer.AssetId))) - chunk1 := SuffixPaddingBufToChunkSize(buf.Bytes()) - buf.Reset() - packedAmountBytes, err := AmountToPackedAmountBytes(txInfo.BuyOffer.AssetAmount) - if err != nil { - logx.Errorf("[ConvertTxToDepositPubData] unable to convert amount to packed amount: %s", err.Error()) - return nil, err - } - buf.Write(packedAmountBytes) - creatorAmountBytes, err := AmountToPackedAmountBytes(txInfo.CreatorAmount) - if err != nil { - logx.Errorf("[ConvertTxToDepositPubData] unable to convert amount to packed amount: %s", err.Error()) - return nil, err - } - buf.Write(creatorAmountBytes) - treasuryAmountBytes, err := AmountToPackedAmountBytes(txInfo.TreasuryAmount) - if err != nil { - logx.Errorf("[ConvertTxToDepositPubData] unable to convert amount to packed amount: %s", err.Error()) - return nil, err - } - buf.Write(treasuryAmountBytes) - buf.Write(Uint32ToBytes(uint32(txInfo.GasAccountIndex))) - buf.Write(Uint16ToBytes(uint16(txInfo.GasFeeAssetId))) - packedFeeBytes, err := FeeToPackedFeeBytes(txInfo.GasFeeAssetAmount) - if err != nil { - logx.Errorf("[ConvertTxToDepositPubData] unable to convert amount to packed fee amount: %s", err.Error()) - return nil, err - } - buf.Write(packedFeeBytes) - chunk2 := PrefixPaddingBufToChunkSize(buf.Bytes()) - buf.Reset() - buf.Write(chunk1) - buf.Write(chunk2) - buf.Write(PrefixPaddingBufToChunkSize([]byte{})) - buf.Write(PrefixPaddingBufToChunkSize([]byte{})) - buf.Write(PrefixPaddingBufToChunkSize([]byte{})) - buf.Write(PrefixPaddingBufToChunkSize([]byte{})) - return buf.Bytes(), nil -} - -func ConvertTxToCancelOfferPubData(oTx *mempool.MempoolTx) (pubData []byte, err error) { - if oTx.TxType != commonTx.TxTypeCancelOffer { - logx.Errorf("[ConvertTxToCancelOfferPubData] invalid tx type") - return nil, errors.New("[ConvertTxToCancelOfferPubData] invalid tx type") - } - // parse tx - txInfo, err := commonTx.ParseCancelOfferTxInfo(oTx.TxInfo) - if err != nil { - logx.Errorf("[ConvertTxToCancelOfferPubData] unable to parse tx info: %s", err.Error()) - return nil, err - } - var buf bytes.Buffer - buf.WriteByte(uint8(oTx.TxType)) - buf.Write(Uint32ToBytes(uint32(txInfo.AccountIndex))) - buf.Write(Uint24ToBytes(txInfo.OfferId)) - buf.Write(Uint32ToBytes(uint32(txInfo.GasAccountIndex))) - buf.Write(Uint16ToBytes(uint16(txInfo.GasFeeAssetId))) - packedFeeBytes, err := FeeToPackedFeeBytes(txInfo.GasFeeAssetAmount) - if err != nil { - logx.Errorf("[ConvertTxToDepositPubData] unable to convert amount to packed fee amount: %s", err.Error()) - return nil, err - } - buf.Write(packedFeeBytes) - chunk := SuffixPaddingBufToChunkSize(buf.Bytes()) - buf.Reset() - buf.Write(chunk) - buf.Write(PrefixPaddingBufToChunkSize([]byte{})) - buf.Write(PrefixPaddingBufToChunkSize([]byte{})) - buf.Write(PrefixPaddingBufToChunkSize([]byte{})) - buf.Write(PrefixPaddingBufToChunkSize([]byte{})) - buf.Write(PrefixPaddingBufToChunkSize([]byte{})) - return buf.Bytes(), nil -} - -func ConvertTxToWithdrawNftPubData(oTx *mempool.MempoolTx) (pubData []byte, err error) { - if oTx.TxType != commonTx.TxTypeWithdrawNft { - logx.Errorf("[ConvertTxToWithdrawNftPubData] invalid tx type") - return nil, errors.New("[ConvertTxToWithdrawNftPubData] invalid tx type") - } - // parse tx - txInfo, err := commonTx.ParseWithdrawNftTxInfo(oTx.TxInfo) - if err != nil { - logx.Errorf("[ConvertTxToWithdrawNftPubData] unable to parse tx info: %s", err.Error()) - return nil, err - } - var buf bytes.Buffer - buf.WriteByte(uint8(oTx.TxType)) - buf.Write(Uint32ToBytes(uint32(txInfo.AccountIndex))) - buf.Write(Uint32ToBytes(uint32(txInfo.CreatorAccountIndex))) - buf.Write(Uint16ToBytes(uint16(txInfo.CreatorTreasuryRate))) - buf.Write(Uint40ToBytes(txInfo.NftIndex)) - buf.Write(Uint16ToBytes(uint16(txInfo.CollectionId))) - chunk1 := SuffixPaddingBufToChunkSize(buf.Bytes()) - buf.Reset() - buf.Write(AddressStrToBytes(txInfo.NftL1Address)) - chunk2 := PrefixPaddingBufToChunkSize(buf.Bytes()) - buf.Reset() - buf.Write(AddressStrToBytes(txInfo.ToAddress)) - buf.Write(Uint32ToBytes(uint32(txInfo.GasAccountIndex))) - buf.Write(Uint16ToBytes(uint16(txInfo.GasFeeAssetId))) - packedFeeBytes, err := FeeToPackedFeeBytes(txInfo.GasFeeAssetAmount) - if err != nil { - logx.Errorf("[ConvertTxToDepositPubData] unable to convert amount to packed fee amount: %s", err.Error()) - return nil, err - } - buf.Write(packedFeeBytes) - chunk3 := PrefixPaddingBufToChunkSize(buf.Bytes()) - buf.Reset() - buf.Write(chunk1) - buf.Write(chunk2) - buf.Write(chunk3) - buf.Write(PrefixPaddingBufToChunkSize(txInfo.NftContentHash)) - buf.Write(Uint256ToBytes(txInfo.NftL1TokenId)) - buf.Write(PrefixPaddingBufToChunkSize(txInfo.CreatorAccountNameHash)) - return buf.Bytes(), nil -} - -func ConvertTxToFullExitPubData(oTx *mempool.MempoolTx) (pubData []byte, err error) { - if oTx.TxType != commonTx.TxTypeFullExit { - logx.Errorf("[ConvertTxToFullExitPubData] invalid tx type") - return nil, errors.New("[ConvertTxToFullExitPubData] invalid tx type") - } - // parse tx - txInfo, err := commonTx.ParseFullExitTxInfo(oTx.TxInfo) - if err != nil { - logx.Errorf("[ConvertTxToFullExitPubData] unable to parse tx info: %s", err.Error()) - return nil, err - } - var buf bytes.Buffer - buf.WriteByte(uint8(oTx.TxType)) - buf.Write(Uint32ToBytes(uint32(txInfo.AccountIndex))) - buf.Write(Uint16ToBytes(uint16(txInfo.AssetId))) - buf.Write(Uint128ToBytes(txInfo.AssetAmount)) - chunk := SuffixPaddingBufToChunkSize(buf.Bytes()) - buf.Reset() - buf.Write(chunk) - buf.Write(PrefixPaddingBufToChunkSize(txInfo.AccountNameHash)) - buf.Write(PrefixPaddingBufToChunkSize([]byte{})) - buf.Write(PrefixPaddingBufToChunkSize([]byte{})) - buf.Write(PrefixPaddingBufToChunkSize([]byte{})) - buf.Write(PrefixPaddingBufToChunkSize([]byte{})) - return buf.Bytes(), nil -} - -func ConvertTxToFullExitNftPubData(oTx *mempool.MempoolTx) (pubData []byte, err error) { - if oTx.TxType != commonTx.TxTypeFullExitNft { - logx.Errorf("[ConvertTxToFullExitNftPubData] invalid tx type") - return nil, errors.New("[ConvertTxToFullExitNftPubData] invalid tx type") - } - // parse tx - txInfo, err := commonTx.ParseFullExitNftTxInfo(oTx.TxInfo) - if err != nil { - logx.Errorf("[ConvertTxToFullExitNftPubData] unable to parse tx info: %s", err.Error()) - return nil, err - } - var buf bytes.Buffer - buf.WriteByte(uint8(oTx.TxType)) - buf.Write(Uint32ToBytes(uint32(txInfo.AccountIndex))) - buf.Write(Uint32ToBytes(uint32(txInfo.CreatorAccountIndex))) - buf.Write(Uint16ToBytes(uint16(txInfo.CreatorTreasuryRate))) - buf.Write(Uint40ToBytes(txInfo.NftIndex)) - buf.Write(Uint16ToBytes(uint16(txInfo.CollectionId))) - chunk1 := SuffixPaddingBufToChunkSize(buf.Bytes()) - buf.Reset() - buf.Write(AddressStrToBytes(txInfo.NftL1Address)) - chunk2 := PrefixPaddingBufToChunkSize(buf.Bytes()) - buf.Reset() - buf.Write(chunk1) - buf.Write(chunk2) - buf.Write(PrefixPaddingBufToChunkSize(txInfo.AccountNameHash)) - buf.Write(PrefixPaddingBufToChunkSize(txInfo.CreatorAccountNameHash)) - buf.Write(PrefixPaddingBufToChunkSize(txInfo.NftContentHash)) - buf.Write(Uint256ToBytes(txInfo.NftL1TokenId)) - return buf.Bytes(), nil -} - -// create block commitment -func CreateBlockCommitment( - currentBlockHeight int64, - createdAt int64, - oldStateRoot []byte, - newStateRoot []byte, - pubData []byte, - onChainOpsCount int64, -) string { - var buf bytes.Buffer - PaddingInt64IntoBuf(&buf, currentBlockHeight) - PaddingInt64IntoBuf(&buf, createdAt) - buf.Write(CleanAndPaddingByteByModulus(oldStateRoot)) - buf.Write(CleanAndPaddingByteByModulus(newStateRoot)) - buf.Write(CleanAndPaddingByteByModulus(pubData)) - PaddingInt64IntoBuf(&buf, onChainOpsCount) - // TODO Keccak256 - //hFunc := mimc.NewMiMC() - //hFunc.Write(buf.Bytes()) - //commitment := hFunc.Sum(nil) - commitment := KeccakHash(buf.Bytes()) - return common.Bytes2Hex(commitment) -} - -func CleanAndPaddingByteByModulus(buf []byte) []byte { - if len(buf) <= 32 { - return ffmath.Mod(new(big.Int).SetBytes(buf), curve.Modulus).FillBytes(make([]byte, 32)) - } - offset := 32 - var pendingBuf bytes.Buffer - for offset <= len(buf) { - pendingBuf.Write(ffmath.Mod(new(big.Int).SetBytes(buf[offset-32:offset]), curve.Modulus).FillBytes(make([]byte, 32))) - offset += 32 - } - return pendingBuf.Bytes() -} diff --git a/common/util/pubdataParser.go b/common/util/pubdataParser.go deleted file mode 100644 index 5cd50181a..000000000 --- a/common/util/pubdataParser.go +++ /dev/null @@ -1,223 +0,0 @@ -/* - * Copyright © 2021 Zkbas Protocol - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package util - -import ( - "errors" - - "github.com/consensys/gnark-crypto/ecc/bn254/twistededwards/eddsa" - "github.com/ethereum/go-ethereum/common" - "github.com/zeromicro/go-zero/core/logx" - - "github.com/bnb-chain/zkbas/common/commonTx" -) - -func ParseRegisterZnsPubData(pubData []byte) (tx *RegisterZnsTxInfo, err error) { - /* - struct RegisterZNS { - uint8 txType; - bytes32 accountName; - bytes32 accountNameHash; - bytes32 pubKeyX; - bytes32 pubKeyY; - } - */ - if len(pubData) != RegisterZnsPubDataSize { - logx.Errorf("[ParseRegisterZnsPubData] invalid size") - return nil, errors.New("[ParseRegisterZnsPubData] invalid size") - } - offset := 0 - offset, txType := ReadUint8(pubData, offset) - offset, accountIndex := ReadUint32(pubData, offset) - offset, accountName := ReadBytes32(pubData, offset) - offset, accountNameHash := ReadBytes32(pubData, offset) - offset, pubKeyX := ReadBytes32(pubData, offset) - offset, pubKeyY := ReadBytes32(pubData, offset) - pk := new(eddsa.PublicKey) - pk.A.X.SetBytes(pubKeyX) - pk.A.Y.SetBytes(pubKeyY) - tx = &RegisterZnsTxInfo{ - TxType: txType, - AccountIndex: int64(accountIndex), - AccountName: CleanAccountName(SerializeAccountName(accountName)), - AccountNameHash: accountNameHash, - PubKey: common.Bytes2Hex(pk.Bytes()), - } - return tx, nil -} - -func ParseCreatePairPubData(pubData []byte) (tx *CreatePairTxInfo, err error) { - if len(pubData) != CreatePairPubDataSize { - logx.Errorf("[ParseCreatePairPubData] invalid size") - return nil, errors.New("[ParseCreatePairPubData] invalid size") - } - offset := 0 - offset, txType := ReadUint8(pubData, offset) - offset, pairIndex := ReadUint16(pubData, offset) - offset, assetAId := ReadUint16(pubData, offset) - offset, assetBId := ReadUint16(pubData, offset) - offset, feeRate := ReadUint16(pubData, offset) - offset, treasuryAccountIndex := ReadUint32(pubData, offset) - offset, treasuryRate := ReadUint16(pubData, offset) - tx = &CreatePairTxInfo{ - TxType: txType, - PairIndex: int64(pairIndex), - AssetAId: int64(assetAId), - AssetBId: int64(assetBId), - FeeRate: int64(feeRate), - TreasuryAccountIndex: int64(treasuryAccountIndex), - TreasuryRate: int64(treasuryRate), - } - return tx, nil -} - -func ParseUpdatePairRatePubData(pubData []byte) (tx *UpdatePairRateTxInfo, err error) { - if len(pubData) != UpdatePairRatePubdataSize { - logx.Errorf("[ParseUpdatePairRatePubData] invalid size") - return nil, errors.New("[ParseUpdatePairRatePubData] invalid size") - } - offset := 0 - offset, txType := ReadUint8(pubData, offset) - offset, pairIndex := ReadUint16(pubData, offset) - offset, feeRate := ReadUint16(pubData, offset) - offset, treasuryAccountIndex := ReadUint32(pubData, offset) - offset, treasuryRate := ReadUint16(pubData, offset) - tx = &UpdatePairRateTxInfo{ - TxType: txType, - PairIndex: int64(pairIndex), - FeeRate: int64(feeRate), - TreasuryAccountIndex: int64(treasuryAccountIndex), - TreasuryRate: int64(treasuryRate), - } - return tx, nil -} - -func ParseDepositPubData(pubData []byte) (tx *DepositTxInfo, err error) { - /* - struct Deposit { - uint8 txType; - uint32 accountIndex; - bytes32 accountNameHash; - uint16 assetId; - uint128 amount; - } - */ - if len(pubData) != DepositPubDataSize { - logx.Errorf("[ParseDepositPubData] invalid size") - return nil, errors.New("[ParseDepositPubData] invalid size") - } - offset := 0 - offset, txType := ReadUint8(pubData, offset) - offset, accountIndex := ReadUint32(pubData, offset) - offset, accountNameHash := ReadBytes32(pubData, offset) - offset, assetId := ReadUint16(pubData, offset) - offset, amount := ReadUint128(pubData, offset) - tx = &DepositTxInfo{ - TxType: txType, - AccountIndex: int64(accountIndex), - AccountNameHash: accountNameHash, - AssetId: int64(assetId), - AssetAmount: amount, - } - return tx, nil -} - -func ParseDepositNftPubData(pubData []byte) (tx *DepositNftTxInfo, err error) { - if len(pubData) != DepositNftPubDataSize { - logx.Errorf("[ParseDepositNftPubData] invalid size") - return nil, errors.New("[ParseDepositNftPubData] invalid size") - } - offset := 0 - offset, txType := ReadUint8(pubData, offset) - offset, accountIndex := ReadUint32(pubData, offset) - offset, nftIndex := ReadUint40(pubData, offset) - offset, nftL1Address := ReadAddress(pubData, offset) - offset, creatorAccountIndex := ReadUint32(pubData, offset) - offset, creatorTreasuryRate := ReadUint16(pubData, offset) - offset, nftContentHash := ReadBytes32(pubData, offset) - offset, nftL1TokenId := ReadUint256(pubData, offset) - offset, accountNameHash := ReadBytes32(pubData, offset) - offset, collectionId := ReadUint16(pubData, offset) - tx = &DepositNftTxInfo{ - TxType: txType, - AccountIndex: int64(accountIndex), - NftIndex: nftIndex, - NftL1Address: nftL1Address, - CreatorAccountIndex: int64(creatorAccountIndex), - CreatorTreasuryRate: int64(creatorTreasuryRate), - NftContentHash: nftContentHash, - NftL1TokenId: nftL1TokenId, - AccountNameHash: accountNameHash, - CollectionId: int64(collectionId), - } - return tx, nil -} - -func ParseFullExitPubData(pubData []byte) (tx *FullExitTxInfo, err error) { - if len(pubData) != FullExitPubDataSize { - logx.Errorf("[ParseFullExitPubData] invalid size") - return nil, errors.New("[ParseFullExitPubData] invalid size") - } - offset := 0 - offset, txType := ReadUint8(pubData, offset) - offset, accountIndex := ReadUint32(pubData, offset) - offset, assetId := ReadUint16(pubData, offset) - offset, assetAmount := ReadUint128(pubData, offset) - offset, accountNameHash := ReadBytes32(pubData, offset) - tx = &FullExitTxInfo{ - TxType: txType, - AccountIndex: int64(accountIndex), - AccountNameHash: accountNameHash, - AssetId: int64(assetId), - AssetAmount: assetAmount, - } - return tx, nil -} - -func ParseFullExitNftPubData(pubData []byte) (tx *commonTx.FullExitNftTxInfo, err error) { - if len(pubData) != FullExitNftPubDataSize { - logx.Errorf("[ParseFullExitNftPubData] invalid size") - return nil, errors.New("[ParseFullExitNftPubData] invalid size") - } - offset := 0 - offset, txType := ReadUint8(pubData, offset) - offset, accountIndex := ReadUint32(pubData, offset) - offset, creatorAccountIndex := ReadUint32(pubData, offset) - offset, creatorTreasuryRate := ReadUint16(pubData, offset) - offset, nftIndex := ReadUint40(pubData, offset) - offset, collectionId := ReadUint16(pubData, offset) - offset, nftL1Address := ReadAddress(pubData, offset) - offset, accountNameHash := ReadBytes32(pubData, offset) - offset, creatorAccountNameHash := ReadBytes32(pubData, offset) - offset, nftContentHash := ReadBytes32(pubData, offset) - offset, nftL1TokenId := ReadUint256(pubData, offset) - tx = &FullExitNftTxInfo{ - TxType: txType, - AccountIndex: int64(accountIndex), - CreatorAccountIndex: int64(creatorAccountIndex), - CreatorTreasuryRate: int64(creatorTreasuryRate), - NftIndex: nftIndex, - CollectionId: int64(collectionId), - NftL1Address: nftL1Address, - AccountNameHash: accountNameHash, - CreatorAccountNameHash: creatorAccountNameHash, - NftContentHash: nftContentHash, - NftL1TokenId: nftL1TokenId, - } - return tx, nil -} diff --git a/common/util/stringHelper_test.go b/common/util/stringHelper_test.go deleted file mode 100644 index adfc1a4a1..000000000 --- a/common/util/stringHelper_test.go +++ /dev/null @@ -1,121 +0,0 @@ -/* - * Copyright © 2021 Zkbas Protocol - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package util - -import ( - "fmt" - "testing" - "time" - - curve "github.com/bnb-chain/zkbas-crypto/ecc/ztwistededwards/tebn254" - "github.com/ethereum/go-ethereum/common" - "github.com/zeromicro/go-zero/core/stores/redis" - - "github.com/bnb-chain/zkbas/common/model/basic" -) - -func TestAccountNameHash(t *testing.T) { - nameHash, err := AccountNameHash("sher.legend") - if err != nil { - panic(err) - } - fmt.Println(nameHash) -} - -func TestPubKey(t *testing.T) { - // sher - seed := "28c5f19f166ad68f350f656104280a744305edac23b5bcbd2d975f2d12721964" - sk, err := curve.GenerateEddsaPrivateKey(seed) - if err != nil { - t.Fatal(err) - } - fmt.Println(common.Bytes2Hex(sk.PublicKey.Bytes())) -} - -func WithRedis(redisType string, redisPass string) redis.Option { - return func(p *redis.Redis) { - p.Type = redisType - p.Pass = redisPass - } -} - -func TestRedis(t *testing.T) { - r := redis.New("127.0.0.1:6379", WithRedis("node", "myredis")) - _ = r.Set("key", "123") - value, err := r.Get("key") - if err != nil { - panic(err) - } - fmt.Println(value) - r.Del("key") - redisLock := redis.NewRedisLock(r, "key") - redisLock.SetExpire(2) - isAcquired, err := redisLock.Acquire() - if err != nil { - panic(err) - } - if !isAcquired { - panic("invalid key") - } - value, err = r.Get("key") - fmt.Println(value) - time.Sleep(time.Second * 3) - isAcquired, err = redisLock.Acquire() - if err != nil { - panic(err) - } - if !isAcquired { - panic("unable to acquire") - } - //_ = r.Set("key", "345") - isReleased, err := redisLock.Release() - if err != nil { - panic(err) - } - if !isReleased { - panic("unable to release") - } - value, err = r.Get("key") - fmt.Println(value) - -} - -type Color struct { - ColorType int64 -} - -func updateColors(colors map[string]*Color) { - colors["0"] = &Color{ - 2, - } -} - -func TestRedisOperations(t *testing.T) { - redisConn := redis.New(basic.CacheConf[0].Host, WithRedis(basic.CacheConf[0].Type, basic.CacheConf[0].Pass)) - delInt, err := redisConn.Del("1") - if err != nil { - panic(err) - } - fmt.Println(delInt) -} - -func TestArray(t *testing.T) { - var colors = make(map[string]*Color) - updateColors(colors) - fmt.Println(colors["0"]) -} diff --git a/common/util/uniqueKeyHelper.go b/common/util/uniqueKeyHelper.go deleted file mode 100644 index a0c8515b1..000000000 --- a/common/util/uniqueKeyHelper.go +++ /dev/null @@ -1,50 +0,0 @@ -/* - * Copyright © 2021 Zkbas Protocol - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package util - -import ( - "strconv" -) - -const ( - LiquidityReadPrefix = "LiquidityRead::" - NftReadPrefix = "NftRead::" - NftIndexWritePrefix = "NftIndexWrite::" - LiquidityWritePrefix = "LiquidityWrite::" - LockKeySuffix = "ByLock" -) - -func GetLiquidityKeyForRead(pairIndex int64) string { - return LiquidityReadPrefix + strconv.FormatInt(pairIndex, 10) -} - -func GetNftKeyForRead(nftIndex int64) string { - return NftReadPrefix + strconv.FormatInt(nftIndex, 10) -} - -func GetNftIndexKeyForWrite() string { - return NftIndexWritePrefix -} - -func GetLiquidityKeyForWrite(pairIndex int64) string { - return LiquidityWritePrefix + strconv.FormatInt(pairIndex, 10) -} - -func GetLockKey(key string) string { - return key + LockKeySuffix -} diff --git a/common/util/uuidHelper.go b/common/uuid.go similarity index 88% rename from common/util/uuidHelper.go rename to common/uuid.go index 7bbfa45a9..5d5c1ab19 100644 --- a/common/util/uuidHelper.go +++ b/common/uuid.go @@ -1,5 +1,5 @@ /* - * Copyright © 2021 Zkbas Protocol + * Copyright © 2021 ZkBAS Protocol * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -15,15 +15,12 @@ * */ -package util +package common import ( "github.com/google/uuid" ) -/* - GetRandomUUID: get random value -*/ func RandomUUID() string { u := uuid.New() return u.String() diff --git a/common/zcrypto/txVerification/addLiquidity.go b/common/zcrypto/txVerification/addLiquidity.go deleted file mode 100644 index ff4deb4ae..000000000 --- a/common/zcrypto/txVerification/addLiquidity.go +++ /dev/null @@ -1,267 +0,0 @@ -/* - * Copyright © 2021 Zkbas Protocol - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package txVerification - -import ( - "errors" - "fmt" - "math/big" - - "github.com/bnb-chain/zkbas-crypto/ffmath" - "github.com/bnb-chain/zkbas-crypto/wasm/legend/legendTxTypes" - "github.com/consensys/gnark-crypto/ecc/bn254/fr/mimc" - "github.com/zeromicro/go-zero/core/logx" - - "github.com/bnb-chain/zkbas/common/commonAsset" - "github.com/bnb-chain/zkbas/common/commonConstant" - "github.com/bnb-chain/zkbas/common/util" -) - -func VerifyAddLiquidityTxInfo( - accountInfoMap map[int64]*AccountInfo, - liquidityInfo *LiquidityInfo, - txInfo *AddLiquidityTxInfo, -) (txDetails []*MempoolTxDetail, err error) { - // verify params - if accountInfoMap[txInfo.FromAccountIndex] == nil || - accountInfoMap[liquidityInfo.TreasuryAccountIndex] == nil || - accountInfoMap[txInfo.GasAccountIndex] == nil || - accountInfoMap[txInfo.FromAccountIndex].AssetInfo == nil || - accountInfoMap[txInfo.FromAccountIndex].AssetInfo[txInfo.AssetAId] == nil || - accountInfoMap[txInfo.FromAccountIndex].AssetInfo[txInfo.AssetAId].Balance.Cmp(ZeroBigInt) <= 0 || - accountInfoMap[txInfo.FromAccountIndex].AssetInfo[txInfo.AssetBId] == nil || - accountInfoMap[txInfo.FromAccountIndex].AssetInfo[txInfo.AssetBId].Balance.Cmp(ZeroBigInt) <= 0 || - accountInfoMap[txInfo.FromAccountIndex].AssetInfo[txInfo.GasFeeAssetId] == nil || - liquidityInfo == nil || - liquidityInfo.AssetAId != txInfo.AssetAId || - liquidityInfo.AssetBId != txInfo.AssetBId || - txInfo.AssetAAmount.Cmp(ZeroBigInt) < 0 || - txInfo.AssetBAmount.Cmp(ZeroBigInt) < 0 || - txInfo.LpAmount.Cmp(ZeroBigInt) < 0 || - txInfo.GasFeeAssetAmount.Cmp(ZeroBigInt) < 0 { - logx.Errorf("invalid params") - return nil, errors.New("invalid params") - } - // verify nonce - if txInfo.Nonce != accountInfoMap[txInfo.FromAccountIndex].Nonce { - logx.Errorf("invalid nonce, actual: %d, expected: %d", - txInfo.Nonce, accountInfoMap[txInfo.FromAccountIndex].Nonce) - return nil, fmt.Errorf("invalid nonce, actual: %d, expected: %d", - txInfo.Nonce, accountInfoMap[txInfo.FromAccountIndex].Nonce) - } - // add tx info - var ( - assetDeltaMap = make(map[int64]map[int64]*big.Int) - poolDeltaForToAccount *LiquidityInfo - lpDeltaForFromAccount *big.Int - lpDeltaForTreasuryAccount *big.Int - ) - // init delta map - assetDeltaMap[txInfo.FromAccountIndex] = make(map[int64]*big.Int) - if assetDeltaMap[txInfo.GasAccountIndex] == nil { - assetDeltaMap[txInfo.GasAccountIndex] = make(map[int64]*big.Int) - } - // from account asset A - assetDeltaMap[txInfo.FromAccountIndex][txInfo.AssetAId] = ffmath.Neg(txInfo.AssetAAmount) - // from account asset B - assetDeltaMap[txInfo.FromAccountIndex][txInfo.AssetBId] = ffmath.Neg(txInfo.AssetBAmount) - // from account asset Gas - if assetDeltaMap[txInfo.FromAccountIndex][txInfo.GasFeeAssetId] == nil { - assetDeltaMap[txInfo.FromAccountIndex][txInfo.GasFeeAssetId] = ffmath.Neg(txInfo.GasFeeAssetAmount) - } else { - assetDeltaMap[txInfo.FromAccountIndex][txInfo.GasFeeAssetId] = ffmath.Sub( - assetDeltaMap[txInfo.FromAccountIndex][txInfo.GasFeeAssetId], - txInfo.GasFeeAssetAmount, - ) - } - poolAssetADelta := txInfo.AssetAAmount - poolAssetBDelta := txInfo.AssetBAmount - // from account lp - lpDeltaForTreasuryAccount, err = util.ComputeSLp(liquidityInfo.AssetA, liquidityInfo.AssetB, liquidityInfo.KLast, liquidityInfo.FeeRate, liquidityInfo.TreasuryRate) - if err != nil { - logx.Errorf("fail to compute liquidity, err: %s", err.Error()) - return nil, errors.New("internal error") - } - poolLp := ffmath.Sub(liquidityInfo.LpAmount, lpDeltaForTreasuryAccount) - // lp = \Delta{x}/x * poolLp - if liquidityInfo.AssetA.Cmp(ZeroBigInt) == 0 { - lpDeltaForFromAccount, err = util.CleanPackedAmount(new(big.Int).Sqrt(ffmath.Multiply(txInfo.AssetAAmount, txInfo.AssetBAmount))) - if err != nil { - logx.Errorf("unable to compute lp delta: %s", err.Error()) - return nil, errors.New("internal error") - } - } else { - lpDeltaForFromAccount, err = util.CleanPackedAmount(ffmath.Div(ffmath.Multiply(poolAssetADelta, poolLp), liquidityInfo.AssetA)) - if err != nil { - logx.Errorf("unable to compute lp delta: %s", err.Error()) - return nil, errors.New("internal error") - } - } - // pool account pool info - finalPoolA := ffmath.Add(liquidityInfo.AssetA, poolAssetADelta) - finalPoolB := ffmath.Add(liquidityInfo.AssetB, poolAssetBDelta) - poolDeltaForToAccount = &LiquidityInfo{ - PairIndex: txInfo.PairIndex, - AssetAId: txInfo.AssetAId, - AssetA: poolAssetADelta, - AssetBId: txInfo.AssetBId, - AssetB: poolAssetBDelta, - LpAmount: lpDeltaForFromAccount, - KLast: ffmath.Multiply(finalPoolA, finalPoolB), - FeeRate: liquidityInfo.FeeRate, - TreasuryAccountIndex: liquidityInfo.TreasuryAccountIndex, - TreasuryRate: liquidityInfo.TreasuryRate, - } - // set tx info - txInfo.KLast, err = util.CleanPackedAmount(ffmath.Multiply(finalPoolA, finalPoolB)) - if err != nil { - return nil, errors.New("internal error") - } - txInfo.TreasuryAmount = lpDeltaForTreasuryAccount - // gas account asset Gas - if assetDeltaMap[txInfo.GasAccountIndex][txInfo.GasFeeAssetId] == nil { - assetDeltaMap[txInfo.GasAccountIndex][txInfo.GasFeeAssetId] = txInfo.GasFeeAssetAmount - } else { - assetDeltaMap[txInfo.GasAccountIndex][txInfo.GasFeeAssetId] = ffmath.Add( - assetDeltaMap[txInfo.GasAccountIndex][txInfo.GasFeeAssetId], - txInfo.GasFeeAssetAmount, - ) - } - // check balance - // check asset A - if accountInfoMap[txInfo.FromAccountIndex].AssetInfo[txInfo.AssetAId].Balance.Cmp(txInfo.AssetAAmount) < 0 { - logx.Errorf("not enough balance of asset %d", txInfo.AssetAId) - return nil, fmt.Errorf("not enough balance of asset %d", txInfo.AssetAId) - } - // check asset B - if accountInfoMap[txInfo.FromAccountIndex].AssetInfo[txInfo.AssetBId].Balance.Cmp(txInfo.AssetBAmount) < 0 { - logx.Errorf("not enough balance of asset %d", txInfo.AssetBId) - return nil, fmt.Errorf("not enough balance of asset %d", txInfo.AssetAId) - } - // asset Gas - if accountInfoMap[txInfo.FromAccountIndex].AssetInfo[txInfo.GasFeeAssetId].Balance.Cmp( - new(big.Int).Abs(assetDeltaMap[txInfo.FromAccountIndex][txInfo.GasFeeAssetId])) < 0 { - logx.Errorf("not enough balance of gas") - return nil, errors.New("not enough balance of gas") - } - // check lp amount - if lpDeltaForFromAccount.Cmp(txInfo.LpAmount) < 0 { - logx.Errorf("invalid lp amount") - return nil, errors.New("invalid lp amount") - } - // compute hash - hFunc := mimc.NewMiMC() - msgHash, err := legendTxTypes.ComputeAddLiquidityMsgHash(txInfo, hFunc) - if err != nil { - logx.Errorf("unable to compute tx hash: %s", err.Error()) - return nil, errors.New("internal error") - } - // verify signature - if err := VerifySignature(txInfo.Sig, msgHash, accountInfoMap[txInfo.FromAccountIndex].PublicKey); err != nil { - return nil, err - } - // compute tx details - // from account asset A - order := int64(0) - accountOrder := int64(0) - txDetails = append(txDetails, &MempoolTxDetail{ - AssetId: txInfo.AssetAId, - AssetType: GeneralAssetType, - AccountIndex: txInfo.FromAccountIndex, - AccountName: accountInfoMap[txInfo.FromAccountIndex].AccountName, - BalanceDelta: commonAsset.ConstructAccountAsset( - txInfo.AssetAId, ffmath.Neg(txInfo.AssetAAmount), ZeroBigInt, ZeroBigInt).String(), - Order: order, - AccountOrder: accountOrder, - }) - // from account asset B - order++ - txDetails = append(txDetails, &MempoolTxDetail{ - AssetId: txInfo.AssetBId, - AssetType: GeneralAssetType, - AccountIndex: txInfo.FromAccountIndex, - AccountName: accountInfoMap[txInfo.FromAccountIndex].AccountName, - BalanceDelta: commonAsset.ConstructAccountAsset( - txInfo.AssetBId, ffmath.Neg(txInfo.AssetBAmount), ZeroBigInt, ZeroBigInt).String(), - Order: order, - AccountOrder: accountOrder, - }) - // from account asset Gas - order++ - txDetails = append(txDetails, &MempoolTxDetail{ - AssetId: txInfo.GasFeeAssetId, - AssetType: GeneralAssetType, - AccountIndex: txInfo.FromAccountIndex, - AccountName: accountInfoMap[txInfo.FromAccountIndex].AccountName, - BalanceDelta: commonAsset.ConstructAccountAsset( - txInfo.GasFeeAssetId, ffmath.Neg(txInfo.GasFeeAssetAmount), ZeroBigInt, ZeroBigInt).String(), - Order: order, - AccountOrder: accountOrder, - }) - // from account lp - order++ - txDetails = append(txDetails, &MempoolTxDetail{ - AssetId: txInfo.PairIndex, - AssetType: GeneralAssetType, - AccountIndex: txInfo.FromAccountIndex, - AccountName: accountInfoMap[txInfo.FromAccountIndex].AccountName, - BalanceDelta: commonAsset.ConstructAccountAsset( - txInfo.PairIndex, ZeroBigInt, lpDeltaForFromAccount, ZeroBigInt).String(), - Order: order, - AccountOrder: accountOrder, - }) - // pool info - order++ - txDetails = append(txDetails, &MempoolTxDetail{ - AssetId: txInfo.PairIndex, - AssetType: LiquidityAssetType, - AccountIndex: commonConstant.NilTxAccountIndex, - AccountName: commonConstant.NilAccountName, - BalanceDelta: poolDeltaForToAccount.String(), - Order: order, - AccountOrder: commonConstant.NilAccountOrder, - }) - // treasury account - order++ - accountOrder++ - txDetails = append(txDetails, &MempoolTxDetail{ - AssetId: txInfo.PairIndex, - AssetType: GeneralAssetType, - AccountIndex: liquidityInfo.TreasuryAccountIndex, - AccountName: accountInfoMap[liquidityInfo.TreasuryAccountIndex].AccountName, - BalanceDelta: commonAsset.ConstructAccountAsset( - txInfo.PairIndex, ZeroBigInt, lpDeltaForTreasuryAccount, ZeroBigInt, - ).String(), - Order: order, - AccountOrder: accountOrder, - }) - // gas account asset Gas - order++ - accountOrder++ - txDetails = append(txDetails, &MempoolTxDetail{ - AssetId: txInfo.GasFeeAssetId, - AssetType: GeneralAssetType, - AccountIndex: txInfo.GasAccountIndex, - AccountName: accountInfoMap[txInfo.GasAccountIndex].AccountName, - BalanceDelta: commonAsset.ConstructAccountAsset( - txInfo.GasFeeAssetId, txInfo.GasFeeAssetAmount, ZeroBigInt, ZeroBigInt).String(), - Order: order, - AccountOrder: accountOrder, - }) - return txDetails, nil -} diff --git a/common/zcrypto/txVerification/atomicMatch.go b/common/zcrypto/txVerification/atomicMatch.go deleted file mode 100644 index 56d3b0079..000000000 --- a/common/zcrypto/txVerification/atomicMatch.go +++ /dev/null @@ -1,352 +0,0 @@ -/* - * Copyright © 2021 Zkbas Protocol - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package txVerification - -import ( - "errors" - "fmt" - "math/big" - "time" - - "github.com/bnb-chain/zkbas-crypto/ffmath" - "github.com/bnb-chain/zkbas-crypto/wasm/legend/legendTxTypes" - "github.com/consensys/gnark-crypto/ecc/bn254/fr/mimc" - "github.com/zeromicro/go-zero/core/logx" - - "github.com/bnb-chain/zkbas/common/commonAsset" - "github.com/bnb-chain/zkbas/common/commonConstant" - "github.com/bnb-chain/zkbas/common/util" -) - -func VerifyAtomicMatchTxInfo( - accountInfoMap map[int64]*AccountInfo, - nftInfo *NftInfo, - txInfo *AtomicMatchTxInfo, -) (txDetails []*MempoolTxDetail, err error) { - // verify params - now := time.Now().UnixMilli() - if accountInfoMap[txInfo.AccountIndex] == nil || - accountInfoMap[txInfo.AccountIndex].AssetInfo == nil || - accountInfoMap[txInfo.AccountIndex].AssetInfo[txInfo.GasFeeAssetId] == nil || - accountInfoMap[txInfo.BuyOffer.AccountIndex] == nil || - accountInfoMap[txInfo.SellOffer.AccountIndex] == nil || - accountInfoMap[txInfo.GasAccountIndex] == nil || - accountInfoMap[nftInfo.CreatorAccountIndex] == nil || - txInfo.BuyOffer.Type != commonAsset.BuyOfferType || - txInfo.SellOffer.Type != commonAsset.SellOfferType || - txInfo.BuyOffer.NftIndex != txInfo.SellOffer.NftIndex || - txInfo.BuyOffer.AssetId != txInfo.SellOffer.AssetId || - txInfo.BuyOffer.AssetAmount.String() != txInfo.SellOffer.AssetAmount.String() || - txInfo.BuyOffer.ExpiredAt < now || - txInfo.SellOffer.ExpiredAt < now || - txInfo.BuyOffer.NftIndex != nftInfo.NftIndex || - txInfo.SellOffer.AccountIndex != nftInfo.OwnerAccountIndex || - txInfo.BuyOffer.AccountIndex == txInfo.SellOffer.AccountIndex || - accountInfoMap[txInfo.BuyOffer.AccountIndex].AssetInfo[txInfo.BuyOffer.AssetId] == nil || - accountInfoMap[txInfo.BuyOffer.AccountIndex].AssetInfo[txInfo.BuyOffer.AssetId].Balance.Cmp(ZeroBigInt) <= 0 || - txInfo.GasFeeAssetAmount.Cmp(ZeroBigInt) < 0 { - logx.Error("invalid params") - return nil, errors.New("invalid params") - } - buyerOfferAssetId := txInfo.BuyOffer.OfferId / OfferPerAsset - sellerOfferAssetId := txInfo.SellOffer.OfferId / OfferPerAsset - if accountInfoMap[txInfo.BuyOffer.AccountIndex].AssetInfo[buyerOfferAssetId] == nil { - accountInfoMap[txInfo.BuyOffer.AccountIndex].AssetInfo[buyerOfferAssetId] = &commonAsset.AccountAsset{ - AssetId: buyerOfferAssetId, - Balance: ZeroBigInt, - LpAmount: ZeroBigInt, - OfferCanceledOrFinalized: ZeroBigInt, - } - } - if accountInfoMap[txInfo.SellOffer.AccountIndex].AssetInfo[sellerOfferAssetId] == nil { - accountInfoMap[txInfo.SellOffer.AccountIndex].AssetInfo[sellerOfferAssetId] = &commonAsset.AccountAsset{ - AssetId: sellerOfferAssetId, - Balance: ZeroBigInt, - LpAmount: ZeroBigInt, - OfferCanceledOrFinalized: ZeroBigInt, - } - } - // verify nonce - if txInfo.Nonce != accountInfoMap[txInfo.AccountIndex].Nonce { - logx.Errorf("invalid nonce, actual: %d, expected: %d", - txInfo.Nonce, accountInfoMap[txInfo.AccountIndex].Nonce) - return nil, fmt.Errorf("invalid nonce, actual: %d, expected: %d", - txInfo.Nonce, accountInfoMap[txInfo.AccountIndex].Nonce) - } - // set tx info - var ( - assetDeltaMap = make(map[int64]map[int64]*big.Int) - ) - // init delta map - assetDeltaMap[txInfo.AccountIndex] = make(map[int64]*big.Int) - if assetDeltaMap[txInfo.BuyOffer.AccountIndex] == nil { - assetDeltaMap[txInfo.BuyOffer.AccountIndex] = make(map[int64]*big.Int) - } - if assetDeltaMap[txInfo.SellOffer.AccountIndex] == nil { - assetDeltaMap[txInfo.SellOffer.AccountIndex] = make(map[int64]*big.Int) - } - if assetDeltaMap[txInfo.GasAccountIndex] == nil { - assetDeltaMap[txInfo.GasAccountIndex] = make(map[int64]*big.Int) - } - // from account asset Gas - assetDeltaMap[txInfo.AccountIndex][txInfo.GasFeeAssetId] = ffmath.Neg(txInfo.GasFeeAssetAmount) - // buyer account asset A - if assetDeltaMap[txInfo.BuyOffer.AccountIndex][txInfo.BuyOffer.AssetId] == nil { - assetDeltaMap[txInfo.BuyOffer.AccountIndex][txInfo.BuyOffer.AssetId] = ffmath.Neg(txInfo.BuyOffer.AssetAmount) - } else { - assetDeltaMap[txInfo.BuyOffer.AccountIndex][txInfo.BuyOffer.AssetId] = ffmath.Sub( - assetDeltaMap[txInfo.BuyOffer.AccountIndex][txInfo.BuyOffer.AssetId], - txInfo.BuyOffer.AssetAmount, - ) - } - // seller account asset A - if assetDeltaMap[txInfo.SellOffer.AccountIndex][txInfo.SellOffer.AssetId] == nil { - assetDeltaMap[txInfo.SellOffer.AccountIndex][txInfo.SellOffer.AssetId] = - txInfo.SellOffer.AssetAmount - } else { - assetDeltaMap[txInfo.SellOffer.AccountIndex][txInfo.SellOffer.AssetId] = ffmath.Add( - assetDeltaMap[txInfo.SellOffer.AccountIndex][txInfo.SellOffer.AssetId], - txInfo.SellOffer.AssetAmount, - ) - } - // gas account asset Gas - if assetDeltaMap[txInfo.GasAccountIndex][txInfo.GasFeeAssetId] == nil { - assetDeltaMap[txInfo.GasAccountIndex][txInfo.GasFeeAssetId] = txInfo.GasFeeAssetAmount - } else { - assetDeltaMap[txInfo.GasAccountIndex][txInfo.GasFeeAssetId] = ffmath.Add( - assetDeltaMap[txInfo.GasAccountIndex][txInfo.GasFeeAssetId], - txInfo.GasFeeAssetAmount, - ) - } - // check balance - if accountInfoMap[txInfo.BuyOffer.AccountIndex].AssetInfo[txInfo.BuyOffer.AssetId].Balance.Cmp( - new(big.Int).Abs(assetDeltaMap[txInfo.BuyOffer.AccountIndex][txInfo.BuyOffer.AssetId])) < 0 { - logx.Errorf("not enough balance of asset %d", txInfo.BuyOffer.AssetId) - return nil, fmt.Errorf("not enough balance of asset %d", txInfo.BuyOffer.AssetId) - } - if accountInfoMap[txInfo.AccountIndex].AssetInfo[txInfo.GasFeeAssetId].Balance.Cmp( - new(big.Int).Abs(assetDeltaMap[txInfo.AccountIndex][txInfo.GasFeeAssetId])) < 0 { - logx.Errorf("not enough balance of gas") - return nil, errors.New("not enough balance of gas") - } - // compute hash - hFunc := mimc.NewMiMC() - // buyer sig - msgHash, err := legendTxTypes.ComputeOfferMsgHash(txInfo.BuyOffer, hFunc) - if err != nil { - logx.Errorf("unable to compute tx hash: %s", err.Error()) - return nil, errors.New("internal error") - } - // verify signature - if err := VerifySignature(txInfo.BuyOffer.Sig, msgHash, accountInfoMap[txInfo.BuyOffer.AccountIndex].PublicKey); err != nil { - return nil, err - } - // seller sig - hFunc.Reset() - msgHash, err = legendTxTypes.ComputeOfferMsgHash(txInfo.SellOffer, hFunc) - if err != nil { - logx.Errorf("unable to compute tx hash: %s", err.Error()) - return nil, errors.New("internal error") - } - // verify signature - if err := VerifySignature(txInfo.SellOffer.Sig, msgHash, accountInfoMap[txInfo.SellOffer.AccountIndex].PublicKey); err != nil { - return nil, err - } - // submitter hash - hFunc.Reset() - msgHash, err = legendTxTypes.ComputeAtomicMatchMsgHash(txInfo, hFunc) - if err != nil { - logx.Errorf("unable to compute tx hash: %s", err.Error()) - return nil, errors.New("internal error") - } - // verify submitter signature - if err := VerifySignature(txInfo.Sig, msgHash, accountInfoMap[txInfo.AccountIndex].PublicKey); err != nil { - return nil, err - } - // compute tx details - // from account asset gas - order := int64(0) - accountOrder := int64(0) - txDetails = append(txDetails, &MempoolTxDetail{ - AssetId: txInfo.GasFeeAssetId, - AssetType: GeneralAssetType, - AccountIndex: txInfo.AccountIndex, - AccountName: accountInfoMap[txInfo.AccountIndex].AccountName, - BalanceDelta: commonAsset.ConstructAccountAsset( - txInfo.GasFeeAssetId, ffmath.Neg(txInfo.GasFeeAssetAmount), ZeroBigInt, ZeroBigInt).String(), - Order: order, - AccountOrder: accountOrder, - }) - // buyer asset A - order++ - accountOrder++ - txDetails = append(txDetails, &MempoolTxDetail{ - AssetId: txInfo.BuyOffer.AssetId, - AssetType: GeneralAssetType, - AccountIndex: txInfo.BuyOffer.AccountIndex, - AccountName: accountInfoMap[txInfo.BuyOffer.AccountIndex].AccountName, - BalanceDelta: commonAsset.ConstructAccountAsset( - txInfo.BuyOffer.AssetId, ffmath.Neg(txInfo.BuyOffer.AssetAmount), ZeroBigInt, ZeroBigInt, - ).String(), - Order: order, - AccountOrder: accountOrder, - }) - // buyer offer - buyerOfferIndex := txInfo.BuyOffer.OfferId % OfferPerAsset - if accountInfoMap[txInfo.BuyOffer.AccountIndex].AssetInfo[buyerOfferAssetId] != nil { - oBuyerOffer := accountInfoMap[txInfo.BuyOffer.AccountIndex].AssetInfo[buyerOfferAssetId].OfferCanceledOrFinalized - // verify whether buyer offer id is valid for use - if oBuyerOffer.Bit(int(buyerOfferIndex)) == 1 { - logx.Errorf("account %d offer index %d is already in use", txInfo.BuyOffer.AccountIndex, buyerOfferIndex) - return nil, errors.New("invalid buyer offer id") - } - - nBuyerOffer := new(big.Int).SetBit(oBuyerOffer, int(buyerOfferIndex), 1) - order++ - txDetails = append(txDetails, &MempoolTxDetail{ - AssetId: buyerOfferAssetId, - AssetType: GeneralAssetType, - AccountIndex: txInfo.BuyOffer.AccountIndex, - AccountName: accountInfoMap[txInfo.BuyOffer.AccountIndex].AccountName, - BalanceDelta: commonAsset.ConstructAccountAsset( - buyerOfferAssetId, ZeroBigInt, ZeroBigInt, nBuyerOffer, - ).String(), - Order: order, - AccountOrder: accountOrder, - }) - } - // seller asset A - // treasury fee - treasuryFee, err := util.CleanPackedAmount(ffmath.Div( - ffmath.Multiply(txInfo.SellOffer.AssetAmount, big.NewInt(txInfo.SellOffer.TreasuryRate)), - big.NewInt(TenThousand))) - if err != nil { - logx.Errorf("unable to compute treasury fee: %s", err.Error()) - return nil, errors.New("internal error") - } - // creator fee - creatorFee, err := util.CleanPackedAmount(ffmath.Div( - ffmath.Multiply(txInfo.SellOffer.AssetAmount, big.NewInt(nftInfo.CreatorTreasuryRate)), - big.NewInt(TenThousand))) - if err != nil { - logx.Errorf("unable to compute creator fee: %s", err.Error()) - return nil, errors.New("internal error") - } - // set tx info - txInfo.CreatorAmount = creatorFee - txInfo.TreasuryAmount = treasuryFee - // seller amount - sellerDeltaAmount := ffmath.Sub(txInfo.SellOffer.AssetAmount, ffmath.Add(treasuryFee, creatorFee)) - order++ - accountOrder++ - txDetails = append(txDetails, &MempoolTxDetail{ - AssetId: txInfo.SellOffer.AssetId, - AssetType: GeneralAssetType, - AccountIndex: txInfo.SellOffer.AccountIndex, - AccountName: accountInfoMap[txInfo.SellOffer.AccountIndex].AccountName, - BalanceDelta: commonAsset.ConstructAccountAsset( - txInfo.SellOffer.AssetId, sellerDeltaAmount, ZeroBigInt, ZeroBigInt, - ).String(), - Order: order, - AccountOrder: accountOrder, - }) - // seller offer - sellerOfferIndex := txInfo.SellOffer.OfferId % OfferPerAsset - if accountInfoMap[txInfo.SellOffer.AccountIndex].AssetInfo[sellerOfferAssetId] != nil { - oSellerOffer := accountInfoMap[txInfo.SellOffer.AccountIndex].AssetInfo[sellerOfferAssetId].OfferCanceledOrFinalized - // verify whether buyer offer id is valid for use - if oSellerOffer.Bit(int(sellerOfferIndex)) == 1 { - logx.Errorf("account %d offer index %d is already in use", txInfo.SellOffer.AccountIndex, sellerOfferIndex) - return nil, errors.New("invalid seller offer id") - } - nSellerOffer := new(big.Int).SetBit(oSellerOffer, int(sellerOfferIndex), 1) - order++ - txDetails = append(txDetails, &MempoolTxDetail{ - AssetId: sellerOfferAssetId, - AssetType: GeneralAssetType, - AccountIndex: txInfo.SellOffer.AccountIndex, - AccountName: accountInfoMap[txInfo.SellOffer.AccountIndex].AccountName, - BalanceDelta: commonAsset.ConstructAccountAsset( - sellerOfferAssetId, ZeroBigInt, ZeroBigInt, nSellerOffer, - ).String(), - Order: order, - AccountOrder: accountOrder, - }) - } - // creator fee - order++ - accountOrder++ - txDetails = append(txDetails, &MempoolTxDetail{ - AssetId: txInfo.BuyOffer.AssetId, - AssetType: GeneralAssetType, - AccountIndex: nftInfo.CreatorAccountIndex, - AccountName: accountInfoMap[nftInfo.CreatorAccountIndex].AccountName, - BalanceDelta: commonAsset.ConstructAccountAsset( - txInfo.BuyOffer.AssetId, creatorFee, ZeroBigInt, ZeroBigInt, - ).String(), - Order: order, - AccountOrder: accountOrder, - }) - // nft info - newNftInfo := &NftInfo{ - NftIndex: nftInfo.NftIndex, - CreatorAccountIndex: nftInfo.CreatorAccountIndex, - OwnerAccountIndex: txInfo.BuyOffer.AccountIndex, - NftContentHash: nftInfo.NftContentHash, - NftL1TokenId: nftInfo.NftL1TokenId, - NftL1Address: nftInfo.NftL1Address, - CreatorTreasuryRate: nftInfo.CreatorTreasuryRate, - CollectionId: nftInfo.CollectionId, - } - order++ - txDetails = append(txDetails, &MempoolTxDetail{ - AssetId: nftInfo.NftIndex, - AssetType: NftAssetType, - AccountIndex: commonConstant.NilTxAccountIndex, - AccountName: commonConstant.NilAccountName, - BalanceDelta: newNftInfo.String(), - Order: order, - AccountOrder: commonConstant.NilAccountOrder, - }) - // gas account asset A - treasury fee - order++ - accountOrder++ - txDetails = append(txDetails, &MempoolTxDetail{ - AssetId: txInfo.BuyOffer.AssetId, - AssetType: GeneralAssetType, - AccountIndex: txInfo.GasAccountIndex, - AccountName: accountInfoMap[txInfo.GasAccountIndex].AccountName, - BalanceDelta: commonAsset.ConstructAccountAsset( - txInfo.BuyOffer.AssetId, treasuryFee, ZeroBigInt, ZeroBigInt).String(), - Order: order, - AccountOrder: accountOrder, - }) - // gas account asset gas - order++ - txDetails = append(txDetails, &MempoolTxDetail{ - AssetId: txInfo.GasFeeAssetId, - AssetType: GeneralAssetType, - AccountIndex: txInfo.GasAccountIndex, - AccountName: accountInfoMap[txInfo.GasAccountIndex].AccountName, - BalanceDelta: commonAsset.ConstructAccountAsset( - txInfo.GasFeeAssetId, txInfo.GasFeeAssetAmount, ZeroBigInt, ZeroBigInt).String(), - Order: order, - AccountOrder: accountOrder, - }) - return txDetails, nil -} diff --git a/common/zcrypto/txVerification/cancelOffer.go b/common/zcrypto/txVerification/cancelOffer.go deleted file mode 100644 index 8a887ff94..000000000 --- a/common/zcrypto/txVerification/cancelOffer.go +++ /dev/null @@ -1,151 +0,0 @@ -/* - * Copyright © 2021 Zkbas Protocol - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package txVerification - -import ( - "errors" - "fmt" - "math/big" - - "github.com/bnb-chain/zkbas-crypto/ffmath" - "github.com/bnb-chain/zkbas-crypto/wasm/legend/legendTxTypes" - "github.com/consensys/gnark-crypto/ecc/bn254/fr/mimc" - "github.com/zeromicro/go-zero/core/logx" - - "github.com/bnb-chain/zkbas/common/commonAsset" -) - -func VerifyCancelOfferTxInfo( - accountInfoMap map[int64]*AccountInfo, - txInfo *CancelOfferTxInfo, -) (txDetails []*MempoolTxDetail, err error) { - // verify params - if accountInfoMap[txInfo.AccountIndex] == nil || - accountInfoMap[txInfo.AccountIndex].AssetInfo == nil || - accountInfoMap[txInfo.AccountIndex].AssetInfo[txInfo.GasFeeAssetId] == nil || - accountInfoMap[txInfo.GasAccountIndex] == nil || - txInfo.GasFeeAssetAmount.Cmp(ZeroBigInt) < 0 { - logx.Error("invalid params") - return nil, errors.New("invalid params") - } - if accountInfoMap[txInfo.AccountIndex].AssetInfo[txInfo.OfferId/OfferPerAsset] == nil { - accountInfoMap[txInfo.AccountIndex].AssetInfo[txInfo.OfferId/OfferPerAsset] = &commonAsset.AccountAsset{ - AssetId: txInfo.OfferId / OfferPerAsset, - Balance: ZeroBigInt, - LpAmount: ZeroBigInt, - OfferCanceledOrFinalized: ZeroBigInt, - } - } - // verify nonce - if txInfo.Nonce != accountInfoMap[txInfo.AccountIndex].Nonce { - logx.Errorf("invalid nonce, actual: %d, expected: %d", - txInfo.Nonce, accountInfoMap[txInfo.AccountIndex].Nonce) - return nil, fmt.Errorf("invalid nonce, actual: %d, expected: %d", - txInfo.Nonce, accountInfoMap[txInfo.AccountIndex].Nonce) - } - // set tx info - var ( - assetDeltaMap = make(map[int64]map[int64]*big.Int) - ) - // init delta map - assetDeltaMap[txInfo.AccountIndex] = make(map[int64]*big.Int) - if assetDeltaMap[txInfo.GasAccountIndex] == nil { - assetDeltaMap[txInfo.GasAccountIndex] = make(map[int64]*big.Int) - } - // from account asset Gas - assetDeltaMap[txInfo.AccountIndex][txInfo.GasFeeAssetId] = ffmath.Neg(txInfo.GasFeeAssetAmount) - // gas account asset Gas - if assetDeltaMap[txInfo.GasAccountIndex][txInfo.GasFeeAssetId] == nil { - assetDeltaMap[txInfo.GasAccountIndex][txInfo.GasFeeAssetId] = txInfo.GasFeeAssetAmount - } else { - assetDeltaMap[txInfo.GasAccountIndex][txInfo.GasFeeAssetId] = ffmath.Add( - assetDeltaMap[txInfo.GasAccountIndex][txInfo.GasFeeAssetId], - txInfo.GasFeeAssetAmount, - ) - } - // check balance - if accountInfoMap[txInfo.AccountIndex].AssetInfo[txInfo.GasFeeAssetId].Balance.Cmp( - new(big.Int).Abs(assetDeltaMap[txInfo.AccountIndex][txInfo.GasFeeAssetId])) < 0 { - logx.Errorf("not enough balance of gas") - return nil, errors.New("not enough balance of gas") - } - // compute hash - hFunc := mimc.NewMiMC() - msgHash, err := legendTxTypes.ComputeCancelOfferMsgHash(txInfo, hFunc) - if err != nil { - logx.Errorf("unable to compute tx hash: %s", err.Error()) - return nil, errors.New("internal error") - } - // verify signature - if err := VerifySignature(txInfo.Sig, msgHash, accountInfoMap[txInfo.AccountIndex].PublicKey); err != nil { - return nil, err - } - // compute tx details - // from account asset gas - order := int64(0) - accountOrder := int64(0) - txDetails = append(txDetails, &MempoolTxDetail{ - AssetId: txInfo.GasFeeAssetId, - AssetType: GeneralAssetType, - AccountIndex: txInfo.AccountIndex, - AccountName: accountInfoMap[txInfo.AccountIndex].AccountName, - BalanceDelta: commonAsset.ConstructAccountAsset( - txInfo.GasFeeAssetId, ffmath.Neg(txInfo.GasFeeAssetAmount), ZeroBigInt, ZeroBigInt).String(), - Order: order, - AccountOrder: accountOrder, - }) - // from account offer id - offerAssetId := txInfo.OfferId / OfferPerAsset - offerIndex := txInfo.OfferId % OfferPerAsset - if accountInfoMap[txInfo.AccountIndex].AssetInfo[offerAssetId] != nil { - oOffer := accountInfoMap[txInfo.AccountIndex].AssetInfo[offerAssetId].OfferCanceledOrFinalized - // verify whether account offer id is valid for use - if oOffer.Bit(int(offerIndex)) == 1 { - logx.Errorf("account %d offer index %d is already in use", txInfo.AccountIndex, offerIndex) - return nil, errors.New("invalid offer id") - } - nOffer := new(big.Int).SetBit(oOffer, int(offerIndex), 1) - order++ - txDetails = append(txDetails, &MempoolTxDetail{ - AssetId: offerAssetId, - AssetType: GeneralAssetType, - AccountIndex: txInfo.AccountIndex, - AccountName: accountInfoMap[txInfo.AccountIndex].AccountName, - BalanceDelta: commonAsset.ConstructAccountAsset( - offerAssetId, ZeroBigInt, ZeroBigInt, nOffer, - ).String(), - Order: order, - AccountOrder: accountOrder, - }) - } - - // gas account asset gas - order++ - accountOrder++ - txDetails = append(txDetails, &MempoolTxDetail{ - AssetId: txInfo.GasFeeAssetId, - AssetType: GeneralAssetType, - AccountIndex: txInfo.GasAccountIndex, - AccountName: accountInfoMap[txInfo.GasAccountIndex].AccountName, - BalanceDelta: commonAsset.ConstructAccountAsset( - txInfo.GasFeeAssetId, txInfo.GasFeeAssetAmount, ZeroBigInt, ZeroBigInt).String(), - Order: order, - AccountOrder: accountOrder, - }) - return txDetails, nil -} diff --git a/common/zcrypto/txVerification/constant.go b/common/zcrypto/txVerification/constant.go deleted file mode 100644 index cb35bfe48..000000000 --- a/common/zcrypto/txVerification/constant.go +++ /dev/null @@ -1,66 +0,0 @@ -/* - * Copyright © 2021 Zkbas Protocol - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package txVerification - -import ( - "math/big" - - "github.com/consensys/gnark-crypto/ecc/bn254/twistededwards/eddsa" - - "github.com/bnb-chain/zkbas/common/commonAsset" - "github.com/bnb-chain/zkbas/common/commonTx" - "github.com/bnb-chain/zkbas/common/model/mempool" -) - -type ( - TransferTxInfo = commonTx.TransferTxInfo - SwapTxInfo = commonTx.SwapTxInfo - AddLiquidityTxInfo = commonTx.AddLiquidityTxInfo - RemoveLiquidityTxInfo = commonTx.RemoveLiquidityTxInfo - WithdrawTxInfo = commonTx.WithdrawTxInfo - CreateCollectionTxInfo = commonTx.CreateCollectionTxInfo - MintNftTxInfo = commonTx.MintNftTxInfo - TransferNftTxInfo = commonTx.TransferNftTxInfo - OfferTxInfo = commonTx.OfferTxInfo - AtomicMatchTxInfo = commonTx.AtomicMatchTxInfo - CancelOfferTxInfo = commonTx.CancelOfferTxInfo - WithdrawNftTxInfo = commonTx.WithdrawNftTxInfo - - PublicKey = eddsa.PublicKey - - MempoolTxDetail = mempool.MempoolTxDetail - - AccountInfo = commonAsset.AccountInfo - LiquidityInfo = commonAsset.LiquidityInfo - NftInfo = commonAsset.NftInfo -) - -const ( - OfferPerAsset = 128 - - TenThousand = 10000 - - GeneralAssetType = commonAsset.GeneralAssetType - LiquidityAssetType = commonAsset.LiquidityAssetType - NftAssetType = commonAsset.NftAssetType - CollectionNonceAssetType = commonAsset.CollectionNonceAssetType -) - -var ( - ZeroBigInt = big.NewInt(0) -) diff --git a/common/zcrypto/txVerification/createCollection.go b/common/zcrypto/txVerification/createCollection.go deleted file mode 100644 index c0525c855..000000000 --- a/common/zcrypto/txVerification/createCollection.go +++ /dev/null @@ -1,132 +0,0 @@ -/* - * Copyright © 2021 Zkbas Protocol - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package txVerification - -import ( - "errors" - "fmt" - "math/big" - "strconv" - - "github.com/bnb-chain/zkbas-crypto/ffmath" - "github.com/bnb-chain/zkbas-crypto/wasm/legend/legendTxTypes" - "github.com/consensys/gnark-crypto/ecc/bn254/fr/mimc" - "github.com/zeromicro/go-zero/core/logx" - - "github.com/bnb-chain/zkbas/common/commonAsset" - "github.com/bnb-chain/zkbas/common/commonConstant" -) - -func VerifyCreateCollectionTxInfo( - accountInfoMap map[int64]*AccountInfo, - txInfo *CreateCollectionTxInfo, -) (txDetails []*MempoolTxDetail, err error) { - // verify params - if accountInfoMap[txInfo.AccountIndex] == nil || - accountInfoMap[txInfo.AccountIndex].AssetInfo == nil || - accountInfoMap[txInfo.AccountIndex].AssetInfo[txInfo.GasFeeAssetId] == nil || - accountInfoMap[txInfo.GasAccountIndex] == nil || - txInfo.GasFeeAssetAmount.Cmp(ZeroBigInt) < 0 { - logx.Error("invalid params") - return nil, errors.New("invalid params") - } - // verify nonce - if txInfo.Nonce != accountInfoMap[txInfo.AccountIndex].Nonce { - logx.Errorf("invalid nonce, actual: %d, expected: %d", - txInfo.Nonce, accountInfoMap[txInfo.AccountIndex].Nonce) - return nil, fmt.Errorf("invalid nonce, actual: %d, expected: %d", - txInfo.Nonce, accountInfoMap[txInfo.AccountIndex].Nonce) - } - - // set tx info - var ( - assetDeltaMap = make(map[int64]map[int64]*big.Int) - ) - // init delta map - assetDeltaMap[txInfo.AccountIndex] = make(map[int64]*big.Int) - if assetDeltaMap[txInfo.GasAccountIndex] == nil { - assetDeltaMap[txInfo.GasAccountIndex] = make(map[int64]*big.Int) - } - // from account asset Gas - assetDeltaMap[txInfo.AccountIndex][txInfo.GasFeeAssetId] = ffmath.Neg(txInfo.GasFeeAssetAmount) - // gas account asset Gas - if assetDeltaMap[txInfo.GasAccountIndex][txInfo.GasFeeAssetId] == nil { - assetDeltaMap[txInfo.GasAccountIndex][txInfo.GasFeeAssetId] = txInfo.GasFeeAssetAmount - } else { - assetDeltaMap[txInfo.GasAccountIndex][txInfo.GasFeeAssetId] = ffmath.Add( - assetDeltaMap[txInfo.GasAccountIndex][txInfo.GasFeeAssetId], - txInfo.GasFeeAssetAmount, - ) - } - // check balance - if accountInfoMap[txInfo.AccountIndex].AssetInfo[txInfo.GasFeeAssetId].Balance.Cmp( - txInfo.GasFeeAssetAmount) < 0 { - logx.Errorf("not enough balance of gas") - return nil, errors.New("not enough balance of gas") - } - // compute hash - hFunc := mimc.NewMiMC() - msgHash, err := legendTxTypes.ComputeCreateCollectionMsgHash(txInfo, hFunc) - if err != nil { - logx.Errorf("unable to compute tx hash: %s", err.Error()) - return nil, errors.New("internal error") - } - // verify signature - if err := VerifySignature(txInfo.Sig, msgHash, accountInfoMap[txInfo.AccountIndex].PublicKey); err != nil { - return nil, err - } - // compute tx details - // from account collection nonce - order := int64(0) - accountOrder := int64(0) - txDetails = append(txDetails, &MempoolTxDetail{ - AssetId: commonConstant.NilAssetId, - AssetType: CollectionNonceAssetType, - AccountIndex: txInfo.AccountIndex, - AccountName: accountInfoMap[txInfo.AccountIndex].AccountName, - BalanceDelta: strconv.FormatInt(txInfo.CollectionId, 10), - Order: order, - AccountOrder: accountOrder, - }) - // from account asset gas - order++ - txDetails = append(txDetails, &MempoolTxDetail{ - AssetId: txInfo.GasFeeAssetId, - AssetType: GeneralAssetType, - AccountIndex: txInfo.AccountIndex, - AccountName: accountInfoMap[txInfo.AccountIndex].AccountName, - BalanceDelta: commonAsset.ConstructAccountAsset( - txInfo.GasFeeAssetId, ffmath.Neg(txInfo.GasFeeAssetAmount), ZeroBigInt, ZeroBigInt).String(), - Order: order, - AccountOrder: accountOrder, - }) - // gas account asset gas - order++ - accountOrder++ - txDetails = append(txDetails, &MempoolTxDetail{ - AssetId: txInfo.GasFeeAssetId, - AssetType: GeneralAssetType, - AccountIndex: txInfo.GasAccountIndex, - AccountName: accountInfoMap[txInfo.GasAccountIndex].AccountName, - BalanceDelta: commonAsset.ConstructAccountAsset( - txInfo.GasFeeAssetId, txInfo.GasFeeAssetAmount, ZeroBigInt, ZeroBigInt).String(), - Order: order, - AccountOrder: accountOrder, - }) - return txDetails, nil -} diff --git a/common/zcrypto/txVerification/eddsa.go b/common/zcrypto/txVerification/eddsa.go deleted file mode 100644 index 5d8251cd5..000000000 --- a/common/zcrypto/txVerification/eddsa.go +++ /dev/null @@ -1,66 +0,0 @@ -/* - * Copyright © 2021 Zkbas Protocol - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package txVerification - -import ( - "encoding/hex" - "errors" - - "github.com/consensys/gnark-crypto/ecc/bn254/fr/mimc" - "github.com/zeromicro/go-zero/core/logx" -) - -/* - ParsePkStr: parse pk string -*/ -func ParsePkStr(pkStr string) (pk *PublicKey, err error) { - pkBytes, err := hex.DecodeString(pkStr) - if err != nil { - logx.Errorf("[ParsePkStr] invalid public key: %s", err.Error()) - return nil, err - } - pk = new(PublicKey) - size, err := pk.SetBytes(pkBytes) - if err != nil { - logx.Errorf("[ParsePkStr] invalid public key: %s", err.Error()) - return nil, err - } - if size != 32 { - logx.Error("[ParsePkStr] invalid public key") - return nil, errors.New("[ParsePkStr] invalid public key") - } - return pk, nil -} - -func VerifySignature(sig, msg []byte, pubkey string) error { - hFunc := mimc.NewMiMC() - pk, err := ParsePkStr(pubkey) - if err != nil { - return errors.New("cannot parse public key") - } - isValid, err := pk.Verify(sig, msg, hFunc) - if err != nil { - logx.Errorf("unable to verify signature: %s", err.Error()) - return errors.New("unable to verify signature") - } - if !isValid { - logx.Errorf("invalid signature") - return errors.New("invalid signature") - } - return nil -} diff --git a/common/zcrypto/txVerification/mintNft.go b/common/zcrypto/txVerification/mintNft.go deleted file mode 100644 index ad20da14f..000000000 --- a/common/zcrypto/txVerification/mintNft.go +++ /dev/null @@ -1,162 +0,0 @@ -/* - * Copyright © 2021 Zkbas Protocol - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package txVerification - -import ( - "errors" - "fmt" - "math/big" - - "github.com/bnb-chain/zkbas-crypto/ffmath" - "github.com/bnb-chain/zkbas-crypto/wasm/legend/legendTxTypes" - "github.com/consensys/gnark-crypto/ecc/bn254/fr/mimc" - "github.com/zeromicro/go-zero/core/logx" - - "github.com/bnb-chain/zkbas/common/commonAsset" - "github.com/bnb-chain/zkbas/common/commonConstant" - "github.com/bnb-chain/zkbas/common/util" -) - -func VerifyMintNftTxInfo( - accountInfoMap map[int64]*AccountInfo, - txInfo *MintNftTxInfo, -) (txDetails []*MempoolTxDetail, err error) { - realNftContentHash, err := util.FromHex(txInfo.NftContentHash) - if err != nil || len(realNftContentHash) != 32 { - logx.Errorf("invalid NftContentHash") - return nil, errors.New("invalid NftContentHash") - } - // verify params - if accountInfoMap[txInfo.CreatorAccountIndex] == nil || - accountInfoMap[txInfo.ToAccountIndex] == nil || - accountInfoMap[txInfo.CreatorAccountIndex].AssetInfo == nil || - accountInfoMap[txInfo.CreatorAccountIndex].AssetInfo[txInfo.GasFeeAssetId] == nil || - accountInfoMap[txInfo.GasAccountIndex] == nil || - txInfo.GasFeeAssetAmount.Cmp(ZeroBigInt) < 0 { - logx.Error("invalid params") - return nil, errors.New("invalid params") - } - // verify nonce - if txInfo.Nonce != accountInfoMap[txInfo.CreatorAccountIndex].Nonce { - logx.Errorf("invalid nonce, actual: %d, expected: %d", - txInfo.Nonce, accountInfoMap[txInfo.CreatorAccountIndex].Nonce) - return nil, fmt.Errorf("invalid nonce, actual: %d, expected: %d", - txInfo.Nonce, accountInfoMap[txInfo.CreatorAccountIndex].Nonce) - } - // set tx info - var ( - assetDeltaMap = make(map[int64]map[int64]*big.Int) - newNftInfo *NftInfo - ) - // init delta map - assetDeltaMap[txInfo.CreatorAccountIndex] = make(map[int64]*big.Int) - if assetDeltaMap[txInfo.GasAccountIndex] == nil { - assetDeltaMap[txInfo.GasAccountIndex] = make(map[int64]*big.Int) - } - // from account asset Gas - assetDeltaMap[txInfo.CreatorAccountIndex][txInfo.GasFeeAssetId] = ffmath.Neg(txInfo.GasFeeAssetAmount) - // to account nft info - newNftInfo = &NftInfo{ - NftIndex: txInfo.NftIndex, - CreatorAccountIndex: txInfo.CreatorAccountIndex, - OwnerAccountIndex: txInfo.ToAccountIndex, - NftContentHash: txInfo.NftContentHash, - NftL1TokenId: commonConstant.NilL1TokenId, - NftL1Address: commonConstant.NilL1Address, - CreatorTreasuryRate: txInfo.CreatorTreasuryRate, - CollectionId: txInfo.NftCollectionId, - } - // gas account asset Gas - if assetDeltaMap[txInfo.GasAccountIndex][txInfo.GasFeeAssetId] == nil { - assetDeltaMap[txInfo.GasAccountIndex][txInfo.GasFeeAssetId] = txInfo.GasFeeAssetAmount - } else { - assetDeltaMap[txInfo.GasAccountIndex][txInfo.GasFeeAssetId] = ffmath.Add( - assetDeltaMap[txInfo.GasAccountIndex][txInfo.GasFeeAssetId], - txInfo.GasFeeAssetAmount, - ) - } - // check balance - if accountInfoMap[txInfo.CreatorAccountIndex].AssetInfo[txInfo.GasFeeAssetId].Balance.Cmp( - new(big.Int).Abs(assetDeltaMap[txInfo.CreatorAccountIndex][txInfo.GasFeeAssetId])) < 0 { - logx.Errorf("not enough balance of gas") - return nil, errors.New("not enough balance of gas") - } - // compute hash - hFunc := mimc.NewMiMC() - msgHash, err := legendTxTypes.ComputeMintNftMsgHash(txInfo, hFunc) - if err != nil { - logx.Errorf("unable to compute tx hash: %s", err.Error()) - return nil, errors.New("internal error") - } - // verify signature - if err := VerifySignature(txInfo.Sig, msgHash, accountInfoMap[txInfo.CreatorAccountIndex].PublicKey); err != nil { - return nil, err - } - // compute tx details - // from account asset gas - order := int64(0) - accountOrder := int64(0) - txDetails = append(txDetails, &MempoolTxDetail{ - AssetId: txInfo.GasFeeAssetId, - AssetType: GeneralAssetType, - AccountIndex: txInfo.CreatorAccountIndex, - AccountName: accountInfoMap[txInfo.CreatorAccountIndex].AccountName, - BalanceDelta: commonAsset.ConstructAccountAsset( - txInfo.GasFeeAssetId, ffmath.Neg(txInfo.GasFeeAssetAmount), ZeroBigInt, ZeroBigInt).String(), - Order: order, - AccountOrder: accountOrder, - }) - // to account empty delta - order++ - accountOrder++ - txDetails = append(txDetails, &MempoolTxDetail{ - AssetId: txInfo.GasFeeAssetId, - AssetType: GeneralAssetType, - AccountIndex: txInfo.ToAccountIndex, - AccountName: accountInfoMap[txInfo.ToAccountIndex].AccountName, - BalanceDelta: commonAsset.ConstructAccountAsset( - txInfo.GasFeeAssetId, ZeroBigInt, ZeroBigInt, ZeroBigInt).String(), - Order: order, - AccountOrder: accountOrder, - }) - // nft info - order++ - txDetails = append(txDetails, &MempoolTxDetail{ - AssetId: txInfo.NftIndex, - AssetType: NftAssetType, - AccountIndex: txInfo.ToAccountIndex, - AccountName: accountInfoMap[txInfo.ToAccountIndex].AccountName, - BalanceDelta: newNftInfo.String(), - Order: order, - AccountOrder: commonConstant.NilAccountOrder, - }) - // gas account asset gas - order++ - accountOrder++ - txDetails = append(txDetails, &MempoolTxDetail{ - AssetId: txInfo.GasFeeAssetId, - AssetType: GeneralAssetType, - AccountIndex: txInfo.GasAccountIndex, - AccountName: accountInfoMap[txInfo.GasAccountIndex].AccountName, - BalanceDelta: commonAsset.ConstructAccountAsset( - txInfo.GasFeeAssetId, txInfo.GasFeeAssetAmount, ZeroBigInt, ZeroBigInt).String(), - Order: order, - AccountOrder: accountOrder, - }) - return txDetails, nil -} diff --git a/common/zcrypto/txVerification/removeLiquidity.go b/common/zcrypto/txVerification/removeLiquidity.go deleted file mode 100644 index 676ae6ead..000000000 --- a/common/zcrypto/txVerification/removeLiquidity.go +++ /dev/null @@ -1,239 +0,0 @@ -/* - * Copyright © 2021 Zkbas Protocol - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package txVerification - -import ( - "errors" - "fmt" - "math/big" - - "github.com/bnb-chain/zkbas-crypto/ffmath" - "github.com/bnb-chain/zkbas-crypto/wasm/legend/legendTxTypes" - "github.com/consensys/gnark-crypto/ecc/bn254/fr/mimc" - "github.com/zeromicro/go-zero/core/logx" - - "github.com/bnb-chain/zkbas/common/commonAsset" - "github.com/bnb-chain/zkbas/common/commonConstant" - "github.com/bnb-chain/zkbas/common/util" -) - -func VerifyRemoveLiquidityTxInfo( - accountInfoMap map[int64]*AccountInfo, - liquidityInfo *LiquidityInfo, - txInfo *RemoveLiquidityTxInfo, -) (txDetails []*MempoolTxDetail, err error) { - // verify params - if accountInfoMap[txInfo.FromAccountIndex] == nil || - accountInfoMap[liquidityInfo.TreasuryAccountIndex] == nil || - accountInfoMap[txInfo.GasAccountIndex] == nil || - liquidityInfo == nil || - liquidityInfo.AssetAId != txInfo.AssetAId || - liquidityInfo.AssetBId != txInfo.AssetBId || - accountInfoMap[txInfo.FromAccountIndex].AssetInfo[txInfo.PairIndex] == nil || - accountInfoMap[txInfo.FromAccountIndex].AssetInfo[txInfo.PairIndex].LpAmount.Cmp(ZeroBigInt) <= 0 || - txInfo.AssetAMinAmount.Cmp(ZeroBigInt) < 0 || - txInfo.AssetBMinAmount.Cmp(ZeroBigInt) < 0 || - txInfo.LpAmount.Cmp(ZeroBigInt) < 0 || - txInfo.GasFeeAssetAmount.Cmp(ZeroBigInt) < 0 { - logx.Error("invalid params") - return nil, errors.New("invalid params") - } - // verify nonce - if txInfo.Nonce != accountInfoMap[txInfo.FromAccountIndex].Nonce { - logx.Errorf("invalid nonce, actual: %d, expected: %d", - txInfo.Nonce, accountInfoMap[txInfo.FromAccountIndex].Nonce) - return nil, fmt.Errorf("invalid nonce, actual: %d, expected: %d", - txInfo.Nonce, accountInfoMap[txInfo.FromAccountIndex].Nonce) - } - // add tx info - var ( - assetDeltaMap = make(map[int64]map[int64]*big.Int) - lpDeltaForFromAccount *big.Int - lpDeltaForTreasuryAccount *big.Int - poolDeltaForToAccount *LiquidityInfo - ) - // init delta map - assetDeltaMap[txInfo.FromAccountIndex] = make(map[int64]*big.Int) - if assetDeltaMap[txInfo.GasAccountIndex] == nil { - assetDeltaMap[txInfo.GasAccountIndex] = make(map[int64]*big.Int) - } - // from account asset A - assetDeltaMap[txInfo.FromAccountIndex][txInfo.AssetAId] = txInfo.AssetAAmountDelta - // from account asset B - assetDeltaMap[txInfo.FromAccountIndex][txInfo.AssetBId] = txInfo.AssetBAmountDelta - // from account asset Gas - if assetDeltaMap[txInfo.FromAccountIndex][txInfo.GasFeeAssetId] == nil { - assetDeltaMap[txInfo.FromAccountIndex][txInfo.GasFeeAssetId] = ffmath.Neg(txInfo.GasFeeAssetAmount) - } else { - assetDeltaMap[txInfo.FromAccountIndex][txInfo.GasFeeAssetId] = ffmath.Sub( - assetDeltaMap[txInfo.FromAccountIndex][txInfo.GasFeeAssetId], - txInfo.GasFeeAssetAmount, - ) - } - // from account lp amount - lpDeltaForFromAccount = ffmath.Neg(txInfo.LpAmount) - // pool account pool info - poolAssetADelta := ffmath.Neg(txInfo.AssetAAmountDelta) - poolAssetBDelta := ffmath.Neg(txInfo.AssetBAmountDelta) - finalPoolA := ffmath.Add(liquidityInfo.AssetA, poolAssetADelta) - finalPoolB := ffmath.Add(liquidityInfo.AssetB, poolAssetBDelta) - poolDeltaForToAccount = &LiquidityInfo{ - PairIndex: txInfo.PairIndex, - AssetAId: txInfo.AssetAId, - AssetA: poolAssetADelta, - AssetBId: txInfo.AssetBId, - AssetB: poolAssetBDelta, - LpAmount: lpDeltaForFromAccount, - KLast: ffmath.Multiply(finalPoolA, finalPoolB), - FeeRate: liquidityInfo.FeeRate, - TreasuryAccountIndex: liquidityInfo.TreasuryAccountIndex, - TreasuryRate: liquidityInfo.TreasuryRate, - } - // treasury account - lpDeltaForTreasuryAccount, err = util.ComputeSLp(liquidityInfo.AssetA, liquidityInfo.AssetB, liquidityInfo.KLast, liquidityInfo.FeeRate, liquidityInfo.TreasuryRate) - if err != nil { - return nil, err - } - // set tx info - txInfo.KLast, err = util.CleanPackedAmount(ffmath.Multiply(finalPoolA, finalPoolB)) - if err != nil { - return nil, err - } - txInfo.TreasuryAmount = lpDeltaForTreasuryAccount - // gas account asset Gas - if assetDeltaMap[txInfo.GasAccountIndex][txInfo.GasFeeAssetId] == nil { - assetDeltaMap[txInfo.GasAccountIndex][txInfo.GasFeeAssetId] = txInfo.GasFeeAssetAmount - } else { - assetDeltaMap[txInfo.GasAccountIndex][txInfo.GasFeeAssetId] = ffmath.Add( - assetDeltaMap[txInfo.GasAccountIndex][txInfo.GasFeeAssetId], - txInfo.GasFeeAssetAmount, - ) - } - // check balance - if accountInfoMap[txInfo.FromAccountIndex].AssetInfo[txInfo.GasFeeAssetId].Balance.Cmp( - assetDeltaMap[txInfo.FromAccountIndex][txInfo.GasFeeAssetId]) < 0 { - logx.Errorf("not enough balance of gas") - return nil, errors.New("not enough balance of gas") - } - - // check lp amount - if accountInfoMap[txInfo.FromAccountIndex].AssetInfo[txInfo.PairIndex].LpAmount.Cmp(txInfo.LpAmount) < 0 { - logx.Errorf("invalid lp amount") - return nil, errors.New("invalid lp amount") - } - // compute hash - hFunc := mimc.NewMiMC() - msgHash, err := legendTxTypes.ComputeRemoveLiquidityMsgHash(txInfo, hFunc) - if err != nil { - logx.Errorf("unable to compute tx hash: %s", err.Error()) - return nil, errors.New("internal error") - } - // verify signature - if err := VerifySignature(txInfo.Sig, msgHash, accountInfoMap[txInfo.FromAccountIndex].PublicKey); err != nil { - return nil, err - } - // compute tx details - // from account asset A - order := int64(0) - accountOrder := int64(0) - txDetails = append(txDetails, &MempoolTxDetail{ - AssetId: txInfo.AssetAId, - AssetType: GeneralAssetType, - AccountIndex: txInfo.FromAccountIndex, - AccountName: accountInfoMap[txInfo.FromAccountIndex].AccountName, - BalanceDelta: commonAsset.ConstructAccountAsset( - txInfo.AssetAId, txInfo.AssetAAmountDelta, ZeroBigInt, ZeroBigInt).String(), - Order: order, - AccountOrder: accountOrder, - }) - // from account asset B - order++ - txDetails = append(txDetails, &MempoolTxDetail{ - AssetId: txInfo.AssetBId, - AssetType: GeneralAssetType, - AccountIndex: txInfo.FromAccountIndex, - AccountName: accountInfoMap[txInfo.FromAccountIndex].AccountName, - BalanceDelta: commonAsset.ConstructAccountAsset( - txInfo.AssetBId, txInfo.AssetBAmountDelta, ZeroBigInt, ZeroBigInt).String(), - Order: order, - AccountOrder: accountOrder, - }) - // from account asset Gas - order++ - txDetails = append(txDetails, &MempoolTxDetail{ - AssetId: txInfo.GasFeeAssetId, - AssetType: GeneralAssetType, - AccountIndex: txInfo.FromAccountIndex, - AccountName: accountInfoMap[txInfo.FromAccountIndex].AccountName, - BalanceDelta: commonAsset.ConstructAccountAsset( - txInfo.GasFeeAssetId, ffmath.Neg(txInfo.GasFeeAssetAmount), ZeroBigInt, ZeroBigInt).String(), - Order: order, - AccountOrder: accountOrder, - }) - // from account lp - order++ - txDetails = append(txDetails, &MempoolTxDetail{ - AssetId: txInfo.PairIndex, - AssetType: GeneralAssetType, - AccountIndex: txInfo.FromAccountIndex, - AccountName: accountInfoMap[txInfo.FromAccountIndex].AccountName, - BalanceDelta: commonAsset.ConstructAccountAsset( - txInfo.PairIndex, ZeroBigInt, lpDeltaForFromAccount, ZeroBigInt).String(), - Order: order, - AccountOrder: accountOrder, - }) - // treasury account - order++ - accountOrder++ - txDetails = append(txDetails, &MempoolTxDetail{ - AssetId: txInfo.PairIndex, - AssetType: GeneralAssetType, - AccountIndex: liquidityInfo.TreasuryAccountIndex, - AccountName: accountInfoMap[liquidityInfo.TreasuryAccountIndex].AccountName, - BalanceDelta: commonAsset.ConstructAccountAsset( - txInfo.PairIndex, ZeroBigInt, lpDeltaForTreasuryAccount, ZeroBigInt, - ).String(), - Order: order, - AccountOrder: accountOrder, - }) - // pool account pool info - order++ - txDetails = append(txDetails, &MempoolTxDetail{ - AssetId: txInfo.PairIndex, - AssetType: LiquidityAssetType, - AccountIndex: commonConstant.NilTxAccountIndex, - AccountName: commonConstant.NilAccountName, - BalanceDelta: poolDeltaForToAccount.String(), - Order: order, - AccountOrder: commonConstant.NilAccountOrder, - }) - // gas account asset Gas - order++ - accountOrder++ - txDetails = append(txDetails, &MempoolTxDetail{ - AssetId: txInfo.GasFeeAssetId, - AssetType: GeneralAssetType, - AccountIndex: txInfo.GasAccountIndex, - AccountName: accountInfoMap[txInfo.GasAccountIndex].AccountName, - BalanceDelta: commonAsset.ConstructAccountAsset( - txInfo.GasFeeAssetId, txInfo.GasFeeAssetAmount, ZeroBigInt, ZeroBigInt).String(), - Order: order, - AccountOrder: accountOrder, - }) - return txDetails, nil -} diff --git a/common/zcrypto/txVerification/swap.go b/common/zcrypto/txVerification/swap.go deleted file mode 100644 index dda2e1971..000000000 --- a/common/zcrypto/txVerification/swap.go +++ /dev/null @@ -1,224 +0,0 @@ -/* - * Copyright © 2021 Zkbas Protocol - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package txVerification - -import ( - "errors" - "fmt" - "math/big" - - "github.com/bnb-chain/zkbas-crypto/ffmath" - "github.com/bnb-chain/zkbas-crypto/wasm/legend/legendTxTypes" - "github.com/consensys/gnark-crypto/ecc/bn254/fr/mimc" - "github.com/zeromicro/go-zero/core/logx" - - "github.com/bnb-chain/zkbas/common/commonAsset" - "github.com/bnb-chain/zkbas/common/commonConstant" -) - -func VerifySwapTxInfo( - accountInfoMap map[int64]*AccountInfo, - liquidityInfo *LiquidityInfo, - txInfo *SwapTxInfo, -) (txDetails []*MempoolTxDetail, err error) { - // verify params - if accountInfoMap[txInfo.FromAccountIndex] == nil || - accountInfoMap[txInfo.GasAccountIndex] == nil || - accountInfoMap[txInfo.FromAccountIndex].AssetInfo == nil || - accountInfoMap[txInfo.FromAccountIndex].AssetInfo[txInfo.AssetAId] == nil || - accountInfoMap[txInfo.FromAccountIndex].AssetInfo[txInfo.AssetAId].Balance.Cmp(ZeroBigInt) <= 0 || - accountInfoMap[txInfo.FromAccountIndex].AssetInfo[txInfo.GasFeeAssetId] == nil || - accountInfoMap[txInfo.FromAccountIndex].AssetInfo[txInfo.GasFeeAssetId].Balance.Cmp(ZeroBigInt) <= 0 || - liquidityInfo == nil || - !((liquidityInfo.AssetAId == txInfo.AssetAId && - liquidityInfo.AssetBId == txInfo.AssetBId) || - (liquidityInfo.AssetBId == txInfo.AssetAId && - liquidityInfo.AssetAId == txInfo.AssetBId)) || - txInfo.AssetAAmount.Cmp(ZeroBigInt) < 0 || - txInfo.AssetBMinAmount.Cmp(ZeroBigInt) < 0 || - txInfo.GasFeeAssetAmount.Cmp(ZeroBigInt) < 0 { - logx.Error("invalid params") - return nil, errors.New("invalid params") - } - // verify delta amount - if txInfo.AssetBAmountDelta.Cmp(txInfo.AssetBMinAmount) < 0 { - logx.Error("invalid swap amount") - return nil, errors.New("invalid swap amount") - } - // verify nonce - if txInfo.Nonce != accountInfoMap[txInfo.FromAccountIndex].Nonce { - logx.Errorf("invalid nonce, actual: %d, expected: %d", - txInfo.Nonce, accountInfoMap[txInfo.FromAccountIndex].Nonce) - return nil, fmt.Errorf("invalid nonce, actual: %d, expected: %d", - txInfo.Nonce, accountInfoMap[txInfo.FromAccountIndex].Nonce) - } - var ( - //assetDeltaForTreasuryAccount *big.Int - assetDeltaMap = make(map[int64]map[int64]*big.Int) - poolDeltaForToAccount *LiquidityInfo - ) - // init delta map - assetDeltaMap[txInfo.FromAccountIndex] = make(map[int64]*big.Int) - if assetDeltaMap[txInfo.GasAccountIndex] == nil { - assetDeltaMap[txInfo.GasAccountIndex] = make(map[int64]*big.Int) - } - // from account asset A - assetDeltaMap[txInfo.FromAccountIndex][txInfo.AssetAId] = ffmath.Neg(txInfo.AssetAAmount) - // from account asset B - assetDeltaMap[txInfo.FromAccountIndex][txInfo.AssetBId] = txInfo.AssetBAmountDelta - // from account asset Gas - if assetDeltaMap[txInfo.FromAccountIndex][txInfo.GasFeeAssetId] == nil { - assetDeltaMap[txInfo.FromAccountIndex][txInfo.GasFeeAssetId] = ffmath.Neg(txInfo.GasFeeAssetAmount) - } else { - assetDeltaMap[txInfo.FromAccountIndex][txInfo.GasFeeAssetId] = ffmath.Sub( - assetDeltaMap[txInfo.FromAccountIndex][txInfo.GasFeeAssetId], - txInfo.GasFeeAssetAmount, - ) - } - if txInfo.AssetAAmount.Cmp(assetDeltaMap[txInfo.FromAccountIndex][txInfo.AssetAId]) < 0 { - logx.Error("invalid treasury amount") - return nil, errors.New("invalid treasury amount") - } - // to account pool - poolAssetADelta := txInfo.AssetAAmount - poolAssetBDelta := ffmath.Neg(txInfo.AssetBAmountDelta) - if txInfo.AssetAId == liquidityInfo.AssetAId { - poolDeltaForToAccount = &LiquidityInfo{ - PairIndex: txInfo.PairIndex, - AssetAId: txInfo.AssetAId, - AssetA: poolAssetADelta, - AssetBId: txInfo.AssetBId, - AssetB: poolAssetBDelta, - LpAmount: ZeroBigInt, - KLast: ZeroBigInt, - FeeRate: liquidityInfo.FeeRate, - TreasuryAccountIndex: liquidityInfo.TreasuryAccountIndex, - TreasuryRate: liquidityInfo.TreasuryRate, - } - } else if txInfo.AssetAId == liquidityInfo.AssetBId { - poolDeltaForToAccount = &LiquidityInfo{ - PairIndex: txInfo.PairIndex, - AssetAId: txInfo.AssetBId, - AssetA: poolAssetBDelta, - AssetBId: txInfo.AssetAId, - AssetB: poolAssetADelta, - LpAmount: ZeroBigInt, - KLast: ZeroBigInt, - FeeRate: liquidityInfo.FeeRate, - TreasuryAccountIndex: liquidityInfo.TreasuryAccountIndex, - TreasuryRate: liquidityInfo.TreasuryRate, - } - } else { - logx.Error("invalid pool") - return nil, errors.New("invalid pool") - } - // gas account asset Gas - if assetDeltaMap[txInfo.GasAccountIndex][txInfo.GasFeeAssetId] == nil { - assetDeltaMap[txInfo.GasAccountIndex][txInfo.GasFeeAssetId] = txInfo.GasFeeAssetAmount - } else { - assetDeltaMap[txInfo.GasAccountIndex][txInfo.GasFeeAssetId] = ffmath.Add( - assetDeltaMap[txInfo.GasAccountIndex][txInfo.GasFeeAssetId], - txInfo.GasFeeAssetAmount, - ) - } - // check balance - if accountInfoMap[txInfo.FromAccountIndex].AssetInfo[txInfo.AssetAId].Balance.Cmp( - assetDeltaMap[txInfo.FromAccountIndex][txInfo.AssetAId]) < 0 { - logx.Errorf("not enough balance of asset %d", txInfo.AssetAId) - return nil, fmt.Errorf("not enough balance of asset %d", txInfo.AssetAId) - } - if accountInfoMap[txInfo.FromAccountIndex].AssetInfo[txInfo.GasFeeAssetId].Balance.Cmp( - assetDeltaMap[txInfo.FromAccountIndex][txInfo.GasFeeAssetId]) < 0 { - logx.Errorf("not enough balance of gas") - return nil, errors.New("not enough balance of gas") - } - // compute hash - hFunc := mimc.NewMiMC() - msgHash, err := legendTxTypes.ComputeSwapMsgHash(txInfo, hFunc) - if err != nil { - logx.Errorf("unable to compute tx hash: %s", err.Error()) - return nil, errors.New("internal error") - } - // verify signature - if err := VerifySignature(txInfo.Sig, msgHash, accountInfoMap[txInfo.FromAccountIndex].PublicKey); err != nil { - return nil, err - } - // compute tx details - // from account asset A - order := int64(0) - accountOrder := int64(0) - txDetails = append(txDetails, &MempoolTxDetail{ - AssetId: txInfo.AssetAId, - AssetType: GeneralAssetType, - AccountIndex: txInfo.FromAccountIndex, - AccountName: accountInfoMap[txInfo.FromAccountIndex].AccountName, - BalanceDelta: commonAsset.ConstructAccountAsset( - txInfo.AssetAId, ffmath.Neg(txInfo.AssetAAmount), ZeroBigInt, ZeroBigInt).String(), - Order: order, - AccountOrder: accountOrder, - }) - // from account asset B - order++ - txDetails = append(txDetails, &MempoolTxDetail{ - AssetId: txInfo.AssetBId, - AssetType: GeneralAssetType, - AccountIndex: txInfo.FromAccountIndex, - AccountName: accountInfoMap[txInfo.FromAccountIndex].AccountName, - BalanceDelta: commonAsset.ConstructAccountAsset( - txInfo.AssetBId, txInfo.AssetBAmountDelta, ZeroBigInt, ZeroBigInt).String(), - Order: order, - AccountOrder: accountOrder, - }) - // from account asset Gas - order++ - txDetails = append(txDetails, &MempoolTxDetail{ - AssetId: txInfo.GasFeeAssetId, - AssetType: GeneralAssetType, - AccountIndex: txInfo.FromAccountIndex, - AccountName: accountInfoMap[txInfo.FromAccountIndex].AccountName, - BalanceDelta: commonAsset.ConstructAccountAsset( - txInfo.GasFeeAssetId, ffmath.Neg(txInfo.GasFeeAssetAmount), ZeroBigInt, ZeroBigInt).String(), - Order: order, - AccountOrder: accountOrder, - }) - // pool info - order++ - txDetails = append(txDetails, &MempoolTxDetail{ - AssetId: txInfo.PairIndex, - AssetType: LiquidityAssetType, - AccountIndex: commonConstant.NilTxAccountIndex, - AccountName: commonConstant.NilAccountName, - BalanceDelta: poolDeltaForToAccount.String(), - Order: order, - AccountOrder: commonConstant.NilAccountOrder, - }) - // gas account asset Gas - order++ - accountOrder++ - txDetails = append(txDetails, &MempoolTxDetail{ - AssetId: txInfo.GasFeeAssetId, - AssetType: GeneralAssetType, - AccountIndex: txInfo.GasAccountIndex, - AccountName: accountInfoMap[txInfo.GasAccountIndex].AccountName, - BalanceDelta: commonAsset.ConstructAccountAsset( - txInfo.GasFeeAssetId, txInfo.GasFeeAssetAmount, ZeroBigInt, ZeroBigInt).String(), - Order: order, - AccountOrder: accountOrder, - }) - return txDetails, nil -} diff --git a/common/zcrypto/txVerification/transfer.go b/common/zcrypto/txVerification/transfer.go deleted file mode 100644 index 223620c69..000000000 --- a/common/zcrypto/txVerification/transfer.go +++ /dev/null @@ -1,156 +0,0 @@ -/* - * Copyright © 2021 Zkbas Protocol - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package txVerification - -import ( - "errors" - "fmt" - "math/big" - - "github.com/bnb-chain/zkbas-crypto/ffmath" - "github.com/bnb-chain/zkbas-crypto/wasm/legend/legendTxTypes" - "github.com/consensys/gnark-crypto/ecc/bn254/fr/mimc" - "github.com/zeromicro/go-zero/core/logx" - - "github.com/bnb-chain/zkbas/common/commonAsset" -) - -func VerifyTransferTxInfo( - accountInfoMap map[int64]*AccountInfo, - txInfo *TransferTxInfo, -) (txDetails []*MempoolTxDetail, err error) { - // verify params - if accountInfoMap[txInfo.ToAccountIndex] == nil || - accountInfoMap[txInfo.GasAccountIndex] == nil || - accountInfoMap[txInfo.FromAccountIndex] == nil || - accountInfoMap[txInfo.FromAccountIndex].AssetInfo[txInfo.AssetId] == nil || - accountInfoMap[txInfo.FromAccountIndex].AssetInfo[txInfo.AssetId].Balance.Cmp(ZeroBigInt) <= 0 || - accountInfoMap[txInfo.FromAccountIndex].AssetInfo[txInfo.GasFeeAssetId] == nil || - accountInfoMap[txInfo.FromAccountIndex].AssetInfo[txInfo.GasFeeAssetId].Balance.Cmp(ZeroBigInt) <= 0 || - txInfo.AssetAmount.Cmp(ZeroBigInt) < 0 || - txInfo.GasFeeAssetAmount.Cmp(ZeroBigInt) < 0 { - logx.Error("invalid params") - return nil, errors.New("invalid params") - } - if txInfo.Nonce != accountInfoMap[txInfo.FromAccountIndex].Nonce { - logx.Errorf("invalid nonce, actual: %d, expected: %d", - txInfo.Nonce, accountInfoMap[txInfo.FromAccountIndex].Nonce) - return nil, fmt.Errorf("invalid nonce, actual: %d, expected: %d", - txInfo.Nonce, accountInfoMap[txInfo.FromAccountIndex].Nonce) - } - // init delta map - var ( - assetDeltaMap = make(map[int64]map[int64]*big.Int) - ) - assetDeltaMap[txInfo.FromAccountIndex] = make(map[int64]*big.Int) - if assetDeltaMap[txInfo.ToAccountIndex] == nil { - assetDeltaMap[txInfo.ToAccountIndex] = make(map[int64]*big.Int) - } - if assetDeltaMap[txInfo.GasAccountIndex] == nil { - assetDeltaMap[txInfo.GasAccountIndex] = make(map[int64]*big.Int) - } - // compute deltas - // from account asset A - assetDeltaMap[txInfo.FromAccountIndex][txInfo.AssetId] = ffmath.Neg(txInfo.AssetAmount) - // from account asset Gas - if assetDeltaMap[txInfo.FromAccountIndex][txInfo.GasFeeAssetId] != nil { - assetDeltaMap[txInfo.FromAccountIndex][txInfo.GasFeeAssetId] = ffmath.Sub(assetDeltaMap[txInfo.FromAccountIndex][txInfo.GasFeeAssetId], txInfo.GasFeeAssetAmount) - } else { - assetDeltaMap[txInfo.FromAccountIndex][txInfo.GasFeeAssetId] = ffmath.Neg(txInfo.GasFeeAssetAmount) - } - // check if from account has enough assetABalance - // asset A - if accountInfoMap[txInfo.FromAccountIndex].AssetInfo[txInfo.AssetId].Balance.Cmp( - new(big.Int).Abs(assetDeltaMap[txInfo.FromAccountIndex][txInfo.AssetId])) < 0 { - logx.Errorf("not enough balance of asset %d", txInfo.AssetId) - return nil, fmt.Errorf("not enough balance of asset %d", txInfo.AssetId) - } - // asset Gas - if accountInfoMap[txInfo.FromAccountIndex].AssetInfo[txInfo.GasFeeAssetId].Balance.Cmp( - new(big.Int).Abs(assetDeltaMap[txInfo.FromAccountIndex][txInfo.GasFeeAssetId])) < 0 { - logx.Errorf("not enough balance of gas") - return nil, errors.New("not enough balance of gas") - } - // compute hash - hFunc := mimc.NewMiMC() - hFunc.Write([]byte(txInfo.CallData)) - callDataHash := hFunc.Sum(nil) - txInfo.CallDataHash = callDataHash - msgHash, err := legendTxTypes.ComputeTransferMsgHash(txInfo, hFunc) - if err != nil { - logx.Errorf("unable to compute message hash: %s", err.Error()) - return nil, errors.New("internal error") - } - // verify signature - if err := VerifySignature(txInfo.Sig, msgHash, accountInfoMap[txInfo.FromAccountIndex].PublicKey); err != nil { - return nil, err - } - // compute tx details - // from account asset A - order := int64(0) - accountOrder := int64(0) - txDetails = append(txDetails, &MempoolTxDetail{ - AssetId: txInfo.AssetId, - AssetType: GeneralAssetType, - AccountIndex: txInfo.FromAccountIndex, - AccountName: accountInfoMap[txInfo.FromAccountIndex].AccountName, - BalanceDelta: commonAsset.ConstructAccountAsset( - txInfo.AssetId, ffmath.Neg(txInfo.AssetAmount), ZeroBigInt, ZeroBigInt).String(), - Order: order, - AccountOrder: accountOrder, - }) - order++ - // from account asset gas - txDetails = append(txDetails, &MempoolTxDetail{ - AssetId: txInfo.GasFeeAssetId, - AssetType: GeneralAssetType, - AccountIndex: txInfo.FromAccountIndex, - AccountName: accountInfoMap[txInfo.FromAccountIndex].AccountName, - BalanceDelta: commonAsset.ConstructAccountAsset( - txInfo.GasFeeAssetId, ffmath.Neg(txInfo.GasFeeAssetAmount), ZeroBigInt, ZeroBigInt).String(), - Order: order, - AccountOrder: accountOrder, - }) - // to account asset a - order++ - accountOrder++ - txDetails = append(txDetails, &MempoolTxDetail{ - AssetId: txInfo.AssetId, - AssetType: GeneralAssetType, - AccountIndex: txInfo.ToAccountIndex, - AccountName: accountInfoMap[txInfo.ToAccountIndex].AccountName, - BalanceDelta: commonAsset.ConstructAccountAsset( - txInfo.AssetId, txInfo.AssetAmount, ZeroBigInt, ZeroBigInt).String(), - Order: order, - AccountOrder: accountOrder, - }) - // gas account asset gas - order++ - accountOrder++ - txDetails = append(txDetails, &MempoolTxDetail{ - AssetId: txInfo.GasFeeAssetId, - AssetType: GeneralAssetType, - AccountIndex: txInfo.GasAccountIndex, - AccountName: accountInfoMap[txInfo.GasAccountIndex].AccountName, - BalanceDelta: commonAsset.ConstructAccountAsset( - txInfo.GasFeeAssetId, txInfo.GasFeeAssetAmount, ZeroBigInt, ZeroBigInt).String(), - Order: order, - AccountOrder: accountOrder, - }) - return txDetails, nil -} diff --git a/common/zcrypto/txVerification/transferNft.go b/common/zcrypto/txVerification/transferNft.go deleted file mode 100644 index b938de591..000000000 --- a/common/zcrypto/txVerification/transferNft.go +++ /dev/null @@ -1,168 +0,0 @@ -/* - * Copyright © 2021 Zkbas Protocol - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package txVerification - -import ( - "errors" - "fmt" - "math/big" - - "github.com/bnb-chain/zkbas-crypto/ffmath" - "github.com/bnb-chain/zkbas-crypto/wasm/legend/legendTxTypes" - "github.com/consensys/gnark-crypto/ecc/bn254/fr/mimc" - "github.com/zeromicro/go-zero/core/logx" - - "github.com/bnb-chain/zkbas/common/commonAsset" - "github.com/bnb-chain/zkbas/common/commonConstant" -) - -/* - VerifyTransferNftTx: - accounts order is: - - FromAccount - - Assets - - AssetGas - - GasAccount - - Assets - - AssetGas -*/ -func VerifyTransferNftTxInfo( - accountInfoMap map[int64]*AccountInfo, - nftInfo *NftInfo, - txInfo *TransferNftTxInfo, -) (txDetails []*MempoolTxDetail, err error) { - // verify params - if accountInfoMap[txInfo.FromAccountIndex] == nil || - accountInfoMap[txInfo.GasAccountIndex] == nil || - accountInfoMap[txInfo.FromAccountIndex].AssetInfo == nil || - accountInfoMap[txInfo.FromAccountIndex].AssetInfo[txInfo.GasFeeAssetId] == nil || - nftInfo == nil || - nftInfo.OwnerAccountIndex != txInfo.FromAccountIndex || - nftInfo.NftIndex != txInfo.NftIndex || - txInfo.GasFeeAssetAmount.Cmp(ZeroBigInt) < 0 { - logx.Error("invalid params") - return nil, errors.New("invalid params") - } - // verify nonce - if txInfo.Nonce != accountInfoMap[txInfo.FromAccountIndex].Nonce { - logx.Errorf("invalid nonce, actual: %d, expected: %d", - txInfo.Nonce, accountInfoMap[txInfo.FromAccountIndex].Nonce) - return nil, fmt.Errorf("invalid nonce, actual: %d, expected: %d", - txInfo.Nonce, accountInfoMap[txInfo.FromAccountIndex].Nonce) - } - // set tx info - var ( - assetDeltaMap = make(map[int64]map[int64]*big.Int) - newNftInfo *NftInfo - ) - // init delta map - assetDeltaMap[txInfo.FromAccountIndex] = make(map[int64]*big.Int) - if assetDeltaMap[txInfo.GasAccountIndex] == nil { - assetDeltaMap[txInfo.GasAccountIndex] = make(map[int64]*big.Int) - } - // from account asset Gas - assetDeltaMap[txInfo.FromAccountIndex][txInfo.GasFeeAssetId] = ffmath.Neg(txInfo.GasFeeAssetAmount) - // to account nft info - newNftInfo = &NftInfo{ - NftIndex: nftInfo.NftIndex, - CreatorAccountIndex: nftInfo.CreatorAccountIndex, - OwnerAccountIndex: txInfo.ToAccountIndex, - NftContentHash: nftInfo.NftContentHash, - NftL1TokenId: nftInfo.NftL1TokenId, - NftL1Address: nftInfo.NftL1Address, - CreatorTreasuryRate: nftInfo.CreatorTreasuryRate, - CollectionId: nftInfo.CollectionId, - } - // gas account asset Gas - if assetDeltaMap[txInfo.GasAccountIndex][txInfo.GasFeeAssetId] == nil { - assetDeltaMap[txInfo.GasAccountIndex][txInfo.GasFeeAssetId] = txInfo.GasFeeAssetAmount - } else { - assetDeltaMap[txInfo.GasAccountIndex][txInfo.GasFeeAssetId] = ffmath.Add( - assetDeltaMap[txInfo.GasAccountIndex][txInfo.GasFeeAssetId], - txInfo.GasFeeAssetAmount, - ) - } - // check balance - if accountInfoMap[txInfo.FromAccountIndex].AssetInfo[txInfo.GasFeeAssetId].Balance.Cmp(txInfo.GasFeeAssetAmount) < 0 { - logx.Errorf("not enough balance of gas") - return nil, errors.New("not enough balance of gas") - } - // compute hash - hFunc := mimc.NewMiMC() - msgHash, err := legendTxTypes.ComputeTransferNftMsgHash(txInfo, hFunc) - if err != nil { - logx.Errorf("unable to compute tx hash: %s", err.Error()) - return nil, errors.New("internal error") - } - // verify signature - if err := VerifySignature(txInfo.Sig, msgHash, accountInfoMap[txInfo.FromAccountIndex].PublicKey); err != nil { - return nil, err - } - // compute tx details - // from account asset gas - order := int64(0) - accountOrder := int64(0) - txDetails = append(txDetails, &MempoolTxDetail{ - AssetId: txInfo.GasFeeAssetId, - AssetType: GeneralAssetType, - AccountIndex: txInfo.FromAccountIndex, - AccountName: accountInfoMap[txInfo.FromAccountIndex].AccountName, - BalanceDelta: commonAsset.ConstructAccountAsset( - txInfo.GasFeeAssetId, ffmath.Neg(txInfo.GasFeeAssetAmount), ZeroBigInt, ZeroBigInt).String(), - Order: order, - AccountOrder: accountOrder, - }) - // to account empty delta - order++ - accountOrder++ - txDetails = append(txDetails, &MempoolTxDetail{ - AssetId: txInfo.GasFeeAssetId, - AssetType: GeneralAssetType, - AccountIndex: txInfo.ToAccountIndex, - AccountName: accountInfoMap[txInfo.ToAccountIndex].AccountName, - BalanceDelta: commonAsset.ConstructAccountAsset( - txInfo.GasFeeAssetId, ZeroBigInt, ZeroBigInt, ZeroBigInt).String(), - Order: order, - AccountOrder: accountOrder, - }) - // to account nft delta - order++ - txDetails = append(txDetails, &MempoolTxDetail{ - AssetId: txInfo.NftIndex, - AssetType: NftAssetType, - AccountIndex: txInfo.ToAccountIndex, - AccountName: accountInfoMap[txInfo.ToAccountIndex].AccountName, - BalanceDelta: newNftInfo.String(), - Order: order, - AccountOrder: commonConstant.NilAccountOrder, - }) - // gas account asset gas - order++ - accountOrder++ - txDetails = append(txDetails, &MempoolTxDetail{ - AssetId: txInfo.GasFeeAssetId, - AssetType: GeneralAssetType, - AccountIndex: txInfo.GasAccountIndex, - AccountName: accountInfoMap[txInfo.GasAccountIndex].AccountName, - BalanceDelta: commonAsset.ConstructAccountAsset( - txInfo.GasFeeAssetId, txInfo.GasFeeAssetAmount, ZeroBigInt, ZeroBigInt).String(), - Order: order, - AccountOrder: accountOrder, - }) - return txDetails, nil -} diff --git a/common/zcrypto/txVerification/withdraw.go b/common/zcrypto/txVerification/withdraw.go deleted file mode 100644 index 0dd487d77..000000000 --- a/common/zcrypto/txVerification/withdraw.go +++ /dev/null @@ -1,144 +0,0 @@ -/* - * Copyright © 2021 Zkbas Protocol - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package txVerification - -import ( - "errors" - "fmt" - "math/big" - - "github.com/bnb-chain/zkbas-crypto/ffmath" - "github.com/bnb-chain/zkbas-crypto/wasm/legend/legendTxTypes" - "github.com/consensys/gnark-crypto/ecc/bn254/fr/mimc" - "github.com/zeromicro/go-zero/core/logx" - - "github.com/bnb-chain/zkbas/common/commonAsset" -) - -func VerifyWithdrawTxInfo( - accountInfoMap map[int64]*AccountInfo, - txInfo *WithdrawTxInfo, -) (txDetails []*MempoolTxDetail, err error) { - // verify params - if accountInfoMap[txInfo.FromAccountIndex] == nil || - accountInfoMap[txInfo.GasAccountIndex] == nil || - accountInfoMap[txInfo.FromAccountIndex].AssetInfo == nil || - accountInfoMap[txInfo.FromAccountIndex].AssetInfo[txInfo.AssetId] == nil || - accountInfoMap[txInfo.FromAccountIndex].AssetInfo[txInfo.AssetId].Balance.Cmp(ZeroBigInt) <= 0 || - accountInfoMap[txInfo.FromAccountIndex].AssetInfo[txInfo.GasFeeAssetId] == nil || - txInfo.AssetAmount.Cmp(ZeroBigInt) < 0 || - txInfo.GasFeeAssetAmount.Cmp(ZeroBigInt) < 0 { - logx.Error("invalid params") - return nil, errors.New("invalid params") - } - // verify nonce - if txInfo.Nonce != accountInfoMap[txInfo.FromAccountIndex].Nonce { - logx.Errorf("invalid nonce, actual: %d, expected: %d", - txInfo.Nonce, accountInfoMap[txInfo.FromAccountIndex].Nonce) - return nil, fmt.Errorf("invalid nonce, actual: %d, expected: %d", - txInfo.Nonce, accountInfoMap[txInfo.FromAccountIndex].Nonce) - } - var ( - assetDeltaMap = make(map[int64]map[int64]*big.Int) - ) - // init delta map - assetDeltaMap[txInfo.FromAccountIndex] = make(map[int64]*big.Int) - if assetDeltaMap[txInfo.GasAccountIndex] == nil { - assetDeltaMap[txInfo.GasAccountIndex] = make(map[int64]*big.Int) - } - // from account asset A - assetDeltaMap[txInfo.FromAccountIndex][txInfo.AssetId] = ffmath.Neg(txInfo.AssetAmount) - // from account asset Gas - if assetDeltaMap[txInfo.FromAccountIndex][txInfo.GasFeeAssetId] == nil { - assetDeltaMap[txInfo.FromAccountIndex][txInfo.GasFeeAssetId] = ffmath.Neg(txInfo.GasFeeAssetAmount) - } else { - assetDeltaMap[txInfo.FromAccountIndex][txInfo.GasFeeAssetId] = ffmath.Sub( - assetDeltaMap[txInfo.FromAccountIndex][txInfo.GasFeeAssetId], - txInfo.GasFeeAssetAmount, - ) - } - // gas account asset Gas - if assetDeltaMap[txInfo.GasAccountIndex][txInfo.GasFeeAssetId] == nil { - assetDeltaMap[txInfo.GasAccountIndex][txInfo.GasFeeAssetId] = txInfo.GasFeeAssetAmount - } else { - assetDeltaMap[txInfo.GasAccountIndex][txInfo.GasFeeAssetId] = ffmath.Add( - assetDeltaMap[txInfo.GasAccountIndex][txInfo.GasFeeAssetId], - txInfo.GasFeeAssetAmount, - ) - } - // check balance - if accountInfoMap[txInfo.FromAccountIndex].AssetInfo[txInfo.AssetId].Balance.Cmp(new(big.Int).Abs(assetDeltaMap[txInfo.FromAccountIndex][txInfo.AssetId])) < 0 { - logx.Errorf("not enough balance of asset %d", txInfo.AssetId) - return nil, fmt.Errorf("not enough balance of asset %d", txInfo.AssetId) - } - if accountInfoMap[txInfo.FromAccountIndex].AssetInfo[txInfo.GasFeeAssetId].Balance.Cmp(new(big.Int).Abs(assetDeltaMap[txInfo.FromAccountIndex][txInfo.GasFeeAssetId])) < 0 { - logx.Errorf("not enough balance of gas") - return nil, errors.New("not enough balance of gas") - } - // compute hash - hFunc := mimc.NewMiMC() - msgHash, err := legendTxTypes.ComputeWithdrawMsgHash(txInfo, hFunc) - if err != nil { - logx.Errorf("unable to compute tx hash: %s", err.Error()) - return nil, errors.New("internal error") - } - // verify signature - if err := VerifySignature(txInfo.Sig, msgHash, accountInfoMap[txInfo.FromAccountIndex].PublicKey); err != nil { - return nil, err - } - // compute tx details - // from account asset A - order := int64(0) - accountOrder := int64(0) - txDetails = append(txDetails, &MempoolTxDetail{ - AssetId: txInfo.AssetId, - AssetType: GeneralAssetType, - AccountIndex: txInfo.FromAccountIndex, - AccountName: accountInfoMap[txInfo.FromAccountIndex].AccountName, - BalanceDelta: commonAsset.ConstructAccountAsset( - txInfo.AssetId, ffmath.Neg(txInfo.AssetAmount), ZeroBigInt, ZeroBigInt).String(), - Order: order, - AccountOrder: accountOrder, - }) - // from account asset gas - order++ - txDetails = append(txDetails, &MempoolTxDetail{ - AssetId: txInfo.GasFeeAssetId, - AssetType: GeneralAssetType, - AccountIndex: txInfo.FromAccountIndex, - AccountName: accountInfoMap[txInfo.FromAccountIndex].AccountName, - BalanceDelta: commonAsset.ConstructAccountAsset( - txInfo.GasFeeAssetId, ffmath.Neg(txInfo.GasFeeAssetAmount), ZeroBigInt, ZeroBigInt).String(), - Order: order, - AccountOrder: accountOrder, - }) - // gas account asset gas - order++ - accountOrder++ - txDetails = append(txDetails, &MempoolTxDetail{ - AssetId: txInfo.GasFeeAssetId, - AssetType: GeneralAssetType, - AccountIndex: txInfo.GasAccountIndex, - AccountName: accountInfoMap[txInfo.GasAccountIndex].AccountName, - BalanceDelta: commonAsset.ConstructAccountAsset( - txInfo.GasFeeAssetId, txInfo.GasFeeAssetAmount, ZeroBigInt, ZeroBigInt).String(), - Order: order, - AccountOrder: accountOrder, - }) - return txDetails, nil -} diff --git a/common/zcrypto/txVerification/withdrawNft.go b/common/zcrypto/txVerification/withdrawNft.go deleted file mode 100644 index 502822cb4..000000000 --- a/common/zcrypto/txVerification/withdrawNft.go +++ /dev/null @@ -1,151 +0,0 @@ -/* - * Copyright © 2021 Zkbas Protocol - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package txVerification - -import ( - "errors" - "fmt" - "math/big" - - "github.com/bnb-chain/zkbas-crypto/ffmath" - "github.com/bnb-chain/zkbas-crypto/wasm/legend/legendTxTypes" - "github.com/consensys/gnark-crypto/ecc/bn254/fr/mimc" - "github.com/ethereum/go-ethereum/common" - "github.com/zeromicro/go-zero/core/logx" - - "github.com/bnb-chain/zkbas/common/commonAsset" - "github.com/bnb-chain/zkbas/common/commonConstant" -) - -func VerifyWithdrawNftTxInfo( - accountInfoMap map[int64]*AccountInfo, - nftInfo *NftInfo, - txInfo *WithdrawNftTxInfo, -) (txDetails []*MempoolTxDetail, err error) { - // verify params - if accountInfoMap[txInfo.AccountIndex] == nil || - accountInfoMap[txInfo.CreatorAccountIndex] == nil || - accountInfoMap[txInfo.GasAccountIndex] == nil || - accountInfoMap[txInfo.AccountIndex].AssetInfo == nil || - accountInfoMap[txInfo.AccountIndex].AssetInfo[txInfo.GasFeeAssetId] == nil || - nftInfo == nil || - nftInfo.OwnerAccountIndex != txInfo.AccountIndex || - nftInfo.NftIndex != txInfo.NftIndex || - nftInfo.NftContentHash != common.Bytes2Hex(txInfo.NftContentHash) { - logx.Error("invalid params") - return nil, errors.New("invalid params") - } - // verify nonce - if txInfo.Nonce != accountInfoMap[txInfo.AccountIndex].Nonce { - logx.Errorf("invalid nonce, actual: %d, expected: %d", - txInfo.Nonce, accountInfoMap[txInfo.AccountIndex].Nonce) - return nil, fmt.Errorf("invalid nonce, actual: %d, expected: %d", - txInfo.Nonce, accountInfoMap[txInfo.AccountIndex].Nonce) - } - // set tx info - var ( - assetDeltaMap = make(map[int64]map[int64]*big.Int) - newNftInfo *NftInfo - ) - // init delta map - assetDeltaMap[txInfo.AccountIndex] = make(map[int64]*big.Int) - if assetDeltaMap[txInfo.GasAccountIndex] == nil { - assetDeltaMap[txInfo.GasAccountIndex] = make(map[int64]*big.Int) - } - // from account asset Gas - assetDeltaMap[txInfo.AccountIndex][txInfo.GasFeeAssetId] = ffmath.Neg(txInfo.GasFeeAssetAmount) - // to account nft info - newNftInfo = commonAsset.EmptyNftInfo(txInfo.NftIndex) - // gas account asset Gas - if assetDeltaMap[txInfo.GasAccountIndex][txInfo.GasFeeAssetId] == nil { - assetDeltaMap[txInfo.GasAccountIndex][txInfo.GasFeeAssetId] = txInfo.GasFeeAssetAmount - } else { - assetDeltaMap[txInfo.GasAccountIndex][txInfo.GasFeeAssetId] = ffmath.Add( - assetDeltaMap[txInfo.GasAccountIndex][txInfo.GasFeeAssetId], - txInfo.GasFeeAssetAmount, - ) - } - // check balance - if accountInfoMap[txInfo.AccountIndex].AssetInfo[txInfo.GasFeeAssetId].Balance.Cmp(txInfo.GasFeeAssetAmount) < 0 { - logx.Errorf("not enough balance of gas") - return nil, errors.New("not enough balance of gas") - } - // compute hash - hFunc := mimc.NewMiMC() - msgHash, err := legendTxTypes.ComputeWithdrawNftMsgHash(txInfo, hFunc) - if err != nil { - logx.Errorf("unable to compute tx hash: %s", err.Error()) - return nil, errors.New("internal error") - } - // verify signature - if err := VerifySignature(txInfo.Sig, msgHash, accountInfoMap[txInfo.AccountIndex].PublicKey); err != nil { - return nil, err - } - // compute tx details - // from account asset gas - order := int64(0) - accountOrder := int64(0) - txDetails = append(txDetails, &MempoolTxDetail{ - AssetId: txInfo.GasFeeAssetId, - AssetType: GeneralAssetType, - AccountIndex: txInfo.AccountIndex, - AccountName: accountInfoMap[txInfo.AccountIndex].AccountName, - BalanceDelta: commonAsset.ConstructAccountAsset( - txInfo.GasFeeAssetId, ffmath.Neg(txInfo.GasFeeAssetAmount), ZeroBigInt, ZeroBigInt).String(), - Order: order, - AccountOrder: accountOrder, - }) - // nft delta - order++ - txDetails = append(txDetails, &MempoolTxDetail{ - AssetId: txInfo.NftIndex, - AssetType: NftAssetType, - AccountIndex: commonConstant.NilTxAccountIndex, - AccountName: commonConstant.NilAccountName, - BalanceDelta: newNftInfo.String(), - Order: order, - AccountOrder: commonConstant.NilAccountOrder, - }) - // creator account zero delta - order++ - accountOrder++ - txDetails = append(txDetails, &MempoolTxDetail{ - AssetId: txInfo.GasFeeAssetId, - AssetType: GeneralAssetType, - AccountIndex: txInfo.CreatorAccountIndex, - AccountName: accountInfoMap[txInfo.CreatorAccountIndex].AccountName, - BalanceDelta: commonAsset.ConstructAccountAsset( - txInfo.GasFeeAssetId, big.NewInt(0), big.NewInt(0), big.NewInt(0)).String(), - Order: order, - AccountOrder: accountOrder, - }) - // gas account asset gas - order++ - accountOrder++ - txDetails = append(txDetails, &MempoolTxDetail{ - AssetId: txInfo.GasFeeAssetId, - AssetType: GeneralAssetType, - AccountIndex: txInfo.GasAccountIndex, - AccountName: accountInfoMap[txInfo.GasAccountIndex].AccountName, - BalanceDelta: commonAsset.ConstructAccountAsset( - txInfo.GasFeeAssetId, txInfo.GasFeeAssetAmount, ZeroBigInt, ZeroBigInt).String(), - Order: order, - AccountOrder: accountOrder, - }) - return txDetails, nil -} diff --git a/configyaml/app.yaml.example b/configyaml/app.yaml.example deleted file mode 100644 index 01a0f8a7a..000000000 --- a/configyaml/app.yaml.example +++ /dev/null @@ -1,25 +0,0 @@ -Name: appService-api -Host: 0.0.0.0 -Port: 8888 - -Prometheus: - Host: 0.0.0.0 - Port: 9091 - Path: /metrics - -Postgres: - DataSource: host=34.122.163.215 user=postgres password=zkbasTest dbname=zkbas port=5432 sslmode=disable - -CacheRedis: - - Host: redis:6379 - Type: node - -GlobalRpc: - Target: k8s://default/globalrpc-svc:8080 - -LogConf: - ServiceName: appservice - Mode: console - Path: ./log/appService - StackCooldownMillis: 500 - Level: error \ No newline at end of file diff --git a/configyaml/committer.yaml.example b/configyaml/committer.yaml.example deleted file mode 100644 index 49dc6f378..000000000 --- a/configyaml/committer.yaml.example +++ /dev/null @@ -1,17 +0,0 @@ -Name: committer.cronjob - - -Prometheus: - Host: 0.0.0.0 - Port: 9091 - Path: /metrics - -Postgres: - DataSource: host=127.0.0.1 user=postgres password=ZecreyProtocolDB@123 dbname=zkbas port=5432 sslmode=disable - -CacheRedis: - - Host: 10.70.61.91:6379 - Type: node - -TreeDB: - Driver: memorydb \ No newline at end of file diff --git a/configyaml/globalrpc.yaml.example b/configyaml/globalrpc.yaml.example deleted file mode 100644 index 7270315fa..000000000 --- a/configyaml/globalrpc.yaml.example +++ /dev/null @@ -1,21 +0,0 @@ -Name: global.rpc -ListenOn: 127.0.0.1:8080 - - -Prometheus: - Host: 0.0.0.0 - Port: 9091 - Path: /metrics - -Postgres: - DataSource: host=127.0.0.1 user=postgres password=pw dbname=zkbas port=5432 sslmode=disable - -CacheRedis: - - Host: redis:6379 - Type: node - -LogConf: - ServiceName: global.rpc - Mode: console - Path: ./log/globalrpc - StackCooldownMillis: 500 diff --git a/configyaml/monitor.yaml.example b/configyaml/monitor.yaml.example deleted file mode 100644 index 0eece5e40..000000000 --- a/configyaml/monitor.yaml.example +++ /dev/null @@ -1,28 +0,0 @@ -Name: monitor.cronjob - - -Prometheus: - Host: 0.0.0.0 - Port: 9091 - Path: /metrics - -Postgres: - DataSource: host=127.0.0.1 user=postgres password=pw dbname=zkbas port=5432 sslmode=disable - -CacheRedis: - - Host: redis:6379 - Type: node - -ChainConfig: - NetworkRPCSysConfigName: "BscTestNetworkRpc" - # NetworkRPCSysConfigName: "LocalTestNetworkRpc" - ZkbasContractAddrSysConfigName: "ZkbasContract" - GovernanceContractAddrSysConfigName: "GovernanceContract" - StartL1BlockHeight: 26770000 - # StartL1BlockHeight: 0 - PendingBlocksCount: 0 - MaxHandledBlocksCount: 100000 - - - - diff --git a/configyaml/witnessgenerator.yaml.example b/configyaml/witnessgenerator.yaml.example deleted file mode 100644 index 6cca28ce0..000000000 --- a/configyaml/witnessgenerator.yaml.example +++ /dev/null @@ -1,11 +0,0 @@ -Name: witnessGenerator.cronjob - -Postgres: - DataSource: host=34.122.163.215 user=postgres password=zkbasTest dbname=zkbas port=5432 sslmode=disable - -CacheRedis: - - Host: 127.0.0.1:6379 - Type: node - -TreeDB: - Driver: memorydb \ No newline at end of file diff --git a/core/block_processor.go b/core/block_processor.go new file mode 100644 index 000000000..7558b1b3f --- /dev/null +++ b/core/block_processor.go @@ -0,0 +1,67 @@ +package core + +import ( + "fmt" + + "github.com/bnb-chain/zkbas/core/executor" + "github.com/bnb-chain/zkbas/dao/tx" +) + +type Processor interface { + Process(tx *tx.Tx) error +} + +type CommitProcessor struct { + bc *BlockChain +} + +func NewCommitProcessor(bc *BlockChain) Processor { + return &CommitProcessor{ + bc: bc, + } +} + +func (p *CommitProcessor) Process(tx *tx.Tx) error { + p.bc.setCurrentBlockTimeStamp() + defer p.bc.resetCurrentBlockTimeStamp() + + executor, err := executor.NewTxExecutor(p.bc, tx) + if err != nil { + return fmt.Errorf("new tx executor failed") + } + + err = executor.Prepare() + if err != nil { + return err + } + err = executor.VerifyInputs() + if err != nil { + return err + } + txDetails, err := executor.GenerateTxDetails() + if err != nil { + return err + } + tx.TxDetails = txDetails + err = executor.ApplyTransaction() + if err != nil { + panic(err) + } + err = executor.GeneratePubData() + if err != nil { + panic(err) + } + err = executor.UpdateTrees() + if err != nil { + panic(err) + } + tx, err = executor.GetExecutedTx() + if err != nil { + panic(err) + } + + p.bc.Statedb.Txs = append(p.bc.Statedb.Txs, tx) + p.bc.Statedb.StateRoot = tx.StateRoot + + return nil +} diff --git a/core/blockchain.go b/core/blockchain.go new file mode 100644 index 000000000..0281aa489 --- /dev/null +++ b/core/blockchain.go @@ -0,0 +1,288 @@ +package core + +import ( + "encoding/json" + "errors" + "fmt" + "time" + + "github.com/ethereum/go-ethereum/common" + "github.com/zeromicro/go-zero/core/logx" + "github.com/zeromicro/go-zero/core/stores/cache" + "gorm.io/driver/postgres" + "gorm.io/gorm" + + "github.com/bnb-chain/zkbas/common/chain" + sdb "github.com/bnb-chain/zkbas/core/statedb" + "github.com/bnb-chain/zkbas/dao/account" + "github.com/bnb-chain/zkbas/dao/block" + "github.com/bnb-chain/zkbas/dao/compressedblock" + "github.com/bnb-chain/zkbas/dao/dbcache" + "github.com/bnb-chain/zkbas/dao/liquidity" + "github.com/bnb-chain/zkbas/dao/mempool" + "github.com/bnb-chain/zkbas/dao/nft" + "github.com/bnb-chain/zkbas/dao/tx" + "github.com/bnb-chain/zkbas/tree" +) + +type ChainConfig struct { + Postgres struct { + DataSource string + } + CacheRedis cache.CacheConf + TreeDB struct { + Driver tree.Driver + //nolint:staticcheck + LevelDBOption tree.LevelDBOption `json:",optional"` + //nolint:staticcheck + RedisDBOption tree.RedisDBOption `json:",optional"` + } +} + +type BlockChain struct { + *sdb.ChainDB + Statedb *sdb.StateDB // Cache for current block changes. + + chainConfig *ChainConfig + dryRun bool //dryRun mode is used for verifying user inputs, is not for execution + + currentBlock *block.Block + processor Processor +} + +func NewBlockChain(config *ChainConfig, moduleName string) (*BlockChain, error) { + db, err := gorm.Open(postgres.Open(config.Postgres.DataSource)) + if err != nil { + logx.Error("gorm connect db failed: ", err) + return nil, err + } + bc := &BlockChain{ + ChainDB: sdb.NewChainDB(db), + chainConfig: config, + } + + curHeight, err := bc.BlockModel.GetCurrentHeight() + if err != nil { + logx.Error("get current block failed: ", err) + return nil, err + } + + bc.currentBlock, err = bc.BlockModel.GetBlockByHeight(curHeight) + if err != nil { + return nil, err + } + if bc.currentBlock.BlockStatus == block.StatusProposing { + curHeight-- + } + redisCache := dbcache.NewRedisCache(config.CacheRedis[0].Host, config.CacheRedis[0].Pass, 15*time.Minute) + treeCtx := &tree.Context{ + Name: moduleName, + Driver: config.TreeDB.Driver, + LevelDBOption: &config.TreeDB.LevelDBOption, + RedisDBOption: &config.TreeDB.RedisDBOption, + } + bc.Statedb, err = sdb.NewStateDB(treeCtx, bc.ChainDB, redisCache, bc.currentBlock.StateRoot, curHeight) + if err != nil { + return nil, err + } + bc.processor = NewCommitProcessor(bc) + return bc, nil +} + +// NewBlockChainForDryRun - for dry run mode, we can reuse existing models for quick creation +// , e.g., for sending tx, we can create blockchain for each request quickly +func NewBlockChainForDryRun(accountModel account.AccountModel, liquidityModel liquidity.LiquidityModel, + nftModel nft.L2NftModel, mempoolModel mempool.MempoolModel, redisCache dbcache.Cache) *BlockChain { + chainDb := &sdb.ChainDB{ + AccountModel: accountModel, + LiquidityModel: liquidityModel, + L2NftModel: nftModel, + MempoolModel: mempoolModel, + } + bc := &BlockChain{ + ChainDB: chainDb, + dryRun: true, + Statedb: sdb.NewStateDBForDryRun(redisCache, chainDb), + } + return bc +} + +func (bc *BlockChain) ApplyTransaction(tx *tx.Tx) error { + return bc.processor.Process(tx) +} + +func (bc *BlockChain) ProposeNewBlock() (*block.Block, error) { + newBlock := &block.Block{ + Model: gorm.Model{ + // The block timestamp will be set when the first transaction executed. + CreatedAt: time.Time{}, + }, + BlockHeight: bc.currentBlock.BlockHeight + 1, + StateRoot: bc.currentBlock.StateRoot, + BlockStatus: block.StatusProposing, + } + + bc.currentBlock = newBlock + bc.Statedb.PurgeCache(bc.currentBlock.StateRoot) + return newBlock, nil +} + +func (bc *BlockChain) CurrentBlock() *block.Block { + return bc.currentBlock +} + +func (bc *BlockChain) CommitNewBlock(blockSize int, createdAt int64) (*block.BlockStates, error) { + newBlock, compressedBlock, err := bc.commitNewBlock(blockSize, createdAt) + if err != nil { + return nil, err + } + + currentHeight := bc.currentBlock.BlockHeight + err = tree.CommitTrees(uint64(currentHeight), bc.Statedb.AccountTree, &bc.Statedb.AccountAssetTrees, bc.Statedb.LiquidityTree, bc.Statedb.NftTree) + if err != nil { + return nil, err + } + + pendingNewAccount, pendingUpdateAccount, pendingNewAccountHistory, err := bc.Statedb.GetPendingAccount(currentHeight) + if err != nil { + return nil, err + } + + pendingNewLiquidity, pendingUpdateLiquidity, pendingNewLiquidityHistory, err := bc.Statedb.GetPendingLiquidity(currentHeight) + if err != nil { + return nil, err + } + + pendingNewNft, pendingUpdateNft, pendingNewNftHistory, err := bc.Statedb.GetPendingNft(currentHeight) + if err != nil { + return nil, err + } + + return &block.BlockStates{ + Block: newBlock, + CompressedBlock: compressedBlock, + PendingNewAccount: pendingNewAccount, + PendingUpdateAccount: pendingUpdateAccount, + PendingNewAccountHistory: pendingNewAccountHistory, + PendingNewLiquidity: pendingNewLiquidity, + PendingUpdateLiquidity: pendingUpdateLiquidity, + PendingNewLiquidityHistory: pendingNewLiquidityHistory, + PendingNewNft: pendingNewNft, + PendingUpdateNft: pendingUpdateNft, + PendingNewNftHistory: pendingNewNftHistory, + }, nil +} + +func (bc *BlockChain) commitNewBlock(blockSize int, createdAt int64) (*block.Block, *compressedblock.CompressedBlock, error) { + s := bc.Statedb + if blockSize < len(s.Txs) { + return nil, nil, errors.New("block size too small") + } + + newBlock := bc.currentBlock + if newBlock.BlockStatus != block.StatusProposing { + newBlock = &block.Block{ + Model: gorm.Model{ + CreatedAt: time.UnixMilli(createdAt), + }, + BlockHeight: bc.currentBlock.BlockHeight + 1, + StateRoot: bc.currentBlock.StateRoot, + BlockStatus: block.StatusProposing, + } + } + + // Align pub data. + s.AlignPubData(blockSize) + + commitment := chain.CreateBlockCommitment(newBlock.BlockHeight, newBlock.CreatedAt.UnixMilli(), + common.FromHex(newBlock.StateRoot), common.FromHex(s.StateRoot), + s.PubData, int64(len(s.PubDataOffset))) + + newBlock.BlockSize = uint16(blockSize) + newBlock.BlockCommitment = commitment + newBlock.StateRoot = s.StateRoot + newBlock.PriorityOperations = s.PriorityOperations + newBlock.PendingOnChainOperationsHash = common.Bytes2Hex(s.PendingOnChainOperationsHash) + newBlock.Txs = s.Txs + newBlock.BlockStatus = block.StatusPending + if len(s.PendingOnChainOperationsPubData) > 0 { + onChainOperationsPubDataBytes, err := json.Marshal(s.PendingOnChainOperationsPubData) + if err != nil { + return nil, nil, fmt.Errorf("marshal pending onChain operation pubData failed: %v", err) + } + newBlock.PendingOnChainOperationsPubData = string(onChainOperationsPubDataBytes) + } + + offsetBytes, err := json.Marshal(s.PubDataOffset) + if err != nil { + return nil, nil, fmt.Errorf("marshal pubData offset failed: %v", err) + } + newCompressedBlock := &compressedblock.CompressedBlock{ + BlockSize: uint16(blockSize), + BlockHeight: newBlock.BlockHeight, + StateRoot: newBlock.StateRoot, + PublicData: common.Bytes2Hex(s.PubData), + Timestamp: newBlock.CreatedAt.UnixMilli(), + PublicDataOffsets: string(offsetBytes), + } + + bc.currentBlock = newBlock + return newBlock, newCompressedBlock, nil +} + +func (bc *BlockChain) VerifyExpiredAt(expiredAt int64) error { + if !bc.dryRun { + if expiredAt < bc.currentBlock.CreatedAt.UnixMilli() { + return errors.New("invalid ExpiredAt") + } + } else { + if expiredAt < time.Now().UnixMilli() { + return errors.New("invalid ExpiredAt") + } + } + return nil +} + +func (bc *BlockChain) VerifyNonce(accountIndex int64, nonce int64) error { + if !bc.dryRun { + expectNonce, err := bc.Statedb.GetCommittedNonce(accountIndex) + if err != nil { + return err + } + if nonce != expectNonce { + return errors.New("invalid Nonce") + } + } else { + pendingNonce, err := bc.Statedb.GetPendingNonce(accountIndex) + if err != nil { + return err + } + if pendingNonce != nonce { + return errors.New("invalid Nonce") + } + } + return nil +} + +func (bc *BlockChain) StateDB() *sdb.StateDB { + return bc.Statedb +} + +func (bc *BlockChain) DB() *sdb.ChainDB { + return bc.ChainDB +} + +func (bc *BlockChain) setCurrentBlockTimeStamp() { + if bc.currentBlock.CreatedAt.IsZero() && len(bc.Statedb.Txs) == 0 { + creatAt := time.Now().UnixMilli() + bc.currentBlock.CreatedAt = time.UnixMilli(creatAt) + } +} + +func (bc *BlockChain) resetCurrentBlockTimeStamp() { + if len(bc.Statedb.Txs) > 0 { + return + } + + bc.currentBlock.CreatedAt = time.Time{} +} diff --git a/core/executor/add_liquidity_executor.go b/core/executor/add_liquidity_executor.go new file mode 100644 index 000000000..dc2dd3b40 --- /dev/null +++ b/core/executor/add_liquidity_executor.go @@ -0,0 +1,572 @@ +package executor + +import ( + "bytes" + "encoding/json" + "math/big" + + "github.com/consensys/gnark-crypto/ecc/bn254/fr/mimc" + "github.com/ethereum/go-ethereum/common" + "github.com/pkg/errors" + "github.com/zeromicro/go-zero/core/logx" + + "github.com/bnb-chain/zkbas-crypto/ffmath" + "github.com/bnb-chain/zkbas-crypto/wasm/legend/legendTxTypes" + common2 "github.com/bnb-chain/zkbas/common" + "github.com/bnb-chain/zkbas/common/chain" + "github.com/bnb-chain/zkbas/core/statedb" + "github.com/bnb-chain/zkbas/dao/liquidity" + "github.com/bnb-chain/zkbas/dao/mempool" + "github.com/bnb-chain/zkbas/dao/tx" + "github.com/bnb-chain/zkbas/types" +) + +type AddLiquidityExecutor struct { + BaseExecutor + + txInfo *legendTxTypes.AddLiquidityTxInfo + + newPoolInfo *types.LiquidityInfo + lpDeltaForFromAccount *big.Int +} + +func NewAddLiquidityExecutor(bc IBlockchain, tx *tx.Tx) (TxExecutor, error) { + txInfo, err := types.ParseAddLiquidityTxInfo(tx.TxInfo) + if err != nil { + logx.Errorf("parse transfer tx failed: %s", err.Error()) + return nil, errors.New("invalid tx info") + } + + return &AddLiquidityExecutor{ + BaseExecutor: BaseExecutor{ + bc: bc, + tx: tx, + iTxInfo: txInfo, + }, + txInfo: txInfo, + }, nil +} + +func (e *AddLiquidityExecutor) Prepare() error { + txInfo := e.txInfo + + err := e.bc.StateDB().PrepareLiquidity(txInfo.PairIndex) + if err != nil { + logx.Errorf("prepare liquidity failed: %s", err.Error()) + return errors.New("internal error") + } + + liquidityModel := e.bc.StateDB().LiquidityMap[txInfo.PairIndex] + + accounts := []int64{txInfo.FromAccountIndex, liquidityModel.TreasuryAccountIndex, txInfo.GasAccountIndex} + assets := []int64{liquidityModel.AssetAId, liquidityModel.AssetBId, txInfo.AssetAId, txInfo.AssetBId, txInfo.PairIndex, txInfo.GasFeeAssetId} + err = e.bc.StateDB().PrepareAccountsAndAssets(accounts, assets) + if err != nil { + logx.Errorf("prepare accounts and assets failed: %s", err.Error()) + return errors.New("internal error") + } + + // add details to tx info + err = e.fillTxInfo() + if err != nil { + return err + } + + return nil +} + +func (e *AddLiquidityExecutor) VerifyInputs() error { + bc := e.bc + txInfo := e.txInfo + + err := e.BaseExecutor.VerifyInputs() + if err != nil { + return err + } + + fromAccount := bc.StateDB().AccountMap[txInfo.FromAccountIndex] + if txInfo.GasFeeAssetId == txInfo.AssetAId { + deltaBalance := ffmath.Add(txInfo.AssetAAmount, txInfo.GasFeeAssetAmount) + if fromAccount.AssetInfo[txInfo.AssetAId].Balance.Cmp(deltaBalance) < 0 { + return errors.New("invalid asset amount") + } + if fromAccount.AssetInfo[txInfo.AssetBId].Balance.Cmp(txInfo.AssetBAmount) < 0 { + return errors.New("invalid asset amount") + } + } else if txInfo.GasFeeAssetId == txInfo.AssetBId { + deltaBalance := ffmath.Add(txInfo.AssetBAmount, txInfo.GasFeeAssetAmount) + if fromAccount.AssetInfo[txInfo.AssetBId].Balance.Cmp(deltaBalance) < 0 { + return errors.New("invalid asset amount") + } + if fromAccount.AssetInfo[txInfo.AssetAId].Balance.Cmp(txInfo.AssetAAmount) < 0 { + return errors.New("invalid asset amount") + } + } else { + if fromAccount.AssetInfo[txInfo.GasFeeAssetId].Balance.Cmp(txInfo.GasFeeAssetAmount) < 0 { + return errors.New("invalid gas asset amount") + } + if fromAccount.AssetInfo[txInfo.AssetAId].Balance.Cmp(txInfo.AssetAAmount) < 0 { + return errors.New("invalid asset amount") + } + if fromAccount.AssetInfo[txInfo.AssetBId].Balance.Cmp(txInfo.AssetBAmount) < 0 { + return errors.New("invalid asset amount") + } + } + + liquidityModel := bc.StateDB().LiquidityMap[txInfo.PairIndex] + liquidityInfo, err := constructLiquidityInfo(liquidityModel) + if err != nil { + logx.Errorf("construct liquidity info error, err: %v", err) + return err + } + + if liquidityInfo.AssetA == nil || liquidityInfo.AssetB == nil { + return errors.New("invalid liquidity") + } + + return nil +} + +func (e *AddLiquidityExecutor) ApplyTransaction() error { + bc := e.bc + txInfo := e.txInfo + + // apply changes + fromAccount := bc.StateDB().AccountMap[txInfo.FromAccountIndex] + gasAccount := bc.StateDB().AccountMap[txInfo.GasAccountIndex] + liquidityModel := bc.StateDB().LiquidityMap[txInfo.PairIndex] + treasuryAccount := bc.StateDB().AccountMap[liquidityModel.TreasuryAccountIndex] + + fromAccount.AssetInfo[txInfo.AssetAId].Balance = ffmath.Sub(fromAccount.AssetInfo[txInfo.AssetAId].Balance, txInfo.AssetAAmount) + fromAccount.AssetInfo[txInfo.AssetBId].Balance = ffmath.Sub(fromAccount.AssetInfo[txInfo.AssetBId].Balance, txInfo.AssetBAmount) + fromAccount.AssetInfo[txInfo.PairIndex].LpAmount = ffmath.Add(fromAccount.AssetInfo[txInfo.PairIndex].LpAmount, e.lpDeltaForFromAccount) + treasuryAccount.AssetInfo[txInfo.PairIndex].LpAmount = ffmath.Add(treasuryAccount.AssetInfo[txInfo.PairIndex].LpAmount, txInfo.TreasuryAmount) + fromAccount.AssetInfo[txInfo.GasFeeAssetId].Balance = ffmath.Sub(fromAccount.AssetInfo[txInfo.GasFeeAssetId].Balance, txInfo.GasFeeAssetAmount) + gasAccount.AssetInfo[txInfo.GasFeeAssetId].Balance = ffmath.Add(gasAccount.AssetInfo[txInfo.GasFeeAssetId].Balance, txInfo.GasFeeAssetAmount) + fromAccount.Nonce++ + + bc.StateDB().LiquidityMap[txInfo.PairIndex] = &liquidity.Liquidity{ + Model: liquidityModel.Model, + PairIndex: e.newPoolInfo.PairIndex, + AssetAId: liquidityModel.AssetAId, + AssetA: e.newPoolInfo.AssetA.String(), + AssetBId: liquidityModel.AssetBId, + AssetB: e.newPoolInfo.AssetB.String(), + LpAmount: e.newPoolInfo.LpAmount.String(), + KLast: e.newPoolInfo.KLast.String(), + FeeRate: e.newPoolInfo.FeeRate, + TreasuryAccountIndex: e.newPoolInfo.TreasuryAccountIndex, + TreasuryRate: e.newPoolInfo.TreasuryRate, + } + + stateCache := e.bc.StateDB() + stateCache.PendingUpdateAccountIndexMap[txInfo.FromAccountIndex] = statedb.StateCachePending + stateCache.PendingUpdateAccountIndexMap[treasuryAccount.AccountIndex] = statedb.StateCachePending + stateCache.PendingUpdateAccountIndexMap[txInfo.GasAccountIndex] = statedb.StateCachePending + stateCache.PendingUpdateLiquidityIndexMap[txInfo.PairIndex] = statedb.StateCachePending + return nil +} + +func (e *AddLiquidityExecutor) fillTxInfo() error { + bc := e.bc + txInfo := e.txInfo + + liquidityModel := bc.StateDB().LiquidityMap[txInfo.PairIndex] + + liquidityInfo, err := constructLiquidityInfo(liquidityModel) + if err != nil { + logx.Errorf("construct liquidity info error, err: %v", err) + return err + } + + if liquidityInfo.AssetA.Cmp(big.NewInt(0)) == 0 { + txInfo.LpAmount, err = chain.ComputeEmptyLpAmount(txInfo.AssetAAmount, txInfo.AssetBAmount) + if err != nil { + logx.Errorf("[ComputeEmptyLpAmount] : %v", err) + return err + } + } else { + txInfo.LpAmount, err = chain.ComputeLpAmount(liquidityInfo, txInfo.AssetAAmount) + if err != nil { + return err + } + } + + txInfo.AssetAId = liquidityInfo.AssetAId + txInfo.AssetBId = liquidityInfo.AssetBId + + lpDeltaForTreasuryAccount, err := chain.ComputeSLp(liquidityInfo.AssetA, + liquidityInfo.AssetB, liquidityInfo.KLast, liquidityInfo.FeeRate, liquidityInfo.TreasuryRate) + if err != nil { + logx.Errorf("[ComputeSLp] err: %v", err) + return err + } + + // pool account pool info + finalPoolA := ffmath.Add(liquidityInfo.AssetA, txInfo.AssetAAmount) + finalPoolB := ffmath.Add(liquidityInfo.AssetB, txInfo.AssetBAmount) + + txInfo.TreasuryAmount = lpDeltaForTreasuryAccount + txInfo.KLast, err = common2.CleanPackedAmount(ffmath.Multiply(finalPoolA, finalPoolB)) + if err != nil { + return err + } + + txInfo.AssetAId = liquidityModel.AssetAId + txInfo.AssetBId = liquidityModel.AssetBId + + return nil +} + +func (e *AddLiquidityExecutor) GeneratePubData() error { + txInfo := e.txInfo + + var buf bytes.Buffer + buf.WriteByte(uint8(types.TxTypeAddLiquidity)) + buf.Write(common2.Uint32ToBytes(uint32(txInfo.FromAccountIndex))) + buf.Write(common2.Uint16ToBytes(uint16(txInfo.PairIndex))) + packedAssetAAmountBytes, err := common2.AmountToPackedAmountBytes(txInfo.AssetAAmount) + if err != nil { + logx.Errorf("unable to convert amount to packed amount: %s", err.Error()) + return err + } + buf.Write(packedAssetAAmountBytes) + packedAssetBAmountBytes, err := common2.AmountToPackedAmountBytes(txInfo.AssetBAmount) + if err != nil { + logx.Errorf("unable to convert amount to packed amount: %s", err.Error()) + return err + } + buf.Write(packedAssetBAmountBytes) + LpAmountBytes, err := common2.AmountToPackedAmountBytes(txInfo.LpAmount) + if err != nil { + logx.Errorf("unable to convert amount to packed amount: %s", err.Error()) + return err + } + buf.Write(LpAmountBytes) + KLastBytes, err := common2.AmountToPackedAmountBytes(txInfo.KLast) + if err != nil { + logx.Errorf("[ConvertTxToDepositPubData] unable to convert amount to packed amount: %s", err.Error()) + return err + } + buf.Write(KLastBytes) + chunk1 := common2.SuffixPaddingBufToChunkSize(buf.Bytes()) + buf.Reset() + treasuryAmountBytes, err := common2.AmountToPackedAmountBytes(txInfo.TreasuryAmount) + if err != nil { + logx.Errorf("unable to convert amount to packed amount: %s", err.Error()) + return err + } + buf.Write(treasuryAmountBytes) + buf.Write(common2.Uint32ToBytes(uint32(txInfo.GasAccountIndex))) + buf.Write(common2.Uint16ToBytes(uint16(txInfo.GasFeeAssetId))) + packedFeeBytes, err := common2.FeeToPackedFeeBytes(txInfo.GasFeeAssetAmount) + if err != nil { + logx.Errorf("unable to convert amount to packed fee amount: %s", err.Error()) + return err + } + buf.Write(packedFeeBytes) + chunk2 := common2.PrefixPaddingBufToChunkSize(buf.Bytes()) + buf.Reset() + buf.Write(chunk1) + buf.Write(chunk2) + buf.Write(common2.PrefixPaddingBufToChunkSize([]byte{})) + buf.Write(common2.PrefixPaddingBufToChunkSize([]byte{})) + buf.Write(common2.PrefixPaddingBufToChunkSize([]byte{})) + buf.Write(common2.PrefixPaddingBufToChunkSize([]byte{})) + pubData := buf.Bytes() + + stateCache := e.bc.StateDB() + stateCache.PubData = append(stateCache.PubData, pubData...) + return nil +} + +func (e *AddLiquidityExecutor) UpdateTrees() error { + bc := e.bc + txInfo := e.txInfo + + liquidityModel := bc.StateDB().LiquidityMap[txInfo.PairIndex] + + accounts := []int64{txInfo.FromAccountIndex, liquidityModel.TreasuryAccountIndex, txInfo.GasAccountIndex} + assets := []int64{txInfo.AssetAId, txInfo.AssetBId, txInfo.PairIndex, txInfo.GasFeeAssetId} + + err := bc.StateDB().UpdateAccountTree(accounts, assets) + if err != nil { + return err + } + + err = bc.StateDB().UpdateLiquidityTree(txInfo.PairIndex) + if err != nil { + return err + } + + return nil +} + +func (e *AddLiquidityExecutor) GetExecutedTx() (*tx.Tx, error) { + txInfoBytes, err := json.Marshal(e.txInfo) + if err != nil { + logx.Errorf("unable to marshal tx, err: %s", err.Error()) + return nil, errors.New("unmarshal tx failed") + } + + e.tx.TxInfo = string(txInfoBytes) + return e.BaseExecutor.GetExecutedTx() +} + +func (e *AddLiquidityExecutor) GenerateTxDetails() ([]*tx.TxDetail, error) { + txInfo := e.txInfo + + liquidityModel := e.bc.StateDB().LiquidityMap[txInfo.PairIndex] + liquidityInfo, err := constructLiquidityInfo(liquidityModel) + if err != nil { + logx.Errorf("construct liquidity info error, err: %v", err) + return nil, err + } + + copiedAccounts, err := e.bc.StateDB().DeepCopyAccounts([]int64{txInfo.FromAccountIndex, txInfo.GasAccountIndex, liquidityInfo.TreasuryAccountIndex}) + if err != nil { + return nil, err + } + + fromAccount := copiedAccounts[txInfo.FromAccountIndex] + gasAccount := copiedAccounts[txInfo.GasAccountIndex] + treasuryAccount := copiedAccounts[liquidityInfo.TreasuryAccountIndex] + + txDetails := make([]*tx.TxDetail, 0, 4) + // from account asset A + order := int64(0) + accountOrder := int64(0) + txDetails = append(txDetails, &tx.TxDetail{ + AssetId: txInfo.AssetAId, + AssetType: types.FungibleAssetType, + AccountIndex: txInfo.FromAccountIndex, + AccountName: fromAccount.AccountName, + Balance: fromAccount.AssetInfo[txInfo.AssetAId].String(), + BalanceDelta: types.ConstructAccountAsset( + txInfo.AssetAId, + ffmath.Neg(txInfo.AssetAAmount), + types.ZeroBigInt, + types.ZeroBigInt, + ).String(), + Order: order, + AccountOrder: accountOrder, + Nonce: fromAccount.Nonce, + CollectionNonce: fromAccount.CollectionNonce, + }) + fromAccount.AssetInfo[txInfo.AssetAId].Balance = ffmath.Sub(fromAccount.AssetInfo[txInfo.AssetAId].Balance, txInfo.AssetAAmount) + if fromAccount.AssetInfo[txInfo.AssetAId].Balance.Cmp(big.NewInt(0)) < 0 { + return nil, errors.New("insufficient asset a balance") + } + + // from account asset B + order++ + txDetails = append(txDetails, &tx.TxDetail{ + AssetId: txInfo.AssetBId, + AssetType: types.FungibleAssetType, + AccountIndex: txInfo.FromAccountIndex, + AccountName: fromAccount.AccountName, + Balance: fromAccount.AssetInfo[txInfo.AssetBId].String(), + BalanceDelta: types.ConstructAccountAsset( + txInfo.AssetBId, + ffmath.Neg(txInfo.AssetBAmount), + types.ZeroBigInt, + types.ZeroBigInt, + ).String(), + Order: order, + AccountOrder: accountOrder, + Nonce: fromAccount.Nonce, + CollectionNonce: fromAccount.CollectionNonce, + }) + + fromAccount.AssetInfo[txInfo.AssetBId].Balance = ffmath.Sub(fromAccount.AssetInfo[txInfo.AssetBId].Balance, txInfo.AssetBAmount) + if fromAccount.AssetInfo[txInfo.AssetBId].Balance.Cmp(big.NewInt(0)) < 0 { + return nil, errors.New("insufficient asset b balance") + } + + // from account asset gas + order++ + txDetails = append(txDetails, &tx.TxDetail{ + AssetId: txInfo.GasFeeAssetId, + AssetType: types.FungibleAssetType, + AccountIndex: txInfo.FromAccountIndex, + AccountName: fromAccount.AccountName, + Balance: fromAccount.AssetInfo[txInfo.GasFeeAssetId].String(), + BalanceDelta: types.ConstructAccountAsset( + txInfo.GasFeeAssetId, + ffmath.Neg(txInfo.GasFeeAssetAmount), + types.ZeroBigInt, + types.ZeroBigInt, + ).String(), + Order: order, + AccountOrder: accountOrder, + Nonce: fromAccount.Nonce, + CollectionNonce: fromAccount.CollectionNonce, + }) + + fromAccount.AssetInfo[txInfo.GasFeeAssetId].Balance = ffmath.Sub(fromAccount.AssetInfo[txInfo.GasFeeAssetId].Balance, txInfo.GasFeeAssetAmount) + if fromAccount.AssetInfo[txInfo.GasFeeAssetId].Balance.Cmp(big.NewInt(0)) < 0 { + return nil, errors.New("insufficient gas fee balance") + } + + // from account lp + poolLp := ffmath.Sub(liquidityInfo.LpAmount, txInfo.TreasuryAmount) + var lpDeltaForFromAccount *big.Int + if liquidityInfo.AssetA.Cmp(types.ZeroBigInt) == 0 { + lpDeltaForFromAccount, err = common2.CleanPackedAmount(new(big.Int).Sqrt(ffmath.Multiply(txInfo.AssetAAmount, txInfo.AssetBAmount))) + if err != nil { + logx.Errorf("unable to compute lp delta: %s", err.Error()) + return nil, err + } + } else { + lpDeltaForFromAccount, err = common2.CleanPackedAmount(ffmath.Div(ffmath.Multiply(txInfo.AssetAAmount, poolLp), liquidityInfo.AssetA)) + if err != nil { + logx.Errorf(" unable to compute lp delta: %s", err.Error()) + return nil, err + } + } + + order++ + txDetails = append(txDetails, &tx.TxDetail{ + AssetId: txInfo.PairIndex, + AssetType: types.FungibleAssetType, + AccountIndex: txInfo.FromAccountIndex, + AccountName: fromAccount.AccountName, + Balance: fromAccount.AssetInfo[txInfo.PairIndex].String(), + BalanceDelta: types.ConstructAccountAsset( + txInfo.PairIndex, + types.ZeroBigInt, + lpDeltaForFromAccount, + types.ZeroBigInt, + ).String(), + Order: order, + AccountOrder: accountOrder, + Nonce: fromAccount.Nonce, + CollectionNonce: fromAccount.CollectionNonce, + }) + e.lpDeltaForFromAccount = lpDeltaForFromAccount + fromAccount.AssetInfo[txInfo.PairIndex].LpAmount = ffmath.Add(fromAccount.AssetInfo[txInfo.PairIndex].LpAmount, lpDeltaForFromAccount) + + // pool info + basePool, err := types.ConstructLiquidityInfo( + e.bc.StateDB().LiquidityMap[txInfo.PairIndex].PairIndex, + e.bc.StateDB().LiquidityMap[txInfo.PairIndex].AssetAId, + e.bc.StateDB().LiquidityMap[txInfo.PairIndex].AssetA, + e.bc.StateDB().LiquidityMap[txInfo.PairIndex].AssetBId, + e.bc.StateDB().LiquidityMap[txInfo.PairIndex].AssetB, + e.bc.StateDB().LiquidityMap[txInfo.PairIndex].LpAmount, + e.bc.StateDB().LiquidityMap[txInfo.PairIndex].KLast, + e.bc.StateDB().LiquidityMap[txInfo.PairIndex].FeeRate, + e.bc.StateDB().LiquidityMap[txInfo.PairIndex].TreasuryAccountIndex, + e.bc.StateDB().LiquidityMap[txInfo.PairIndex].TreasuryRate, + ) + if err != nil { + return nil, err + } + + finalPoolA := ffmath.Add(liquidityInfo.AssetA, txInfo.AssetAAmount) + finalPoolB := ffmath.Add(liquidityInfo.AssetB, txInfo.AssetBAmount) + poolDeltaForToAccount := &types.LiquidityInfo{ + PairIndex: txInfo.PairIndex, + AssetAId: txInfo.AssetAId, + AssetA: txInfo.AssetAAmount, + AssetBId: txInfo.AssetBId, + AssetB: txInfo.AssetAAmount, + LpAmount: lpDeltaForFromAccount, + KLast: ffmath.Multiply(finalPoolA, finalPoolB), + FeeRate: liquidityInfo.FeeRate, + TreasuryAccountIndex: liquidityInfo.TreasuryAccountIndex, + TreasuryRate: liquidityInfo.TreasuryRate, + } + newPool, err := chain.ComputeNewBalance(types.LiquidityAssetType, basePool.String(), poolDeltaForToAccount.String()) + if err != nil { + return nil, err + } + + newPoolInfo, err := types.ParseLiquidityInfo(newPool) + if err != nil { + return nil, err + } + e.newPoolInfo = newPoolInfo + + order++ + txDetails = append(txDetails, &tx.TxDetail{ + AssetId: txInfo.PairIndex, + AssetType: types.LiquidityAssetType, + AccountIndex: types.NilTxAccountIndex, + AccountName: types.NilAccountName, + Balance: basePool.String(), + BalanceDelta: poolDeltaForToAccount.String(), + Order: order, + Nonce: 0, + AccountOrder: types.NilAccountOrder, + CollectionNonce: 0, + }) + + // treasury account + order++ + accountOrder++ + txDetails = append(txDetails, &tx.TxDetail{ + AssetId: txInfo.PairIndex, + AssetType: types.FungibleAssetType, + AccountIndex: treasuryAccount.AccountIndex, + AccountName: treasuryAccount.AccountName, + Balance: treasuryAccount.AssetInfo[txInfo.PairIndex].String(), + BalanceDelta: types.ConstructAccountAsset( + txInfo.PairIndex, types.ZeroBigInt, txInfo.TreasuryAmount, types.ZeroBigInt, + ).String(), + Order: order, + Nonce: treasuryAccount.Nonce, + AccountOrder: accountOrder, + CollectionNonce: treasuryAccount.CollectionNonce, + }) + treasuryAccount.AssetInfo[txInfo.PairIndex].LpAmount = ffmath.Add(treasuryAccount.AssetInfo[txInfo.PairIndex].LpAmount, txInfo.TreasuryAmount) + + // gas account asset gas + order++ + accountOrder++ + txDetails = append(txDetails, &tx.TxDetail{ + AssetId: txInfo.GasFeeAssetId, + AssetType: types.FungibleAssetType, + AccountIndex: txInfo.GasAccountIndex, + AccountName: gasAccount.AccountName, + Balance: gasAccount.AssetInfo[txInfo.GasFeeAssetId].String(), + BalanceDelta: types.ConstructAccountAsset( + txInfo.GasFeeAssetId, + txInfo.GasFeeAssetAmount, + types.ZeroBigInt, + types.ZeroBigInt, + ).String(), + Order: order, + AccountOrder: accountOrder, + Nonce: gasAccount.Nonce, + CollectionNonce: gasAccount.CollectionNonce, + }) + return txDetails, nil +} + +func (e *AddLiquidityExecutor) GenerateMempoolTx() (*mempool.MempoolTx, error) { + hash, err := legendTxTypes.ComputeAddLiquidityMsgHash(e.txInfo, mimc.NewMiMC()) + if err != nil { + return nil, err + } + txHash := common.Bytes2Hex(hash) + + mempoolTx := &mempool.MempoolTx{ + TxHash: txHash, + TxType: e.tx.TxType, + GasFeeAssetId: e.txInfo.GasFeeAssetId, + GasFee: e.txInfo.GasFeeAssetAmount.String(), + NftIndex: types.NilTxNftIndex, + PairIndex: e.txInfo.PairIndex, + AssetId: types.NilAssetId, + TxAmount: e.txInfo.LpAmount.String(), + Memo: "", + AccountIndex: e.txInfo.FromAccountIndex, + Nonce: e.txInfo.Nonce, + ExpiredAt: e.txInfo.ExpiredAt, + L2BlockHeight: types.NilBlockHeight, + Status: mempool.PendingTxStatus, + TxInfo: e.tx.TxInfo, + } + return mempoolTx, nil +} diff --git a/core/executor/atomic_match_executor.go b/core/executor/atomic_match_executor.go new file mode 100644 index 000000000..309c0bc52 --- /dev/null +++ b/core/executor/atomic_match_executor.go @@ -0,0 +1,519 @@ +package executor + +import ( + "bytes" + "encoding/json" + "math/big" + + "github.com/consensys/gnark-crypto/ecc/bn254/fr/mimc" + "github.com/ethereum/go-ethereum/common" + "github.com/pkg/errors" + "github.com/zeromicro/go-zero/core/logx" + + "github.com/bnb-chain/zkbas-crypto/ffmath" + "github.com/bnb-chain/zkbas-crypto/wasm/legend/legendTxTypes" + common2 "github.com/bnb-chain/zkbas/common" + "github.com/bnb-chain/zkbas/core/statedb" + "github.com/bnb-chain/zkbas/dao/mempool" + "github.com/bnb-chain/zkbas/dao/tx" + "github.com/bnb-chain/zkbas/types" +) + +type AtomicMatchExecutor struct { + BaseExecutor + + txInfo *legendTxTypes.AtomicMatchTxInfo + + buyOfferAssetId int64 + buyOfferIndex int64 + sellOfferAssetId int64 + sellOfferIndex int64 + + isFromBuyer bool // True when the sender's account is the same to buyer's account. + isAssetGas bool // True when the gas asset is the same to the buyer's asset. +} + +func NewAtomicMatchExecutor(bc IBlockchain, tx *tx.Tx) (TxExecutor, error) { + txInfo, err := types.ParseAtomicMatchTxInfo(tx.TxInfo) + if err != nil { + logx.Errorf("parse atomic match tx failed: %s", err.Error()) + return nil, errors.New("invalid tx info") + } + + return &AtomicMatchExecutor{ + BaseExecutor: BaseExecutor{ + bc: bc, + tx: tx, + iTxInfo: txInfo, + }, + txInfo: txInfo, + }, nil +} + +func (e *AtomicMatchExecutor) Prepare() error { + txInfo := e.txInfo + + e.buyOfferAssetId = txInfo.BuyOffer.OfferId / OfferPerAsset + e.buyOfferIndex = txInfo.BuyOffer.OfferId % OfferPerAsset + e.sellOfferAssetId = txInfo.SellOffer.OfferId / OfferPerAsset + e.sellOfferIndex = txInfo.SellOffer.OfferId % OfferPerAsset + + // Prepare seller's asset and nft, if the buyer's asset or nft isn't the same, it will be failed in the verify step. + err := e.bc.StateDB().PrepareNft(txInfo.SellOffer.NftIndex) + if err != nil { + logx.Errorf("prepare nft failed") + return errors.New("internal error") + } + + matchNft := e.bc.StateDB().NftMap[txInfo.SellOffer.NftIndex] + e.isFromBuyer = true + accounts := []int64{txInfo.AccountIndex, txInfo.GasAccountIndex, txInfo.SellOffer.AccountIndex, matchNft.CreatorAccountIndex} + if txInfo.AccountIndex != txInfo.BuyOffer.AccountIndex { + e.isFromBuyer = false + accounts = append(accounts, txInfo.BuyOffer.AccountIndex) + } + e.isAssetGas = true + assets := []int64{txInfo.GasFeeAssetId, e.buyOfferAssetId, e.sellOfferAssetId} + if txInfo.GasFeeAssetId != txInfo.SellOffer.AssetId { + e.isAssetGas = false + assets = append(assets, txInfo.SellOffer.AssetId) + } + err = e.bc.StateDB().PrepareAccountsAndAssets(accounts, assets) + if err != nil { + logx.Errorf("prepare accounts and assets failed: %s", err.Error()) + return errors.New("internal error") + } + + // Set the right treasury and creator treasury amount. + txInfo.TreasuryAmount = ffmath.Div(ffmath.Multiply(txInfo.SellOffer.AssetAmount, big.NewInt(txInfo.SellOffer.TreasuryRate)), big.NewInt(TenThousand)) + txInfo.CreatorAmount = ffmath.Div(ffmath.Multiply(txInfo.SellOffer.AssetAmount, big.NewInt(matchNft.CreatorTreasuryRate)), big.NewInt(TenThousand)) + + return nil +} + +func (e *AtomicMatchExecutor) VerifyInputs() error { + bc := e.bc + txInfo := e.txInfo + + err := e.BaseExecutor.VerifyInputs() + if err != nil { + return err + } + + if txInfo.BuyOffer.Type != types.BuyOfferType || + txInfo.SellOffer.Type != types.SellOfferType { + return errors.New("invalid offer type") + } + if txInfo.BuyOffer.AccountIndex == txInfo.SellOffer.AccountIndex { + return errors.New("same buyer and seller") + } + if txInfo.SellOffer.NftIndex != txInfo.BuyOffer.NftIndex || + txInfo.SellOffer.AssetId != txInfo.BuyOffer.AssetId || + txInfo.SellOffer.AssetAmount.String() != txInfo.BuyOffer.AssetAmount.String() || + txInfo.SellOffer.TreasuryRate != txInfo.BuyOffer.TreasuryRate { + return errors.New("buy offer mismatches sell offer") + } + + // Check offer expired time. + if err := e.bc.VerifyExpiredAt(txInfo.BuyOffer.ExpiredAt); err != nil { + return errors.New("invalid BuyOffer.ExpiredAt") + } + if err := e.bc.VerifyExpiredAt(txInfo.SellOffer.ExpiredAt); err != nil { + return errors.New("invalid SellOffer.ExpiredAt") + } + + fromAccount := bc.StateDB().AccountMap[txInfo.AccountIndex] + buyAccount := bc.StateDB().AccountMap[txInfo.BuyOffer.AccountIndex] + sellAccount := bc.StateDB().AccountMap[txInfo.SellOffer.AccountIndex] + + // Check sender's gas balance and buyer's asset balance. + if e.isFromBuyer && e.isAssetGas { + totalBalance := ffmath.Add(txInfo.GasFeeAssetAmount, txInfo.BuyOffer.AssetAmount) + if fromAccount.AssetInfo[txInfo.GasFeeAssetId].Balance.Cmp(totalBalance) < 0 { + return errors.New("sender balance is not enough") + } + } else { + if fromAccount.AssetInfo[txInfo.GasFeeAssetId].Balance.Cmp(txInfo.GasFeeAssetAmount) < 0 { + return errors.New("sender balance is not enough") + } + + if buyAccount.AssetInfo[txInfo.BuyOffer.AssetId].Balance.Cmp(txInfo.BuyOffer.AssetAmount) < 0 { + return errors.New("buy balance is not enough") + } + } + + // Check offer canceled or finalized. + sellOffer := bc.StateDB().AccountMap[txInfo.SellOffer.AccountIndex].AssetInfo[e.sellOfferAssetId].OfferCanceledOrFinalized + if sellOffer.Bit(int(e.sellOfferIndex)) == 1 { + return errors.New("sell offer canceled or finalized") + } + buyOffer := bc.StateDB().AccountMap[txInfo.BuyOffer.AccountIndex].AssetInfo[e.buyOfferAssetId].OfferCanceledOrFinalized + if buyOffer.Bit(int(e.buyOfferIndex)) == 1 { + return errors.New("buy offer canceled or finalized") + } + + // Check the seller is the owner of the nft. + if bc.StateDB().NftMap[txInfo.SellOffer.NftIndex].OwnerAccountIndex != txInfo.SellOffer.AccountIndex { + return errors.New("seller is not owner") + } + + // Verify offer signature. + err = txInfo.BuyOffer.VerifySignature(buyAccount.PublicKey) + if err != nil { + return err + } + err = txInfo.SellOffer.VerifySignature(sellAccount.PublicKey) + if err != nil { + return err + } + + return nil +} + +func (e *AtomicMatchExecutor) ApplyTransaction() error { + bc := e.bc + txInfo := e.txInfo + + // apply changes + matchNft := bc.StateDB().NftMap[txInfo.SellOffer.NftIndex] + fromAccount := bc.StateDB().AccountMap[txInfo.AccountIndex] + gasAccount := bc.StateDB().AccountMap[txInfo.GasAccountIndex] + buyAccount := bc.StateDB().AccountMap[txInfo.BuyOffer.AccountIndex] + sellAccount := bc.StateDB().AccountMap[txInfo.SellOffer.AccountIndex] + creatorAccount := bc.StateDB().AccountMap[matchNft.CreatorAccountIndex] + + fromAccount.AssetInfo[txInfo.GasFeeAssetId].Balance = ffmath.Sub(fromAccount.AssetInfo[txInfo.GasFeeAssetId].Balance, txInfo.GasFeeAssetAmount) + buyAccount.AssetInfo[txInfo.BuyOffer.AssetId].Balance = ffmath.Sub(buyAccount.AssetInfo[txInfo.BuyOffer.AssetId].Balance, txInfo.BuyOffer.AssetAmount) + sellAccount.AssetInfo[txInfo.SellOffer.AssetId].Balance = ffmath.Add(sellAccount.AssetInfo[txInfo.SellOffer.AssetId].Balance, ffmath.Sub( + txInfo.BuyOffer.AssetAmount, ffmath.Add(txInfo.TreasuryAmount, txInfo.CreatorAmount))) + creatorAccount.AssetInfo[txInfo.BuyOffer.AssetId].Balance = ffmath.Add(creatorAccount.AssetInfo[txInfo.BuyOffer.AssetId].Balance, txInfo.CreatorAmount) + gasAccount.AssetInfo[txInfo.BuyOffer.AssetId].Balance = ffmath.Add(gasAccount.AssetInfo[txInfo.BuyOffer.AssetId].Balance, txInfo.TreasuryAmount) + gasAccount.AssetInfo[txInfo.GasFeeAssetId].Balance = ffmath.Add(gasAccount.AssetInfo[txInfo.GasFeeAssetId].Balance, txInfo.GasFeeAssetAmount) + fromAccount.Nonce++ + + sellOffer := sellAccount.AssetInfo[e.sellOfferAssetId].OfferCanceledOrFinalized + sellOffer = new(big.Int).SetBit(sellOffer, int(e.sellOfferIndex), 1) + sellAccount.AssetInfo[e.sellOfferAssetId].OfferCanceledOrFinalized = sellOffer + buyOffer := buyAccount.AssetInfo[e.buyOfferAssetId].OfferCanceledOrFinalized + buyOffer = new(big.Int).SetBit(buyOffer, int(e.buyOfferIndex), 1) + buyAccount.AssetInfo[e.buyOfferAssetId].OfferCanceledOrFinalized = buyOffer + + // Change new owner. + matchNft.OwnerAccountIndex = txInfo.BuyOffer.AccountIndex + + stateCache := e.bc.StateDB() + stateCache.PendingUpdateAccountIndexMap[txInfo.AccountIndex] = statedb.StateCachePending + stateCache.PendingUpdateAccountIndexMap[txInfo.BuyOffer.AccountIndex] = statedb.StateCachePending + stateCache.PendingUpdateAccountIndexMap[txInfo.SellOffer.AccountIndex] = statedb.StateCachePending + stateCache.PendingUpdateAccountIndexMap[matchNft.CreatorAccountIndex] = statedb.StateCachePending + stateCache.PendingUpdateAccountIndexMap[txInfo.GasAccountIndex] = statedb.StateCachePending + stateCache.PendingUpdateNftIndexMap[txInfo.SellOffer.NftIndex] = statedb.StateCachePending + + return nil +} + +func (e *AtomicMatchExecutor) GeneratePubData() error { + txInfo := e.txInfo + + var buf bytes.Buffer + buf.WriteByte(uint8(types.TxTypeAtomicMatch)) + buf.Write(common2.Uint32ToBytes(uint32(txInfo.AccountIndex))) + buf.Write(common2.Uint32ToBytes(uint32(txInfo.BuyOffer.AccountIndex))) + buf.Write(common2.Uint24ToBytes(txInfo.BuyOffer.OfferId)) + buf.Write(common2.Uint32ToBytes(uint32(txInfo.SellOffer.AccountIndex))) + buf.Write(common2.Uint24ToBytes(txInfo.SellOffer.OfferId)) + buf.Write(common2.Uint40ToBytes(txInfo.BuyOffer.NftIndex)) + buf.Write(common2.Uint16ToBytes(uint16(txInfo.SellOffer.AssetId))) + chunk1 := common2.SuffixPaddingBufToChunkSize(buf.Bytes()) + buf.Reset() + packedAmountBytes, err := common2.AmountToPackedAmountBytes(txInfo.BuyOffer.AssetAmount) + if err != nil { + logx.Errorf("unable to convert amount to packed amount: %s", err.Error()) + return err + } + buf.Write(packedAmountBytes) + creatorAmountBytes, err := common2.AmountToPackedAmountBytes(txInfo.CreatorAmount) + if err != nil { + logx.Errorf("unable to convert amount to packed amount: %s", err.Error()) + return err + } + buf.Write(creatorAmountBytes) + treasuryAmountBytes, err := common2.AmountToPackedAmountBytes(txInfo.TreasuryAmount) + if err != nil { + logx.Errorf("unable to convert amount to packed amount: %s", err.Error()) + return err + } + buf.Write(treasuryAmountBytes) + buf.Write(common2.Uint32ToBytes(uint32(txInfo.GasAccountIndex))) + buf.Write(common2.Uint16ToBytes(uint16(txInfo.GasFeeAssetId))) + packedFeeBytes, err := common2.FeeToPackedFeeBytes(txInfo.GasFeeAssetAmount) + if err != nil { + logx.Errorf("unable to convert amount to packed fee amount: %s", err.Error()) + return err + } + buf.Write(packedFeeBytes) + chunk2 := common2.PrefixPaddingBufToChunkSize(buf.Bytes()) + buf.Reset() + buf.Write(chunk1) + buf.Write(chunk2) + buf.Write(common2.PrefixPaddingBufToChunkSize([]byte{})) + buf.Write(common2.PrefixPaddingBufToChunkSize([]byte{})) + buf.Write(common2.PrefixPaddingBufToChunkSize([]byte{})) + buf.Write(common2.PrefixPaddingBufToChunkSize([]byte{})) + pubData := buf.Bytes() + + stateCache := e.bc.StateDB() + stateCache.PubData = append(stateCache.PubData, pubData...) + return nil +} + +func (e *AtomicMatchExecutor) UpdateTrees() error { + txInfo := e.txInfo + + matchNft := e.bc.StateDB().NftMap[txInfo.SellOffer.NftIndex] + accounts := []int64{txInfo.AccountIndex, txInfo.GasAccountIndex, txInfo.SellOffer.AccountIndex, matchNft.CreatorAccountIndex} + if !e.isFromBuyer { + accounts = append(accounts, txInfo.BuyOffer.AccountIndex) + } + assets := []int64{txInfo.GasFeeAssetId, e.buyOfferAssetId, e.sellOfferAssetId} + if !e.isAssetGas { + assets = append(assets, txInfo.SellOffer.AssetId) + } + err := e.bc.StateDB().UpdateAccountTree(accounts, assets) + if err != nil { + logx.Errorf("update account tree error, err: %s", err.Error()) + return err + } + + err = e.bc.StateDB().UpdateNftTree(txInfo.SellOffer.NftIndex) + if err != nil { + logx.Errorf("update nft tree error, err: %s", err.Error()) + return err + } + return nil +} + +func (e *AtomicMatchExecutor) GetExecutedTx() (*tx.Tx, error) { + txInfoBytes, err := json.Marshal(e.txInfo) + if err != nil { + logx.Errorf("unable to marshal tx, err: %s", err.Error()) + return nil, errors.New("unmarshal tx failed") + } + + e.tx.TxInfo = string(txInfoBytes) + return e.BaseExecutor.GetExecutedTx() +} + +func (e *AtomicMatchExecutor) GenerateTxDetails() ([]*tx.TxDetail, error) { + bc := e.bc + txInfo := e.txInfo + matchNft := bc.StateDB().NftMap[txInfo.SellOffer.NftIndex] + + copiedAccounts, err := e.bc.StateDB().DeepCopyAccounts([]int64{txInfo.AccountIndex, txInfo.GasAccountIndex, + txInfo.SellOffer.AccountIndex, txInfo.BuyOffer.AccountIndex, matchNft.CreatorAccountIndex}) + if err != nil { + return nil, err + } + fromAccount := copiedAccounts[txInfo.AccountIndex] + buyAccount := copiedAccounts[txInfo.BuyOffer.AccountIndex] + sellAccount := copiedAccounts[txInfo.SellOffer.AccountIndex] + creatorAccount := copiedAccounts[matchNft.CreatorAccountIndex] + gasAccount := copiedAccounts[txInfo.GasAccountIndex] + + txDetails := make([]*tx.TxDetail, 0, 9) + + // from account gas asset + order := int64(0) + accountOrder := int64(0) + txDetails = append(txDetails, &tx.TxDetail{ + AssetId: txInfo.GasFeeAssetId, + AssetType: types.FungibleAssetType, + AccountIndex: txInfo.AccountIndex, + AccountName: fromAccount.AccountName, + Balance: fromAccount.AssetInfo[txInfo.GasFeeAssetId].String(), + BalanceDelta: types.ConstructAccountAsset( + txInfo.GasFeeAssetId, ffmath.Neg(txInfo.GasFeeAssetAmount), types.ZeroBigInt, types.ZeroBigInt).String(), + Order: order, + AccountOrder: accountOrder, + Nonce: fromAccount.Nonce, + CollectionNonce: fromAccount.CollectionNonce, + }) + fromAccount.AssetInfo[txInfo.GasFeeAssetId].Balance = ffmath.Sub(fromAccount.AssetInfo[txInfo.GasFeeAssetId].Balance, txInfo.GasFeeAssetAmount) + + // buyer asset A + order++ + accountOrder++ + txDetails = append(txDetails, &tx.TxDetail{ + AssetId: txInfo.BuyOffer.AssetId, + AssetType: types.FungibleAssetType, + AccountIndex: txInfo.BuyOffer.AccountIndex, + AccountName: buyAccount.AccountName, + Balance: buyAccount.AssetInfo[txInfo.BuyOffer.AssetId].String(), + BalanceDelta: types.ConstructAccountAsset( + txInfo.BuyOffer.AssetId, ffmath.Neg(txInfo.BuyOffer.AssetAmount), types.ZeroBigInt, types.ZeroBigInt, + ).String(), + Order: order, + AccountOrder: accountOrder, + Nonce: buyAccount.Nonce, + CollectionNonce: buyAccount.CollectionNonce, + }) + buyAccount.AssetInfo[txInfo.BuyOffer.AssetId].Balance = ffmath.Sub(buyAccount.AssetInfo[txInfo.BuyOffer.AssetId].Balance, txInfo.BuyOffer.AssetAmount) + + // buy offer + order++ + buyOffer := buyAccount.AssetInfo[e.buyOfferAssetId].OfferCanceledOrFinalized + buyOffer = new(big.Int).SetBit(buyOffer, int(e.buyOfferIndex), 1) + txDetails = append(txDetails, &tx.TxDetail{ + AssetId: e.buyOfferAssetId, + AssetType: types.FungibleAssetType, + AccountIndex: txInfo.BuyOffer.AccountIndex, + AccountName: buyAccount.AccountName, + Balance: buyAccount.AssetInfo[e.buyOfferAssetId].String(), + BalanceDelta: types.ConstructAccountAsset( + e.buyOfferAssetId, types.ZeroBigInt, types.ZeroBigInt, buyOffer).String(), + Order: order, + AccountOrder: accountOrder, + Nonce: buyAccount.Nonce, + CollectionNonce: buyAccount.CollectionNonce, + }) + buyAccount.AssetInfo[e.buyOfferAssetId].OfferCanceledOrFinalized = buyOffer + + // seller asset A + order++ + accountOrder++ + sellDeltaAmount := ffmath.Sub(txInfo.SellOffer.AssetAmount, ffmath.Add(txInfo.TreasuryAmount, txInfo.CreatorAmount)) + txDetails = append(txDetails, &tx.TxDetail{ + AssetId: txInfo.SellOffer.AssetId, + AssetType: types.FungibleAssetType, + AccountIndex: txInfo.SellOffer.AccountIndex, + AccountName: sellAccount.AccountName, + Balance: sellAccount.AssetInfo[txInfo.SellOffer.AssetId].String(), + BalanceDelta: types.ConstructAccountAsset( + txInfo.SellOffer.AssetId, sellDeltaAmount, types.ZeroBigInt, types.ZeroBigInt, + ).String(), + Order: order, + AccountOrder: accountOrder, + Nonce: sellAccount.Nonce, + CollectionNonce: sellAccount.CollectionNonce, + }) + sellAccount.AssetInfo[txInfo.SellOffer.AssetId].Balance = ffmath.Add(sellAccount.AssetInfo[txInfo.SellOffer.AssetId].Balance, sellDeltaAmount) + + // sell offer + order++ + sellOffer := sellAccount.AssetInfo[e.sellOfferAssetId].OfferCanceledOrFinalized + sellOffer = new(big.Int).SetBit(sellOffer, int(e.sellOfferIndex), 1) + txDetails = append(txDetails, &tx.TxDetail{ + AssetId: e.sellOfferAssetId, + AssetType: types.FungibleAssetType, + AccountIndex: txInfo.SellOffer.AccountIndex, + AccountName: sellAccount.AccountName, + Balance: sellAccount.AssetInfo[e.sellOfferAssetId].String(), + BalanceDelta: types.ConstructAccountAsset( + e.sellOfferAssetId, types.ZeroBigInt, types.ZeroBigInt, sellOffer).String(), + Order: order, + AccountOrder: accountOrder, + Nonce: sellAccount.Nonce, + CollectionNonce: sellAccount.CollectionNonce, + }) + sellAccount.AssetInfo[e.sellOfferAssetId].OfferCanceledOrFinalized = sellOffer + + // creator fee + order++ + accountOrder++ + txDetails = append(txDetails, &tx.TxDetail{ + AssetId: txInfo.BuyOffer.AssetId, + AssetType: types.FungibleAssetType, + AccountIndex: matchNft.CreatorAccountIndex, + AccountName: creatorAccount.AccountName, + Balance: creatorAccount.AssetInfo[txInfo.BuyOffer.AssetId].String(), + BalanceDelta: types.ConstructAccountAsset( + txInfo.BuyOffer.AssetId, txInfo.CreatorAmount, types.ZeroBigInt, types.ZeroBigInt, + ).String(), + Order: order, + AccountOrder: accountOrder, + Nonce: creatorAccount.Nonce, + CollectionNonce: creatorAccount.CollectionNonce, + }) + creatorAccount.AssetInfo[txInfo.BuyOffer.AssetId].Balance = ffmath.Add(creatorAccount.AssetInfo[txInfo.BuyOffer.AssetId].Balance, txInfo.CreatorAmount) + + // nft info + order++ + txDetails = append(txDetails, &tx.TxDetail{ + AssetId: matchNft.NftIndex, + AssetType: types.NftAssetType, + AccountIndex: types.NilTxAccountIndex, + AccountName: types.NilAccountName, + Balance: types.ConstructNftInfo(matchNft.NftIndex, matchNft.CreatorAccountIndex, matchNft.OwnerAccountIndex, + matchNft.NftContentHash, matchNft.NftL1TokenId, matchNft.NftL1Address, matchNft.CreatorTreasuryRate, matchNft.CollectionId).String(), + BalanceDelta: types.ConstructNftInfo(matchNft.NftIndex, matchNft.CreatorAccountIndex, txInfo.BuyOffer.AccountIndex, + matchNft.NftContentHash, matchNft.NftL1TokenId, matchNft.NftL1Address, matchNft.CreatorTreasuryRate, matchNft.CollectionId).String(), + Order: order, + AccountOrder: types.NilAccountOrder, + Nonce: 0, + CollectionNonce: 0, + }) + + // gas account asset A - treasury fee + order++ + accountOrder++ + txDetails = append(txDetails, &tx.TxDetail{ + AssetId: txInfo.BuyOffer.AssetId, + AssetType: types.FungibleAssetType, + AccountIndex: txInfo.GasAccountIndex, + AccountName: gasAccount.AccountName, + Balance: gasAccount.AssetInfo[txInfo.BuyOffer.AssetId].String(), + BalanceDelta: types.ConstructAccountAsset( + txInfo.BuyOffer.AssetId, txInfo.TreasuryAmount, types.ZeroBigInt, types.ZeroBigInt).String(), + Order: order, + AccountOrder: accountOrder, + Nonce: gasAccount.Nonce, + CollectionNonce: gasAccount.CollectionNonce, + }) + gasAccount.AssetInfo[txInfo.BuyOffer.AssetId].Balance = ffmath.Add(gasAccount.AssetInfo[txInfo.BuyOffer.AssetId].Balance, txInfo.TreasuryAmount) + + // gas account asset gas + order++ + txDetails = append(txDetails, &tx.TxDetail{ + AssetId: txInfo.GasFeeAssetId, + AssetType: types.FungibleAssetType, + AccountIndex: txInfo.GasAccountIndex, + AccountName: gasAccount.AccountName, + Balance: gasAccount.AssetInfo[txInfo.GasFeeAssetId].String(), + BalanceDelta: types.ConstructAccountAsset( + txInfo.GasFeeAssetId, txInfo.GasFeeAssetAmount, types.ZeroBigInt, types.ZeroBigInt).String(), + Order: order, + AccountOrder: accountOrder, + Nonce: gasAccount.Nonce, + CollectionNonce: gasAccount.CollectionNonce, + }) + gasAccount.AssetInfo[txInfo.GasFeeAssetId].Balance = ffmath.Add(gasAccount.AssetInfo[txInfo.GasFeeAssetId].Balance, txInfo.GasFeeAssetAmount) + + return txDetails, nil +} + +func (e *AtomicMatchExecutor) GenerateMempoolTx() (*mempool.MempoolTx, error) { + hash, err := legendTxTypes.ComputeAtomicMatchMsgHash(e.txInfo, mimc.NewMiMC()) + if err != nil { + return nil, err + } + txHash := common.Bytes2Hex(hash) + + mempoolTx := &mempool.MempoolTx{ + TxHash: txHash, + TxType: e.tx.TxType, + GasFeeAssetId: e.txInfo.GasFeeAssetId, + GasFee: e.txInfo.GasFeeAssetAmount.String(), + NftIndex: types.NilTxNftIndex, + PairIndex: types.NilPairIndex, + AssetId: e.txInfo.BuyOffer.AssetId, + TxAmount: e.txInfo.BuyOffer.AssetAmount.String(), + Memo: "", + AccountIndex: e.txInfo.AccountIndex, + Nonce: e.txInfo.Nonce, + ExpiredAt: e.txInfo.ExpiredAt, + L2BlockHeight: types.NilBlockHeight, + Status: mempool.PendingTxStatus, + TxInfo: e.tx.TxInfo, + } + return mempoolTx, nil +} diff --git a/core/executor/base_executor.go b/core/executor/base_executor.go new file mode 100644 index 000000000..958835ebf --- /dev/null +++ b/core/executor/base_executor.go @@ -0,0 +1,73 @@ +package executor + +import ( + "github.com/bnb-chain/zkbas-crypto/wasm/legend/legendTxTypes" + "github.com/bnb-chain/zkbas/dao/tx" + "github.com/bnb-chain/zkbas/types" +) + +const ( + OfferPerAsset = 128 + TenThousand = 10000 +) + +type BaseExecutor struct { + bc IBlockchain + tx *tx.Tx + iTxInfo legendTxTypes.TxInfo +} + +func (e *BaseExecutor) Prepare() error { + return nil +} + +func (e *BaseExecutor) VerifyInputs() error { + txInfo := e.iTxInfo + + err := txInfo.Validate() + if err != nil { + return err + } + err = e.bc.VerifyExpiredAt(txInfo.GetExpiredAt()) + if err != nil { + return err + } + + from := txInfo.GetFromAccountIndex() + if from != types.NilTxAccountIndex { + err = e.bc.VerifyNonce(from, txInfo.GetNonce()) + if err != nil { + return err + } + + err = txInfo.VerifySignature(e.bc.StateDB().AccountMap[from].PublicKey) + if err != nil { + return err + } + } + + return nil +} + +func (e *BaseExecutor) ApplyTransaction() error { + return nil +} + +func (e *BaseExecutor) GeneratePubData() error { + return nil +} + +func (e *BaseExecutor) UpdateTrees() error { + return nil +} + +func (e *BaseExecutor) GetExecutedTx() (*tx.Tx, error) { + e.tx.BlockHeight = e.bc.CurrentBlock().BlockHeight + e.tx.StateRoot = e.bc.StateDB().GetStateRoot() + e.tx.TxStatus = tx.StatusPending + return e.tx, nil +} + +func (e *BaseExecutor) GenerateTxDetails() ([]*tx.TxDetail, error) { + return nil, nil +} diff --git a/core/executor/cancel_offer_executor.go b/core/executor/cancel_offer_executor.go new file mode 100644 index 000000000..c8e406569 --- /dev/null +++ b/core/executor/cancel_offer_executor.go @@ -0,0 +1,274 @@ +package executor + +import ( + "bytes" + "encoding/json" + "math/big" + + "github.com/consensys/gnark-crypto/ecc/bn254/fr/mimc" + "github.com/ethereum/go-ethereum/common" + "github.com/pkg/errors" + "github.com/zeromicro/go-zero/core/logx" + + "github.com/bnb-chain/zkbas-crypto/ffmath" + "github.com/bnb-chain/zkbas-crypto/wasm/legend/legendTxTypes" + common2 "github.com/bnb-chain/zkbas/common" + "github.com/bnb-chain/zkbas/core/statedb" + "github.com/bnb-chain/zkbas/dao/mempool" + "github.com/bnb-chain/zkbas/dao/tx" + "github.com/bnb-chain/zkbas/types" +) + +type CancelOfferExecutor struct { + BaseExecutor + + txInfo *legendTxTypes.CancelOfferTxInfo +} + +func NewCancelOfferExecutor(bc IBlockchain, tx *tx.Tx) (TxExecutor, error) { + txInfo, err := types.ParseCancelOfferTxInfo(tx.TxInfo) + if err != nil { + logx.Errorf("parse transfer tx failed: %s", err.Error()) + return nil, errors.New("invalid tx info") + } + + return &CancelOfferExecutor{ + BaseExecutor: BaseExecutor{ + bc: bc, + tx: tx, + iTxInfo: txInfo, + }, + txInfo: txInfo, + }, nil +} + +func (e *CancelOfferExecutor) Prepare() error { + txInfo := e.txInfo + + accounts := []int64{txInfo.AccountIndex, txInfo.GasAccountIndex} + offerAssetId := txInfo.OfferId / OfferPerAsset + assets := []int64{offerAssetId, txInfo.GasFeeAssetId} + err := e.bc.StateDB().PrepareAccountsAndAssets(accounts, assets) + if err != nil { + logx.Errorf("prepare accounts and assets failed: %s", err.Error()) + return errors.New("internal error") + } + + return nil +} + +func (e *CancelOfferExecutor) VerifyInputs() error { + txInfo := e.txInfo + + err := e.BaseExecutor.VerifyInputs() + if err != nil { + return err + } + + fromAccount := e.bc.StateDB().AccountMap[txInfo.AccountIndex] + if fromAccount.AssetInfo[txInfo.GasFeeAssetId].Balance.Cmp(txInfo.GasFeeAssetAmount) < 0 { + return errors.New("balance is not enough") + } + + return nil +} + +func (e *CancelOfferExecutor) ApplyTransaction() error { + bc := e.bc + txInfo := e.txInfo + + // apply changes + fromAccount := bc.StateDB().AccountMap[txInfo.AccountIndex] + gasAccount := bc.StateDB().AccountMap[txInfo.GasAccountIndex] + + fromAccount.AssetInfo[txInfo.GasFeeAssetId].Balance = ffmath.Sub(fromAccount.AssetInfo[txInfo.GasFeeAssetId].Balance, txInfo.GasFeeAssetAmount) + gasAccount.AssetInfo[txInfo.GasFeeAssetId].Balance = ffmath.Add(gasAccount.AssetInfo[txInfo.GasFeeAssetId].Balance, txInfo.GasFeeAssetAmount) + fromAccount.Nonce++ + + offerAssetId := txInfo.OfferId / OfferPerAsset + offerIndex := txInfo.OfferId % OfferPerAsset + oOffer := fromAccount.AssetInfo[offerAssetId].OfferCanceledOrFinalized + nOffer := new(big.Int).SetBit(oOffer, int(offerIndex), 1) + fromAccount.AssetInfo[offerAssetId].OfferCanceledOrFinalized = nOffer + + stateCache := e.bc.StateDB() + stateCache.PendingUpdateAccountIndexMap[txInfo.AccountIndex] = statedb.StateCachePending + stateCache.PendingUpdateAccountIndexMap[txInfo.GasAccountIndex] = statedb.StateCachePending + + return nil +} + +func (e *CancelOfferExecutor) GeneratePubData() error { + txInfo := e.txInfo + + var buf bytes.Buffer + buf.WriteByte(uint8(types.TxTypeCancelOffer)) + buf.Write(common2.Uint32ToBytes(uint32(txInfo.AccountIndex))) + buf.Write(common2.Uint24ToBytes(txInfo.OfferId)) + buf.Write(common2.Uint32ToBytes(uint32(txInfo.GasAccountIndex))) + buf.Write(common2.Uint16ToBytes(uint16(txInfo.GasFeeAssetId))) + packedFeeBytes, err := common2.FeeToPackedFeeBytes(txInfo.GasFeeAssetAmount) + if err != nil { + logx.Errorf("unable to convert amount to packed fee amount: %s", err.Error()) + return err + } + buf.Write(packedFeeBytes) + chunk := common2.SuffixPaddingBufToChunkSize(buf.Bytes()) + buf.Reset() + buf.Write(chunk) + buf.Write(common2.PrefixPaddingBufToChunkSize([]byte{})) + buf.Write(common2.PrefixPaddingBufToChunkSize([]byte{})) + buf.Write(common2.PrefixPaddingBufToChunkSize([]byte{})) + buf.Write(common2.PrefixPaddingBufToChunkSize([]byte{})) + buf.Write(common2.PrefixPaddingBufToChunkSize([]byte{})) + pubData := buf.Bytes() + + stateCache := e.bc.StateDB() + stateCache.PubData = append(stateCache.PubData, pubData...) + return nil +} + +func (e *CancelOfferExecutor) UpdateTrees() error { + txInfo := e.txInfo + + offerAssetId := txInfo.OfferId / OfferPerAsset + accounts := []int64{txInfo.AccountIndex, txInfo.GasAccountIndex} + assets := []int64{offerAssetId, txInfo.GasFeeAssetId} + + err := e.bc.StateDB().UpdateAccountTree(accounts, assets) + if err != nil { + logx.Errorf("update account tree error, err: %s", err.Error()) + return err + } + + return nil +} + +func (e *CancelOfferExecutor) GetExecutedTx() (*tx.Tx, error) { + txInfoBytes, err := json.Marshal(e.txInfo) + if err != nil { + logx.Errorf("unable to marshal tx, err: %s", err.Error()) + return nil, errors.New("unmarshal tx failed") + } + + e.tx.TxInfo = string(txInfoBytes) + return e.BaseExecutor.GetExecutedTx() +} + +func (e *CancelOfferExecutor) GenerateTxDetails() ([]*tx.TxDetail, error) { + txInfo := e.txInfo + + copiedAccounts, err := e.bc.StateDB().DeepCopyAccounts([]int64{txInfo.AccountIndex, txInfo.GasAccountIndex}) + if err != nil { + return nil, err + } + fromAccount := copiedAccounts[txInfo.AccountIndex] + gasAccount := copiedAccounts[txInfo.GasAccountIndex] + + txDetails := make([]*tx.TxDetail, 0, 4) + + // from account gas asset + order := int64(0) + accountOrder := int64(0) + txDetails = append(txDetails, &tx.TxDetail{ + AssetId: txInfo.GasFeeAssetId, + AssetType: types.FungibleAssetType, + AccountIndex: txInfo.AccountIndex, + AccountName: fromAccount.AccountName, + Balance: fromAccount.AssetInfo[txInfo.GasFeeAssetId].String(), + BalanceDelta: types.ConstructAccountAsset( + txInfo.GasFeeAssetId, + ffmath.Neg(txInfo.GasFeeAssetAmount), + types.ZeroBigInt, + types.ZeroBigInt, + ).String(), + Order: order, + Nonce: fromAccount.Nonce, + AccountOrder: accountOrder, + CollectionNonce: fromAccount.CollectionNonce, + }) + fromAccount.AssetInfo[txInfo.GasFeeAssetId].Balance = ffmath.Sub(fromAccount.AssetInfo[txInfo.GasFeeAssetId].Balance, txInfo.GasFeeAssetAmount) + if fromAccount.AssetInfo[txInfo.GasFeeAssetId].Balance.Cmp(big.NewInt(0)) < 0 { + return nil, errors.New("insufficient gas fee balance") + } + + // from account offer id + offerAssetId := txInfo.OfferId / OfferPerAsset + offerIndex := txInfo.OfferId % OfferPerAsset + oldOffer := fromAccount.AssetInfo[offerAssetId].OfferCanceledOrFinalized + // verify whether account offer id is valid for use + if oldOffer.Bit(int(offerIndex)) == 1 { + logx.Errorf("account %d offer index %d is already in use", txInfo.AccountIndex, offerIndex) + return nil, errors.New("unexpected err") + } + nOffer := new(big.Int).SetBit(oldOffer, int(offerIndex), 1) + + order++ + txDetails = append(txDetails, &tx.TxDetail{ + AssetId: offerAssetId, + AssetType: types.FungibleAssetType, + AccountIndex: txInfo.AccountIndex, + AccountName: fromAccount.AccountName, + Balance: fromAccount.AssetInfo[offerAssetId].String(), + BalanceDelta: types.ConstructAccountAsset( + offerAssetId, + types.ZeroBigInt, + types.ZeroBigInt, + nOffer, + ).String(), + Order: order, + Nonce: fromAccount.Nonce, + AccountOrder: accountOrder, + CollectionNonce: fromAccount.CollectionNonce, + }) + fromAccount.AssetInfo[offerAssetId].OfferCanceledOrFinalized = nOffer + + // gas account gas asset + order++ + accountOrder++ + txDetails = append(txDetails, &tx.TxDetail{ + AssetId: txInfo.GasFeeAssetId, + AssetType: types.FungibleAssetType, + AccountIndex: txInfo.GasAccountIndex, + AccountName: gasAccount.AccountName, + Balance: gasAccount.AssetInfo[txInfo.GasFeeAssetId].String(), + BalanceDelta: types.ConstructAccountAsset( + txInfo.GasFeeAssetId, + txInfo.GasFeeAssetAmount, + types.ZeroBigInt, + types.ZeroBigInt, + ).String(), + Order: order, + Nonce: gasAccount.Nonce, + AccountOrder: accountOrder, + CollectionNonce: gasAccount.CollectionNonce, + }) + return txDetails, nil +} + +func (e *CancelOfferExecutor) GenerateMempoolTx() (*mempool.MempoolTx, error) { + hash, err := legendTxTypes.ComputeCancelOfferMsgHash(e.txInfo, mimc.NewMiMC()) + if err != nil { + return nil, err + } + txHash := common.Bytes2Hex(hash) + + mempoolTx := &mempool.MempoolTx{ + TxHash: txHash, + TxType: e.tx.TxType, + GasFeeAssetId: e.txInfo.GasFeeAssetId, + GasFee: e.txInfo.GasFeeAssetAmount.String(), + NftIndex: types.NilTxNftIndex, + PairIndex: types.NilPairIndex, + AssetId: types.NilAssetId, + TxAmount: "", + Memo: "", + AccountIndex: e.txInfo.AccountIndex, + Nonce: e.txInfo.Nonce, + ExpiredAt: e.txInfo.ExpiredAt, + L2BlockHeight: types.NilBlockHeight, + Status: mempool.PendingTxStatus, + TxInfo: e.tx.TxInfo, + } + return mempoolTx, nil +} diff --git a/core/executor/create_collection_executor.go b/core/executor/create_collection_executor.go new file mode 100644 index 000000000..1f358ad46 --- /dev/null +++ b/core/executor/create_collection_executor.go @@ -0,0 +1,258 @@ +package executor + +import ( + "bytes" + "encoding/json" + "math/big" + "strconv" + + "github.com/consensys/gnark-crypto/ecc/bn254/fr/mimc" + "github.com/ethereum/go-ethereum/common" + "github.com/pkg/errors" + "github.com/zeromicro/go-zero/core/logx" + + "github.com/bnb-chain/zkbas-crypto/ffmath" + "github.com/bnb-chain/zkbas-crypto/wasm/legend/legendTxTypes" + common2 "github.com/bnb-chain/zkbas/common" + "github.com/bnb-chain/zkbas/core/statedb" + "github.com/bnb-chain/zkbas/dao/mempool" + "github.com/bnb-chain/zkbas/dao/tx" + "github.com/bnb-chain/zkbas/types" +) + +type CreateCollectionExecutor struct { + BaseExecutor + + txInfo *legendTxTypes.CreateCollectionTxInfo +} + +func NewCreateCollectionExecutor(bc IBlockchain, tx *tx.Tx) (TxExecutor, error) { + txInfo, err := types.ParseCreateCollectionTxInfo(tx.TxInfo) + if err != nil { + logx.Errorf("parse transfer tx failed: %s", err.Error()) + return nil, errors.New("invalid tx info") + } + + return &CreateCollectionExecutor{ + BaseExecutor: BaseExecutor{ + bc: bc, + tx: tx, + iTxInfo: txInfo, + }, + txInfo: txInfo, + }, nil +} + +func (e *CreateCollectionExecutor) Prepare() error { + txInfo := e.txInfo + + accounts := []int64{txInfo.AccountIndex, txInfo.GasAccountIndex} + assets := []int64{txInfo.GasFeeAssetId} + err := e.bc.StateDB().PrepareAccountsAndAssets(accounts, assets) + if err != nil { + logx.Errorf("prepare accounts and assets failed: %s", err.Error()) + return errors.New("internal error") + } + + fromAccount := e.bc.StateDB().AccountMap[txInfo.AccountIndex] + // add collection nonce to tx info + txInfo.CollectionId = fromAccount.CollectionNonce + + return nil +} + +func (e *CreateCollectionExecutor) VerifyInputs() error { + txInfo := e.txInfo + + err := e.BaseExecutor.VerifyInputs() + if err != nil { + return err + } + + fromAccount := e.bc.StateDB().AccountMap[txInfo.AccountIndex] + if fromAccount.AssetInfo[txInfo.GasFeeAssetId].Balance.Cmp(txInfo.GasFeeAssetAmount) < 0 { + return errors.New("balance is not enough") + } + + return nil +} + +func (e *CreateCollectionExecutor) ApplyTransaction() error { + bc := e.bc + txInfo := e.txInfo + + fromAccount := bc.StateDB().AccountMap[txInfo.AccountIndex] + gasAccount := bc.StateDB().AccountMap[txInfo.GasAccountIndex] + + // apply changes + fromAccount.AssetInfo[txInfo.GasFeeAssetId].Balance = ffmath.Sub(fromAccount.AssetInfo[txInfo.GasFeeAssetId].Balance, txInfo.GasFeeAssetAmount) + gasAccount.AssetInfo[txInfo.GasFeeAssetId].Balance = ffmath.Add(gasAccount.AssetInfo[txInfo.GasFeeAssetId].Balance, txInfo.GasFeeAssetAmount) + fromAccount.Nonce++ + fromAccount.CollectionNonce++ + + stateCache := e.bc.StateDB() + stateCache.PendingUpdateAccountIndexMap[txInfo.AccountIndex] = statedb.StateCachePending + stateCache.PendingUpdateAccountIndexMap[txInfo.GasAccountIndex] = statedb.StateCachePending + return nil +} + +func (e *CreateCollectionExecutor) GeneratePubData() error { + txInfo := e.txInfo + + var buf bytes.Buffer + buf.WriteByte(uint8(types.TxTypeCreateCollection)) + buf.Write(common2.Uint32ToBytes(uint32(txInfo.AccountIndex))) + buf.Write(common2.Uint16ToBytes(uint16(txInfo.CollectionId))) + buf.Write(common2.Uint32ToBytes(uint32(txInfo.GasAccountIndex))) + buf.Write(common2.Uint16ToBytes(uint16(txInfo.GasFeeAssetId))) + packedFeeBytes, err := common2.FeeToPackedFeeBytes(txInfo.GasFeeAssetAmount) + if err != nil { + logx.Errorf("unable to convert amount to packed fee amount: %s", err.Error()) + return err + } + buf.Write(packedFeeBytes) + chunk := common2.SuffixPaddingBufToChunkSize(buf.Bytes()) + buf.Reset() + buf.Write(chunk) + buf.Write(common2.PrefixPaddingBufToChunkSize([]byte{})) + buf.Write(common2.PrefixPaddingBufToChunkSize([]byte{})) + buf.Write(common2.PrefixPaddingBufToChunkSize([]byte{})) + buf.Write(common2.PrefixPaddingBufToChunkSize([]byte{})) + buf.Write(common2.PrefixPaddingBufToChunkSize([]byte{})) + pubData := buf.Bytes() + + stateCache := e.bc.StateDB() + stateCache.PubData = append(stateCache.PubData, pubData...) + return nil +} + +func (e *CreateCollectionExecutor) UpdateTrees() error { + txInfo := e.txInfo + + accounts := []int64{txInfo.AccountIndex, txInfo.GasAccountIndex} + assets := []int64{txInfo.GasFeeAssetId} + + err := e.bc.StateDB().UpdateAccountTree(accounts, assets) + if err != nil { + logx.Errorf("update account tree error, err: %s", err.Error()) + return err + } + + return nil +} + +func (e *CreateCollectionExecutor) GetExecutedTx() (*tx.Tx, error) { + txInfoBytes, err := json.Marshal(e.txInfo) + if err != nil { + logx.Errorf("unable to marshal tx, err: %s", err.Error()) + return nil, errors.New("unmarshal tx failed") + } + + e.tx.TxInfo = string(txInfoBytes) + e.tx.CollectionId = e.txInfo.CollectionId + + return e.BaseExecutor.GetExecutedTx() +} + +func (e *CreateCollectionExecutor) GenerateTxDetails() ([]*tx.TxDetail, error) { + txInfo := e.txInfo + + copiedAccounts, err := e.bc.StateDB().DeepCopyAccounts([]int64{txInfo.AccountIndex, txInfo.GasAccountIndex}) + if err != nil { + return nil, err + } + + fromAccount := copiedAccounts[txInfo.AccountIndex] + gasAccount := copiedAccounts[txInfo.GasAccountIndex] + + txDetails := make([]*tx.TxDetail, 0, 4) + + // from account collection nonce + order := int64(0) + accountOrder := int64(0) + txDetails = append(txDetails, &tx.TxDetail{ + AssetId: types.NilAssetId, + AssetType: types.CollectionNonceAssetType, + AccountIndex: txInfo.AccountIndex, + AccountName: fromAccount.AccountName, + Balance: strconv.FormatInt(fromAccount.CollectionNonce, 10), + BalanceDelta: strconv.FormatInt(fromAccount.CollectionNonce+1, 10), + Order: order, + Nonce: fromAccount.Nonce, + AccountOrder: accountOrder, + CollectionNonce: fromAccount.CollectionNonce, + }) + fromAccount.CollectionNonce = fromAccount.CollectionNonce + 1 + + // from account gas + order++ + txDetails = append(txDetails, &tx.TxDetail{ + AssetId: txInfo.GasFeeAssetId, + AssetType: types.FungibleAssetType, + AccountIndex: txInfo.AccountIndex, + AccountName: fromAccount.AccountName, + Balance: fromAccount.AssetInfo[txInfo.GasFeeAssetId].String(), + BalanceDelta: types.ConstructAccountAsset( + txInfo.GasFeeAssetId, + ffmath.Neg(txInfo.GasFeeAssetAmount), + types.ZeroBigInt, + types.ZeroBigInt, + ).String(), + Order: order, + Nonce: fromAccount.Nonce, + AccountOrder: accountOrder, + CollectionNonce: fromAccount.CollectionNonce, + }) + fromAccount.AssetInfo[txInfo.GasFeeAssetId].Balance = ffmath.Sub(fromAccount.AssetInfo[txInfo.GasFeeAssetId].Balance, txInfo.GasFeeAssetAmount) + if fromAccount.AssetInfo[txInfo.GasFeeAssetId].Balance.Cmp(big.NewInt(0)) < 0 { + return nil, errors.New("insufficient gas fee balance") + } + + // gas account gas asset + order++ + accountOrder++ + txDetails = append(txDetails, &tx.TxDetail{ + AssetId: txInfo.GasFeeAssetId, + AssetType: types.FungibleAssetType, + AccountIndex: txInfo.GasAccountIndex, + AccountName: gasAccount.AccountName, + Balance: gasAccount.AssetInfo[txInfo.GasFeeAssetId].String(), + BalanceDelta: types.ConstructAccountAsset( + txInfo.GasFeeAssetId, + txInfo.GasFeeAssetAmount, + types.ZeroBigInt, + types.ZeroBigInt, + ).String(), + Order: order, + Nonce: gasAccount.Nonce, + AccountOrder: accountOrder, + }) + return txDetails, nil +} + +func (e *CreateCollectionExecutor) GenerateMempoolTx() (*mempool.MempoolTx, error) { + hash, err := legendTxTypes.ComputeCreateCollectionMsgHash(e.txInfo, mimc.NewMiMC()) + if err != nil { + return nil, err + } + txHash := common.Bytes2Hex(hash) + + mempoolTx := &mempool.MempoolTx{ + TxHash: txHash, + TxType: e.tx.TxType, + GasFeeAssetId: e.txInfo.GasFeeAssetId, + GasFee: e.txInfo.GasFeeAssetAmount.String(), + NftIndex: types.NilTxNftIndex, + PairIndex: types.NilPairIndex, + AssetId: types.NilAssetId, + TxAmount: "", + Memo: "", + AccountIndex: e.txInfo.AccountIndex, + Nonce: e.txInfo.Nonce, + ExpiredAt: e.txInfo.ExpiredAt, + L2BlockHeight: types.NilBlockHeight, + Status: mempool.PendingTxStatus, + TxInfo: e.tx.TxInfo, + } + return mempoolTx, nil +} diff --git a/core/executor/create_pair_executor.go b/core/executor/create_pair_executor.go new file mode 100644 index 000000000..9ba7002fc --- /dev/null +++ b/core/executor/create_pair_executor.go @@ -0,0 +1,169 @@ +package executor + +import ( + "bytes" + "encoding/json" + "errors" + "math/big" + + "github.com/zeromicro/go-zero/core/logx" + "github.com/zeromicro/go-zero/core/stores/sqlx" + + "github.com/bnb-chain/zkbas-crypto/wasm/legend/legendTxTypes" + "github.com/bnb-chain/zkbas/common" + "github.com/bnb-chain/zkbas/core/statedb" + "github.com/bnb-chain/zkbas/dao/liquidity" + "github.com/bnb-chain/zkbas/dao/mempool" + "github.com/bnb-chain/zkbas/dao/tx" + "github.com/bnb-chain/zkbas/types" +) + +type CreatePairExecutor struct { + BaseExecutor + + txInfo *legendTxTypes.CreatePairTxInfo +} + +func NewCreatePairExecutor(bc IBlockchain, tx *tx.Tx) (TxExecutor, error) { + txInfo, err := types.ParseCreatePairTxInfo(tx.TxInfo) + if err != nil { + logx.Errorf("parse create pair tx failed: %s", err.Error()) + return nil, errors.New("invalid tx info") + } + + return &CreatePairExecutor{ + BaseExecutor: BaseExecutor{ + bc: bc, + tx: tx, + iTxInfo: txInfo, + }, + txInfo: txInfo, + }, nil +} + +func (e *CreatePairExecutor) Prepare() error { + return nil +} + +func (e *CreatePairExecutor) VerifyInputs() error { + bc := e.bc + txInfo := e.txInfo + + _, err := bc.DB().LiquidityModel.GetLiquidityByPairIndex(txInfo.PairIndex) + if err != sqlx.ErrNotFound { + return errors.New("invalid pair index, already registered") + } + + for index := range bc.StateDB().PendingNewLiquidityIndexMap { + if txInfo.PairIndex == index { + return errors.New("invalid pair index, already registered") + } + } + + return nil +} + +func (e *CreatePairExecutor) ApplyTransaction() error { + bc := e.bc + txInfo := e.txInfo + + newLiquidity := &liquidity.Liquidity{ + PairIndex: txInfo.PairIndex, + AssetAId: txInfo.AssetAId, + AssetA: types.ZeroBigIntString, + AssetBId: txInfo.AssetBId, + AssetB: types.ZeroBigIntString, + LpAmount: types.ZeroBigIntString, + KLast: types.ZeroBigIntString, + TreasuryAccountIndex: txInfo.TreasuryAccountIndex, + FeeRate: txInfo.FeeRate, + TreasuryRate: txInfo.TreasuryRate, + } + bc.StateDB().LiquidityMap[txInfo.PairIndex] = newLiquidity + + stateCache := e.bc.StateDB() + stateCache.PendingNewLiquidityIndexMap[txInfo.PairIndex] = statedb.StateCachePending + return nil +} + +func (e *CreatePairExecutor) GeneratePubData() error { + txInfo := e.txInfo + + var buf bytes.Buffer + buf.WriteByte(uint8(types.TxTypeCreatePair)) + buf.Write(common.Uint16ToBytes(uint16(txInfo.PairIndex))) + buf.Write(common.Uint16ToBytes(uint16(txInfo.AssetAId))) + buf.Write(common.Uint16ToBytes(uint16(txInfo.AssetBId))) + buf.Write(common.Uint16ToBytes(uint16(txInfo.FeeRate))) + buf.Write(common.Uint32ToBytes(uint32(txInfo.TreasuryAccountIndex))) + buf.Write(common.Uint16ToBytes(uint16(txInfo.TreasuryRate))) + chunk := common.SuffixPaddingBufToChunkSize(buf.Bytes()) + buf.Reset() + buf.Write(chunk) + buf.Write(common.PrefixPaddingBufToChunkSize([]byte{})) + buf.Write(common.PrefixPaddingBufToChunkSize([]byte{})) + buf.Write(common.PrefixPaddingBufToChunkSize([]byte{})) + buf.Write(common.PrefixPaddingBufToChunkSize([]byte{})) + buf.Write(common.PrefixPaddingBufToChunkSize([]byte{})) + pubData := buf.Bytes() + + stateCache := e.bc.StateDB() + stateCache.PriorityOperations++ + stateCache.PubDataOffset = append(stateCache.PubDataOffset, uint32(len(stateCache.PubData))) + stateCache.PubData = append(stateCache.PubData, pubData...) + return nil +} + +func (e *CreatePairExecutor) UpdateTrees() error { + bc := e.bc + txInfo := e.txInfo + return bc.StateDB().UpdateLiquidityTree(txInfo.PairIndex) +} + +func (e *CreatePairExecutor) GetExecutedTx() (*tx.Tx, error) { + txInfoBytes, err := json.Marshal(e.txInfo) + if err != nil { + logx.Errorf("unable to marshal tx, err: %s", err.Error()) + return nil, errors.New("unmarshal tx failed") + } + + e.tx.TxInfo = string(txInfoBytes) + e.tx.PairIndex = e.txInfo.PairIndex + return e.BaseExecutor.GetExecutedTx() +} + +func (e *CreatePairExecutor) GenerateTxDetails() ([]*tx.TxDetail, error) { + txInfo := e.txInfo + baseLiquidity := types.EmptyLiquidityInfo(txInfo.PairIndex) + deltaLiquidity := &types.LiquidityInfo{ + PairIndex: txInfo.PairIndex, + AssetAId: txInfo.AssetAId, + AssetA: big.NewInt(0), + AssetBId: txInfo.AssetBId, + AssetB: big.NewInt(0), + LpAmount: big.NewInt(0), + KLast: big.NewInt(0), + FeeRate: txInfo.FeeRate, + TreasuryAccountIndex: txInfo.TreasuryAccountIndex, + TreasuryRate: txInfo.TreasuryRate, + } + + txDetail := &tx.TxDetail{ + AssetId: txInfo.PairIndex, + AssetType: types.LiquidityAssetType, + AccountIndex: types.NilTxAccountIndex, + AccountName: types.NilAccountName, + Balance: baseLiquidity.String(), + BalanceDelta: deltaLiquidity.String(), + Order: 0, + AccountOrder: types.NilAccountOrder, + Nonce: types.NilNonce, + CollectionNonce: types.NilNonce, + } + + return []*tx.TxDetail{txDetail}, nil +} + +func (e *CreatePairExecutor) GenerateMempoolTx() (*mempool.MempoolTx, error) { + return nil, nil +} diff --git a/core/executor/deposit_executor.go b/core/executor/deposit_executor.go new file mode 100644 index 000000000..894ab4098 --- /dev/null +++ b/core/executor/deposit_executor.go @@ -0,0 +1,175 @@ +package executor + +import ( + "bytes" + "encoding/json" + "errors" + "math/big" + + "github.com/ethereum/go-ethereum/common" + "github.com/zeromicro/go-zero/core/logx" + + "github.com/bnb-chain/zkbas-crypto/ffmath" + "github.com/bnb-chain/zkbas-crypto/wasm/legend/legendTxTypes" + common2 "github.com/bnb-chain/zkbas/common" + "github.com/bnb-chain/zkbas/common/chain" + "github.com/bnb-chain/zkbas/core/statedb" + "github.com/bnb-chain/zkbas/dao/mempool" + "github.com/bnb-chain/zkbas/dao/tx" + "github.com/bnb-chain/zkbas/types" +) + +type DepositExecutor struct { + BaseExecutor + + txInfo *legendTxTypes.DepositTxInfo +} + +func NewDepositExecutor(bc IBlockchain, tx *tx.Tx) (TxExecutor, error) { + txInfo, err := types.ParseDepositTxInfo(tx.TxInfo) + if err != nil { + logx.Errorf("parse deposit tx failed: %s", err.Error()) + return nil, errors.New("invalid tx info") + } + + return &DepositExecutor{ + BaseExecutor: BaseExecutor{ + bc: bc, + tx: tx, + iTxInfo: txInfo, + }, + txInfo: txInfo, + }, nil +} + +func (e *DepositExecutor) Prepare() error { + bc := e.bc + txInfo := e.txInfo + + // The account index from txInfo isn't true, find account by account name hash. + accountNameHash := common.Bytes2Hex(txInfo.AccountNameHash) + account, err := bc.DB().AccountModel.GetAccountByNameHash(accountNameHash) + if err != nil { + for index := range bc.StateDB().PendingNewAccountIndexMap { + if accountNameHash == bc.StateDB().AccountMap[index].AccountNameHash { + account, err = chain.FromFormatAccountInfo(bc.StateDB().AccountMap[index]) + break + } + } + + if err != nil { + return errors.New("invalid account name hash") + } + } + + // Set the right account index. + txInfo.AccountIndex = account.AccountIndex + + accounts := []int64{txInfo.AccountIndex} + assets := []int64{txInfo.AssetId} + err = e.bc.StateDB().PrepareAccountsAndAssets(accounts, assets) + if err != nil { + logx.Errorf("prepare accounts and assets failed: %s", err.Error()) + return err + } + + return nil +} + +func (e *DepositExecutor) VerifyInputs() error { + txInfo := e.txInfo + + if txInfo.AssetAmount.Cmp(types.ZeroBigInt) < 0 { + return errors.New("invalid asset amount") + } + + return nil +} + +func (e *DepositExecutor) ApplyTransaction() error { + bc := e.bc + txInfo := e.txInfo + + depositAccount := bc.StateDB().AccountMap[txInfo.AccountIndex] + depositAccount.AssetInfo[txInfo.AssetId].Balance = ffmath.Add(depositAccount.AssetInfo[txInfo.AssetId].Balance, txInfo.AssetAmount) + + stateCache := e.bc.StateDB() + stateCache.PendingUpdateAccountIndexMap[txInfo.AccountIndex] = statedb.StateCachePending + return nil +} + +func (e *DepositExecutor) GeneratePubData() error { + txInfo := e.txInfo + + var buf bytes.Buffer + buf.WriteByte(uint8(types.TxTypeDeposit)) + buf.Write(common2.Uint32ToBytes(uint32(txInfo.AccountIndex))) + buf.Write(common2.Uint16ToBytes(uint16(txInfo.AssetId))) + buf.Write(common2.Uint128ToBytes(txInfo.AssetAmount)) + chunk1 := common2.SuffixPaddingBufToChunkSize(buf.Bytes()) + buf.Reset() + buf.Write(chunk1) + buf.Write(common2.PrefixPaddingBufToChunkSize(txInfo.AccountNameHash)) + buf.Write(common2.PrefixPaddingBufToChunkSize([]byte{})) + buf.Write(common2.PrefixPaddingBufToChunkSize([]byte{})) + buf.Write(common2.PrefixPaddingBufToChunkSize([]byte{})) + buf.Write(common2.PrefixPaddingBufToChunkSize([]byte{})) + pubData := buf.Bytes() + + stateCache := e.bc.StateDB() + stateCache.PriorityOperations++ + stateCache.PubDataOffset = append(stateCache.PubDataOffset, uint32(len(stateCache.PubData))) + stateCache.PubData = append(stateCache.PubData, pubData...) + return nil +} + +func (e *DepositExecutor) UpdateTrees() error { + bc := e.bc + txInfo := e.txInfo + accounts := []int64{txInfo.AccountIndex} + assets := []int64{txInfo.AssetId} + return bc.StateDB().UpdateAccountTree(accounts, assets) +} + +func (e *DepositExecutor) GetExecutedTx() (*tx.Tx, error) { + txInfoBytes, err := json.Marshal(e.txInfo) + if err != nil { + logx.Errorf("unable to marshal tx, err: %s", err.Error()) + return nil, errors.New("unmarshal tx failed") + } + + e.tx.TxInfo = string(txInfoBytes) + e.tx.AssetId = e.txInfo.AssetId + e.tx.TxAmount = e.txInfo.AssetAmount.String() + e.tx.AccountIndex = e.txInfo.AccountIndex + return e.BaseExecutor.GetExecutedTx() +} + +func (e *DepositExecutor) GenerateTxDetails() ([]*tx.TxDetail, error) { + txInfo := e.txInfo + depositAccount := e.bc.StateDB().AccountMap[txInfo.AccountIndex] + baseBalance := depositAccount.AssetInfo[txInfo.AssetId] + deltaBalance := &types.AccountAsset{ + AssetId: txInfo.AssetId, + Balance: txInfo.AssetAmount, + LpAmount: big.NewInt(0), + OfferCanceledOrFinalized: big.NewInt(0), + } + txDetail := &tx.TxDetail{ + AssetId: txInfo.AssetId, + AssetType: types.FungibleAssetType, + AccountIndex: txInfo.AccountIndex, + AccountName: depositAccount.AccountName, + Balance: baseBalance.String(), + BalanceDelta: deltaBalance.String(), + Order: 0, + AccountOrder: 0, + Nonce: depositAccount.Nonce, + CollectionNonce: depositAccount.CollectionNonce, + } + return []*tx.TxDetail{txDetail}, nil +} + +func (e *DepositExecutor) GenerateMempoolTx() (*mempool.MempoolTx, error) { + return nil, nil +} diff --git a/core/executor/deposit_nft_executor.go b/core/executor/deposit_nft_executor.go new file mode 100644 index 000000000..503fc25fa --- /dev/null +++ b/core/executor/deposit_nft_executor.go @@ -0,0 +1,244 @@ +package executor + +import ( + "bytes" + "encoding/json" + "errors" + "math/big" + + "github.com/ethereum/go-ethereum/common" + "github.com/zeromicro/go-zero/core/logx" + + "github.com/bnb-chain/zkbas-crypto/wasm/legend/legendTxTypes" + common2 "github.com/bnb-chain/zkbas/common" + "github.com/bnb-chain/zkbas/common/chain" + "github.com/bnb-chain/zkbas/core/statedb" + "github.com/bnb-chain/zkbas/dao/mempool" + "github.com/bnb-chain/zkbas/dao/nft" + "github.com/bnb-chain/zkbas/dao/tx" + "github.com/bnb-chain/zkbas/types" +) + +type DepositNftExecutor struct { + BaseExecutor + + txInfo *legendTxTypes.DepositNftTxInfo + + isNewNft bool +} + +func NewDepositNftExecutor(bc IBlockchain, tx *tx.Tx) (TxExecutor, error) { + txInfo, err := types.ParseDepositNftTxInfo(tx.TxInfo) + if err != nil { + logx.Errorf("parse deposit nft tx failed: %s", err.Error()) + return nil, errors.New("invalid tx info") + } + + return &DepositNftExecutor{ + BaseExecutor: BaseExecutor{ + bc: bc, + tx: tx, + iTxInfo: txInfo, + }, + txInfo: txInfo, + }, nil +} + +func (e *DepositNftExecutor) Prepare() error { + bc := e.bc + txInfo := e.txInfo + + // The account index from txInfo isn't true, find account by account name hash. + accountNameHash := common.Bytes2Hex(txInfo.AccountNameHash) + account, err := bc.DB().AccountModel.GetAccountByNameHash(accountNameHash) + if err != nil { + for index := range bc.StateDB().PendingNewAccountIndexMap { + if accountNameHash == bc.StateDB().AccountMap[index].AccountNameHash { + account, err = chain.FromFormatAccountInfo(bc.StateDB().AccountMap[index]) + break + } + } + + if err != nil { + return errors.New("invalid account name hash") + } + } + + // Set the right account index. + txInfo.AccountIndex = account.AccountIndex + + accounts := []int64{txInfo.AccountIndex, txInfo.CreatorAccountIndex} + assets := []int64{0} // Just used for generate an empty tx detail. + err = e.bc.StateDB().PrepareAccountsAndAssets(accounts, assets) + if err != nil { + logx.Errorf("prepare accounts and assets failed: %s", err.Error()) + return err + } + + // Check if it is a new nft, or it is a nft previously withdraw from layer2. + if txInfo.NftIndex == 0 && txInfo.CollectionId == 0 && txInfo.CreatorAccountIndex == 0 && txInfo.CreatorTreasuryRate == 0 { + e.isNewNft = true + // Set new nft index for new nft. + txInfo.NftIndex = bc.StateDB().GetNextNftIndex() + } else { + err = e.bc.StateDB().PrepareNft(txInfo.NftIndex) + if err != nil { + logx.Errorf("prepare nft failed") + return err + } + } + + return nil +} + +func (e *DepositNftExecutor) VerifyInputs() error { + bc := e.bc + txInfo := e.txInfo + + if e.isNewNft { + if bc.StateDB().NftMap[txInfo.NftIndex] != nil { + return errors.New("invalid nft index, already exist") + } + } else { + if bc.StateDB().NftMap[txInfo.NftIndex].OwnerAccountIndex != types.NilAccountIndex { + return errors.New("invalid nft index, already exist") + } + } + + return nil +} + +func (e *DepositNftExecutor) ApplyTransaction() error { + bc := e.bc + txInfo := e.txInfo + + bc.StateDB().NftMap[txInfo.NftIndex] = &nft.L2Nft{ + NftIndex: txInfo.NftIndex, + CreatorAccountIndex: txInfo.CreatorAccountIndex, + OwnerAccountIndex: txInfo.AccountIndex, + NftContentHash: common.Bytes2Hex(txInfo.NftContentHash), + NftL1Address: txInfo.NftL1Address, + NftL1TokenId: txInfo.NftL1TokenId.String(), + CreatorTreasuryRate: txInfo.CreatorTreasuryRate, + CollectionId: txInfo.CollectionId, + } + + stateCache := e.bc.StateDB() + if e.isNewNft { + stateCache.PendingNewNftIndexMap[txInfo.NftIndex] = statedb.StateCachePending + } else { + stateCache.PendingUpdateNftIndexMap[txInfo.NftIndex] = statedb.StateCachePending + } + + return nil +} + +func (e *DepositNftExecutor) GeneratePubData() error { + txInfo := e.txInfo + + var buf bytes.Buffer + buf.WriteByte(uint8(types.TxTypeDepositNft)) + buf.Write(common2.Uint32ToBytes(uint32(txInfo.AccountIndex))) + buf.Write(common2.Uint40ToBytes(txInfo.NftIndex)) + buf.Write(common2.AddressStrToBytes(txInfo.NftL1Address)) + chunk1 := common2.SuffixPaddingBufToChunkSize(buf.Bytes()) + buf.Reset() + buf.Write(common2.Uint32ToBytes(uint32(txInfo.CreatorAccountIndex))) + buf.Write(common2.Uint16ToBytes(uint16(txInfo.CreatorTreasuryRate))) + buf.Write(common2.Uint16ToBytes(uint16(txInfo.CollectionId))) + chunk2 := common2.PrefixPaddingBufToChunkSize(buf.Bytes()) + buf.Reset() + buf.Write(chunk1) + buf.Write(chunk2) + buf.Write(common2.PrefixPaddingBufToChunkSize(txInfo.NftContentHash)) + buf.Write(common2.Uint256ToBytes(txInfo.NftL1TokenId)) + buf.Write(common2.PrefixPaddingBufToChunkSize(txInfo.AccountNameHash)) + buf.Write(common2.PrefixPaddingBufToChunkSize([]byte{})) + pubData := buf.Bytes() + + stateCache := e.bc.StateDB() + stateCache.PriorityOperations++ + stateCache.PubDataOffset = append(stateCache.PubDataOffset, uint32(len(stateCache.PubData))) + stateCache.PubData = append(stateCache.PubData, pubData...) + return nil +} + +func (e *DepositNftExecutor) UpdateTrees() error { + bc := e.bc + txInfo := e.txInfo + + return bc.StateDB().UpdateNftTree(txInfo.NftIndex) +} + +func (e *DepositNftExecutor) GetExecutedTx() (*tx.Tx, error) { + txInfoBytes, err := json.Marshal(e.txInfo) + if err != nil { + logx.Errorf("unable to marshal tx, err: %s", err.Error()) + return nil, errors.New("unmarshal tx failed") + } + + e.tx.TxInfo = string(txInfoBytes) + e.tx.NftIndex = e.txInfo.NftIndex + e.tx.AccountIndex = e.txInfo.AccountIndex + return e.BaseExecutor.GetExecutedTx() +} + +func (e *DepositNftExecutor) GenerateTxDetails() ([]*tx.TxDetail, error) { + txInfo := e.txInfo + depositAccount := e.bc.StateDB().AccountMap[txInfo.AccountIndex] + txDetails := make([]*tx.TxDetail, 0, 2) + + // user info + accountOrder := int64(0) + order := int64(0) + baseBalance := depositAccount.AssetInfo[0] + deltaBalance := &types.AccountAsset{ + AssetId: 0, + Balance: big.NewInt(0), + LpAmount: big.NewInt(0), + OfferCanceledOrFinalized: big.NewInt(0), + } + txDetails = append(txDetails, &tx.TxDetail{ + AssetId: 0, + AssetType: types.FungibleAssetType, + AccountIndex: txInfo.AccountIndex, + AccountName: depositAccount.AccountName, + Balance: baseBalance.String(), + BalanceDelta: deltaBalance.String(), + AccountOrder: accountOrder, + Order: order, + Nonce: depositAccount.Nonce, + CollectionNonce: depositAccount.CollectionNonce, + }) + // nft info + order++ + baseNft := types.EmptyNftInfo(txInfo.NftIndex) + newNft := types.ConstructNftInfo( + txInfo.NftIndex, + txInfo.CreatorAccountIndex, + txInfo.AccountIndex, + common.Bytes2Hex(txInfo.NftContentHash), + txInfo.NftL1TokenId.String(), + txInfo.NftL1Address, + txInfo.CreatorTreasuryRate, + txInfo.CollectionId, + ) + txDetails = append(txDetails, &tx.TxDetail{ + AssetId: txInfo.NftIndex, + AssetType: types.NftAssetType, + AccountIndex: txInfo.AccountIndex, + AccountName: depositAccount.AccountName, + Balance: baseNft.String(), + BalanceDelta: newNft.String(), + AccountOrder: types.NilAccountOrder, + Order: order, + Nonce: depositAccount.Nonce, + CollectionNonce: depositAccount.CollectionNonce, + }) + + return txDetails, nil +} + +func (e *DepositNftExecutor) GenerateMempoolTx() (*mempool.MempoolTx, error) { + return nil, nil +} diff --git a/core/executor/executor.go b/core/executor/executor.go new file mode 100644 index 000000000..5994bc14c --- /dev/null +++ b/core/executor/executor.go @@ -0,0 +1,73 @@ +package executor + +import ( + "errors" + + sdb "github.com/bnb-chain/zkbas/core/statedb" + "github.com/bnb-chain/zkbas/dao/block" + "github.com/bnb-chain/zkbas/dao/mempool" + "github.com/bnb-chain/zkbas/dao/tx" + "github.com/bnb-chain/zkbas/types" +) + +type IBlockchain interface { + VerifyExpiredAt(expiredAt int64) error + VerifyNonce(accountIndex int64, nonce int64) error + StateDB() *sdb.StateDB + DB() *sdb.ChainDB + CurrentBlock() *block.Block +} + +type TxExecutor interface { + Prepare() error + VerifyInputs() error + ApplyTransaction() error + GeneratePubData() error + UpdateTrees() error + GetExecutedTx() (*tx.Tx, error) + GenerateTxDetails() ([]*tx.TxDetail, error) + GenerateMempoolTx() (*mempool.MempoolTx, error) +} + +func NewTxExecutor(bc IBlockchain, tx *tx.Tx) (TxExecutor, error) { + switch tx.TxType { + case types.TxTypeRegisterZns: + return NewRegisterZnsExecutor(bc, tx) + case types.TxTypeCreatePair: + return NewCreatePairExecutor(bc, tx) + case types.TxTypeUpdatePairRate: + return NewUpdatePairRateExecutor(bc, tx) + case types.TxTypeDeposit: + return NewDepositExecutor(bc, tx) + case types.TxTypeDepositNft: + return NewDepositNftExecutor(bc, tx) + case types.TxTypeTransfer: + return NewTransferExecutor(bc, tx) + case types.TxTypeSwap: + return NewSwapExecutor(bc, tx) + case types.TxTypeAddLiquidity: + return NewAddLiquidityExecutor(bc, tx) + case types.TxTypeRemoveLiquidity: + return NewRemoveLiquidityExecutor(bc, tx) + case types.TxTypeWithdraw: + return NewWithdrawExecutor(bc, tx) + case types.TxTypeCreateCollection: + return NewCreateCollectionExecutor(bc, tx) + case types.TxTypeMintNft: + return NewMintNftExecutor(bc, tx) + case types.TxTypeTransferNft: + return NewTransferNftExecutor(bc, tx) + case types.TxTypeAtomicMatch: + return NewAtomicMatchExecutor(bc, tx) + case types.TxTypeCancelOffer: + return NewCancelOfferExecutor(bc, tx) + case types.TxTypeWithdrawNft: + return NewWithdrawNftExecutor(bc, tx) + case types.TxTypeFullExit: + return NewFullExitExecutor(bc, tx) + case types.TxTypeFullExitNft: + return NewFullExitNftExecutor(bc, tx) + } + + return nil, errors.New("unsupported tx type") +} diff --git a/core/executor/full_exit_executor.go b/core/executor/full_exit_executor.go new file mode 100644 index 000000000..f494d97a1 --- /dev/null +++ b/core/executor/full_exit_executor.go @@ -0,0 +1,176 @@ +package executor + +import ( + "bytes" + "encoding/json" + "errors" + "math/big" + + "github.com/ethereum/go-ethereum/common" + "github.com/zeromicro/go-zero/core/logx" + + "github.com/bnb-chain/zkbas-crypto/ffmath" + "github.com/bnb-chain/zkbas-crypto/wasm/legend/legendTxTypes" + common2 "github.com/bnb-chain/zkbas/common" + "github.com/bnb-chain/zkbas/common/chain" + "github.com/bnb-chain/zkbas/core/statedb" + "github.com/bnb-chain/zkbas/dao/mempool" + "github.com/bnb-chain/zkbas/dao/tx" + "github.com/bnb-chain/zkbas/types" +) + +type FullExitExecutor struct { + BaseExecutor + + txInfo *legendTxTypes.FullExitTxInfo +} + +func NewFullExitExecutor(bc IBlockchain, tx *tx.Tx) (TxExecutor, error) { + txInfo, err := types.ParseFullExitTxInfo(tx.TxInfo) + if err != nil { + logx.Errorf("parse full exit tx failed: %s", err.Error()) + return nil, errors.New("invalid tx info") + } + + return &FullExitExecutor{ + BaseExecutor: BaseExecutor{ + bc: bc, + tx: tx, + iTxInfo: txInfo, + }, + txInfo: txInfo, + }, nil +} + +func (e *FullExitExecutor) Prepare() error { + bc := e.bc + txInfo := e.txInfo + + // The account index from txInfo isn't true, find account by account name hash. + accountNameHash := common.Bytes2Hex(txInfo.AccountNameHash) + account, err := bc.DB().AccountModel.GetAccountByNameHash(accountNameHash) + if err != nil { + for index := range bc.StateDB().PendingNewAccountIndexMap { + if accountNameHash == bc.StateDB().AccountMap[index].AccountNameHash { + account, err = chain.FromFormatAccountInfo(bc.StateDB().AccountMap[index]) + break + } + } + + if err != nil { + return errors.New("invalid account name hash") + } + } + + // Set the right account index. + txInfo.AccountIndex = account.AccountIndex + + accounts := []int64{txInfo.AccountIndex} + assets := []int64{txInfo.AssetId} + err = e.bc.StateDB().PrepareAccountsAndAssets(accounts, assets) + if err != nil { + logx.Errorf("prepare accounts and assets failed: %s", err.Error()) + return errors.New("internal error") + } + + // Set the right asset amount. + txInfo.AssetAmount = bc.StateDB().AccountMap[txInfo.AccountIndex].AssetInfo[txInfo.AssetId].Balance + + return nil +} + +func (e *FullExitExecutor) VerifyInputs() error { + return nil +} + +func (e *FullExitExecutor) ApplyTransaction() error { + bc := e.bc + txInfo := e.txInfo + + exitAccount := bc.StateDB().AccountMap[txInfo.AccountIndex] + exitAccount.AssetInfo[txInfo.AssetId].Balance = ffmath.Sub(exitAccount.AssetInfo[txInfo.AssetId].Balance, txInfo.AssetAmount) + + if txInfo.AssetAmount.Cmp(types.ZeroBigInt) != 0 { + stateCache := e.bc.StateDB() + stateCache.PendingUpdateAccountIndexMap[txInfo.AccountIndex] = statedb.StateCachePending + } + return nil +} + +func (e *FullExitExecutor) GeneratePubData() error { + txInfo := e.txInfo + + var buf bytes.Buffer + buf.WriteByte(uint8(types.TxTypeFullExit)) + buf.Write(common2.Uint32ToBytes(uint32(txInfo.AccountIndex))) + buf.Write(common2.Uint16ToBytes(uint16(txInfo.AssetId))) + buf.Write(common2.Uint128ToBytes(txInfo.AssetAmount)) + chunk := common2.SuffixPaddingBufToChunkSize(buf.Bytes()) + buf.Reset() + buf.Write(chunk) + buf.Write(common2.PrefixPaddingBufToChunkSize(txInfo.AccountNameHash)) + buf.Write(common2.PrefixPaddingBufToChunkSize([]byte{})) + buf.Write(common2.PrefixPaddingBufToChunkSize([]byte{})) + buf.Write(common2.PrefixPaddingBufToChunkSize([]byte{})) + buf.Write(common2.PrefixPaddingBufToChunkSize([]byte{})) + pubData := buf.Bytes() + + stateCache := e.bc.StateDB() + stateCache.PriorityOperations++ + stateCache.PubDataOffset = append(stateCache.PubDataOffset, uint32(len(stateCache.PubData))) + stateCache.PendingOnChainOperationsPubData = append(stateCache.PendingOnChainOperationsPubData, pubData) + stateCache.PendingOnChainOperationsHash = common2.ConcatKeccakHash(stateCache.PendingOnChainOperationsHash, pubData) + stateCache.PubData = append(stateCache.PubData, pubData...) + return nil +} + +func (e *FullExitExecutor) UpdateTrees() error { + bc := e.bc + txInfo := e.txInfo + accounts := []int64{txInfo.AccountIndex} + assets := []int64{txInfo.AssetId} + return bc.StateDB().UpdateAccountTree(accounts, assets) +} + +func (e *FullExitExecutor) GetExecutedTx() (*tx.Tx, error) { + txInfoBytes, err := json.Marshal(e.txInfo) + if err != nil { + logx.Errorf("unable to marshal tx, err: %s", err.Error()) + return nil, errors.New("unmarshal tx failed") + } + + e.tx.TxInfo = string(txInfoBytes) + e.tx.AssetId = e.txInfo.AssetId + e.tx.TxAmount = e.txInfo.AssetAmount.String() + e.tx.AccountIndex = e.txInfo.AccountIndex + return e.BaseExecutor.GetExecutedTx() +} + +func (e *FullExitExecutor) GenerateTxDetails() ([]*tx.TxDetail, error) { + txInfo := e.txInfo + exitAccount := e.bc.StateDB().AccountMap[txInfo.AccountIndex] + baseBalance := exitAccount.AssetInfo[txInfo.AssetId] + deltaBalance := &types.AccountAsset{ + AssetId: txInfo.AssetId, + Balance: ffmath.Neg(txInfo.AssetAmount), + LpAmount: big.NewInt(0), + OfferCanceledOrFinalized: big.NewInt(0), + } + txDetail := &tx.TxDetail{ + AssetId: txInfo.AssetId, + AssetType: types.FungibleAssetType, + AccountIndex: txInfo.AccountIndex, + AccountName: exitAccount.AccountName, + Balance: baseBalance.String(), + BalanceDelta: deltaBalance.String(), + Order: 0, + AccountOrder: 0, + Nonce: exitAccount.Nonce, + CollectionNonce: exitAccount.CollectionNonce, + } + return []*tx.TxDetail{txDetail}, nil +} + +func (e *FullExitExecutor) GenerateMempoolTx() (*mempool.MempoolTx, error) { + return nil, nil +} diff --git a/core/executor/full_exit_nft_executor.go b/core/executor/full_exit_nft_executor.go new file mode 100644 index 000000000..3be646770 --- /dev/null +++ b/core/executor/full_exit_nft_executor.go @@ -0,0 +1,288 @@ +package executor + +import ( + "bytes" + "encoding/json" + "errors" + "math/big" + + "github.com/ethereum/go-ethereum/common" + "github.com/zeromicro/go-zero/core/logx" + + "github.com/bnb-chain/zkbas-crypto/wasm/legend/legendTxTypes" + common2 "github.com/bnb-chain/zkbas/common" + "github.com/bnb-chain/zkbas/common/chain" + "github.com/bnb-chain/zkbas/core/statedb" + "github.com/bnb-chain/zkbas/dao/mempool" + "github.com/bnb-chain/zkbas/dao/nft" + "github.com/bnb-chain/zkbas/dao/tx" + "github.com/bnb-chain/zkbas/types" +) + +type FullExitNftExecutor struct { + BaseExecutor + + txInfo *legendTxTypes.FullExitNftTxInfo + + exitNft *nft.L2Nft +} + +func NewFullExitNftExecutor(bc IBlockchain, tx *tx.Tx) (TxExecutor, error) { + txInfo, err := types.ParseFullExitNftTxInfo(tx.TxInfo) + if err != nil { + logx.Errorf("parse full exit nft tx failed: %s", err.Error()) + return nil, errors.New("invalid tx info") + } + + return &FullExitNftExecutor{ + BaseExecutor: BaseExecutor{ + bc: bc, + tx: tx, + iTxInfo: txInfo, + }, + txInfo: txInfo, + }, nil +} + +func (e *FullExitNftExecutor) Prepare() error { + bc := e.bc + txInfo := e.txInfo + + // The account index from txInfo isn't true, find account by account name hash. + accountNameHash := common.Bytes2Hex(txInfo.AccountNameHash) + account, err := bc.DB().AccountModel.GetAccountByNameHash(accountNameHash) + if err != nil { + for index := range bc.StateDB().PendingNewAccountIndexMap { + if accountNameHash == bc.StateDB().AccountMap[index].AccountNameHash { + account, err = chain.FromFormatAccountInfo(bc.StateDB().AccountMap[index]) + break + } + } + + if err != nil { + return errors.New("invalid account name hash") + } + } + + // Set the right account index. + txInfo.AccountIndex = account.AccountIndex + + // Default withdraw an empty nft. + // Case1: the nft index isn't exist. + // Case2: the account isn't the owner of the nft. + emptyNftInfo := types.EmptyNftInfo(txInfo.NftIndex) + exitNft := &nft.L2Nft{ + NftIndex: emptyNftInfo.NftIndex, + CreatorAccountIndex: emptyNftInfo.CreatorAccountIndex, + OwnerAccountIndex: emptyNftInfo.OwnerAccountIndex, + NftContentHash: emptyNftInfo.NftContentHash, + NftL1Address: emptyNftInfo.NftL1Address, + NftL1TokenId: emptyNftInfo.NftL1TokenId, + CreatorTreasuryRate: emptyNftInfo.CreatorTreasuryRate, + CollectionId: emptyNftInfo.CollectionId, + } + err = e.bc.StateDB().PrepareNft(txInfo.NftIndex) + if err == nil && bc.StateDB().NftMap[txInfo.NftIndex].OwnerAccountIndex == account.AccountIndex { + // Set the right nft if the owner is correct. + exitNft = bc.StateDB().NftMap[txInfo.NftIndex] + } + + accounts := []int64{account.AccountIndex} + if exitNft.CreatorAccountIndex != types.NilAccountIndex { + accounts = append(accounts, exitNft.CreatorAccountIndex) + } + assets := []int64{0} // Just used for generate an empty tx detail. + err = e.bc.StateDB().PrepareAccountsAndAssets(accounts, assets) + if err != nil { + logx.Errorf("prepare accounts and assets failed: %s", err.Error()) + return errors.New("internal error") + } + + // Set the right tx info. + txInfo.CreatorAccountIndex = exitNft.CreatorAccountIndex + txInfo.CreatorTreasuryRate = exitNft.CreatorTreasuryRate + txInfo.CreatorAccountNameHash = common.FromHex(types.NilAccountNameHash) + if exitNft.CreatorAccountIndex != types.NilAccountIndex { + txInfo.CreatorAccountNameHash = common.FromHex(bc.StateDB().AccountMap[exitNft.CreatorAccountIndex].AccountNameHash) + } + txInfo.NftL1Address = exitNft.NftL1Address + txInfo.NftL1TokenId, _ = new(big.Int).SetString(exitNft.NftL1TokenId, 10) + txInfo.NftContentHash = common.FromHex(exitNft.NftContentHash) + txInfo.CollectionId = exitNft.CollectionId + + e.exitNft = exitNft + return nil +} + +func (e *FullExitNftExecutor) VerifyInputs() error { + bc := e.bc + txInfo := e.txInfo + + if bc.StateDB().NftMap[txInfo.NftIndex] == nil || txInfo.AccountIndex != bc.StateDB().NftMap[txInfo.NftIndex].OwnerAccountIndex { + // The check is not fully enough, just avoid explicit error. + if !bytes.Equal(txInfo.NftContentHash, common.FromHex(types.NilNftContentHash)) { + return errors.New("invalid nft content hash") + } + } else { + // The check is not fully enough, just avoid explicit error. + if !bytes.Equal(txInfo.NftContentHash, common.FromHex(bc.StateDB().NftMap[txInfo.NftIndex].NftContentHash)) { + return errors.New("invalid nft content hash") + } + } + + return nil +} + +func (e *FullExitNftExecutor) ApplyTransaction() error { + bc := e.bc + txInfo := e.txInfo + + if bc.StateDB().NftMap[txInfo.NftIndex] == nil || txInfo.AccountIndex != bc.StateDB().NftMap[txInfo.NftIndex].OwnerAccountIndex { + // Do nothing. + return nil + } + + // Set nft to empty nft. + emptyNftInfo := types.EmptyNftInfo(txInfo.NftIndex) + emptyNft := &nft.L2Nft{ + NftIndex: emptyNftInfo.NftIndex, + CreatorAccountIndex: emptyNftInfo.CreatorAccountIndex, + OwnerAccountIndex: emptyNftInfo.OwnerAccountIndex, + NftContentHash: emptyNftInfo.NftContentHash, + NftL1Address: emptyNftInfo.NftL1Address, + NftL1TokenId: emptyNftInfo.NftL1TokenId, + CreatorTreasuryRate: emptyNftInfo.CreatorTreasuryRate, + CollectionId: emptyNftInfo.CollectionId, + } + bc.StateDB().NftMap[txInfo.NftIndex] = emptyNft + + stateCache := e.bc.StateDB() + stateCache.PendingUpdateNftIndexMap[txInfo.NftIndex] = statedb.StateCachePending + return nil +} + +func (e *FullExitNftExecutor) GeneratePubData() error { + txInfo := e.txInfo + + var buf bytes.Buffer + buf.WriteByte(uint8(types.TxTypeFullExitNft)) + buf.Write(common2.Uint32ToBytes(uint32(txInfo.AccountIndex))) + buf.Write(common2.Uint32ToBytes(uint32(txInfo.CreatorAccountIndex))) + buf.Write(common2.Uint16ToBytes(uint16(txInfo.CreatorTreasuryRate))) + buf.Write(common2.Uint40ToBytes(txInfo.NftIndex)) + buf.Write(common2.Uint16ToBytes(uint16(txInfo.CollectionId))) + chunk1 := common2.SuffixPaddingBufToChunkSize(buf.Bytes()) + buf.Reset() + buf.Write(common2.AddressStrToBytes(txInfo.NftL1Address)) + chunk2 := common2.PrefixPaddingBufToChunkSize(buf.Bytes()) + buf.Reset() + buf.Write(chunk1) + buf.Write(chunk2) + buf.Write(common2.PrefixPaddingBufToChunkSize(txInfo.AccountNameHash)) + buf.Write(common2.PrefixPaddingBufToChunkSize(txInfo.CreatorAccountNameHash)) + buf.Write(common2.PrefixPaddingBufToChunkSize(txInfo.NftContentHash)) + buf.Write(common2.Uint256ToBytes(txInfo.NftL1TokenId)) + pubData := buf.Bytes() + + stateCache := e.bc.StateDB() + stateCache.PriorityOperations++ + stateCache.PubDataOffset = append(stateCache.PubDataOffset, uint32(len(stateCache.PubData))) + stateCache.PendingOnChainOperationsPubData = append(stateCache.PendingOnChainOperationsPubData, pubData) + stateCache.PendingOnChainOperationsHash = common2.ConcatKeccakHash(stateCache.PendingOnChainOperationsHash, pubData) + stateCache.PubData = append(stateCache.PubData, pubData...) + return nil +} + +func (e *FullExitNftExecutor) UpdateTrees() error { + bc := e.bc + txInfo := e.txInfo + + if bc.StateDB().NftMap[txInfo.NftIndex] == nil { + // Do nothing when nft doesn't exist. + return nil + } + + return bc.StateDB().UpdateNftTree(txInfo.NftIndex) +} + +func (e *FullExitNftExecutor) GetExecutedTx() (*tx.Tx, error) { + txInfoBytes, err := json.Marshal(e.txInfo) + if err != nil { + logx.Errorf("unable to marshal tx, err: %s", err.Error()) + return nil, errors.New("unmarshal tx failed") + } + + e.tx.TxInfo = string(txInfoBytes) + e.tx.NftIndex = e.txInfo.NftIndex + e.tx.AccountIndex = e.txInfo.AccountIndex + return e.BaseExecutor.GetExecutedTx() +} + +func (e *FullExitNftExecutor) GenerateTxDetails() ([]*tx.TxDetail, error) { + bc := e.bc + txInfo := e.txInfo + exitAccount := e.bc.StateDB().AccountMap[txInfo.AccountIndex] + txDetails := make([]*tx.TxDetail, 0, 2) + + // user info + accountOrder := int64(0) + order := int64(0) + baseBalance := exitAccount.AssetInfo[0] + emptyDelta := &types.AccountAsset{ + AssetId: 0, + Balance: big.NewInt(0), + LpAmount: big.NewInt(0), + OfferCanceledOrFinalized: big.NewInt(0), + } + txDetails = append(txDetails, &tx.TxDetail{ + AssetId: 0, + AssetType: types.FungibleAssetType, + AccountIndex: txInfo.AccountIndex, + AccountName: exitAccount.AccountName, + Balance: baseBalance.String(), + BalanceDelta: emptyDelta.String(), + AccountOrder: accountOrder, + Order: order, + Nonce: exitAccount.Nonce, + CollectionNonce: exitAccount.CollectionNonce, + }) + // nft info + order++ + emptyNft := types.EmptyNftInfo(txInfo.NftIndex) + baseNft := emptyNft + newNft := emptyNft + if bc.StateDB().NftMap[txInfo.NftIndex] != nil { + baseNft = types.ConstructNftInfo( + bc.StateDB().NftMap[txInfo.NftIndex].NftIndex, + bc.StateDB().NftMap[txInfo.NftIndex].CreatorAccountIndex, + bc.StateDB().NftMap[txInfo.NftIndex].OwnerAccountIndex, + bc.StateDB().NftMap[txInfo.NftIndex].NftContentHash, + bc.StateDB().NftMap[txInfo.NftIndex].NftL1TokenId, + bc.StateDB().NftMap[txInfo.NftIndex].NftL1Address, + bc.StateDB().NftMap[txInfo.NftIndex].CreatorTreasuryRate, + bc.StateDB().NftMap[txInfo.NftIndex].CollectionId, + ) + if txInfo.AccountIndex != bc.StateDB().NftMap[txInfo.NftIndex].OwnerAccountIndex { + newNft = baseNft + } + } + + txDetails = append(txDetails, &tx.TxDetail{ + AssetId: txInfo.NftIndex, + AssetType: types.NftAssetType, + AccountIndex: txInfo.AccountIndex, + AccountName: exitAccount.AccountName, + Balance: baseNft.String(), + BalanceDelta: newNft.String(), + AccountOrder: types.NilAccountOrder, + Order: order, + Nonce: exitAccount.Nonce, + CollectionNonce: exitAccount.CollectionNonce, + }) + + return txDetails, nil +} + +func (e *FullExitNftExecutor) GenerateMempoolTx() (*mempool.MempoolTx, error) { + return nil, nil +} diff --git a/core/executor/mint_nft_executor.go b/core/executor/mint_nft_executor.go new file mode 100644 index 000000000..c06d40145 --- /dev/null +++ b/core/executor/mint_nft_executor.go @@ -0,0 +1,319 @@ +package executor + +import ( + "bytes" + "encoding/json" + "math/big" + + "github.com/consensys/gnark-crypto/ecc/bn254/fr/mimc" + "github.com/ethereum/go-ethereum/common" + "github.com/pkg/errors" + "github.com/zeromicro/go-zero/core/logx" + + "github.com/bnb-chain/zkbas-crypto/ffmath" + "github.com/bnb-chain/zkbas-crypto/wasm/legend/legendTxTypes" + common2 "github.com/bnb-chain/zkbas/common" + "github.com/bnb-chain/zkbas/core/statedb" + "github.com/bnb-chain/zkbas/dao/mempool" + "github.com/bnb-chain/zkbas/dao/nft" + "github.com/bnb-chain/zkbas/dao/tx" + "github.com/bnb-chain/zkbas/types" +) + +type MintNftExecutor struct { + BaseExecutor + + txInfo *legendTxTypes.MintNftTxInfo +} + +func NewMintNftExecutor(bc IBlockchain, tx *tx.Tx) (TxExecutor, error) { + txInfo, err := types.ParseMintNftTxInfo(tx.TxInfo) + if err != nil { + logx.Errorf("parse transfer tx failed: %s", err.Error()) + return nil, errors.New("invalid tx info") + } + + return &MintNftExecutor{ + BaseExecutor: BaseExecutor{ + bc: bc, + tx: tx, + iTxInfo: txInfo, + }, + txInfo: txInfo, + }, nil +} + +func (e *MintNftExecutor) Prepare() error { + txInfo := e.txInfo + + accounts := []int64{txInfo.CreatorAccountIndex, txInfo.ToAccountIndex, txInfo.GasAccountIndex} + assets := []int64{txInfo.GasFeeAssetId} + err := e.bc.StateDB().PrepareAccountsAndAssets(accounts, assets) + if err != nil { + logx.Errorf("prepare accounts and assets failed: %s", err.Error()) + return errors.New("internal error") + } + + // add nft index to tx info + nextNftIndex := e.bc.StateDB().GetNextNftIndex() + txInfo.NftIndex = nextNftIndex + + return nil +} + +func (e *MintNftExecutor) VerifyInputs() error { + txInfo := e.txInfo + + err := e.BaseExecutor.VerifyInputs() + if err != nil { + return err + } + + creatorAccount := e.bc.StateDB().AccountMap[txInfo.CreatorAccountIndex] + if creatorAccount.CollectionNonce <= txInfo.NftCollectionId { + return errors.New("nft collection id is not less than account collection nonce") + } + if creatorAccount.AssetInfo[txInfo.GasFeeAssetId].Balance.Cmp(txInfo.GasFeeAssetAmount) < 0 { + return errors.New("balance is not enough") + } + + toAccount := e.bc.StateDB().AccountMap[txInfo.ToAccountIndex] + if txInfo.ToAccountNameHash != toAccount.AccountNameHash { + return errors.New("invalid ToAccountNameHash") + } + + return nil +} + +func (e *MintNftExecutor) ApplyTransaction() error { + bc := e.bc + txInfo := e.txInfo + + // apply changes + creatorAccount := bc.StateDB().AccountMap[txInfo.CreatorAccountIndex] + gasAccount := bc.StateDB().AccountMap[txInfo.GasAccountIndex] + + creatorAccount.AssetInfo[txInfo.GasFeeAssetId].Balance = ffmath.Sub(creatorAccount.AssetInfo[txInfo.GasFeeAssetId].Balance, txInfo.GasFeeAssetAmount) + gasAccount.AssetInfo[txInfo.GasFeeAssetId].Balance = ffmath.Add(gasAccount.AssetInfo[txInfo.GasFeeAssetId].Balance, txInfo.GasFeeAssetAmount) + creatorAccount.Nonce++ + + bc.StateDB().NftMap[txInfo.NftIndex] = &nft.L2Nft{ + NftIndex: txInfo.NftIndex, + CreatorAccountIndex: txInfo.CreatorAccountIndex, + OwnerAccountIndex: txInfo.ToAccountIndex, + NftContentHash: txInfo.NftContentHash, + NftL1Address: types.NilL1Address, + NftL1TokenId: types.NilL1TokenId, + CreatorTreasuryRate: txInfo.CreatorTreasuryRate, + CollectionId: txInfo.NftCollectionId, + } + + stateCache := e.bc.StateDB() + stateCache.PendingUpdateAccountIndexMap[txInfo.CreatorAccountIndex] = statedb.StateCachePending + stateCache.PendingUpdateAccountIndexMap[txInfo.GasAccountIndex] = statedb.StateCachePending + stateCache.PendingNewNftIndexMap[txInfo.NftIndex] = statedb.StateCachePending + return nil +} + +func (e *MintNftExecutor) GeneratePubData() error { + txInfo := e.txInfo + + var buf bytes.Buffer + buf.WriteByte(uint8(types.TxTypeMintNft)) + buf.Write(common2.Uint32ToBytes(uint32(txInfo.CreatorAccountIndex))) + buf.Write(common2.Uint32ToBytes(uint32(txInfo.ToAccountIndex))) + buf.Write(common2.Uint40ToBytes(txInfo.NftIndex)) + buf.Write(common2.Uint32ToBytes(uint32(txInfo.GasAccountIndex))) + buf.Write(common2.Uint16ToBytes(uint16(txInfo.GasFeeAssetId))) + packedFeeBytes, err := common2.FeeToPackedFeeBytes(txInfo.GasFeeAssetAmount) + if err != nil { + logx.Errorf("[ConvertTxToDepositPubData] unable to convert amount to packed fee amount: %s", err.Error()) + return err + } + buf.Write(packedFeeBytes) + buf.Write(common2.Uint16ToBytes(uint16(txInfo.CreatorTreasuryRate))) + buf.Write(common2.Uint16ToBytes(uint16(txInfo.NftCollectionId))) + chunk := common2.SuffixPaddingBufToChunkSize(buf.Bytes()) + buf.Reset() + buf.Write(chunk) + buf.Write(common2.PrefixPaddingBufToChunkSize(common.FromHex(txInfo.NftContentHash))) + buf.Write(common2.PrefixPaddingBufToChunkSize([]byte{})) + buf.Write(common2.PrefixPaddingBufToChunkSize([]byte{})) + buf.Write(common2.PrefixPaddingBufToChunkSize([]byte{})) + buf.Write(common2.PrefixPaddingBufToChunkSize([]byte{})) + + pubData := buf.Bytes() + + stateCache := e.bc.StateDB() + stateCache.PubData = append(stateCache.PubData, pubData...) + return nil +} + +func (e *MintNftExecutor) UpdateTrees() error { + txInfo := e.txInfo + + accounts := []int64{txInfo.CreatorAccountIndex, txInfo.ToAccountIndex, txInfo.GasAccountIndex} + assets := []int64{txInfo.GasFeeAssetId} + + err := e.bc.StateDB().UpdateAccountTree(accounts, assets) + if err != nil { + logx.Errorf("update account tree error, err: %s", err.Error()) + return err + } + + err = e.bc.StateDB().UpdateNftTree(txInfo.NftIndex) + if err != nil { + logx.Errorf("update nft tree error, err: %s", err.Error()) + return err + } + return nil +} + +func (e *MintNftExecutor) GetExecutedTx() (*tx.Tx, error) { + txInfoBytes, err := json.Marshal(e.txInfo) + if err != nil { + logx.Errorf("unable to marshal tx, err: %s", err.Error()) + return nil, errors.New("unmarshal tx failed") + } + + e.tx.TxInfo = string(txInfoBytes) + e.tx.NftIndex = e.txInfo.NftIndex + + return e.BaseExecutor.GetExecutedTx() +} + +func (e *MintNftExecutor) GenerateTxDetails() ([]*tx.TxDetail, error) { + txInfo := e.txInfo + + copiedAccounts, err := e.bc.StateDB().DeepCopyAccounts([]int64{txInfo.CreatorAccountIndex, txInfo.ToAccountIndex, txInfo.GasAccountIndex}) + if err != nil { + return nil, err + } + + creatorAccount := copiedAccounts[txInfo.CreatorAccountIndex] + toAccount := copiedAccounts[txInfo.ToAccountIndex] + gasAccount := copiedAccounts[txInfo.GasAccountIndex] + + txDetails := make([]*tx.TxDetail, 0, 4) + + // from account gas asset + order := int64(0) + accountOrder := int64(0) + txDetails = append(txDetails, &tx.TxDetail{ + AssetId: txInfo.GasFeeAssetId, + AssetType: types.FungibleAssetType, + AccountIndex: txInfo.CreatorAccountIndex, + AccountName: creatorAccount.AccountName, + Balance: creatorAccount.AssetInfo[txInfo.GasFeeAssetId].String(), + BalanceDelta: types.ConstructAccountAsset( + txInfo.GasFeeAssetId, + ffmath.Neg(txInfo.GasFeeAssetAmount), + types.ZeroBigInt, + types.ZeroBigInt, + ).String(), + Order: order, + Nonce: creatorAccount.Nonce, + AccountOrder: accountOrder, + CollectionNonce: creatorAccount.CollectionNonce, + }) + creatorAccount.AssetInfo[txInfo.GasFeeAssetId].Balance = ffmath.Sub(creatorAccount.AssetInfo[txInfo.GasFeeAssetId].Balance, txInfo.GasFeeAssetAmount) + if creatorAccount.AssetInfo[txInfo.GasFeeAssetId].Balance.Cmp(big.NewInt(0)) < 0 { + return nil, errors.New("insufficient gas fee balance") + } + + // to account empty delta + order++ + accountOrder++ + txDetails = append(txDetails, &tx.TxDetail{ + AssetId: txInfo.GasFeeAssetId, + AssetType: types.FungibleAssetType, + AccountIndex: txInfo.ToAccountIndex, + AccountName: toAccount.AccountName, + Balance: toAccount.AssetInfo[txInfo.GasFeeAssetId].String(), + BalanceDelta: types.ConstructAccountAsset( + txInfo.GasFeeAssetId, + types.ZeroBigInt, + types.ZeroBigInt, + types.ZeroBigInt, + ).String(), + Order: order, + Nonce: toAccount.Nonce, + AccountOrder: accountOrder, + CollectionNonce: toAccount.CollectionNonce, + }) + + // to account nft delta + oldNftInfo := types.EmptyNftInfo(txInfo.NftIndex) + newNftInfo := &types.NftInfo{ + NftIndex: txInfo.NftIndex, + CreatorAccountIndex: txInfo.CreatorAccountIndex, + OwnerAccountIndex: txInfo.ToAccountIndex, + NftContentHash: txInfo.NftContentHash, + NftL1TokenId: types.NilL1TokenId, + NftL1Address: types.NilL1Address, + CreatorTreasuryRate: txInfo.CreatorTreasuryRate, + CollectionId: txInfo.NftCollectionId, + } + order++ + txDetails = append(txDetails, &tx.TxDetail{ + AssetId: txInfo.NftIndex, + AssetType: types.NftAssetType, + AccountIndex: txInfo.ToAccountIndex, + AccountName: toAccount.AccountName, + Balance: oldNftInfo.String(), + BalanceDelta: newNftInfo.String(), + Order: order, + Nonce: toAccount.Nonce, + AccountOrder: types.NilAccountOrder, + CollectionNonce: toAccount.CollectionNonce, + }) + + // gas account gas asset + order++ + accountOrder++ + txDetails = append(txDetails, &tx.TxDetail{ + AssetId: txInfo.GasFeeAssetId, + AssetType: types.FungibleAssetType, + AccountIndex: txInfo.GasAccountIndex, + AccountName: gasAccount.AccountName, + Balance: gasAccount.AssetInfo[txInfo.GasFeeAssetId].String(), + BalanceDelta: types.ConstructAccountAsset( + txInfo.GasFeeAssetId, + txInfo.GasFeeAssetAmount, + types.ZeroBigInt, + types.ZeroBigInt, + ).String(), + Order: order, + Nonce: gasAccount.Nonce, + AccountOrder: accountOrder, + CollectionNonce: gasAccount.CollectionNonce, + }) + return txDetails, nil +} + +func (e *MintNftExecutor) GenerateMempoolTx() (*mempool.MempoolTx, error) { + hash, err := legendTxTypes.ComputeMintNftMsgHash(e.txInfo, mimc.NewMiMC()) + if err != nil { + return nil, err + } + txHash := common.Bytes2Hex(hash) + + mempoolTx := &mempool.MempoolTx{ + TxHash: txHash, + TxType: e.tx.TxType, + GasFeeAssetId: e.txInfo.GasFeeAssetId, + GasFee: e.txInfo.GasFeeAssetAmount.String(), + NftIndex: types.NilTxNftIndex, + PairIndex: types.NilPairIndex, + AssetId: types.NilAssetId, + TxAmount: "", + Memo: "", + AccountIndex: e.txInfo.CreatorAccountIndex, + Nonce: e.txInfo.Nonce, + ExpiredAt: e.txInfo.ExpiredAt, + L2BlockHeight: types.NilBlockHeight, + Status: mempool.PendingTxStatus, + TxInfo: e.tx.TxInfo, + } + return mempoolTx, nil +} diff --git a/core/executor/register_zns_executor.go b/core/executor/register_zns_executor.go new file mode 100644 index 000000000..c81ae0756 --- /dev/null +++ b/core/executor/register_zns_executor.go @@ -0,0 +1,161 @@ +package executor + +import ( + "bytes" + "encoding/json" + "errors" + + "github.com/ethereum/go-ethereum/common" + "github.com/zeromicro/go-zero/core/logx" + "github.com/zeromicro/go-zero/core/stores/sqlx" + + "github.com/bnb-chain/zkbas-crypto/wasm/legend/legendTxTypes" + common2 "github.com/bnb-chain/zkbas/common" + "github.com/bnb-chain/zkbas/common/chain" + "github.com/bnb-chain/zkbas/core/statedb" + "github.com/bnb-chain/zkbas/dao/account" + "github.com/bnb-chain/zkbas/dao/mempool" + "github.com/bnb-chain/zkbas/dao/tx" + "github.com/bnb-chain/zkbas/tree" + "github.com/bnb-chain/zkbas/types" +) + +type RegisterZnsExecutor struct { + BaseExecutor + + txInfo *legendTxTypes.RegisterZnsTxInfo +} + +func NewRegisterZnsExecutor(bc IBlockchain, tx *tx.Tx) (TxExecutor, error) { + txInfo, err := types.ParseRegisterZnsTxInfo(tx.TxInfo) + if err != nil { + logx.Errorf("parse register tx failed: %s", err.Error()) + return nil, errors.New("invalid tx info") + } + + return &RegisterZnsExecutor{ + BaseExecutor: BaseExecutor{ + bc: bc, + tx: tx, + iTxInfo: txInfo, + }, + txInfo: txInfo, + }, nil +} + +func (e *RegisterZnsExecutor) Prepare() error { + return nil +} + +func (e *RegisterZnsExecutor) VerifyInputs() error { + bc := e.bc + txInfo := e.txInfo + + _, err := bc.DB().AccountModel.GetAccountByName(txInfo.AccountName) + if err != sqlx.ErrNotFound { + return errors.New("invalid account name, already registered") + } + + for index := range bc.StateDB().PendingNewAccountIndexMap { + if txInfo.AccountName == bc.StateDB().AccountMap[index].AccountName { + return errors.New("invalid account name, already registered") + } + } + + if txInfo.AccountIndex != bc.StateDB().GetNextAccountIndex() { + return errors.New("invalid account index") + } + + return nil +} + +func (e *RegisterZnsExecutor) ApplyTransaction() error { + bc := e.bc + txInfo := e.txInfo + var err error + + newAccount := &account.Account{ + AccountIndex: txInfo.AccountIndex, + AccountName: txInfo.AccountName, + PublicKey: txInfo.PubKey, + AccountNameHash: common.Bytes2Hex(txInfo.AccountNameHash), + L1Address: e.tx.NativeAddress, + Nonce: types.NilNonce, + CollectionNonce: types.NilNonce, + AssetInfo: types.NilAssetInfo, + AssetRoot: common.Bytes2Hex(tree.NilAccountAssetRoot), + Status: account.AccountStatusConfirmed, + } + bc.StateDB().AccountMap[txInfo.AccountIndex], err = chain.ToFormatAccountInfo(newAccount) + if err != nil { + return err + } + + stateCache := e.bc.StateDB() + stateCache.PendingNewAccountIndexMap[txInfo.AccountIndex] = statedb.StateCachePending + return nil +} + +func (e *RegisterZnsExecutor) GeneratePubData() error { + txInfo := e.txInfo + + var buf bytes.Buffer + buf.WriteByte(uint8(types.TxTypeRegisterZns)) + buf.Write(common2.Uint32ToBytes(uint32(txInfo.AccountIndex))) + chunk := common2.SuffixPaddingBufToChunkSize(buf.Bytes()) + buf.Reset() + buf.Write(chunk) + buf.Write(common2.PrefixPaddingBufToChunkSize(common2.AccountNameToBytes32(txInfo.AccountName))) + buf.Write(common2.PrefixPaddingBufToChunkSize(txInfo.AccountNameHash)) + pk, err := common2.ParsePubKey(txInfo.PubKey) + if err != nil { + logx.Errorf("unable to parse pub key: %s", err.Error()) + return err + } + // because we can get Y from X, so we only need to store X is enough + buf.Write(common2.PrefixPaddingBufToChunkSize(pk.A.X.Marshal())) + buf.Write(common2.PrefixPaddingBufToChunkSize(pk.A.Y.Marshal())) + buf.Write(common2.PrefixPaddingBufToChunkSize([]byte{})) + pubData := buf.Bytes() + + stateCache := e.bc.StateDB() + stateCache.PriorityOperations++ + stateCache.PubDataOffset = append(stateCache.PubDataOffset, uint32(len(stateCache.PubData))) + stateCache.PubData = append(stateCache.PubData, pubData...) + return nil +} + +func (e *RegisterZnsExecutor) UpdateTrees() error { + bc := e.bc + txInfo := e.txInfo + accounts := []int64{txInfo.AccountIndex} + + emptyAssetTree, err := tree.NewEmptyAccountAssetTree(bc.StateDB().TreeCtx, txInfo.AccountIndex, uint64(bc.CurrentBlock().BlockHeight)) + if err != nil { + logx.Errorf("new empty account asset tree failed: %s", err.Error()) + return err + } + bc.StateDB().AccountAssetTrees = append(bc.StateDB().AccountAssetTrees, emptyAssetTree) + + return bc.StateDB().UpdateAccountTree(accounts, nil) +} + +func (e *RegisterZnsExecutor) GetExecutedTx() (*tx.Tx, error) { + txInfoBytes, err := json.Marshal(e.txInfo) + if err != nil { + logx.Errorf("unable to marshal tx, err: %s", err.Error()) + return nil, errors.New("unmarshal tx failed") + } + + e.tx.TxInfo = string(txInfoBytes) + e.tx.AccountIndex = e.txInfo.AccountIndex + return e.BaseExecutor.GetExecutedTx() +} + +func (e *RegisterZnsExecutor) GenerateTxDetails() ([]*tx.TxDetail, error) { + return nil, nil +} + +func (e *RegisterZnsExecutor) GenerateMempoolTx() (*mempool.MempoolTx, error) { + return nil, nil +} diff --git a/core/executor/remove_liquidity_executor.go b/core/executor/remove_liquidity_executor.go new file mode 100644 index 000000000..69f764e19 --- /dev/null +++ b/core/executor/remove_liquidity_executor.go @@ -0,0 +1,536 @@ +package executor + +import ( + "bytes" + "encoding/json" + "math/big" + + "github.com/consensys/gnark-crypto/ecc/bn254/fr/mimc" + "github.com/ethereum/go-ethereum/common" + "github.com/pkg/errors" + "github.com/zeromicro/go-zero/core/logx" + + "github.com/bnb-chain/zkbas-crypto/ffmath" + "github.com/bnb-chain/zkbas-crypto/wasm/legend/legendTxTypes" + common2 "github.com/bnb-chain/zkbas/common" + "github.com/bnb-chain/zkbas/common/chain" + "github.com/bnb-chain/zkbas/core/statedb" + "github.com/bnb-chain/zkbas/dao/liquidity" + "github.com/bnb-chain/zkbas/dao/mempool" + "github.com/bnb-chain/zkbas/dao/tx" + "github.com/bnb-chain/zkbas/types" +) + +type RemoveLiquidityExecutor struct { + BaseExecutor + + txInfo *legendTxTypes.RemoveLiquidityTxInfo + + newPoolInfo *types.LiquidityInfo +} + +func NewRemoveLiquidityExecutor(bc IBlockchain, tx *tx.Tx) (TxExecutor, error) { + txInfo, err := types.ParseRemoveLiquidityTxInfo(tx.TxInfo) + if err != nil { + logx.Errorf("parse transfer tx failed: %s", err.Error()) + return nil, errors.New("invalid tx info") + } + + return &RemoveLiquidityExecutor{ + BaseExecutor: BaseExecutor{ + bc: bc, + tx: tx, + iTxInfo: txInfo, + }, + txInfo: txInfo, + }, nil +} + +func (e *RemoveLiquidityExecutor) Prepare() error { + txInfo := e.txInfo + + err := e.bc.StateDB().PrepareLiquidity(txInfo.PairIndex) + if err != nil { + logx.Errorf("prepare liquidity failed: %s", err.Error()) + return err + } + + liquidityModel := e.bc.StateDB().LiquidityMap[txInfo.PairIndex] + + accounts := []int64{txInfo.FromAccountIndex, liquidityModel.TreasuryAccountIndex, txInfo.GasAccountIndex} + assets := []int64{liquidityModel.AssetAId, liquidityModel.AssetBId, txInfo.AssetAId, txInfo.AssetBId, txInfo.PairIndex, txInfo.GasFeeAssetId} + err = e.bc.StateDB().PrepareAccountsAndAssets(accounts, assets) + if err != nil { + logx.Errorf("prepare accounts and assets failed: %s", err.Error()) + return err + } + + err = e.fillTxInfo() + if err != nil { + return err + } + + return nil +} + +func (e *RemoveLiquidityExecutor) VerifyInputs() error { + bc := e.bc + txInfo := e.txInfo + + err := e.BaseExecutor.VerifyInputs() + if err != nil { + return err + } + + fromAccount := bc.StateDB().AccountMap[txInfo.FromAccountIndex] + if fromAccount.AssetInfo[txInfo.GasFeeAssetId].Balance.Cmp(txInfo.GasFeeAssetAmount) < 0 { + return errors.New("invalid gas asset amount") + } + + liquidityModel := bc.StateDB().LiquidityMap[txInfo.PairIndex] + liquidityInfo, err := constructLiquidityInfo(liquidityModel) + if err != nil { + logx.Errorf("construct liquidity info error, err: %v", err) + return err + } + if liquidityInfo.AssetA == nil || liquidityInfo.AssetA.Cmp(big.NewInt(0)) == 0 || + liquidityInfo.AssetB == nil || liquidityInfo.AssetB.Cmp(big.NewInt(0)) == 0 || + liquidityInfo.LpAmount == nil || liquidityInfo.LpAmount.Cmp(big.NewInt(0)) == 0 { + return errors.New("invalid pool liquidity") + } + + return nil +} + +func (e *RemoveLiquidityExecutor) fillTxInfo() error { + bc := e.bc + txInfo := e.txInfo + + fromAccount := bc.StateDB().AccountMap[txInfo.FromAccountIndex] + liquidityModel := bc.StateDB().LiquidityMap[txInfo.PairIndex] + + liquidityInfo, err := constructLiquidityInfo(liquidityModel) + if err != nil { + logx.Errorf("construct liquidity info error, err: %v", err) + return err + } + + assetAAmount, assetBAmount, err := chain.ComputeRemoveLiquidityAmount(liquidityInfo, txInfo.LpAmount) + if err != nil { + return err + } + + if assetAAmount.Cmp(txInfo.AssetAMinAmount) < 0 || assetBAmount.Cmp(txInfo.AssetBMinAmount) < 0 { + return errors.New("invalid asset min amount") + } + + if fromAccount.AssetInfo[txInfo.PairIndex].LpAmount.Cmp(txInfo.LpAmount) < 0 { + return errors.New("invalid lp amount") + } + + txInfo.AssetAAmountDelta = assetAAmount + txInfo.AssetBAmountDelta = assetBAmount + txInfo.AssetAId = liquidityInfo.AssetAId + txInfo.AssetBId = liquidityInfo.AssetBId + + poolAssetADelta := ffmath.Neg(txInfo.AssetAAmountDelta) + poolAssetBDelta := ffmath.Neg(txInfo.AssetBAmountDelta) + finalPoolA := ffmath.Add(liquidityInfo.AssetA, poolAssetADelta) + finalPoolB := ffmath.Add(liquidityInfo.AssetB, poolAssetBDelta) + lpDeltaForTreasuryAccount, err := chain.ComputeSLp(liquidityInfo.AssetA, liquidityInfo.AssetB, liquidityInfo.KLast, liquidityInfo.FeeRate, liquidityInfo.TreasuryRate) + if err != nil { + return err + } + + // set tx info + txInfo.KLast, err = common2.CleanPackedAmount(ffmath.Multiply(finalPoolA, finalPoolB)) + if err != nil { + return err + } + txInfo.TreasuryAmount = lpDeltaForTreasuryAccount + + return nil +} + +func (e *RemoveLiquidityExecutor) ApplyTransaction() error { + bc := e.bc + txInfo := e.txInfo + + // apply changes + fromAccountInfo := bc.StateDB().AccountMap[txInfo.FromAccountIndex] + gasAccountInfo := bc.StateDB().AccountMap[txInfo.GasAccountIndex] + liquidityModel := bc.StateDB().LiquidityMap[txInfo.PairIndex] + treasuryAccount := bc.StateDB().AccountMap[liquidityModel.TreasuryAccountIndex] + + fromAccountInfo.AssetInfo[txInfo.AssetAId].Balance = ffmath.Add(fromAccountInfo.AssetInfo[txInfo.AssetAId].Balance, txInfo.AssetAAmountDelta) + fromAccountInfo.AssetInfo[txInfo.AssetBId].Balance = ffmath.Add(fromAccountInfo.AssetInfo[txInfo.AssetBId].Balance, txInfo.AssetBAmountDelta) + fromAccountInfo.AssetInfo[txInfo.PairIndex].LpAmount = ffmath.Sub(fromAccountInfo.AssetInfo[txInfo.PairIndex].LpAmount, txInfo.LpAmount) + treasuryAccount.AssetInfo[txInfo.PairIndex].LpAmount = ffmath.Add(treasuryAccount.AssetInfo[txInfo.PairIndex].LpAmount, txInfo.TreasuryAmount) + fromAccountInfo.AssetInfo[txInfo.GasFeeAssetId].Balance = ffmath.Sub(fromAccountInfo.AssetInfo[txInfo.GasFeeAssetId].Balance, txInfo.GasFeeAssetAmount) + gasAccountInfo.AssetInfo[txInfo.GasFeeAssetId].Balance = ffmath.Add(gasAccountInfo.AssetInfo[txInfo.GasFeeAssetId].Balance, txInfo.GasFeeAssetAmount) + fromAccountInfo.Nonce++ + + bc.StateDB().LiquidityMap[txInfo.PairIndex] = &liquidity.Liquidity{ + Model: liquidityModel.Model, + PairIndex: e.newPoolInfo.PairIndex, + AssetAId: liquidityModel.AssetAId, + AssetA: e.newPoolInfo.AssetA.String(), + AssetBId: liquidityModel.AssetBId, + AssetB: e.newPoolInfo.AssetB.String(), + LpAmount: e.newPoolInfo.LpAmount.String(), + KLast: e.newPoolInfo.KLast.String(), + FeeRate: e.newPoolInfo.FeeRate, + TreasuryAccountIndex: e.newPoolInfo.TreasuryAccountIndex, + TreasuryRate: e.newPoolInfo.TreasuryRate, + } + + stateCache := e.bc.StateDB() + stateCache.PendingUpdateAccountIndexMap[txInfo.FromAccountIndex] = statedb.StateCachePending + stateCache.PendingUpdateAccountIndexMap[treasuryAccount.AccountIndex] = statedb.StateCachePending + stateCache.PendingUpdateAccountIndexMap[txInfo.GasAccountIndex] = statedb.StateCachePending + stateCache.PendingUpdateLiquidityIndexMap[txInfo.PairIndex] = statedb.StateCachePending + return nil +} + +func (e *RemoveLiquidityExecutor) GeneratePubData() error { + txInfo := e.txInfo + + var buf bytes.Buffer + buf.WriteByte(uint8(types.TxTypeRemoveLiquidity)) + buf.Write(common2.Uint32ToBytes(uint32(txInfo.FromAccountIndex))) + buf.Write(common2.Uint16ToBytes(uint16(txInfo.PairIndex))) + packedAssetAAmountBytes, err := common2.AmountToPackedAmountBytes(txInfo.AssetAAmountDelta) + if err != nil { + logx.Errorf("unable to convert amount to packed amount: %s", err.Error()) + return err + } + buf.Write(packedAssetAAmountBytes) + packedAssetBAmountBytes, err := common2.AmountToPackedAmountBytes(txInfo.AssetBAmountDelta) + if err != nil { + logx.Errorf("unable to convert amount to packed amount: %s", err.Error()) + return err + } + buf.Write(packedAssetBAmountBytes) + LpAmountBytes, err := common2.AmountToPackedAmountBytes(txInfo.LpAmount) + if err != nil { + logx.Errorf("unable to convert amount to packed amount: %s", err.Error()) + return err + } + buf.Write(LpAmountBytes) + KLastBytes, err := common2.AmountToPackedAmountBytes(txInfo.KLast) + if err != nil { + logx.Errorf("unable to convert amount to packed amount: %s", err.Error()) + return err + } + buf.Write(KLastBytes) + chunk1 := common2.SuffixPaddingBufToChunkSize(buf.Bytes()) + buf.Reset() + treasuryAmountBytes, err := common2.AmountToPackedAmountBytes(txInfo.TreasuryAmount) + if err != nil { + logx.Errorf("unable to convert amount to packed amount: %s", err.Error()) + return err + } + buf.Write(treasuryAmountBytes) + buf.Write(common2.Uint32ToBytes(uint32(txInfo.GasAccountIndex))) + buf.Write(common2.Uint16ToBytes(uint16(txInfo.GasFeeAssetId))) + packedFeeBytes, err := common2.FeeToPackedFeeBytes(txInfo.GasFeeAssetAmount) + if err != nil { + logx.Errorf("unable to convert amount to packed fee amount: %s", err.Error()) + return err + } + buf.Write(packedFeeBytes) + chunk2 := common2.PrefixPaddingBufToChunkSize(buf.Bytes()) + buf.Reset() + buf.Write(chunk1) + buf.Write(chunk2) + buf.Write(common2.PrefixPaddingBufToChunkSize([]byte{})) + buf.Write(common2.PrefixPaddingBufToChunkSize([]byte{})) + buf.Write(common2.PrefixPaddingBufToChunkSize([]byte{})) + buf.Write(common2.PrefixPaddingBufToChunkSize([]byte{})) + pubData := buf.Bytes() + + stateCache := e.bc.StateDB() + stateCache.PubData = append(stateCache.PubData, pubData...) + return nil +} + +func (e *RemoveLiquidityExecutor) UpdateTrees() error { + bc := e.bc + txInfo := e.txInfo + + liquidityModel := bc.StateDB().LiquidityMap[txInfo.PairIndex] + + accounts := []int64{txInfo.FromAccountIndex, liquidityModel.TreasuryAccountIndex, txInfo.GasAccountIndex} + assets := []int64{txInfo.AssetAId, txInfo.AssetBId, txInfo.PairIndex, txInfo.GasFeeAssetId} + + err := bc.StateDB().UpdateAccountTree(accounts, assets) + if err != nil { + return err + } + + err = bc.StateDB().UpdateLiquidityTree(txInfo.PairIndex) + if err != nil { + return err + } + + return nil +} + +func (e *RemoveLiquidityExecutor) GetExecutedTx() (*tx.Tx, error) { + txInfoBytes, err := json.Marshal(e.txInfo) + if err != nil { + logx.Errorf("unable to marshal tx, err: %s", err.Error()) + return nil, errors.New("unmarshal tx failed") + } + + e.tx.TxInfo = string(txInfoBytes) + return e.BaseExecutor.GetExecutedTx() +} + +func (e *RemoveLiquidityExecutor) GenerateTxDetails() ([]*tx.TxDetail, error) { + txInfo := e.txInfo + + liquidityModel := e.bc.StateDB().LiquidityMap[txInfo.PairIndex] + liquidityInfo, err := constructLiquidityInfo(liquidityModel) + if err != nil { + logx.Errorf("construct liquidity info error, err: %v", err) + return nil, err + } + + copiedAccounts, err := e.bc.StateDB().DeepCopyAccounts([]int64{txInfo.FromAccountIndex, txInfo.GasAccountIndex, liquidityInfo.TreasuryAccountIndex}) + if err != nil { + return nil, err + } + + fromAccount := copiedAccounts[txInfo.FromAccountIndex] + gasAccount := copiedAccounts[txInfo.GasAccountIndex] + treasuryAccount := copiedAccounts[liquidityInfo.TreasuryAccountIndex] + + txDetails := make([]*tx.TxDetail, 0, 4) + // from account asset A + order := int64(0) + accountOrder := int64(0) + txDetails = append(txDetails, &tx.TxDetail{ + AssetId: txInfo.AssetAId, + AssetType: types.FungibleAssetType, + AccountIndex: txInfo.FromAccountIndex, + AccountName: fromAccount.AccountName, + Balance: fromAccount.AssetInfo[txInfo.AssetAId].String(), + BalanceDelta: types.ConstructAccountAsset( + txInfo.AssetAId, + txInfo.AssetAAmountDelta, + types.ZeroBigInt, + types.ZeroBigInt, + ).String(), + Order: order, + AccountOrder: accountOrder, + Nonce: fromAccount.Nonce, + CollectionNonce: fromAccount.CollectionNonce, + }) + fromAccount.AssetInfo[txInfo.AssetAId].Balance = ffmath.Add(fromAccount.AssetInfo[txInfo.AssetAId].Balance, txInfo.AssetAAmountDelta) + if fromAccount.AssetInfo[txInfo.AssetAId].Balance.Cmp(big.NewInt(0)) < 0 { + return nil, errors.New("insufficient asset a balance") + } + + // from account asset B + order++ + txDetails = append(txDetails, &tx.TxDetail{ + AssetId: txInfo.AssetBId, + AssetType: types.FungibleAssetType, + AccountIndex: txInfo.FromAccountIndex, + AccountName: fromAccount.AccountName, + Balance: fromAccount.AssetInfo[txInfo.AssetBId].String(), + BalanceDelta: types.ConstructAccountAsset( + txInfo.AssetBId, + txInfo.AssetBAmountDelta, + types.ZeroBigInt, + types.ZeroBigInt, + ).String(), + Order: order, + AccountOrder: accountOrder, + Nonce: fromAccount.Nonce, + CollectionNonce: fromAccount.CollectionNonce, + }) + fromAccount.AssetInfo[txInfo.AssetBId].Balance = ffmath.Add(fromAccount.AssetInfo[txInfo.AssetBId].Balance, txInfo.AssetBAmountDelta) + if fromAccount.AssetInfo[txInfo.AssetBId].Balance.Cmp(big.NewInt(0)) < 0 { + return nil, errors.New("insufficient asset b balance") + } + + // from account asset gas + order++ + txDetails = append(txDetails, &tx.TxDetail{ + AssetId: txInfo.GasFeeAssetId, + AssetType: types.FungibleAssetType, + AccountIndex: txInfo.FromAccountIndex, + AccountName: fromAccount.AccountName, + Balance: fromAccount.AssetInfo[txInfo.GasFeeAssetId].String(), + BalanceDelta: types.ConstructAccountAsset( + txInfo.GasFeeAssetId, + ffmath.Neg(txInfo.GasFeeAssetAmount), + types.ZeroBigInt, + types.ZeroBigInt, + ).String(), + Order: order, + AccountOrder: accountOrder, + Nonce: fromAccount.Nonce, + CollectionNonce: fromAccount.CollectionNonce, + }) + fromAccount.AssetInfo[txInfo.GasFeeAssetId].Balance = ffmath.Sub(fromAccount.AssetInfo[txInfo.GasFeeAssetId].Balance, txInfo.GasFeeAssetAmount) + if fromAccount.AssetInfo[txInfo.GasFeeAssetId].Balance.Cmp(big.NewInt(0)) < 0 { + return nil, errors.New("insufficient gas asset balance") + } + + // from account lp + order++ + txDetails = append(txDetails, &tx.TxDetail{ + AssetId: txInfo.PairIndex, + AssetType: types.FungibleAssetType, + AccountIndex: txInfo.FromAccountIndex, + AccountName: fromAccount.AccountName, + Balance: fromAccount.AssetInfo[txInfo.PairIndex].String(), + BalanceDelta: types.ConstructAccountAsset( + txInfo.PairIndex, + types.ZeroBigInt, + ffmath.Neg(txInfo.LpAmount), + types.ZeroBigInt, + ).String(), + Order: order, + AccountOrder: accountOrder, + Nonce: fromAccount.Nonce, + CollectionNonce: fromAccount.CollectionNonce, + }) + fromAccount.AssetInfo[txInfo.PairIndex].LpAmount = ffmath.Sub(fromAccount.AssetInfo[txInfo.PairIndex].LpAmount, txInfo.LpAmount) + if fromAccount.AssetInfo[txInfo.PairIndex].LpAmount.Cmp(big.NewInt(0)) < 0 { + return nil, errors.New("insufficient lp amount") + } + + // treasury account + order++ + accountOrder++ + txDetails = append(txDetails, &tx.TxDetail{ + AssetId: txInfo.PairIndex, + AssetType: types.FungibleAssetType, + AccountIndex: treasuryAccount.AccountIndex, + AccountName: treasuryAccount.AccountName, + Balance: treasuryAccount.AssetInfo[txInfo.PairIndex].String(), + BalanceDelta: types.ConstructAccountAsset( + txInfo.PairIndex, types.ZeroBigInt, txInfo.TreasuryAmount, types.ZeroBigInt, + ).String(), + Order: order, + Nonce: treasuryAccount.Nonce, + AccountOrder: accountOrder, + CollectionNonce: treasuryAccount.CollectionNonce, + }) + treasuryAccount.AssetInfo[txInfo.PairIndex].LpAmount = ffmath.Add(treasuryAccount.AssetInfo[txInfo.PairIndex].LpAmount, txInfo.TreasuryAmount) + + // pool account info + basePool, err := types.ConstructLiquidityInfo( + e.bc.StateDB().LiquidityMap[txInfo.PairIndex].PairIndex, + e.bc.StateDB().LiquidityMap[txInfo.PairIndex].AssetAId, + e.bc.StateDB().LiquidityMap[txInfo.PairIndex].AssetA, + e.bc.StateDB().LiquidityMap[txInfo.PairIndex].AssetBId, + e.bc.StateDB().LiquidityMap[txInfo.PairIndex].AssetB, + e.bc.StateDB().LiquidityMap[txInfo.PairIndex].LpAmount, + e.bc.StateDB().LiquidityMap[txInfo.PairIndex].KLast, + e.bc.StateDB().LiquidityMap[txInfo.PairIndex].FeeRate, + e.bc.StateDB().LiquidityMap[txInfo.PairIndex].TreasuryAccountIndex, + e.bc.StateDB().LiquidityMap[txInfo.PairIndex].TreasuryRate, + ) + if err != nil { + return nil, err + } + + finalPoolA := ffmath.Add(liquidityInfo.AssetA, ffmath.Neg(txInfo.AssetAAmountDelta)) + finalPoolB := ffmath.Add(liquidityInfo.AssetB, ffmath.Neg(txInfo.AssetBAmountDelta)) + poolDeltaForToAccount := &types.LiquidityInfo{ + PairIndex: txInfo.PairIndex, + AssetAId: txInfo.AssetAId, + AssetA: ffmath.Neg(txInfo.AssetAAmountDelta), + AssetBId: txInfo.AssetBId, + AssetB: ffmath.Neg(txInfo.AssetBAmountDelta), + LpAmount: ffmath.Neg(txInfo.LpAmount), + KLast: ffmath.Multiply(finalPoolA, finalPoolB), + FeeRate: liquidityInfo.FeeRate, + TreasuryAccountIndex: liquidityInfo.TreasuryAccountIndex, + TreasuryRate: liquidityInfo.TreasuryRate, + } + newPool, err := chain.ComputeNewBalance(types.LiquidityAssetType, basePool.String(), poolDeltaForToAccount.String()) + if err != nil { + return nil, err + } + + newPoolInfo, err := types.ParseLiquidityInfo(newPool) + if err != nil { + return nil, err + } + e.newPoolInfo = newPoolInfo + if newPoolInfo.AssetA.Cmp(big.NewInt(0)) <= 0 || + newPoolInfo.AssetB.Cmp(big.NewInt(0)) <= 0 || + newPoolInfo.LpAmount.Cmp(big.NewInt(0)) < 0 || + newPoolInfo.KLast.Cmp(big.NewInt(0)) <= 0 { + return nil, errors.New("invalid new pool") + } + + order++ + txDetails = append(txDetails, &tx.TxDetail{ + AssetId: txInfo.PairIndex, + AssetType: types.LiquidityAssetType, + AccountIndex: types.NilTxAccountIndex, + AccountName: types.NilAccountName, + Balance: basePool.String(), + BalanceDelta: poolDeltaForToAccount.String(), + Order: order, + Nonce: types.NilNonce, + AccountOrder: types.NilAccountOrder, + CollectionNonce: types.NilNonce, + }) + + // gas account asset gas + order++ + accountOrder++ + txDetails = append(txDetails, &tx.TxDetail{ + AssetId: txInfo.GasFeeAssetId, + AssetType: types.FungibleAssetType, + AccountIndex: txInfo.GasAccountIndex, + AccountName: gasAccount.AccountName, + Balance: gasAccount.AssetInfo[txInfo.GasFeeAssetId].String(), + BalanceDelta: types.ConstructAccountAsset( + txInfo.GasFeeAssetId, + txInfo.GasFeeAssetAmount, + types.ZeroBigInt, + types.ZeroBigInt, + ).String(), + Order: order, + AccountOrder: accountOrder, + Nonce: gasAccount.Nonce, + CollectionNonce: gasAccount.CollectionNonce, + }) + return txDetails, nil +} + +func (e *RemoveLiquidityExecutor) GenerateMempoolTx() (*mempool.MempoolTx, error) { + hash, err := legendTxTypes.ComputeRemoveLiquidityMsgHash(e.txInfo, mimc.NewMiMC()) + if err != nil { + return nil, err + } + txHash := common.Bytes2Hex(hash) + + mempoolTx := &mempool.MempoolTx{ + TxHash: txHash, + TxType: e.tx.TxType, + GasFeeAssetId: e.txInfo.GasFeeAssetId, + GasFee: e.txInfo.GasFeeAssetAmount.String(), + NftIndex: types.NilTxNftIndex, + PairIndex: e.txInfo.PairIndex, + AssetId: types.NilAssetId, + TxAmount: e.txInfo.LpAmount.String(), + Memo: "", + AccountIndex: e.txInfo.FromAccountIndex, + Nonce: e.txInfo.Nonce, + ExpiredAt: e.txInfo.ExpiredAt, + L2BlockHeight: types.NilBlockHeight, + Status: mempool.PendingTxStatus, + TxInfo: e.tx.TxInfo, + } + return mempoolTx, nil +} diff --git a/core/executor/swap_executor.go b/core/executor/swap_executor.go new file mode 100644 index 000000000..e37508449 --- /dev/null +++ b/core/executor/swap_executor.go @@ -0,0 +1,488 @@ +package executor + +import ( + "bytes" + "encoding/json" + "math/big" + + "github.com/consensys/gnark-crypto/ecc/bn254/fr/mimc" + "github.com/ethereum/go-ethereum/common" + "github.com/pkg/errors" + "github.com/zeromicro/go-zero/core/logx" + + "github.com/bnb-chain/zkbas-crypto/ffmath" + "github.com/bnb-chain/zkbas-crypto/wasm/legend/legendTxTypes" + common2 "github.com/bnb-chain/zkbas/common" + "github.com/bnb-chain/zkbas/common/chain" + "github.com/bnb-chain/zkbas/core/statedb" + "github.com/bnb-chain/zkbas/dao/liquidity" + "github.com/bnb-chain/zkbas/dao/mempool" + "github.com/bnb-chain/zkbas/dao/tx" + "github.com/bnb-chain/zkbas/types" +) + +type SwapExecutor struct { + BaseExecutor + + txInfo *legendTxTypes.SwapTxInfo + + newPoolInfo *types.LiquidityInfo +} + +func NewSwapExecutor(bc IBlockchain, tx *tx.Tx) (TxExecutor, error) { + txInfo, err := types.ParseSwapTxInfo(tx.TxInfo) + if err != nil { + logx.Errorf("parse transfer tx failed: %s", err.Error()) + return nil, errors.New("invalid tx info") + } + + return &SwapExecutor{ + BaseExecutor: BaseExecutor{ + bc: bc, + tx: tx, + iTxInfo: txInfo, + }, + txInfo: txInfo, + }, nil +} + +func (e *SwapExecutor) Prepare() error { + txInfo := e.txInfo + + accounts := []int64{txInfo.FromAccountIndex, txInfo.GasAccountIndex} + assets := []int64{txInfo.AssetAId, txInfo.AssetBId, txInfo.PairIndex, txInfo.GasFeeAssetId} + err := e.bc.StateDB().PrepareAccountsAndAssets(accounts, assets) + if err != nil { + logx.Errorf("prepare accounts and assets failed: %s", err.Error()) + return errors.New("internal error") + } + + err = e.bc.StateDB().PrepareLiquidity(txInfo.PairIndex) + if err != nil { + logx.Errorf("prepare liquidity failed: %s", err.Error()) + return errors.New("internal error") + } + + // check the other restrictions + err = e.fillTxInfo() + if err != nil { + return err + } + + return nil +} + +func (e *SwapExecutor) VerifyInputs() error { + bc := e.bc + txInfo := e.txInfo + + err := e.BaseExecutor.VerifyInputs() + if err != nil { + return err + } + + fromAccount := bc.StateDB().AccountMap[txInfo.FromAccountIndex] + if txInfo.GasFeeAssetId != txInfo.AssetAId { + if fromAccount.AssetInfo[txInfo.GasFeeAssetId].Balance.Cmp(txInfo.GasFeeAssetAmount) < 0 { + return errors.New("invalid gas asset amount") + } + if fromAccount.AssetInfo[txInfo.AssetAId].Balance.Cmp(txInfo.AssetAAmount) < 0 { + return errors.New("invalid asset amount") + } + } else { + deltaBalance := ffmath.Add(txInfo.AssetAAmount, txInfo.GasFeeAssetAmount) + if fromAccount.AssetInfo[txInfo.AssetAId].Balance.Cmp(deltaBalance) < 0 { + return errors.New("invalid asset amount") + } + } + + liquidityModel := bc.StateDB().LiquidityMap[txInfo.PairIndex] + liquidityInfo, err := constructLiquidityInfo(liquidityModel) + if err != nil { + logx.Errorf("construct liquidity info error, err: %v", err) + return errors.New("internal error") + } + if !((liquidityModel.AssetAId == txInfo.AssetAId && liquidityModel.AssetBId == txInfo.AssetBId) || + (liquidityModel.AssetAId == txInfo.AssetBId && liquidityModel.AssetBId == txInfo.AssetAId)) { + return errors.New("invalid asset ids") + } + if liquidityInfo.AssetA == nil || liquidityInfo.AssetA.Cmp(big.NewInt(0)) == 0 || + liquidityInfo.AssetB == nil || liquidityInfo.AssetB.Cmp(big.NewInt(0)) == 0 { + return errors.New("liquidity is empty") + } + + return nil +} + +func constructLiquidityInfo(liquidity *liquidity.Liquidity) (*types.LiquidityInfo, error) { + return types.ConstructLiquidityInfo( + liquidity.PairIndex, + liquidity.AssetAId, + liquidity.AssetA, + liquidity.AssetBId, + liquidity.AssetB, + liquidity.LpAmount, + liquidity.KLast, + liquidity.FeeRate, + liquidity.TreasuryAccountIndex, + liquidity.TreasuryRate, + ) +} + +func (e *SwapExecutor) ApplyTransaction() error { + bc := e.bc + txInfo := e.txInfo + + // apply changes + fromAccountInfo := bc.StateDB().AccountMap[txInfo.FromAccountIndex] + gasAccountInfo := bc.StateDB().AccountMap[txInfo.GasAccountIndex] + + fromAccountInfo.AssetInfo[txInfo.AssetAId].Balance = ffmath.Sub(fromAccountInfo.AssetInfo[txInfo.AssetAId].Balance, txInfo.AssetAAmount) + fromAccountInfo.AssetInfo[txInfo.AssetBId].Balance = ffmath.Add(fromAccountInfo.AssetInfo[txInfo.AssetBId].Balance, txInfo.AssetBAmountDelta) + fromAccountInfo.AssetInfo[txInfo.GasFeeAssetId].Balance = ffmath.Sub(fromAccountInfo.AssetInfo[txInfo.GasFeeAssetId].Balance, txInfo.GasFeeAssetAmount) + gasAccountInfo.AssetInfo[txInfo.GasFeeAssetId].Balance = ffmath.Add(gasAccountInfo.AssetInfo[txInfo.GasFeeAssetId].Balance, txInfo.GasFeeAssetAmount) + fromAccountInfo.Nonce++ + + liquidityModel := bc.StateDB().LiquidityMap[txInfo.PairIndex] + bc.StateDB().LiquidityMap[txInfo.PairIndex] = &liquidity.Liquidity{ + Model: liquidityModel.Model, + PairIndex: e.newPoolInfo.PairIndex, + AssetAId: liquidityModel.AssetAId, + AssetA: e.newPoolInfo.AssetA.String(), + AssetBId: liquidityModel.AssetBId, + AssetB: e.newPoolInfo.AssetB.String(), + LpAmount: e.newPoolInfo.LpAmount.String(), + KLast: e.newPoolInfo.KLast.String(), + FeeRate: e.newPoolInfo.FeeRate, + TreasuryAccountIndex: e.newPoolInfo.TreasuryAccountIndex, + TreasuryRate: e.newPoolInfo.TreasuryRate, + } + + stateCache := e.bc.StateDB() + stateCache.PendingUpdateAccountIndexMap[txInfo.FromAccountIndex] = statedb.StateCachePending + stateCache.PendingUpdateAccountIndexMap[txInfo.GasAccountIndex] = statedb.StateCachePending + stateCache.PendingUpdateLiquidityIndexMap[txInfo.PairIndex] = statedb.StateCachePending + return nil +} + +func (e *SwapExecutor) fillTxInfo() error { + bc := e.bc + txInfo := e.txInfo + + liquidityModel := bc.StateDB().LiquidityMap[txInfo.PairIndex] + + liquidityInfo, err := constructLiquidityInfo(liquidityModel) + if err != nil { + logx.Errorf("construct liquidity info error, err: %v", err) + return err + } + + // add details to tx info + var toDelta *big.Int + if liquidityInfo.AssetAId == txInfo.AssetAId && liquidityInfo.AssetBId == txInfo.AssetBId { + toDelta, _, err = chain.ComputeDelta( + liquidityInfo.AssetA, + liquidityInfo.AssetB, + liquidityInfo.AssetAId, + liquidityInfo.AssetBId, + txInfo.AssetAId, + true, + txInfo.AssetAAmount, + liquidityInfo.FeeRate, + ) + if err != nil { + return err + } + } else if liquidityInfo.AssetAId == txInfo.AssetBId && liquidityInfo.AssetBId == txInfo.AssetAId { + toDelta, _, err = chain.ComputeDelta( + liquidityInfo.AssetA, + liquidityInfo.AssetB, + liquidityInfo.AssetAId, + liquidityInfo.AssetBId, + txInfo.AssetBId, + true, + txInfo.AssetAAmount, + liquidityInfo.FeeRate, + ) + if err != nil { + return err + } + } + + if toDelta.Cmp(txInfo.AssetBMinAmount) < 0 { + return errors.New("invalid AssetBMinAmount") + } + txInfo.AssetBAmountDelta = toDelta + + return nil +} + +func (e *SwapExecutor) GeneratePubData() error { + txInfo := e.txInfo + + var buf bytes.Buffer + buf.WriteByte(uint8(types.TxTypeSwap)) + buf.Write(common2.Uint32ToBytes(uint32(txInfo.FromAccountIndex))) + buf.Write(common2.Uint16ToBytes(uint16(txInfo.PairIndex))) + buf.Write(common2.Uint16ToBytes(uint16(txInfo.AssetAId))) + packedAssetAAmountBytes, err := common2.AmountToPackedAmountBytes(txInfo.AssetAAmount) + if err != nil { + logx.Errorf("unable to convert amount to packed amount: %s", err.Error()) + return err + } + buf.Write(packedAssetAAmountBytes) + buf.Write(common2.Uint16ToBytes(uint16(txInfo.AssetBId))) + packedAssetBAmountDeltaBytes, err := common2.AmountToPackedAmountBytes(txInfo.AssetBAmountDelta) + if err != nil { + logx.Errorf("unable to convert amount to packed amount: %s", err.Error()) + return err + } + buf.Write(packedAssetBAmountDeltaBytes) + buf.Write(common2.Uint32ToBytes(uint32(txInfo.GasAccountIndex))) + buf.Write(common2.Uint16ToBytes(uint16(txInfo.GasFeeAssetId))) + packedFeeBytes, err := common2.FeeToPackedFeeBytes(txInfo.GasFeeAssetAmount) + if err != nil { + logx.Errorf("unable to convert amount to packed fee amount: %s", err.Error()) + return err + } + buf.Write(packedFeeBytes) + chunk := common2.SuffixPaddingBufToChunkSize(buf.Bytes()) + buf.Reset() + buf.Write(chunk) + buf.Write(common2.PrefixPaddingBufToChunkSize([]byte{})) + buf.Write(common2.PrefixPaddingBufToChunkSize([]byte{})) + buf.Write(common2.PrefixPaddingBufToChunkSize([]byte{})) + buf.Write(common2.PrefixPaddingBufToChunkSize([]byte{})) + buf.Write(common2.PrefixPaddingBufToChunkSize([]byte{})) + pubData := buf.Bytes() + + stateCache := e.bc.StateDB() + stateCache.PubData = append(stateCache.PubData, pubData...) + return nil +} + +func (e *SwapExecutor) UpdateTrees() error { + bc := e.bc + txInfo := e.txInfo + + accounts := []int64{txInfo.FromAccountIndex, txInfo.GasAccountIndex} + assets := []int64{txInfo.AssetAId, txInfo.AssetBId, txInfo.GasFeeAssetId} + + err := bc.StateDB().UpdateAccountTree(accounts, assets) + if err != nil { + return err + } + + err = bc.StateDB().UpdateLiquidityTree(txInfo.PairIndex) + if err != nil { + return err + } + + return nil +} + +func (e *SwapExecutor) GetExecutedTx() (*tx.Tx, error) { + txInfoBytes, err := json.Marshal(e.txInfo) + if err != nil { + logx.Errorf("unable to marshal tx, err: %s", err.Error()) + return nil, errors.New("unmarshal tx failed") + } + + e.tx.TxInfo = string(txInfoBytes) + return e.BaseExecutor.GetExecutedTx() +} + +func (e *SwapExecutor) GenerateTxDetails() ([]*tx.TxDetail, error) { + txInfo := e.txInfo + + copiedAccounts, err := e.bc.StateDB().DeepCopyAccounts([]int64{txInfo.FromAccountIndex, txInfo.GasAccountIndex}) + if err != nil { + return nil, err + } + + fromAccount := copiedAccounts[txInfo.FromAccountIndex] + gasAccount := copiedAccounts[txInfo.GasAccountIndex] + liquidityModel := e.bc.StateDB().LiquidityMap[txInfo.PairIndex] + liquidityInfo, err := constructLiquidityInfo(liquidityModel) + if err != nil { + logx.Errorf("construct liquidity info error, err: %v", err) + return nil, err + } + + txDetails := make([]*tx.TxDetail, 0, 4) + // from account asset A + order := int64(0) + accountOrder := int64(0) + txDetails = append(txDetails, &tx.TxDetail{ + AssetId: txInfo.AssetAId, + AssetType: types.FungibleAssetType, + AccountIndex: txInfo.FromAccountIndex, + AccountName: fromAccount.AccountName, + Balance: fromAccount.AssetInfo[txInfo.AssetAId].String(), + BalanceDelta: types.ConstructAccountAsset( + txInfo.AssetAId, + ffmath.Neg(txInfo.AssetAAmount), + types.ZeroBigInt, + types.ZeroBigInt, + ).String(), + Order: order, + AccountOrder: accountOrder, + Nonce: fromAccount.Nonce, + CollectionNonce: fromAccount.CollectionNonce, + }) + fromAccount.AssetInfo[txInfo.AssetAId].Balance = ffmath.Sub(fromAccount.AssetInfo[txInfo.AssetAId].Balance, txInfo.AssetAAmount) + if fromAccount.AssetInfo[txInfo.AssetAId].Balance.Cmp(big.NewInt(0)) < 0 { + return nil, errors.New("insufficient asset a balance") + } + + // from account asset B + order++ + txDetails = append(txDetails, &tx.TxDetail{ + AssetId: txInfo.AssetBId, + AssetType: types.FungibleAssetType, + AccountIndex: txInfo.FromAccountIndex, + AccountName: fromAccount.AccountName, + Balance: fromAccount.AssetInfo[txInfo.AssetBId].String(), + BalanceDelta: types.ConstructAccountAsset( + txInfo.AssetBId, + txInfo.AssetBAmountDelta, + types.ZeroBigInt, + types.ZeroBigInt, + ).String(), + Order: order, + AccountOrder: accountOrder, + Nonce: fromAccount.Nonce, + CollectionNonce: fromAccount.CollectionNonce, + }) + fromAccount.AssetInfo[txInfo.AssetBId].Balance = ffmath.Add(fromAccount.AssetInfo[txInfo.AssetBId].Balance, txInfo.AssetBAmountDelta) + + // from account asset gas + order++ + txDetails = append(txDetails, &tx.TxDetail{ + AssetId: txInfo.GasFeeAssetId, + AssetType: types.FungibleAssetType, + AccountIndex: txInfo.FromAccountIndex, + AccountName: fromAccount.AccountName, + Balance: fromAccount.AssetInfo[txInfo.GasFeeAssetId].String(), + BalanceDelta: types.ConstructAccountAsset( + txInfo.GasFeeAssetId, + ffmath.Neg(txInfo.GasFeeAssetAmount), + types.ZeroBigInt, + types.ZeroBigInt, + ).String(), + Order: order, + AccountOrder: accountOrder, + Nonce: fromAccount.Nonce, + CollectionNonce: fromAccount.CollectionNonce, + }) + fromAccount.AssetInfo[txInfo.GasFeeAssetId].Balance = ffmath.Sub(fromAccount.AssetInfo[txInfo.GasFeeAssetId].Balance, txInfo.GasFeeAssetAmount) + if fromAccount.AssetInfo[txInfo.GasFeeAssetId].Balance.Cmp(big.NewInt(0)) < 0 { + return nil, errors.New("insufficient gas fee balance") + } + + // pool info + var poolDelta *types.LiquidityInfo + poolAssetBDelta := ffmath.Neg(txInfo.AssetBAmountDelta) + if txInfo.AssetAId == liquidityInfo.AssetAId { + poolDelta = &types.LiquidityInfo{ + PairIndex: txInfo.PairIndex, + AssetAId: txInfo.AssetAId, + AssetA: txInfo.AssetAAmount, + AssetBId: txInfo.AssetBId, + AssetB: poolAssetBDelta, + LpAmount: types.ZeroBigInt, + KLast: types.ZeroBigInt, + FeeRate: liquidityInfo.FeeRate, + TreasuryAccountIndex: liquidityInfo.TreasuryAccountIndex, + TreasuryRate: liquidityInfo.TreasuryRate, + } + } else if txInfo.AssetAId == liquidityInfo.AssetBId { + poolDelta = &types.LiquidityInfo{ + PairIndex: txInfo.PairIndex, + AssetAId: txInfo.AssetBId, + AssetA: poolAssetBDelta, + AssetBId: txInfo.AssetAId, + AssetB: txInfo.AssetAAmount, + LpAmount: types.ZeroBigInt, + KLast: types.ZeroBigInt, + FeeRate: liquidityInfo.FeeRate, + TreasuryAccountIndex: liquidityInfo.TreasuryAccountIndex, + TreasuryRate: liquidityInfo.TreasuryRate, + } + } + + newPool, err := chain.ComputeNewBalance( + types.LiquidityAssetType, liquidityInfo.String(), poolDelta.String()) + if err != nil { + return nil, err + } + + nPoolInfo, err := types.ParseLiquidityInfo(newPool) + if err != nil { + return nil, err + } + e.newPoolInfo = nPoolInfo + + order++ + txDetails = append(txDetails, &tx.TxDetail{ + AssetId: txInfo.PairIndex, + AssetType: types.LiquidityAssetType, + AccountIndex: types.NilTxAccountIndex, + AccountName: types.NilAccountName, + Balance: liquidityInfo.String(), + BalanceDelta: poolDelta.String(), + Order: order, + Nonce: 0, + AccountOrder: types.NilAccountOrder, + CollectionNonce: 0, + }) + + // gas account asset gas + order++ + accountOrder++ + txDetails = append(txDetails, &tx.TxDetail{ + AssetId: txInfo.GasFeeAssetId, + AssetType: types.FungibleAssetType, + AccountIndex: txInfo.GasAccountIndex, + AccountName: gasAccount.AccountName, + Balance: gasAccount.AssetInfo[txInfo.GasFeeAssetId].String(), + BalanceDelta: types.ConstructAccountAsset( + txInfo.GasFeeAssetId, + txInfo.GasFeeAssetAmount, + types.ZeroBigInt, + types.ZeroBigInt, + ).String(), + Order: order, + AccountOrder: accountOrder, + Nonce: gasAccount.Nonce, + CollectionNonce: gasAccount.CollectionNonce, + }) + return txDetails, nil +} + +func (e *SwapExecutor) GenerateMempoolTx() (*mempool.MempoolTx, error) { + hash, err := legendTxTypes.ComputeSwapMsgHash(e.txInfo, mimc.NewMiMC()) + if err != nil { + return nil, err + } + txHash := common.Bytes2Hex(hash) + + mempoolTx := &mempool.MempoolTx{ + TxHash: txHash, + TxType: e.tx.TxType, + GasFeeAssetId: e.txInfo.GasFeeAssetId, + GasFee: e.txInfo.GasFeeAssetAmount.String(), + NftIndex: types.NilTxNftIndex, + PairIndex: e.txInfo.PairIndex, + AssetId: types.NilAssetId, + TxAmount: e.txInfo.AssetAAmount.String(), + Memo: "", + AccountIndex: e.txInfo.FromAccountIndex, + Nonce: e.txInfo.Nonce, + ExpiredAt: e.txInfo.ExpiredAt, + L2BlockHeight: types.NilBlockHeight, + Status: mempool.PendingTxStatus, + TxInfo: e.tx.TxInfo, + } + return mempoolTx, nil +} diff --git a/core/executor/transfer_executor.go b/core/executor/transfer_executor.go new file mode 100644 index 000000000..dca3482e2 --- /dev/null +++ b/core/executor/transfer_executor.go @@ -0,0 +1,272 @@ +package executor + +import ( + "bytes" + "encoding/json" + + "github.com/consensys/gnark-crypto/ecc/bn254/fr/mimc" + "github.com/ethereum/go-ethereum/common" + "github.com/pkg/errors" + "github.com/zeromicro/go-zero/core/logx" + + "github.com/bnb-chain/zkbas-crypto/ffmath" + "github.com/bnb-chain/zkbas-crypto/wasm/legend/legendTxTypes" + common2 "github.com/bnb-chain/zkbas/common" + "github.com/bnb-chain/zkbas/core/statedb" + "github.com/bnb-chain/zkbas/dao/mempool" + "github.com/bnb-chain/zkbas/dao/tx" + "github.com/bnb-chain/zkbas/types" +) + +type TransferExecutor struct { + BaseExecutor + + txInfo *legendTxTypes.TransferTxInfo +} + +func NewTransferExecutor(bc IBlockchain, tx *tx.Tx) (TxExecutor, error) { + txInfo, err := types.ParseTransferTxInfo(tx.TxInfo) + if err != nil { + logx.Errorf("parse transfer tx failed: %s", err.Error()) + return nil, errors.New("invalid tx info") + } + + return &TransferExecutor{ + BaseExecutor: BaseExecutor{ + bc: bc, + tx: tx, + iTxInfo: txInfo, + }, + txInfo: txInfo, + }, nil +} + +func (e *TransferExecutor) Prepare() error { + txInfo := e.txInfo + + accounts := []int64{txInfo.FromAccountIndex, txInfo.ToAccountIndex, txInfo.GasAccountIndex} + assets := []int64{txInfo.AssetId, txInfo.GasFeeAssetId} + err := e.bc.StateDB().PrepareAccountsAndAssets(accounts, assets) + if err != nil { + logx.Errorf("prepare accounts and assets failed: %s", err.Error()) + return errors.New("internal error") + } + + return nil +} + +func (e *TransferExecutor) VerifyInputs() error { + bc := e.bc + txInfo := e.txInfo + + err := e.BaseExecutor.VerifyInputs() + if err != nil { + return err + } + + fromAccount := bc.StateDB().AccountMap[txInfo.FromAccountIndex] + toAccount := bc.StateDB().AccountMap[txInfo.ToAccountIndex] + if txInfo.ToAccountNameHash != toAccount.AccountNameHash { + return errors.New("invalid to account name hash") + } + if txInfo.GasFeeAssetId != txInfo.AssetId { + if fromAccount.AssetInfo[txInfo.GasFeeAssetId].Balance.Cmp(txInfo.GasFeeAssetAmount) < 0 { + return errors.New("invalid gas asset amount") + } + if fromAccount.AssetInfo[txInfo.AssetId].Balance.Cmp(txInfo.AssetAmount) < 0 { + return errors.New("invalid asset amount") + } + } else { + deltaBalance := ffmath.Add(txInfo.AssetAmount, txInfo.GasFeeAssetAmount) + if fromAccount.AssetInfo[txInfo.AssetId].Balance.Cmp(deltaBalance) < 0 { + return errors.New("invalid asset amount") + } + } + + return nil +} + +func (e *TransferExecutor) ApplyTransaction() error { + bc := e.bc + txInfo := e.txInfo + + fromAccount := bc.StateDB().AccountMap[txInfo.FromAccountIndex] + toAccount := bc.StateDB().AccountMap[txInfo.ToAccountIndex] + gasAccount := bc.StateDB().AccountMap[txInfo.GasAccountIndex] + + fromAccount.AssetInfo[txInfo.AssetId].Balance = ffmath.Sub(fromAccount.AssetInfo[txInfo.AssetId].Balance, txInfo.AssetAmount) + fromAccount.AssetInfo[txInfo.GasFeeAssetId].Balance = ffmath.Sub(fromAccount.AssetInfo[txInfo.GasFeeAssetId].Balance, txInfo.GasFeeAssetAmount) + toAccount.AssetInfo[txInfo.AssetId].Balance = ffmath.Add(toAccount.AssetInfo[txInfo.AssetId].Balance, txInfo.AssetAmount) + gasAccount.AssetInfo[txInfo.GasFeeAssetId].Balance = ffmath.Add(gasAccount.AssetInfo[txInfo.GasFeeAssetId].Balance, txInfo.GasFeeAssetAmount) + fromAccount.Nonce++ + + stateCache := e.bc.StateDB() + stateCache.PendingUpdateAccountIndexMap[txInfo.FromAccountIndex] = statedb.StateCachePending + stateCache.PendingUpdateAccountIndexMap[txInfo.ToAccountIndex] = statedb.StateCachePending + stateCache.PendingUpdateAccountIndexMap[txInfo.GasAccountIndex] = statedb.StateCachePending + return nil +} + +func (e *TransferExecutor) GeneratePubData() error { + txInfo := e.txInfo + var buf bytes.Buffer + buf.WriteByte(uint8(types.TxTypeTransfer)) + buf.Write(common2.Uint32ToBytes(uint32(txInfo.FromAccountIndex))) + buf.Write(common2.Uint32ToBytes(uint32(txInfo.ToAccountIndex))) + buf.Write(common2.Uint16ToBytes(uint16(txInfo.AssetId))) + packedAmountBytes, err := common2.AmountToPackedAmountBytes(txInfo.AssetAmount) + if err != nil { + return err + } + buf.Write(packedAmountBytes) + buf.Write(common2.Uint32ToBytes(uint32(txInfo.GasAccountIndex))) + buf.Write(common2.Uint16ToBytes(uint16(txInfo.GasFeeAssetId))) + packedFeeBytes, err := common2.FeeToPackedFeeBytes(txInfo.GasFeeAssetAmount) + if err != nil { + return err + } + buf.Write(packedFeeBytes) + chunk := common2.SuffixPaddingBufToChunkSize(buf.Bytes()) + buf.Reset() + buf.Write(chunk) + buf.Write(common2.PrefixPaddingBufToChunkSize(txInfo.CallDataHash)) + buf.Write(common2.PrefixPaddingBufToChunkSize([]byte{})) + buf.Write(common2.PrefixPaddingBufToChunkSize([]byte{})) + buf.Write(common2.PrefixPaddingBufToChunkSize([]byte{})) + buf.Write(common2.PrefixPaddingBufToChunkSize([]byte{})) + pubData := buf.Bytes() + + stateCache := e.bc.StateDB() + stateCache.PubData = append(stateCache.PubData, pubData...) + return nil +} + +func (e *TransferExecutor) UpdateTrees() error { + bc := e.bc + txInfo := e.txInfo + accounts := []int64{txInfo.FromAccountIndex, txInfo.ToAccountIndex, txInfo.GasAccountIndex} + assets := []int64{txInfo.AssetId, txInfo.GasFeeAssetId} + return bc.StateDB().UpdateAccountTree(accounts, assets) +} + +func (e *TransferExecutor) GetExecutedTx() (*tx.Tx, error) { + txInfoBytes, err := json.Marshal(e.txInfo) + if err != nil { + logx.Errorf("unable to marshal tx, err: %s", err.Error()) + return nil, errors.New("unmarshal tx failed") + } + + e.tx.TxInfo = string(txInfoBytes) + return e.BaseExecutor.GetExecutedTx() +} + +func (e *TransferExecutor) GenerateTxDetails() ([]*tx.TxDetail, error) { + txInfo := e.txInfo + + copiedAccounts, err := e.bc.StateDB().DeepCopyAccounts([]int64{txInfo.FromAccountIndex, txInfo.GasAccountIndex, txInfo.ToAccountIndex}) + if err != nil { + return nil, err + } + fromAccount := copiedAccounts[txInfo.FromAccountIndex] + gasAccount := copiedAccounts[txInfo.GasAccountIndex] + toAccount := copiedAccounts[txInfo.ToAccountIndex] + + txDetails := make([]*tx.TxDetail, 0, 4) + + // from account asset A + order := int64(0) + accountOrder := int64(0) + txDetails = append(txDetails, &tx.TxDetail{ + AssetId: txInfo.AssetId, + AssetType: types.FungibleAssetType, + AccountIndex: txInfo.FromAccountIndex, + AccountName: fromAccount.AccountName, + Balance: fromAccount.AssetInfo[txInfo.AssetId].String(), + BalanceDelta: types.ConstructAccountAsset( + txInfo.AssetId, ffmath.Neg(txInfo.AssetAmount), types.ZeroBigInt, types.ZeroBigInt).String(), + Order: order, + AccountOrder: accountOrder, + Nonce: fromAccount.Nonce, + CollectionNonce: fromAccount.CollectionNonce, + }) + fromAccount.AssetInfo[txInfo.AssetId].Balance = ffmath.Sub(fromAccount.AssetInfo[txInfo.AssetId].Balance, txInfo.AssetAmount) + // from account asset gas + order++ + txDetails = append(txDetails, &tx.TxDetail{ + AssetId: txInfo.GasFeeAssetId, + AssetType: types.FungibleAssetType, + AccountIndex: txInfo.FromAccountIndex, + AccountName: fromAccount.AccountName, + Balance: fromAccount.AssetInfo[txInfo.GasFeeAssetId].String(), + BalanceDelta: types.ConstructAccountAsset( + txInfo.GasFeeAssetId, ffmath.Neg(txInfo.GasFeeAssetAmount), types.ZeroBigInt, types.ZeroBigInt).String(), + Order: order, + AccountOrder: accountOrder, + Nonce: fromAccount.Nonce, + CollectionNonce: fromAccount.CollectionNonce, + }) + fromAccount.AssetInfo[txInfo.GasFeeAssetId].Balance = ffmath.Sub(fromAccount.AssetInfo[txInfo.GasFeeAssetId].Balance, txInfo.GasFeeAssetAmount) + // to account asset a + order++ + accountOrder++ + txDetails = append(txDetails, &tx.TxDetail{ + AssetId: txInfo.AssetId, + AssetType: types.FungibleAssetType, + AccountIndex: txInfo.ToAccountIndex, + AccountName: toAccount.AccountName, + Balance: toAccount.AssetInfo[txInfo.AssetId].String(), + BalanceDelta: types.ConstructAccountAsset( + txInfo.AssetId, txInfo.AssetAmount, types.ZeroBigInt, types.ZeroBigInt).String(), + Order: order, + AccountOrder: accountOrder, + Nonce: toAccount.Nonce, + CollectionNonce: toAccount.CollectionNonce, + }) + toAccount.AssetInfo[txInfo.AssetId].Balance = ffmath.Add(toAccount.AssetInfo[txInfo.AssetId].Balance, txInfo.AssetAmount) + // gas account asset gas + order++ + accountOrder++ + txDetails = append(txDetails, &tx.TxDetail{ + AssetId: txInfo.GasFeeAssetId, + AssetType: types.FungibleAssetType, + AccountIndex: txInfo.GasAccountIndex, + AccountName: gasAccount.AccountName, + Balance: gasAccount.AssetInfo[txInfo.GasFeeAssetId].String(), + BalanceDelta: types.ConstructAccountAsset( + txInfo.GasFeeAssetId, txInfo.GasFeeAssetAmount, types.ZeroBigInt, types.ZeroBigInt).String(), + Order: order, + AccountOrder: accountOrder, + Nonce: gasAccount.Nonce, + CollectionNonce: gasAccount.CollectionNonce, + }) + gasAccount.AssetInfo[txInfo.GasFeeAssetId].Balance = ffmath.Add(gasAccount.AssetInfo[txInfo.GasFeeAssetId].Balance, txInfo.GasFeeAssetAmount) + + return txDetails, nil +} + +func (e *TransferExecutor) GenerateMempoolTx() (*mempool.MempoolTx, error) { + hash, err := legendTxTypes.ComputeTransferMsgHash(e.txInfo, mimc.NewMiMC()) + if err != nil { + return nil, err + } + txHash := common.Bytes2Hex(hash) + + mempoolTx := &mempool.MempoolTx{ + TxHash: txHash, + TxType: e.tx.TxType, + GasFeeAssetId: e.txInfo.GasFeeAssetId, + GasFee: e.txInfo.GasFeeAssetAmount.String(), + NftIndex: types.NilTxNftIndex, + PairIndex: types.NilPairIndex, + AssetId: types.NilAssetId, + TxAmount: e.txInfo.AssetAmount.String(), + Memo: e.txInfo.Memo, + AccountIndex: e.txInfo.FromAccountIndex, + Nonce: e.txInfo.Nonce, + ExpiredAt: e.txInfo.ExpiredAt, + L2BlockHeight: types.NilBlockHeight, + Status: mempool.PendingTxStatus, + TxInfo: e.tx.TxInfo, + } + return mempoolTx, nil +} diff --git a/core/executor/transfer_nft_executor.go b/core/executor/transfer_nft_executor.go new file mode 100644 index 000000000..306abaff9 --- /dev/null +++ b/core/executor/transfer_nft_executor.go @@ -0,0 +1,315 @@ +package executor + +import ( + "bytes" + "encoding/json" + "math/big" + + "github.com/consensys/gnark-crypto/ecc/bn254/fr/mimc" + "github.com/ethereum/go-ethereum/common" + "github.com/pkg/errors" + "github.com/zeromicro/go-zero/core/logx" + + "github.com/bnb-chain/zkbas-crypto/ffmath" + "github.com/bnb-chain/zkbas-crypto/wasm/legend/legendTxTypes" + common2 "github.com/bnb-chain/zkbas/common" + "github.com/bnb-chain/zkbas/core/statedb" + "github.com/bnb-chain/zkbas/dao/mempool" + "github.com/bnb-chain/zkbas/dao/tx" + "github.com/bnb-chain/zkbas/types" +) + +type TransferNftExecutor struct { + BaseExecutor + + txInfo *legendTxTypes.TransferNftTxInfo +} + +func NewTransferNftExecutor(bc IBlockchain, tx *tx.Tx) (TxExecutor, error) { + txInfo, err := types.ParseTransferNftTxInfo(tx.TxInfo) + if err != nil { + logx.Errorf("parse transfer tx failed: %s", err.Error()) + return nil, errors.New("invalid tx info") + } + + return &TransferNftExecutor{ + BaseExecutor: BaseExecutor{ + bc: bc, + tx: tx, + iTxInfo: txInfo, + }, + txInfo: txInfo, + }, nil +} + +func (e *TransferNftExecutor) Prepare() error { + txInfo := e.txInfo + + accounts := []int64{txInfo.FromAccountIndex, txInfo.ToAccountIndex, txInfo.GasAccountIndex} + assets := []int64{txInfo.GasFeeAssetId} + err := e.bc.StateDB().PrepareAccountsAndAssets(accounts, assets) + if err != nil { + logx.Errorf("prepare accounts and assets failed: %s", err.Error()) + return errors.New("internal error") + } + + err = e.bc.StateDB().PrepareNft(txInfo.NftIndex) + if err != nil { + logx.Errorf("prepare nft failed") + return errors.New("internal error") + } + + return nil +} + +func (e *TransferNftExecutor) VerifyInputs() error { + txInfo := e.txInfo + + err := e.BaseExecutor.VerifyInputs() + if err != nil { + return err + } + + fromAccount := e.bc.StateDB().AccountMap[txInfo.FromAccountIndex] + if fromAccount.AssetInfo[txInfo.GasFeeAssetId].Balance.Cmp(txInfo.GasFeeAssetAmount) < 0 { + return errors.New("balance is not enough") + } + + toAccount := e.bc.StateDB().AccountMap[txInfo.ToAccountIndex] + if txInfo.ToAccountNameHash != toAccount.AccountNameHash { + return errors.New("invalid ToAccountNameHash") + } + + nft := e.bc.StateDB().NftMap[txInfo.NftIndex] + if nft.OwnerAccountIndex != txInfo.FromAccountIndex { + return errors.New("account is not owner of the nft") + } + + return nil +} + +func (e *TransferNftExecutor) ApplyTransaction() error { + bc := e.bc + txInfo := e.txInfo + + fromAccount := bc.StateDB().AccountMap[txInfo.FromAccountIndex] + gasAccount := bc.StateDB().AccountMap[txInfo.GasAccountIndex] + nft := bc.StateDB().NftMap[txInfo.NftIndex] + + fromAccount.AssetInfo[txInfo.GasFeeAssetId].Balance = ffmath.Sub(fromAccount.AssetInfo[txInfo.GasFeeAssetId].Balance, txInfo.GasFeeAssetAmount) + gasAccount.AssetInfo[txInfo.GasFeeAssetId].Balance = ffmath.Add(gasAccount.AssetInfo[txInfo.GasFeeAssetId].Balance, txInfo.GasFeeAssetAmount) + fromAccount.Nonce++ + nft.OwnerAccountIndex = txInfo.ToAccountIndex + + stateCache := e.bc.StateDB() + stateCache.PendingUpdateAccountIndexMap[txInfo.FromAccountIndex] = statedb.StateCachePending + stateCache.PendingUpdateAccountIndexMap[txInfo.GasAccountIndex] = statedb.StateCachePending + stateCache.PendingUpdateNftIndexMap[txInfo.NftIndex] = statedb.StateCachePending + return nil +} + +func (e *TransferNftExecutor) GeneratePubData() error { + txInfo := e.txInfo + + var buf bytes.Buffer + buf.WriteByte(uint8(types.TxTypeTransferNft)) + buf.Write(common2.Uint32ToBytes(uint32(txInfo.FromAccountIndex))) + buf.Write(common2.Uint32ToBytes(uint32(txInfo.ToAccountIndex))) + buf.Write(common2.Uint40ToBytes(txInfo.NftIndex)) + buf.Write(common2.Uint32ToBytes(uint32(txInfo.GasAccountIndex))) + buf.Write(common2.Uint16ToBytes(uint16(txInfo.GasFeeAssetId))) + packedFeeBytes, err := common2.FeeToPackedFeeBytes(txInfo.GasFeeAssetAmount) + if err != nil { + return err + } + buf.Write(packedFeeBytes) + chunk := common2.SuffixPaddingBufToChunkSize(buf.Bytes()) + buf.Reset() + buf.Write(chunk) + buf.Write(common2.PrefixPaddingBufToChunkSize(txInfo.CallDataHash)) + buf.Write(common2.PrefixPaddingBufToChunkSize([]byte{})) + buf.Write(common2.PrefixPaddingBufToChunkSize([]byte{})) + buf.Write(common2.PrefixPaddingBufToChunkSize([]byte{})) + buf.Write(common2.PrefixPaddingBufToChunkSize([]byte{})) + pubData := buf.Bytes() + + stateCache := e.bc.StateDB() + stateCache.PubData = append(stateCache.PubData, pubData...) + return nil +} + +func (e *TransferNftExecutor) UpdateTrees() error { + txInfo := e.txInfo + + accounts := []int64{txInfo.FromAccountIndex, txInfo.ToAccountIndex, txInfo.GasAccountIndex} + assets := []int64{txInfo.GasFeeAssetId} + + err := e.bc.StateDB().UpdateAccountTree(accounts, assets) + if err != nil { + logx.Errorf("update account tree error, err: %s", err.Error()) + return err + } + + err = e.bc.StateDB().UpdateNftTree(txInfo.NftIndex) + if err != nil { + logx.Errorf("update nft tree error, err: %s", err.Error()) + return err + } + return nil +} + +func (e *TransferNftExecutor) GetExecutedTx() (*tx.Tx, error) { + txInfoBytes, err := json.Marshal(e.txInfo) + if err != nil { + logx.Errorf("unable to marshal tx, err: %s", err.Error()) + return nil, errors.New("unmarshal tx failed") + } + + e.tx.TxInfo = string(txInfoBytes) + return e.BaseExecutor.GetExecutedTx() +} + +func (e *TransferNftExecutor) GenerateTxDetails() ([]*tx.TxDetail, error) { + txInfo := e.txInfo + nftModel := e.bc.StateDB().NftMap[txInfo.NftIndex] + + copiedAccounts, err := e.bc.StateDB().DeepCopyAccounts([]int64{txInfo.FromAccountIndex, txInfo.ToAccountIndex, txInfo.GasAccountIndex}) + if err != nil { + return nil, err + } + fromAccount := copiedAccounts[txInfo.FromAccountIndex] + toAccount := copiedAccounts[txInfo.ToAccountIndex] + gasAccount := copiedAccounts[txInfo.GasAccountIndex] + + txDetails := make([]*tx.TxDetail, 0, 4) + + // from account gas asset + order := int64(0) + accountOrder := int64(0) + txDetails = append(txDetails, &tx.TxDetail{ + AssetId: txInfo.GasFeeAssetId, + AssetType: types.FungibleAssetType, + AccountIndex: txInfo.FromAccountIndex, + AccountName: fromAccount.AccountName, + Balance: fromAccount.AssetInfo[txInfo.GasFeeAssetId].String(), + BalanceDelta: types.ConstructAccountAsset( + txInfo.GasFeeAssetId, + ffmath.Neg(txInfo.GasFeeAssetAmount), + types.ZeroBigInt, + types.ZeroBigInt, + ).String(), + Order: order, + Nonce: fromAccount.Nonce, + AccountOrder: accountOrder, + CollectionNonce: fromAccount.CollectionNonce, + }) + fromAccount.AssetInfo[txInfo.GasFeeAssetId].Balance = ffmath.Sub(fromAccount.AssetInfo[txInfo.GasFeeAssetId].Balance, txInfo.GasFeeAssetAmount) + if fromAccount.AssetInfo[txInfo.GasFeeAssetId].Balance.Cmp(big.NewInt(0)) < 0 { + return nil, errors.New("insufficient gas fee balance") + } + + // to account empty delta + order++ + accountOrder++ + txDetails = append(txDetails, &tx.TxDetail{ + AssetId: txInfo.GasFeeAssetId, + AssetType: types.FungibleAssetType, + AccountIndex: txInfo.ToAccountIndex, + AccountName: toAccount.AccountName, + Balance: toAccount.AssetInfo[txInfo.GasFeeAssetId].String(), + BalanceDelta: types.ConstructAccountAsset( + txInfo.GasFeeAssetId, + types.ZeroBigInt, + types.ZeroBigInt, + types.ZeroBigInt, + ).String(), + Order: order, + Nonce: toAccount.Nonce, + AccountOrder: accountOrder, + CollectionNonce: toAccount.CollectionNonce, + }) + + // to account nft delta + oldNftInfo := &types.NftInfo{ + NftIndex: nftModel.NftIndex, + CreatorAccountIndex: nftModel.CreatorAccountIndex, + OwnerAccountIndex: nftModel.OwnerAccountIndex, + NftContentHash: nftModel.NftContentHash, + NftL1TokenId: nftModel.NftL1TokenId, + NftL1Address: nftModel.NftL1Address, + CreatorTreasuryRate: nftModel.CreatorTreasuryRate, + CollectionId: nftModel.CollectionId, + } + newNftInfo := &types.NftInfo{ + NftIndex: nftModel.NftIndex, + CreatorAccountIndex: nftModel.CreatorAccountIndex, + OwnerAccountIndex: txInfo.ToAccountIndex, + NftContentHash: nftModel.NftContentHash, + NftL1TokenId: nftModel.NftL1TokenId, + NftL1Address: nftModel.NftL1Address, + CreatorTreasuryRate: nftModel.CreatorTreasuryRate, + CollectionId: nftModel.CollectionId, + } + order++ + txDetails = append(txDetails, &tx.TxDetail{ + AssetId: txInfo.NftIndex, + AssetType: types.NftAssetType, + AccountIndex: txInfo.ToAccountIndex, + AccountName: toAccount.AccountName, + Balance: oldNftInfo.String(), + BalanceDelta: newNftInfo.String(), + Order: order, + Nonce: toAccount.Nonce, + AccountOrder: types.NilAccountOrder, + CollectionNonce: toAccount.CollectionNonce, + }) + + // gas account gas asset + order++ + accountOrder++ + txDetails = append(txDetails, &tx.TxDetail{ + AssetId: txInfo.GasFeeAssetId, + AssetType: types.FungibleAssetType, + AccountIndex: txInfo.GasAccountIndex, + AccountName: gasAccount.AccountName, + Balance: gasAccount.AssetInfo[txInfo.GasFeeAssetId].String(), + BalanceDelta: types.ConstructAccountAsset( + txInfo.GasFeeAssetId, + txInfo.GasFeeAssetAmount, + types.ZeroBigInt, + types.ZeroBigInt, + ).String(), + Order: order, + Nonce: gasAccount.Nonce, + AccountOrder: accountOrder, + CollectionNonce: gasAccount.CollectionNonce, + }) + return txDetails, nil +} + +func (e *TransferNftExecutor) GenerateMempoolTx() (*mempool.MempoolTx, error) { + hash, err := legendTxTypes.ComputeTransferNftMsgHash(e.txInfo, mimc.NewMiMC()) + if err != nil { + return nil, err + } + txHash := common.Bytes2Hex(hash) + + mempoolTx := &mempool.MempoolTx{ + TxHash: txHash, + TxType: e.tx.TxType, + GasFeeAssetId: e.txInfo.GasFeeAssetId, + GasFee: e.txInfo.GasFeeAssetAmount.String(), + NftIndex: e.txInfo.NftIndex, + PairIndex: types.NilPairIndex, + AssetId: types.NilAssetId, + TxAmount: types.NilAssetAmountStr, + Memo: "", + AccountIndex: e.txInfo.FromAccountIndex, + Nonce: e.txInfo.Nonce, + ExpiredAt: e.txInfo.ExpiredAt, + L2BlockHeight: types.NilBlockHeight, + Status: mempool.PendingTxStatus, + TxInfo: e.tx.TxInfo, + } + return mempoolTx, nil +} diff --git a/core/executor/update_pair_rate_executor.go b/core/executor/update_pair_rate_executor.go new file mode 100644 index 000000000..dea6d07d3 --- /dev/null +++ b/core/executor/update_pair_rate_executor.go @@ -0,0 +1,176 @@ +package executor + +import ( + "bytes" + "encoding/json" + "errors" + "math/big" + + "github.com/zeromicro/go-zero/core/logx" + + "github.com/bnb-chain/zkbas-crypto/wasm/legend/legendTxTypes" + "github.com/bnb-chain/zkbas/common" + "github.com/bnb-chain/zkbas/core/statedb" + "github.com/bnb-chain/zkbas/dao/mempool" + "github.com/bnb-chain/zkbas/dao/tx" + "github.com/bnb-chain/zkbas/types" +) + +type UpdatePairRateExecutor struct { + BaseExecutor + + txInfo *legendTxTypes.UpdatePairRateTxInfo +} + +func NewUpdatePairRateExecutor(bc IBlockchain, tx *tx.Tx) (TxExecutor, error) { + txInfo, err := types.ParseUpdatePairRateTxInfo(tx.TxInfo) + if err != nil { + logx.Errorf("parse update pair rate tx failed: %s", err.Error()) + return nil, errors.New("invalid tx info") + } + + return &UpdatePairRateExecutor{ + BaseExecutor: BaseExecutor{ + bc: bc, + tx: tx, + iTxInfo: txInfo, + }, + txInfo: txInfo, + }, nil +} + +func (e *UpdatePairRateExecutor) Prepare() error { + txInfo := e.txInfo + + err := e.bc.StateDB().PrepareLiquidity(txInfo.PairIndex) + if err != nil { + logx.Errorf("prepare liquidity failed: %s", err.Error()) + return err + } + + return nil +} + +func (e *UpdatePairRateExecutor) VerifyInputs() error { + bc := e.bc + txInfo := e.txInfo + liquidity := bc.StateDB().LiquidityMap[txInfo.PairIndex] + + if liquidity.FeeRate == txInfo.FeeRate && + liquidity.TreasuryAccountIndex == txInfo.TreasuryAccountIndex && + liquidity.TreasuryRate == txInfo.TreasuryRate { + return errors.New("invalid update, the same to old") + } + + return nil +} + +func (e *UpdatePairRateExecutor) ApplyTransaction() error { + bc := e.bc + txInfo := e.txInfo + + liquidity := bc.StateDB().LiquidityMap[txInfo.PairIndex] + liquidity.FeeRate = txInfo.FeeRate + liquidity.TreasuryAccountIndex = txInfo.TreasuryAccountIndex + liquidity.TreasuryRate = txInfo.TreasuryRate + + stateCache := e.bc.StateDB() + stateCache.PendingUpdateLiquidityIndexMap[txInfo.PairIndex] = statedb.StateCachePending + return nil +} + +func (e *UpdatePairRateExecutor) GeneratePubData() error { + txInfo := e.txInfo + + var buf bytes.Buffer + buf.WriteByte(uint8(types.TxTypeUpdatePairRate)) + buf.Write(common.Uint16ToBytes(uint16(txInfo.PairIndex))) + buf.Write(common.Uint16ToBytes(uint16(txInfo.FeeRate))) + buf.Write(common.Uint32ToBytes(uint32(txInfo.TreasuryAccountIndex))) + buf.Write(common.Uint16ToBytes(uint16(txInfo.TreasuryRate))) + chunk := common.SuffixPaddingBufToChunkSize(buf.Bytes()) + buf.Reset() + buf.Write(chunk) + buf.Write(common.PrefixPaddingBufToChunkSize([]byte{})) + buf.Write(common.PrefixPaddingBufToChunkSize([]byte{})) + buf.Write(common.PrefixPaddingBufToChunkSize([]byte{})) + buf.Write(common.PrefixPaddingBufToChunkSize([]byte{})) + buf.Write(common.PrefixPaddingBufToChunkSize([]byte{})) + pubData := buf.Bytes() + + stateCache := e.bc.StateDB() + stateCache.PriorityOperations++ + stateCache.PubDataOffset = append(stateCache.PubDataOffset, uint32(len(stateCache.PubData))) + stateCache.PubData = append(stateCache.PubData, pubData...) + return nil +} + +func (e *UpdatePairRateExecutor) UpdateTrees() error { + bc := e.bc + txInfo := e.txInfo + return bc.StateDB().UpdateLiquidityTree(txInfo.PairIndex) +} + +func (e *UpdatePairRateExecutor) GetExecutedTx() (*tx.Tx, error) { + txInfoBytes, err := json.Marshal(e.txInfo) + if err != nil { + logx.Errorf("unable to marshal tx, err: %s", err.Error()) + return nil, errors.New("unmarshal tx failed") + } + + e.tx.TxInfo = string(txInfoBytes) + e.tx.PairIndex = e.txInfo.PairIndex + return e.BaseExecutor.GetExecutedTx() +} + +func (e *UpdatePairRateExecutor) GenerateTxDetails() ([]*tx.TxDetail, error) { + bc := e.bc + txInfo := e.txInfo + liquidity := bc.StateDB().LiquidityMap[txInfo.PairIndex] + baseLiquidity, err := types.ConstructLiquidityInfo( + liquidity.PairIndex, + liquidity.AssetAId, + liquidity.AssetA, + liquidity.AssetBId, + liquidity.AssetB, + liquidity.LpAmount, + liquidity.KLast, + liquidity.FeeRate, + liquidity.TreasuryAccountIndex, + liquidity.TreasuryRate, + ) + if err != nil { + return nil, err + } + deltaLiquidity := &types.LiquidityInfo{ + PairIndex: baseLiquidity.PairIndex, + AssetAId: baseLiquidity.AssetAId, + AssetA: big.NewInt(0), + AssetBId: baseLiquidity.AssetBId, + AssetB: big.NewInt(0), + LpAmount: big.NewInt(0), + KLast: baseLiquidity.KLast, + FeeRate: txInfo.FeeRate, + TreasuryAccountIndex: txInfo.TreasuryAccountIndex, + TreasuryRate: txInfo.TreasuryRate, + } + + txDetail := &tx.TxDetail{ + AssetId: txInfo.PairIndex, + AssetType: types.LiquidityAssetType, + AccountIndex: types.NilTxAccountIndex, + AccountName: types.NilAccountName, + Balance: baseLiquidity.String(), + BalanceDelta: deltaLiquidity.String(), + Order: 0, + AccountOrder: types.NilAccountOrder, + Nonce: types.NilNonce, + CollectionNonce: types.NilNonce, + } + + return []*tx.TxDetail{txDetail}, nil +} + +func (e *UpdatePairRateExecutor) GenerateMempoolTx() (*mempool.MempoolTx, error) { + return nil, nil +} diff --git a/core/executor/withdraw_executor.go b/core/executor/withdraw_executor.go new file mode 100644 index 000000000..33e3b1a2f --- /dev/null +++ b/core/executor/withdraw_executor.go @@ -0,0 +1,265 @@ +package executor + +import ( + "bytes" + "encoding/json" + "math/big" + + "github.com/consensys/gnark-crypto/ecc/bn254/fr/mimc" + "github.com/ethereum/go-ethereum/common" + "github.com/pkg/errors" + "github.com/zeromicro/go-zero/core/logx" + + "github.com/bnb-chain/zkbas-crypto/ffmath" + "github.com/bnb-chain/zkbas-crypto/wasm/legend/legendTxTypes" + common2 "github.com/bnb-chain/zkbas/common" + "github.com/bnb-chain/zkbas/core/statedb" + "github.com/bnb-chain/zkbas/dao/mempool" + "github.com/bnb-chain/zkbas/dao/tx" + "github.com/bnb-chain/zkbas/types" +) + +type WithdrawExecutor struct { + BaseExecutor + + txInfo *legendTxTypes.WithdrawTxInfo +} + +func NewWithdrawExecutor(bc IBlockchain, tx *tx.Tx) (TxExecutor, error) { + txInfo, err := types.ParseWithdrawTxInfo(tx.TxInfo) + if err != nil { + logx.Errorf("parse transfer tx failed: %s", err.Error()) + return nil, errors.New("invalid tx info") + } + + return &WithdrawExecutor{ + BaseExecutor: BaseExecutor{ + bc: bc, + tx: tx, + iTxInfo: txInfo, + }, + txInfo: txInfo, + }, nil +} + +func (e *WithdrawExecutor) Prepare() error { + txInfo := e.txInfo + + accounts := []int64{txInfo.FromAccountIndex, txInfo.GasAccountIndex} + assets := []int64{txInfo.AssetId, txInfo.GasFeeAssetId} + err := e.bc.StateDB().PrepareAccountsAndAssets(accounts, assets) + if err != nil { + logx.Errorf("prepare accounts and assets failed: %s", err.Error()) + return err + } + + return nil +} + +func (e *WithdrawExecutor) VerifyInputs() error { + txInfo := e.txInfo + + err := e.BaseExecutor.VerifyInputs() + if err != nil { + return err + } + + fromAccount := e.bc.StateDB().AccountMap[txInfo.FromAccountIndex] + if txInfo.GasFeeAssetId != txInfo.AssetId { + if fromAccount.AssetInfo[txInfo.AssetId].Balance.Cmp(txInfo.AssetAmount) < 0 { + return errors.New("invalid asset amount") + } + if fromAccount.AssetInfo[txInfo.GasFeeAssetId].Balance.Cmp(txInfo.GasFeeAssetAmount) < 0 { + return errors.New("invalid gas asset amount") + } + } else { + deltaBalance := ffmath.Add(txInfo.AssetAmount, txInfo.GasFeeAssetAmount) + if fromAccount.AssetInfo[txInfo.AssetId].Balance.Cmp(deltaBalance) < 0 { + return errors.New("invalid asset amount") + } + } + + return nil +} + +func (e *WithdrawExecutor) ApplyTransaction() error { + bc := e.bc + txInfo := e.txInfo + + fromAccount := bc.StateDB().AccountMap[txInfo.FromAccountIndex] + gasAccount := bc.StateDB().AccountMap[txInfo.GasAccountIndex] + + // apply changes + fromAccount.AssetInfo[txInfo.AssetId].Balance = ffmath.Sub(fromAccount.AssetInfo[txInfo.AssetId].Balance, txInfo.AssetAmount) + fromAccount.AssetInfo[txInfo.GasFeeAssetId].Balance = ffmath.Sub(fromAccount.AssetInfo[txInfo.GasFeeAssetId].Balance, txInfo.GasFeeAssetAmount) + gasAccount.AssetInfo[txInfo.GasFeeAssetId].Balance = ffmath.Add(gasAccount.AssetInfo[txInfo.GasFeeAssetId].Balance, txInfo.GasFeeAssetAmount) + fromAccount.Nonce++ + + stateCache := e.bc.StateDB() + stateCache.PendingUpdateAccountIndexMap[txInfo.FromAccountIndex] = statedb.StateCachePending + stateCache.PendingUpdateAccountIndexMap[txInfo.GasAccountIndex] = statedb.StateCachePending + + return nil +} + +func (e *WithdrawExecutor) GeneratePubData() error { + txInfo := e.txInfo + + var buf bytes.Buffer + buf.WriteByte(uint8(types.TxTypeWithdraw)) + buf.Write(common2.Uint32ToBytes(uint32(txInfo.FromAccountIndex))) + buf.Write(common2.AddressStrToBytes(txInfo.ToAddress)) + buf.Write(common2.Uint16ToBytes(uint16(txInfo.AssetId))) + chunk1 := common2.SuffixPaddingBufToChunkSize(buf.Bytes()) + buf.Reset() + buf.Write(common2.Uint128ToBytes(txInfo.AssetAmount)) + buf.Write(common2.Uint32ToBytes(uint32(txInfo.GasAccountIndex))) + buf.Write(common2.Uint16ToBytes(uint16(txInfo.GasFeeAssetId))) + packedFeeBytes, err := common2.FeeToPackedFeeBytes(txInfo.GasFeeAssetAmount) + if err != nil { + logx.Errorf("unable to convert amount to packed fee amount: %s", err.Error()) + return err + } + buf.Write(packedFeeBytes) + chunk2 := common2.PrefixPaddingBufToChunkSize(buf.Bytes()) + buf.Reset() + buf.Write(chunk1) + buf.Write(chunk2) + buf.Write(common2.PrefixPaddingBufToChunkSize([]byte{})) + buf.Write(common2.PrefixPaddingBufToChunkSize([]byte{})) + buf.Write(common2.PrefixPaddingBufToChunkSize([]byte{})) + buf.Write(common2.PrefixPaddingBufToChunkSize([]byte{})) + pubData := buf.Bytes() + + stateCache := e.bc.StateDB() + stateCache.PubDataOffset = append(stateCache.PubDataOffset, uint32(len(stateCache.PubData))) + stateCache.PendingOnChainOperationsPubData = append(stateCache.PendingOnChainOperationsPubData, pubData) + stateCache.PendingOnChainOperationsHash = common2.ConcatKeccakHash(stateCache.PendingOnChainOperationsHash, pubData) + stateCache.PubData = append(stateCache.PubData, pubData...) + return nil +} + +func (e *WithdrawExecutor) UpdateTrees() error { + txInfo := e.txInfo + + accounts := []int64{txInfo.FromAccountIndex, txInfo.GasAccountIndex} + assets := []int64{txInfo.AssetId, txInfo.GasFeeAssetId} + + err := e.bc.StateDB().UpdateAccountTree(accounts, assets) + if err != nil { + logx.Errorf("update account tree error, err: %s", err.Error()) + return err + } + + return nil +} + +func (e *WithdrawExecutor) GetExecutedTx() (*tx.Tx, error) { + txInfoBytes, err := json.Marshal(e.txInfo) + if err != nil { + logx.Errorf("unable to marshal tx, err: %s", err.Error()) + return nil, errors.New("unmarshal tx failed") + } + + e.tx.TxInfo = string(txInfoBytes) + return e.BaseExecutor.GetExecutedTx() +} + +func (e *WithdrawExecutor) GenerateTxDetails() ([]*tx.TxDetail, error) { + txInfo := e.txInfo + + copiedAccounts, err := e.bc.StateDB().DeepCopyAccounts([]int64{txInfo.FromAccountIndex, txInfo.GasAccountIndex}) + if err != nil { + return nil, err + } + fromAccount := copiedAccounts[txInfo.FromAccountIndex] + gasAccount := copiedAccounts[txInfo.GasAccountIndex] + + txDetails := make([]*tx.TxDetail, 0, 3) + // from account asset A + order := int64(0) + accountOrder := int64(0) + txDetails = append(txDetails, &tx.TxDetail{ + AssetId: txInfo.AssetId, + AssetType: types.FungibleAssetType, + AccountIndex: txInfo.FromAccountIndex, + AccountName: fromAccount.AccountName, + Balance: fromAccount.AssetInfo[txInfo.AssetId].String(), + BalanceDelta: types.ConstructAccountAsset( + txInfo.AssetId, ffmath.Neg(txInfo.AssetAmount), types.ZeroBigInt, types.ZeroBigInt).String(), + Order: order, + AccountOrder: accountOrder, + Nonce: fromAccount.Nonce, + CollectionNonce: fromAccount.CollectionNonce, + }) + fromAccount.AssetInfo[txInfo.AssetId].Balance = ffmath.Sub(fromAccount.AssetInfo[txInfo.AssetId].Balance, txInfo.AssetAmount) + if fromAccount.AssetInfo[txInfo.AssetId].Balance.Cmp(big.NewInt(0)) < 0 { + return nil, errors.New("insufficient asset a balance") + } + + order++ + // from account asset gas + txDetails = append(txDetails, &tx.TxDetail{ + AssetId: txInfo.GasFeeAssetId, + AssetType: types.FungibleAssetType, + AccountIndex: txInfo.FromAccountIndex, + AccountName: fromAccount.AccountName, + Balance: fromAccount.AssetInfo[txInfo.GasFeeAssetId].String(), + BalanceDelta: types.ConstructAccountAsset( + txInfo.GasFeeAssetId, ffmath.Neg(txInfo.GasFeeAssetAmount), types.ZeroBigInt, types.ZeroBigInt).String(), + Order: order, + AccountOrder: accountOrder, + Nonce: fromAccount.Nonce, + CollectionNonce: fromAccount.CollectionNonce, + }) + fromAccount.AssetInfo[txInfo.GasFeeAssetId].Balance = ffmath.Sub(fromAccount.AssetInfo[txInfo.GasFeeAssetId].Balance, txInfo.GasFeeAssetAmount) + if fromAccount.AssetInfo[txInfo.GasFeeAssetId].Balance.Cmp(big.NewInt(0)) < 0 { + return nil, errors.New("insufficient gas balance") + } + + // gas account asset gas + order++ + accountOrder++ + txDetails = append(txDetails, &tx.TxDetail{ + AssetId: txInfo.GasFeeAssetId, + AssetType: types.FungibleAssetType, + AccountIndex: txInfo.GasAccountIndex, + AccountName: gasAccount.AccountName, + Balance: gasAccount.AssetInfo[txInfo.GasFeeAssetId].String(), + BalanceDelta: types.ConstructAccountAsset( + txInfo.GasFeeAssetId, txInfo.GasFeeAssetAmount, types.ZeroBigInt, types.ZeroBigInt).String(), + Order: order, + AccountOrder: accountOrder, + Nonce: gasAccount.Nonce, + CollectionNonce: gasAccount.CollectionNonce, + }) + return txDetails, nil +} + +func (e *WithdrawExecutor) GenerateMempoolTx() (*mempool.MempoolTx, error) { + hash, err := legendTxTypes.ComputeWithdrawMsgHash(e.txInfo, mimc.NewMiMC()) + if err != nil { + return nil, err + } + txHash := common.Bytes2Hex(hash) + + mempoolTx := &mempool.MempoolTx{ + TxHash: txHash, + TxType: e.tx.TxType, + GasFeeAssetId: e.txInfo.GasFeeAssetId, + GasFee: e.txInfo.GasFeeAssetAmount.String(), + NftIndex: types.NilTxNftIndex, + PairIndex: types.NilPairIndex, + AssetId: e.txInfo.AssetId, + TxAmount: e.txInfo.AssetAmount.String(), + Memo: "", + NativeAddress: e.txInfo.ToAddress, + AccountIndex: e.txInfo.FromAccountIndex, + Nonce: e.txInfo.Nonce, + ExpiredAt: e.txInfo.ExpiredAt, + L2BlockHeight: types.NilBlockHeight, + Status: mempool.PendingTxStatus, + TxInfo: e.tx.TxInfo, + } + return mempoolTx, nil +} diff --git a/core/executor/withdraw_nft_executor.go b/core/executor/withdraw_nft_executor.go new file mode 100644 index 000000000..ad0016b3f --- /dev/null +++ b/core/executor/withdraw_nft_executor.go @@ -0,0 +1,340 @@ +package executor + +import ( + "bytes" + "encoding/json" + "math/big" + + "github.com/consensys/gnark-crypto/ecc/bn254/fr/mimc" + "github.com/ethereum/go-ethereum/common" + "github.com/pkg/errors" + "github.com/zeromicro/go-zero/core/logx" + + "github.com/bnb-chain/zkbas-crypto/ffmath" + "github.com/bnb-chain/zkbas-crypto/wasm/legend/legendTxTypes" + common2 "github.com/bnb-chain/zkbas/common" + "github.com/bnb-chain/zkbas/core/statedb" + "github.com/bnb-chain/zkbas/dao/mempool" + "github.com/bnb-chain/zkbas/dao/nft" + "github.com/bnb-chain/zkbas/dao/tx" + "github.com/bnb-chain/zkbas/types" +) + +type WithdrawNftExecutor struct { + BaseExecutor + + txInfo *legendTxTypes.WithdrawNftTxInfo +} + +func NewWithdrawNftExecutor(bc IBlockchain, tx *tx.Tx) (TxExecutor, error) { + txInfo, err := types.ParseWithdrawNftTxInfo(tx.TxInfo) + if err != nil { + logx.Errorf("parse transfer tx failed: %s", err.Error()) + return nil, errors.New("invalid tx info") + } + + return &WithdrawNftExecutor{ + BaseExecutor: BaseExecutor{ + bc: bc, + tx: tx, + iTxInfo: txInfo, + }, + txInfo: txInfo, + }, nil +} + +func (e *WithdrawNftExecutor) Prepare() error { + txInfo := e.txInfo + + err := e.bc.StateDB().PrepareNft(txInfo.NftIndex) + if err != nil { + logx.Errorf("prepare nft failed") + return errors.New("internal error") + } + nftInfo := e.bc.StateDB().NftMap[txInfo.NftIndex] + + accounts := []int64{txInfo.AccountIndex, nftInfo.CreatorAccountIndex, txInfo.GasAccountIndex} + assets := []int64{txInfo.GasFeeAssetId} + err = e.bc.StateDB().PrepareAccountsAndAssets(accounts, assets) + if err != nil { + logx.Errorf("prepare accounts and assets failed: %s", err.Error()) + return errors.New("internal error") + } + + creatorAccount := e.bc.StateDB().AccountMap[nftInfo.CreatorAccountIndex] + + // add details to tx info + txInfo.CreatorAccountIndex = nftInfo.CreatorAccountIndex + txInfo.CreatorAccountNameHash = common.FromHex(creatorAccount.AccountNameHash) + txInfo.CreatorTreasuryRate = nftInfo.CreatorTreasuryRate + txInfo.NftContentHash = common.FromHex(nftInfo.NftContentHash) + txInfo.NftL1Address = nftInfo.NftL1Address + txInfo.NftL1TokenId, _ = new(big.Int).SetString(nftInfo.NftL1TokenId, 10) + txInfo.CollectionId = nftInfo.CollectionId + + return nil +} + +func (e *WithdrawNftExecutor) VerifyInputs() error { + txInfo := e.txInfo + + err := e.BaseExecutor.VerifyInputs() + if err != nil { + return err + } + + fromAccount := e.bc.StateDB().AccountMap[txInfo.AccountIndex] + if fromAccount.AssetInfo[txInfo.GasFeeAssetId].Balance.Cmp(txInfo.GasFeeAssetAmount) < 0 { + return errors.New("balance is not enough") + } + + nftInfo := e.bc.StateDB().NftMap[txInfo.NftIndex] + if nftInfo.OwnerAccountIndex != txInfo.AccountIndex { + return errors.New("account is not owner of the nft") + } + + return nil +} + +func (e *WithdrawNftExecutor) ApplyTransaction() error { + bc := e.bc + txInfo := e.txInfo + + oldNft := bc.StateDB().NftMap[txInfo.NftIndex] + fromAccount := bc.StateDB().AccountMap[txInfo.AccountIndex] + gasAccount := bc.StateDB().AccountMap[txInfo.GasAccountIndex] + + // apply changes + fromAccount.AssetInfo[txInfo.GasFeeAssetId].Balance = ffmath.Sub(fromAccount.AssetInfo[txInfo.GasFeeAssetId].Balance, txInfo.GasFeeAssetAmount) + gasAccount.AssetInfo[txInfo.GasFeeAssetId].Balance = ffmath.Add(gasAccount.AssetInfo[txInfo.GasFeeAssetId].Balance, txInfo.GasFeeAssetAmount) + fromAccount.Nonce++ + + newNftInfo := types.EmptyNftInfo(txInfo.NftIndex) + bc.StateDB().NftMap[txInfo.NftIndex] = &nft.L2Nft{ + Model: oldNft.Model, + NftIndex: newNftInfo.NftIndex, + CreatorAccountIndex: newNftInfo.CreatorAccountIndex, + OwnerAccountIndex: newNftInfo.OwnerAccountIndex, + NftContentHash: newNftInfo.NftContentHash, + NftL1Address: newNftInfo.NftL1Address, + NftL1TokenId: newNftInfo.NftL1TokenId, + CreatorTreasuryRate: newNftInfo.CreatorTreasuryRate, + CollectionId: newNftInfo.CollectionId, + } + + stateCache := e.bc.StateDB() + stateCache.PendingUpdateAccountIndexMap[txInfo.AccountIndex] = statedb.StateCachePending + stateCache.PendingUpdateAccountIndexMap[txInfo.GasAccountIndex] = statedb.StateCachePending + stateCache.PendingUpdateNftIndexMap[txInfo.NftIndex] = statedb.StateCachePending + + return nil +} + +func (e *WithdrawNftExecutor) GeneratePubData() error { + txInfo := e.txInfo + + var buf bytes.Buffer + buf.WriteByte(uint8(types.TxTypeWithdrawNft)) + buf.Write(common2.Uint32ToBytes(uint32(txInfo.AccountIndex))) + buf.Write(common2.Uint32ToBytes(uint32(txInfo.CreatorAccountIndex))) + buf.Write(common2.Uint16ToBytes(uint16(txInfo.CreatorTreasuryRate))) + buf.Write(common2.Uint40ToBytes(txInfo.NftIndex)) + buf.Write(common2.Uint16ToBytes(uint16(txInfo.CollectionId))) + chunk1 := common2.SuffixPaddingBufToChunkSize(buf.Bytes()) + buf.Reset() + buf.Write(common2.AddressStrToBytes(txInfo.NftL1Address)) + chunk2 := common2.PrefixPaddingBufToChunkSize(buf.Bytes()) + buf.Reset() + buf.Write(common2.AddressStrToBytes(txInfo.ToAddress)) + buf.Write(common2.Uint32ToBytes(uint32(txInfo.GasAccountIndex))) + buf.Write(common2.Uint16ToBytes(uint16(txInfo.GasFeeAssetId))) + packedFeeBytes, err := common2.FeeToPackedFeeBytes(txInfo.GasFeeAssetAmount) + if err != nil { + logx.Errorf("unable to convert amount to packed fee amount: %s", err.Error()) + return err + } + buf.Write(packedFeeBytes) + chunk3 := common2.PrefixPaddingBufToChunkSize(buf.Bytes()) + buf.Reset() + buf.Write(chunk1) + buf.Write(chunk2) + buf.Write(chunk3) + buf.Write(common2.PrefixPaddingBufToChunkSize(txInfo.NftContentHash)) + buf.Write(common2.Uint256ToBytes(txInfo.NftL1TokenId)) + buf.Write(common2.PrefixPaddingBufToChunkSize(txInfo.CreatorAccountNameHash)) + pubData := buf.Bytes() + + stateCache := e.bc.StateDB() + stateCache.PubDataOffset = append(stateCache.PubDataOffset, uint32(len(stateCache.PubData))) + stateCache.PendingOnChainOperationsPubData = append(stateCache.PendingOnChainOperationsPubData, pubData) + stateCache.PendingOnChainOperationsHash = common2.ConcatKeccakHash(stateCache.PendingOnChainOperationsHash, pubData) + stateCache.PubData = append(stateCache.PubData, pubData...) + return nil +} + +func (e *WithdrawNftExecutor) UpdateTrees() error { + txInfo := e.txInfo + + accounts := []int64{txInfo.AccountIndex, txInfo.GasAccountIndex} + assets := []int64{txInfo.GasFeeAssetId} + + err := e.bc.StateDB().UpdateAccountTree(accounts, assets) + if err != nil { + logx.Errorf("update account tree error, err: %s", err.Error()) + return err + } + + err = e.bc.StateDB().UpdateNftTree(txInfo.NftIndex) + if err != nil { + logx.Errorf("update nft tree error, err: %s", err.Error()) + return err + } + return nil +} + +func (e *WithdrawNftExecutor) GetExecutedTx() (*tx.Tx, error) { + txInfoBytes, err := json.Marshal(e.txInfo) + if err != nil { + logx.Errorf("unable to marshal tx, err: %s", err.Error()) + return nil, errors.New("unmarshal tx failed") + } + + e.tx.TxInfo = string(txInfoBytes) + return e.BaseExecutor.GetExecutedTx() +} + +func (e *WithdrawNftExecutor) GenerateTxDetails() ([]*tx.TxDetail, error) { + txInfo := e.txInfo + nftModel := e.bc.StateDB().NftMap[txInfo.NftIndex] + + copiedAccounts, err := e.bc.StateDB().DeepCopyAccounts([]int64{txInfo.AccountIndex, txInfo.CreatorAccountIndex, txInfo.GasAccountIndex}) + if err != nil { + return nil, err + } + + fromAccount := copiedAccounts[txInfo.AccountIndex] + creatorAccount := copiedAccounts[txInfo.CreatorAccountIndex] + gasAccount := copiedAccounts[txInfo.GasAccountIndex] + + txDetails := make([]*tx.TxDetail, 0, 4) + + // from account gas asset + order := int64(0) + accountOrder := int64(0) + txDetails = append(txDetails, &tx.TxDetail{ + AssetId: txInfo.GasFeeAssetId, + AssetType: types.FungibleAssetType, + AccountIndex: txInfo.AccountIndex, + AccountName: fromAccount.AccountName, + Balance: fromAccount.AssetInfo[txInfo.GasFeeAssetId].String(), + BalanceDelta: types.ConstructAccountAsset( + txInfo.GasFeeAssetId, + ffmath.Neg(txInfo.GasFeeAssetAmount), + types.ZeroBigInt, + types.ZeroBigInt, + ).String(), + Order: order, + AccountOrder: accountOrder, + Nonce: fromAccount.Nonce, + CollectionNonce: fromAccount.CollectionNonce, + }) + fromAccount.AssetInfo[txInfo.GasFeeAssetId].Balance = ffmath.Sub(fromAccount.AssetInfo[txInfo.GasFeeAssetId].Balance, txInfo.GasFeeAssetAmount) + if fromAccount.AssetInfo[txInfo.GasFeeAssetId].Balance.Cmp(big.NewInt(0)) < 0 { + return nil, errors.New("insufficient gas fee balance") + } + + // nft delta + order++ + txDetails = append(txDetails, &tx.TxDetail{ + AssetId: txInfo.NftIndex, + AssetType: types.NftAssetType, + AccountIndex: types.NilTxAccountIndex, + AccountName: types.NilAccountName, + Balance: types.ConstructNftInfo( + nftModel.NftIndex, + nftModel.CreatorAccountIndex, + nftModel.OwnerAccountIndex, + nftModel.NftContentHash, + nftModel.NftL1TokenId, + nftModel.NftL1Address, + nftModel.CreatorTreasuryRate, + nftModel.CollectionId, + ).String(), + BalanceDelta: types.EmptyNftInfo(txInfo.NftIndex).String(), + Order: order, + AccountOrder: types.NilAccountOrder, + Nonce: fromAccount.Nonce, + CollectionNonce: fromAccount.CollectionNonce, + }) + + // create account empty delta + order++ + accountOrder++ + txDetails = append(txDetails, &tx.TxDetail{ + AssetId: txInfo.GasFeeAssetId, + AssetType: types.FungibleAssetType, + AccountIndex: txInfo.CreatorAccountIndex, + AccountName: creatorAccount.AccountName, + Balance: creatorAccount.AssetInfo[txInfo.GasFeeAssetId].String(), + BalanceDelta: types.ConstructAccountAsset( + txInfo.GasFeeAssetId, + types.ZeroBigInt, + types.ZeroBigInt, + types.ZeroBigInt, + ).String(), + Order: order, + AccountOrder: accountOrder, + Nonce: creatorAccount.Nonce, + CollectionNonce: creatorAccount.CollectionNonce, + }) + + // gas account gas asset + order++ + accountOrder++ + txDetails = append(txDetails, &tx.TxDetail{ + AssetId: txInfo.GasFeeAssetId, + AssetType: types.FungibleAssetType, + AccountIndex: txInfo.GasAccountIndex, + AccountName: gasAccount.AccountName, + Balance: gasAccount.AssetInfo[txInfo.GasFeeAssetId].String(), + BalanceDelta: types.ConstructAccountAsset( + txInfo.GasFeeAssetId, + txInfo.GasFeeAssetAmount, + types.ZeroBigInt, + types.ZeroBigInt, + ).String(), + Order: order, + AccountOrder: accountOrder, + Nonce: gasAccount.Nonce, + CollectionNonce: gasAccount.CollectionNonce, + }) + return txDetails, nil +} + +func (e *WithdrawNftExecutor) GenerateMempoolTx() (*mempool.MempoolTx, error) { + hash, err := legendTxTypes.ComputeWithdrawNftMsgHash(e.txInfo, mimc.NewMiMC()) + if err != nil { + return nil, err + } + txHash := common.Bytes2Hex(hash) + + mempoolTx := &mempool.MempoolTx{ + TxHash: txHash, + TxType: e.tx.TxType, + GasFeeAssetId: e.txInfo.GasFeeAssetId, + GasFee: e.txInfo.GasFeeAssetAmount.String(), + NftIndex: e.txInfo.NftIndex, + PairIndex: types.NilPairIndex, + AssetId: types.NilAssetId, + TxAmount: types.NilAssetAmountStr, + Memo: "", + NativeAddress: e.txInfo.ToAddress, + AccountIndex: e.txInfo.AccountIndex, + Nonce: e.txInfo.Nonce, + ExpiredAt: e.txInfo.ExpiredAt, + L2BlockHeight: types.NilBlockHeight, + Status: mempool.PendingTxStatus, + TxInfo: e.tx.TxInfo, + } + return mempoolTx, nil +} diff --git a/core/statedb/chaindb.go b/core/statedb/chaindb.go new file mode 100644 index 000000000..8e7491d3d --- /dev/null +++ b/core/statedb/chaindb.go @@ -0,0 +1,47 @@ +package statedb + +import ( + "gorm.io/gorm" + + "github.com/bnb-chain/zkbas/dao/account" + "github.com/bnb-chain/zkbas/dao/asset" + "github.com/bnb-chain/zkbas/dao/block" + "github.com/bnb-chain/zkbas/dao/liquidity" + "github.com/bnb-chain/zkbas/dao/mempool" + "github.com/bnb-chain/zkbas/dao/nft" + "github.com/bnb-chain/zkbas/dao/tx" +) + +type ChainDB struct { + // Block Chain data + BlockModel block.BlockModel + TxModel tx.TxModel + TxDetailModel tx.TxDetailModel + + // State DB + AccountModel account.AccountModel + AccountHistoryModel account.AccountHistoryModel + L2AssetInfoModel asset.AssetModel + LiquidityModel liquidity.LiquidityModel + LiquidityHistoryModel liquidity.LiquidityHistoryModel + L2NftModel nft.L2NftModel + L2NftHistoryModel nft.L2NftHistoryModel + MempoolModel mempool.MempoolModel +} + +func NewChainDB(db *gorm.DB) *ChainDB { + return &ChainDB{ + BlockModel: block.NewBlockModel(db), + TxModel: tx.NewTxModel(db), + TxDetailModel: tx.NewTxDetailModel(db), + + AccountModel: account.NewAccountModel(db), + AccountHistoryModel: account.NewAccountHistoryModel(db), + L2AssetInfoModel: asset.NewAssetModel(db), + LiquidityModel: liquidity.NewLiquidityModel(db), + LiquidityHistoryModel: liquidity.NewLiquidityHistoryModel(db), + L2NftModel: nft.NewL2NftModel(db), + L2NftHistoryModel: nft.NewL2NftHistoryModel(db), + MempoolModel: mempool.NewMempoolModel(db), + } +} diff --git a/core/statedb/state_cache.go b/core/statedb/state_cache.go new file mode 100644 index 000000000..26cd564ec --- /dev/null +++ b/core/statedb/state_cache.go @@ -0,0 +1,59 @@ +package statedb + +import ( + "github.com/ethereum/go-ethereum/common" + + "github.com/bnb-chain/zkbas-crypto/legend/circuit/bn254/std" + "github.com/bnb-chain/zkbas/dao/tx" + "github.com/bnb-chain/zkbas/types" +) + +const ( + _ = iota + StateCachePending + StateCacheCached +) + +type StateCache struct { + StateRoot string + // Updated in executor's GeneratePubData method. + PubData []byte + PriorityOperations int64 + PubDataOffset []uint32 + PendingOnChainOperationsPubData [][]byte + PendingOnChainOperationsHash []byte + Txs []*tx.Tx + + // Updated in executor's ApplyTransaction method. + PendingNewAccountIndexMap map[int64]int + PendingNewLiquidityIndexMap map[int64]int + PendingNewNftIndexMap map[int64]int + PendingUpdateAccountIndexMap map[int64]int + PendingUpdateLiquidityIndexMap map[int64]int + PendingUpdateNftIndexMap map[int64]int +} + +func NewStateCache(stateRoot string) *StateCache { + return &StateCache{ + StateRoot: stateRoot, + Txs: make([]*tx.Tx, 0), + + PendingNewAccountIndexMap: make(map[int64]int, 0), + PendingNewLiquidityIndexMap: make(map[int64]int, 0), + PendingNewNftIndexMap: make(map[int64]int, 0), + PendingUpdateAccountIndexMap: make(map[int64]int, 0), + PendingUpdateLiquidityIndexMap: make(map[int64]int, 0), + PendingUpdateNftIndexMap: make(map[int64]int, 0), + + PubData: make([]byte, 0), + PriorityOperations: 0, + PubDataOffset: make([]uint32, 0), + PendingOnChainOperationsPubData: make([][]byte, 0), + PendingOnChainOperationsHash: common.FromHex(types.EmptyStringKeccak), + } +} + +func (c *StateCache) AlignPubData(blockSize int) { + emptyPubdata := make([]byte, (blockSize-len(c.Txs))*32*std.PubDataSizePerTx) + c.PubData = append(c.PubData, emptyPubdata...) +} diff --git a/core/statedb/statedb.go b/core/statedb/statedb.go new file mode 100644 index 000000000..a08c87d22 --- /dev/null +++ b/core/statedb/statedb.go @@ -0,0 +1,574 @@ +package statedb + +import ( + "context" + "fmt" + + "github.com/consensys/gnark-crypto/ecc/bn254/fr/mimc" + "github.com/ethereum/go-ethereum/common" + "github.com/zeromicro/go-zero/core/logx" + + bsmt "github.com/bnb-chain/zkbas-smt" + "github.com/bnb-chain/zkbas/common/chain" + "github.com/bnb-chain/zkbas/dao/account" + "github.com/bnb-chain/zkbas/dao/dbcache" + "github.com/bnb-chain/zkbas/dao/liquidity" + "github.com/bnb-chain/zkbas/dao/nft" + "github.com/bnb-chain/zkbas/tree" + "github.com/bnb-chain/zkbas/types" +) + +type StateDB struct { + dryRun bool + // State cache + *StateCache + chainDb *ChainDB + redisCache dbcache.Cache + + // Flat state + AccountMap map[int64]*types.AccountInfo + LiquidityMap map[int64]*liquidity.Liquidity + NftMap map[int64]*nft.L2Nft + + // Tree state + AccountTree bsmt.SparseMerkleTree + LiquidityTree bsmt.SparseMerkleTree + NftTree bsmt.SparseMerkleTree + AccountAssetTrees []bsmt.SparseMerkleTree + TreeCtx *tree.Context +} + +func NewStateDB(treeCtx *tree.Context, chainDb *ChainDB, redisCache dbcache.Cache, stateRoot string, curHeight int64) (*StateDB, error) { + err := tree.SetupTreeDB(treeCtx) + if err != nil { + logx.Error("setup tree db failed: ", err) + return nil, err + } + accountTree, accountAssetTrees, err := tree.InitAccountTree( + chainDb.AccountModel, + chainDb.AccountHistoryModel, + curHeight, + treeCtx, + ) + if err != nil { + logx.Error("dbinitializer account tree failed:", err) + return nil, err + } + liquidityTree, err := tree.InitLiquidityTree( + chainDb.LiquidityHistoryModel, + curHeight, + treeCtx, + ) + if err != nil { + logx.Error("dbinitializer liquidity tree failed:", err) + return nil, err + } + nftTree, err := tree.InitNftTree( + chainDb.L2NftHistoryModel, + curHeight, + treeCtx, + ) + if err != nil { + logx.Error("dbinitializer nft tree failed:", err) + return nil, err + } + return &StateDB{ + StateCache: NewStateCache(stateRoot), + chainDb: chainDb, + redisCache: redisCache, + AccountMap: make(map[int64]*types.AccountInfo), + LiquidityMap: make(map[int64]*liquidity.Liquidity), + NftMap: make(map[int64]*nft.L2Nft), + + AccountTree: accountTree, + LiquidityTree: liquidityTree, + NftTree: nftTree, + AccountAssetTrees: accountAssetTrees, + TreeCtx: treeCtx, + }, nil +} + +func NewStateDBForDryRun(redisCache dbcache.Cache, chainDb *ChainDB) *StateDB { + return &StateDB{ + dryRun: true, + redisCache: redisCache, + chainDb: chainDb, + AccountMap: make(map[int64]*types.AccountInfo), + LiquidityMap: make(map[int64]*liquidity.Liquidity), + NftMap: make(map[int64]*nft.L2Nft), + StateCache: NewStateCache(""), + } +} + +func (s *StateDB) GetAccount(accountIndex int64) interface{} { + // to save account to cache, we need to convert it + account, err := chain.FromFormatAccountInfo(s.AccountMap[accountIndex]) + if err != nil { + return nil + } + return account +} + +func (s *StateDB) GetLiquidity(pairIndex int64) interface{} { + return s.LiquidityMap[pairIndex] +} + +func (s *StateDB) GetNft(nftIndex int64) interface{} { + return s.NftMap[nftIndex] +} + +func (s *StateDB) syncPendingStateToRedis(pendingMap map[int64]int, getKey func(int64) string, getValue func(int64) interface{}) error { + for index, status := range pendingMap { + if status != StateCachePending { + continue + } + + err := s.redisCache.Set(context.Background(), getKey(index), getValue(index)) + if err != nil { + return fmt.Errorf("cache to redis failed: %v", err) + } + pendingMap[index] = StateCacheCached + } + + return nil +} + +func (s *StateDB) SyncStateCacheToRedis() error { + + // Sync new create to cache. + err := s.syncPendingStateToRedis(s.PendingNewAccountIndexMap, dbcache.AccountKeyByIndex, s.GetAccount) + if err != nil { + return err + } + err = s.syncPendingStateToRedis(s.PendingNewLiquidityIndexMap, dbcache.LiquidityKeyByIndex, s.GetLiquidity) + if err != nil { + return err + } + err = s.syncPendingStateToRedis(s.PendingNewNftIndexMap, dbcache.NftKeyByIndex, s.GetNft) + if err != nil { + return err + } + + // Sync pending update to cache. + err = s.syncPendingStateToRedis(s.PendingUpdateAccountIndexMap, dbcache.AccountKeyByIndex, s.GetAccount) + if err != nil { + return err + } + err = s.syncPendingStateToRedis(s.PendingUpdateLiquidityIndexMap, dbcache.LiquidityKeyByIndex, s.GetLiquidity) + if err != nil { + return err + } + err = s.syncPendingStateToRedis(s.PendingUpdateNftIndexMap, dbcache.NftKeyByIndex, s.GetNft) + if err != nil { + return err + } + + return nil +} + +func (s *StateDB) PurgeCache(stateRoot string) { + s.StateCache = NewStateCache(stateRoot) +} + +func (s *StateDB) GetPendingAccount(blockHeight int64) ([]*account.Account, []*account.Account, []*account.AccountHistory, error) { + pendingNewAccount := make([]*account.Account, 0) + pendingUpdateAccount := make([]*account.Account, 0) + pendingNewAccountHistory := make([]*account.AccountHistory, 0) + + for index, status := range s.PendingNewAccountIndexMap { + if status < StateCachePending { + logx.Errorf("unexpected 0 status in Statedb cache") + continue + } + + newAccount, err := chain.FromFormatAccountInfo(s.AccountMap[index]) + if err != nil { + return nil, nil, nil, err + } + + pendingNewAccount = append(pendingNewAccount, newAccount) + pendingNewAccountHistory = append(pendingNewAccountHistory, &account.AccountHistory{ + AccountIndex: newAccount.AccountIndex, + Nonce: newAccount.Nonce, + CollectionNonce: newAccount.CollectionNonce, + AssetInfo: newAccount.AssetInfo, + AssetRoot: newAccount.AssetRoot, + L2BlockHeight: blockHeight, // TODO: ensure this should be the new block's height. + }) + } + + for index, status := range s.PendingUpdateAccountIndexMap { + if status < StateCachePending { + logx.Errorf("unexpected 0 status in Statedb cache") + continue + } + + if _, exist := s.PendingNewAccountIndexMap[index]; exist { + continue + } + + newAccount, err := chain.FromFormatAccountInfo(s.AccountMap[index]) + if err != nil { + return nil, nil, nil, err + } + pendingUpdateAccount = append(pendingUpdateAccount, newAccount) + pendingNewAccountHistory = append(pendingNewAccountHistory, &account.AccountHistory{ + AccountIndex: newAccount.AccountIndex, + Nonce: newAccount.Nonce, + CollectionNonce: newAccount.CollectionNonce, + AssetInfo: newAccount.AssetInfo, + AssetRoot: newAccount.AssetRoot, + L2BlockHeight: blockHeight, // TODO: ensure this should be the new block's height. + }) + } + + return pendingNewAccount, pendingUpdateAccount, pendingNewAccountHistory, nil +} + +func (s *StateDB) GetPendingLiquidity(blockHeight int64) ([]*liquidity.Liquidity, []*liquidity.Liquidity, []*liquidity.LiquidityHistory, error) { + pendingNewLiquidity := make([]*liquidity.Liquidity, 0) + pendingUpdateLiquidity := make([]*liquidity.Liquidity, 0) + pendingNewLiquidityHistory := make([]*liquidity.LiquidityHistory, 0) + + for index, status := range s.PendingNewLiquidityIndexMap { + if status < StateCachePending { + logx.Errorf("unexpected 0 status in Statedb cache") + continue + } + + newLiquidity := s.LiquidityMap[index] + pendingNewLiquidity = append(pendingNewLiquidity, newLiquidity) + pendingNewLiquidityHistory = append(pendingNewLiquidityHistory, &liquidity.LiquidityHistory{ + PairIndex: newLiquidity.PairIndex, + AssetAId: newLiquidity.AssetAId, + AssetA: newLiquidity.AssetA, + AssetBId: newLiquidity.AssetBId, + AssetB: newLiquidity.AssetB, + LpAmount: newLiquidity.LpAmount, + KLast: newLiquidity.KLast, + FeeRate: newLiquidity.FeeRate, + TreasuryAccountIndex: newLiquidity.TreasuryAccountIndex, + TreasuryRate: newLiquidity.TreasuryRate, + L2BlockHeight: blockHeight, + }) + } + + for index, status := range s.PendingUpdateLiquidityIndexMap { + if status < StateCachePending { + logx.Errorf("unexpected 0 status in Statedb cache") + continue + } + + if _, exist := s.PendingNewLiquidityIndexMap[index]; exist { + continue + } + + newLiquidity := s.LiquidityMap[index] + pendingUpdateLiquidity = append(pendingUpdateLiquidity, newLiquidity) + pendingNewLiquidityHistory = append(pendingNewLiquidityHistory, &liquidity.LiquidityHistory{ + PairIndex: newLiquidity.PairIndex, + AssetAId: newLiquidity.AssetAId, + AssetA: newLiquidity.AssetA, + AssetBId: newLiquidity.AssetBId, + AssetB: newLiquidity.AssetB, + LpAmount: newLiquidity.LpAmount, + KLast: newLiquidity.KLast, + FeeRate: newLiquidity.FeeRate, + TreasuryAccountIndex: newLiquidity.TreasuryAccountIndex, + TreasuryRate: newLiquidity.TreasuryRate, + L2BlockHeight: blockHeight, + }) + } + + return pendingNewLiquidity, pendingUpdateLiquidity, pendingNewLiquidityHistory, nil +} + +func (s *StateDB) GetPendingNft(blockHeight int64) ([]*nft.L2Nft, []*nft.L2Nft, []*nft.L2NftHistory, error) { + pendingNewNft := make([]*nft.L2Nft, 0) + pendingUpdateNft := make([]*nft.L2Nft, 0) + pendingNewNftHistory := make([]*nft.L2NftHistory, 0) + + for index, status := range s.PendingNewNftIndexMap { + if status < StateCachePending { + logx.Errorf("unexpected 0 status in Statedb cache") + continue + } + + newNft := s.NftMap[index] + pendingNewNft = append(pendingNewNft, newNft) + pendingNewNftHistory = append(pendingNewNftHistory, &nft.L2NftHistory{ + NftIndex: newNft.NftIndex, + CreatorAccountIndex: newNft.CreatorAccountIndex, + OwnerAccountIndex: newNft.OwnerAccountIndex, + NftContentHash: newNft.NftContentHash, + NftL1Address: newNft.NftL1Address, + NftL1TokenId: newNft.NftL1TokenId, + CreatorTreasuryRate: newNft.CreatorTreasuryRate, + CollectionId: newNft.CollectionId, + L2BlockHeight: blockHeight, + }) + } + + for index, status := range s.PendingUpdateNftIndexMap { + if status < StateCachePending { + logx.Errorf("unexpected 0 status in Statedb cache") + continue + } + + if _, exist := s.PendingNewNftIndexMap[index]; exist { + continue + } + + newNft := s.NftMap[index] + pendingUpdateNft = append(pendingUpdateNft, newNft) + pendingNewNftHistory = append(pendingNewNftHistory, &nft.L2NftHistory{ + NftIndex: newNft.NftIndex, + CreatorAccountIndex: newNft.CreatorAccountIndex, + OwnerAccountIndex: newNft.OwnerAccountIndex, + NftContentHash: newNft.NftContentHash, + NftL1Address: newNft.NftL1Address, + NftL1TokenId: newNft.NftL1TokenId, + CreatorTreasuryRate: newNft.CreatorTreasuryRate, + CollectionId: newNft.CollectionId, + L2BlockHeight: blockHeight, + }) + } + + return pendingNewNft, pendingUpdateNft, pendingNewNftHistory, nil +} + +func (s *StateDB) DeepCopyAccounts(accountIds []int64) (map[int64]*types.AccountInfo, error) { + accounts := make(map[int64]*types.AccountInfo) + if len(accountIds) == 0 { + return accounts, nil + } + + for _, accountId := range accountIds { + if _, ok := accounts[accountId]; ok { + continue + } + + accountCopy, err := s.AccountMap[accountId].DeepCopy() + if err != nil { + return nil, err + } + accounts[accountId] = accountCopy + } + + return accounts, nil +} + +func (s *StateDB) PrepareAccountsAndAssets(accounts []int64, assets []int64) error { + for _, accountIndex := range accounts { + if s.dryRun { + account := &account.Account{} + redisAccount, err := s.redisCache.Get(context.Background(), dbcache.AccountKeyByIndex(accountIndex), account) + if err == nil && redisAccount != nil { + formatAccount, err := chain.ToFormatAccountInfo(account) + if err == nil { + s.AccountMap[accountIndex] = formatAccount + } + } + } + + if s.AccountMap[accountIndex] == nil { + accountInfo, err := s.chainDb.AccountModel.GetAccountByIndex(accountIndex) + if err != nil { + return err + } + s.AccountMap[accountIndex], err = chain.ToFormatAccountInfo(accountInfo) + if err != nil { + return fmt.Errorf("convert to format account info failed: %v", err) + } + } + if s.AccountMap[accountIndex].AssetInfo == nil { + s.AccountMap[accountIndex].AssetInfo = make(map[int64]*types.AccountAsset) + } + for _, assetId := range assets { + if s.AccountMap[accountIndex].AssetInfo[assetId] == nil { + s.AccountMap[accountIndex].AssetInfo[assetId] = &types.AccountAsset{ + AssetId: assetId, + Balance: types.ZeroBigInt, + LpAmount: types.ZeroBigInt, + OfferCanceledOrFinalized: types.ZeroBigInt, + } + } + } + } + + return nil +} + +func (s *StateDB) PrepareLiquidity(pairIndex int64) error { + if s.dryRun { + l := &liquidity.Liquidity{} + redisLiquidity, err := s.redisCache.Get(context.Background(), dbcache.LiquidityKeyByIndex(pairIndex), l) + if err == nil && redisLiquidity != nil { + s.LiquidityMap[pairIndex] = l + } + } + + if s.LiquidityMap[pairIndex] == nil { + liquidityInfo, err := s.chainDb.LiquidityModel.GetLiquidityByPairIndex(pairIndex) + if err != nil { + return err + } + s.LiquidityMap[pairIndex] = liquidityInfo + } + return nil +} + +func (s *StateDB) PrepareNft(nftIndex int64) error { + if s.dryRun { + n := &nft.L2Nft{} + redisNft, err := s.redisCache.Get(context.Background(), dbcache.NftKeyByIndex(nftIndex), n) + if err == nil && redisNft != nil { + s.NftMap[nftIndex] = n + } + } + + if s.NftMap[nftIndex] == nil { + nftAsset, err := s.chainDb.L2NftModel.GetNftAsset(nftIndex) + if err != nil { + return err + } + s.NftMap[nftIndex] = nftAsset + } + return nil +} + +func (s *StateDB) UpdateAccountTree(accounts []int64, assets []int64) error { + for _, accountIndex := range accounts { + for _, assetId := range assets { + assetLeaf, err := tree.ComputeAccountAssetLeafHash( + s.AccountMap[accountIndex].AssetInfo[assetId].Balance.String(), + s.AccountMap[accountIndex].AssetInfo[assetId].LpAmount.String(), + s.AccountMap[accountIndex].AssetInfo[assetId].OfferCanceledOrFinalized.String(), + ) + if err != nil { + return fmt.Errorf("compute new account asset leaf failed: %v", err) + } + err = s.AccountAssetTrees[accountIndex].Set(uint64(assetId), assetLeaf) + if err != nil { + return fmt.Errorf("update asset tree failed: %v", err) + } + } + + s.AccountMap[accountIndex].AssetRoot = common.Bytes2Hex(s.AccountAssetTrees[accountIndex].Root()) + nAccountLeafHash, err := tree.ComputeAccountLeafHash( + s.AccountMap[accountIndex].AccountNameHash, + s.AccountMap[accountIndex].PublicKey, + s.AccountMap[accountIndex].Nonce, + s.AccountMap[accountIndex].CollectionNonce, + s.AccountAssetTrees[accountIndex].Root(), + ) + if err != nil { + return fmt.Errorf("unable to compute account leaf: %v", err) + } + err = s.AccountTree.Set(uint64(accountIndex), nAccountLeafHash) + if err != nil { + return fmt.Errorf("unable to update account tree: %v", err) + } + } + + return nil +} + +func (s *StateDB) UpdateLiquidityTree(pairIndex int64) error { + nLiquidityAssetLeaf, err := tree.ComputeLiquidityAssetLeafHash( + s.LiquidityMap[pairIndex].AssetAId, + s.LiquidityMap[pairIndex].AssetA, + s.LiquidityMap[pairIndex].AssetBId, + s.LiquidityMap[pairIndex].AssetB, + s.LiquidityMap[pairIndex].LpAmount, + s.LiquidityMap[pairIndex].KLast, + s.LiquidityMap[pairIndex].FeeRate, + s.LiquidityMap[pairIndex].TreasuryAccountIndex, + s.LiquidityMap[pairIndex].TreasuryRate, + ) + if err != nil { + return fmt.Errorf("unable to compute liquidity leaf: %v", err) + } + err = s.LiquidityTree.Set(uint64(pairIndex), nLiquidityAssetLeaf) + if err != nil { + return fmt.Errorf("unable to update liquidity tree: %v", err) + } + + return nil +} + +func (s *StateDB) UpdateNftTree(nftIndex int64) error { + nftAssetLeaf, err := tree.ComputeNftAssetLeafHash( + s.NftMap[nftIndex].CreatorAccountIndex, + s.NftMap[nftIndex].OwnerAccountIndex, + s.NftMap[nftIndex].NftContentHash, + s.NftMap[nftIndex].NftL1Address, + s.NftMap[nftIndex].NftL1TokenId, + s.NftMap[nftIndex].CreatorTreasuryRate, + s.NftMap[nftIndex].CollectionId, + ) + if err != nil { + return fmt.Errorf("unable to compute nft leaf: %v", err) + } + err = s.NftTree.Set(uint64(nftIndex), nftAssetLeaf) + if err != nil { + return fmt.Errorf("unable to update nft tree: %v", err) + } + + return nil +} + +func (s *StateDB) GetStateRoot() string { + hFunc := mimc.NewMiMC() + hFunc.Write(s.AccountTree.Root()) + hFunc.Write(s.LiquidityTree.Root()) + hFunc.Write(s.NftTree.Root()) + return common.Bytes2Hex(hFunc.Sum(nil)) +} + +func (s *StateDB) GetCommittedNonce(accountIndex int64) (int64, error) { + if acc, exist := s.AccountMap[accountIndex]; exist { + return acc.Nonce, nil + } else { + return 0, fmt.Errorf("account does not exist") + } +} + +func (s *StateDB) GetPendingNonce(accountIndex int64) (int64, error) { + nonce, err := s.chainDb.MempoolModel.GetMaxNonceByAccountIndex(accountIndex) + if err == nil { + return nonce + 1, nil + } + account := &account.Account{} + redisAccount, err := s.redisCache.Get(context.Background(), dbcache.AccountKeyByIndex(accountIndex), account) + if err == nil && redisAccount != nil { + return account.Nonce, nil + } + dbAccount, err := s.chainDb.AccountModel.GetAccountByIndex(accountIndex) + if err == nil { + return dbAccount.Nonce, nil + } + return 0, err +} + +func (s *StateDB) GetNextAccountIndex() int64 { + return int64(len(s.AccountAssetTrees)) +} + +func (s *StateDB) GetNextNftIndex() int64 { + if len(s.PendingNewNftIndexMap) == 0 { + maxNftIndex, err := s.chainDb.L2NftModel.GetLatestNftIndex() + if err != nil { + panic("get latest nft index error: " + err.Error()) + } + return maxNftIndex + 1 + } + + maxNftIndex := int64(-1) + for index, status := range s.PendingNewNftIndexMap { + if status >= StateCachePending && index > maxNftIndex { + maxNftIndex = index + } + } + return maxNftIndex + 1 +} diff --git a/dao/account/account.go b/dao/account/account.go new file mode 100644 index 000000000..6833b6eec --- /dev/null +++ b/dao/account/account.go @@ -0,0 +1,161 @@ +/* + * Copyright © 2021 ZkBAS Protocol + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package account + +import ( + "gorm.io/gorm" + + "github.com/bnb-chain/zkbas/types" +) + +const ( + AccountTableName = `account` +) + +const ( + AccountStatusPending = iota + AccountStatusConfirmed + AccountStatusVerified +) + +type ( + AccountModel interface { + CreateAccountTable() error + DropAccountTable() error + GetAccountByIndex(accountIndex int64) (account *Account, err error) + GetConfirmedAccountByIndex(accountIndex int64) (account *Account, err error) + GetAccountByPk(pk string) (account *Account, err error) + GetAccountByName(name string) (account *Account, err error) + GetAccountByNameHash(nameHash string) (account *Account, err error) + GetAccountsList(limit int, offset int64) (accounts []*Account, err error) + GetAccountsTotalCount() (count int64, err error) + } + + defaultAccountModel struct { + table string + DB *gorm.DB + } + + /* + always keep the latest data of committer + */ + Account struct { + gorm.Model + AccountIndex int64 `gorm:"uniqueIndex"` + AccountName string `gorm:"uniqueIndex"` + PublicKey string `gorm:"uniqueIndex"` + AccountNameHash string `gorm:"uniqueIndex"` + L1Address string + Nonce int64 + CollectionNonce int64 + // map[int64]*AccountAsset + AssetInfo string + AssetRoot string + // 0 - registered, not committer 1 - committer + Status int + } +) + +func NewAccountModel(db *gorm.DB) AccountModel { + return &defaultAccountModel{ + table: AccountTableName, + DB: db, + } +} + +func (*Account) TableName() string { + return AccountTableName +} + +func (m *defaultAccountModel) CreateAccountTable() error { + return m.DB.AutoMigrate(Account{}) +} + +func (m *defaultAccountModel) DropAccountTable() error { + return m.DB.Migrator().DropTable(m.table) +} + +func (m *defaultAccountModel) GetAccountByIndex(accountIndex int64) (account *Account, err error) { + dbTx := m.DB.Table(m.table).Where("account_index = ?", accountIndex).Find(&account) + if dbTx.Error != nil { + return nil, types.DbErrSqlOperation + } else if dbTx.RowsAffected == 0 { + return nil, types.DbErrNotFound + } + return account, nil +} + +func (m *defaultAccountModel) GetAccountByPk(pk string) (account *Account, err error) { + dbTx := m.DB.Table(m.table).Where("public_key = ?", pk).Find(&account) + if dbTx.Error != nil { + return nil, types.DbErrSqlOperation + } else if dbTx.RowsAffected == 0 { + return nil, types.DbErrNotFound + } + return account, nil +} + +func (m *defaultAccountModel) GetAccountByName(accountName string) (account *Account, err error) { + dbTx := m.DB.Table(m.table).Where("account_name = ?", accountName).Find(&account) + if dbTx.Error != nil { + return nil, types.DbErrSqlOperation + } else if dbTx.RowsAffected == 0 { + return nil, types.DbErrNotFound + } + return account, nil +} + +func (m *defaultAccountModel) GetAccountByNameHash(accountNameHash string) (account *Account, err error) { + dbTx := m.DB.Table(m.table).Where("account_name_hash = ?", accountNameHash).Find(&account) + if dbTx.Error != nil { + return nil, types.DbErrSqlOperation + } else if dbTx.RowsAffected == 0 { + return nil, types.DbErrNotFound + } + return account, nil +} + +func (m *defaultAccountModel) GetAccountsList(limit int, offset int64) (accounts []*Account, err error) { + dbTx := m.DB.Table(m.table).Limit(limit).Offset(int(offset)).Order("account_index desc").Find(&accounts) + if dbTx.Error != nil { + return nil, types.DbErrSqlOperation + } else if dbTx.RowsAffected == 0 { + return nil, types.DbErrNotFound + } + return accounts, nil +} + +func (m *defaultAccountModel) GetAccountsTotalCount() (count int64, err error) { + dbTx := m.DB.Table(m.table).Where("deleted_at is NULL").Count(&count) + if dbTx.Error != nil { + return 0, types.DbErrSqlOperation + } else if dbTx.RowsAffected == 0 { + return 0, nil + } + return count, nil +} + +func (m *defaultAccountModel) GetConfirmedAccountByIndex(accountIndex int64) (account *Account, err error) { + dbTx := m.DB.Table(m.table).Where("account_index = ? and status = ?", accountIndex, AccountStatusConfirmed).Find(&account) + if dbTx.Error != nil { + return nil, types.DbErrSqlOperation + } else if dbTx.RowsAffected == 0 { + return nil, types.DbErrNotFound + } + return account, nil +} diff --git a/dao/account/account_history.go b/dao/account/account_history.go new file mode 100644 index 000000000..c8cc96968 --- /dev/null +++ b/dao/account/account_history.go @@ -0,0 +1,112 @@ +/* + * Copyright © 2021 ZkBAS Protocol + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package account + +import ( + "errors" + + "gorm.io/gorm" + + "github.com/bnb-chain/zkbas/types" +) + +const ( + AccountHistoryTableName = `account_history` +) + +type ( + AccountHistoryModel interface { + CreateAccountHistoryTable() error + DropAccountHistoryTable() error + GetValidAccounts(height int64, limit int, offset int) (rowsAffected int64, accounts []*AccountHistory, err error) + GetValidAccountCount(height int64) (accounts int64, err error) + } + + defaultAccountHistoryModel struct { + table string + DB *gorm.DB + } + + AccountHistory struct { + gorm.Model + AccountIndex int64 `gorm:"index"` + Nonce int64 + CollectionNonce int64 + AssetInfo string + AssetRoot string + L2BlockHeight int64 + } +) + +func NewAccountHistoryModel(db *gorm.DB) AccountHistoryModel { + return &defaultAccountHistoryModel{ + table: AccountHistoryTableName, + DB: db, + } +} + +func (*AccountHistory) TableName() string { + return AccountHistoryTableName +} + +func (m *defaultAccountHistoryModel) CreateAccountHistoryTable() error { + return m.DB.AutoMigrate(AccountHistory{}) +} + +func (m *defaultAccountHistoryModel) DropAccountHistoryTable() error { + return m.DB.Migrator().DropTable(m.table) +} + +func (m *defaultAccountHistoryModel) CreateNewAccount(nAccount *AccountHistory) (err error) { + dbTx := m.DB.Table(m.table).Create(&nAccount) + if dbTx.Error != nil { + return types.DbErrSqlOperation + } else if dbTx.RowsAffected == 0 { + return errors.New("create new account no rows affected") + } + + return nil +} + +func (m *defaultAccountHistoryModel) GetValidAccounts(height int64, limit int, offset int) (rowsAffected int64, accounts []*AccountHistory, err error) { + subQuery := m.DB.Table(m.table).Select("*"). + Where("account_index = a.account_index AND l2_block_height <= ? AND l2_block_height > a.l2_block_height AND l2_block_height != -1", height) + + dbTx := m.DB.Table(m.table+" as a").Select("*"). + Where("NOT EXISTS (?) AND l2_block_height <= ? AND l2_block_height != -1", subQuery, height). + Limit(limit).Offset(offset). + Order("account_index") + + if dbTx.Find(&accounts).Error != nil { + return 0, nil, types.DbErrSqlOperation + } + return dbTx.RowsAffected, accounts, nil + +} + +func (m *defaultAccountHistoryModel) GetValidAccountCount(height int64) (count int64, err error) { + subQuery := m.DB.Table(m.table).Select("*"). + Where("account_index = a.account_index AND l2_block_height <= ? AND l2_block_height > a.l2_block_height AND l2_block_height != -1", height) + + dbTx := m.DB.Table(m.table+" as a"). + Where("NOT EXISTS (?) AND l2_block_height <= ? AND l2_block_height != -1", subQuery, height) + + if dbTx.Count(&count).Error != nil { + return 0, types.DbErrSqlOperation + } + return count, nil +} diff --git a/dao/asset/asset.go b/dao/asset/asset.go new file mode 100644 index 000000000..9f0da6cb7 --- /dev/null +++ b/dao/asset/asset.go @@ -0,0 +1,167 @@ +/* + * Copyright © 2021 ZkBAS Protocol + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package asset + +import ( + "gorm.io/gorm" + + "github.com/bnb-chain/zkbas/types" +) + +const ( + AssetTableName = `asset` + + StatusActive uint32 = 0 + StatusInactive uint32 = 1 + + IsGasAsset = 1 +) + +type ( + AssetModel interface { + CreateAssetTable() error + DropAssetTable() error + CreateAssetsInBatch(assets []*Asset) (rowsAffected int64, err error) + GetAssetsTotalCount() (count int64, err error) + GetAssetsList(limit int64, offset int64) (assets []*Asset, err error) + GetAssetById(assetId int64) (asset *Asset, err error) + GetAssetBySymbol(symbol string) (asset *Asset, err error) + GetAssetByAddress(address string) (asset *Asset, err error) + GetGasAssets() (assets []*Asset, err error) + GetMaxId() (max int64, err error) + } + + defaultAssetModel struct { + table string + DB *gorm.DB + } + + Asset struct { + gorm.Model + AssetId uint32 `gorm:"uniqueIndex"` + AssetName string + AssetSymbol string + L1Address string + Decimals uint32 + Status uint32 + IsGasAsset uint32 + } +) + +func (*Asset) TableName() string { + return AssetTableName +} + +func NewAssetModel(db *gorm.DB) AssetModel { + return &defaultAssetModel{ + table: AssetTableName, + DB: db, + } +} + +func (m *defaultAssetModel) CreateAssetTable() error { + return m.DB.AutoMigrate(Asset{}) +} + +func (m *defaultAssetModel) DropAssetTable() error { + return m.DB.Migrator().DropTable(m.table) +} + +func (m *defaultAssetModel) GetAssetsTotalCount() (count int64, err error) { + dbTx := m.DB.Table(m.table).Where("deleted_at is NULL").Count(&count) + if dbTx.Error != nil { + return 0, dbTx.Error + } else if dbTx.RowsAffected == 0 { + return 0, nil + } + return count, nil +} + +func (m *defaultAssetModel) GetAssetsList(limit int64, offset int64) (res []*Asset, err error) { + dbTx := m.DB.Table(m.table).Limit(int(limit)).Offset(int(offset)).Order("id asc").Find(&res) + if dbTx.Error != nil { + return nil, types.DbErrSqlOperation + } + if dbTx.RowsAffected == 0 { + return nil, types.DbErrNotFound + } + return res, nil +} + +func (m *defaultAssetModel) CreateAssetsInBatch(l2Assets []*Asset) (rowsAffected int64, err error) { + dbTx := m.DB.Table(m.table).CreateInBatches(l2Assets, len(l2Assets)) + if dbTx.Error != nil { + return 0, types.DbErrSqlOperation + } + if dbTx.RowsAffected == 0 { + return 0, nil + } + return dbTx.RowsAffected, nil +} + +func (m *defaultAssetModel) GetAssetById(assetId int64) (res *Asset, err error) { + dbTx := m.DB.Table(m.table).Where("asset_id = ?", assetId).Find(&res) + if dbTx.Error != nil { + return nil, types.DbErrSqlOperation + } + if dbTx.RowsAffected == 0 { + return nil, types.DbErrNotFound + } + return res, nil +} + +func (m *defaultAssetModel) GetAssetBySymbol(symbol string) (res *Asset, err error) { + dbTx := m.DB.Table(m.table).Where("asset_symbol = ?", symbol).Find(&res) + if dbTx.Error != nil { + return nil, types.DbErrSqlOperation + } + if dbTx.RowsAffected == 0 { + return nil, types.DbErrNotFound + } + return res, nil +} + +func (m *defaultAssetModel) GetAssetByAddress(address string) (asset *Asset, err error) { + dbTx := m.DB.Table(m.table).Where("asset_address = ?", address).Find(&asset) + if dbTx.Error != nil { + return nil, types.DbErrSqlOperation + } else if dbTx.RowsAffected == 0 { + return nil, types.DbErrNotFound + } + return asset, nil +} + +func (m *defaultAssetModel) GetGasAssets() (assets []*Asset, err error) { + dbTx := m.DB.Table(m.table).Where("is_gas_asset = ?", IsGasAsset).Find(&assets) + if dbTx.Error != nil { + return nil, types.DbErrSqlOperation + } else if dbTx.RowsAffected == 0 { + return nil, types.DbErrNotFound + } + return assets, nil +} + +func (m *defaultAssetModel) GetMaxId() (max int64, err error) { + dbTx := m.DB.Table(m.table).Select("id").Order("id desc").Limit(1).Find(&max) + if dbTx.Error != nil { + return 0, types.DbErrSqlOperation + } else if dbTx.RowsAffected == 0 { + return 0, types.DbErrNotFound + } + return max, nil +} diff --git a/dao/block/block.go b/dao/block/block.go new file mode 100644 index 000000000..a30716b95 --- /dev/null +++ b/dao/block/block.go @@ -0,0 +1,488 @@ +/* + * Copyright © 2021 ZkBAS Protocol + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package block + +import ( + "errors" + "sort" + + "gorm.io/gorm" + + "github.com/bnb-chain/zkbas/dao/account" + "github.com/bnb-chain/zkbas/dao/compressedblock" + "github.com/bnb-chain/zkbas/dao/liquidity" + "github.com/bnb-chain/zkbas/dao/mempool" + "github.com/bnb-chain/zkbas/dao/nft" + "github.com/bnb-chain/zkbas/dao/tx" + "github.com/bnb-chain/zkbas/types" +) + +const ( + _ = iota + StatusProposing + StatusPending + StatusCommitted + StatusVerifiedAndExecuted +) + +const ( + BlockTableName = `block` +) + +type ( + BlockModel interface { + CreateBlockTable() error + DropBlockTable() error + GetBlocksList(limit int64, offset int64) (blocks []*Block, err error) + GetBlocksBetween(start int64, end int64) (blocks []*Block, err error) + GetBlockByHeight(blockHeight int64) (block *Block, err error) + GetBlockByHeightWithoutTx(blockHeight int64) (block *Block, err error) + GetCommittedBlocksCount() (count int64, err error) + GetVerifiedBlocksCount() (count int64, err error) + GetLatestVerifiedHeight() (height int64, err error) + GetBlockByCommitment(blockCommitment string) (block *Block, err error) + GetCommittedBlocksBetween(start, end int64) (blocks []*Block, err error) + GetBlocksTotalCount() (count int64, err error) + CreateGenesisBlock(block *Block) error + GetCurrentHeight() (blockHeight int64, err error) + CreateNewBlock(oBlock *Block) (err error) + CreateCompressedBlock(pendingMempoolTxs []*mempool.MempoolTx, blockStates *BlockStates) error + } + + defaultBlockModel struct { + table string + DB *gorm.DB + } + + Block struct { + gorm.Model + BlockSize uint16 + // pubdata + BlockCommitment string + BlockHeight int64 `gorm:"uniqueIndex"` + StateRoot string + PriorityOperations int64 + PendingOnChainOperationsHash string + PendingOnChainOperationsPubData string + CommittedTxHash string + CommittedAt int64 + VerifiedTxHash string + VerifiedAt int64 + Txs []*tx.Tx `gorm:"foreignKey:BlockId"` + BlockStatus int64 + } + + BlockStates struct { + Block *Block + CompressedBlock *compressedblock.CompressedBlock + + PendingNewAccount []*account.Account + PendingUpdateAccount []*account.Account + PendingNewAccountHistory []*account.AccountHistory + PendingNewLiquidity []*liquidity.Liquidity + PendingUpdateLiquidity []*liquidity.Liquidity + PendingNewLiquidityHistory []*liquidity.LiquidityHistory + PendingNewNft []*nft.L2Nft + PendingUpdateNft []*nft.L2Nft + PendingNewNftHistory []*nft.L2NftHistory + } +) + +func NewBlockModel(db *gorm.DB) BlockModel { + return &defaultBlockModel{ + table: BlockTableName, + DB: db, + } +} + +func (*Block) TableName() string { + return BlockTableName +} + +func (m *defaultBlockModel) CreateBlockTable() error { + return m.DB.AutoMigrate(Block{}) +} + +func (m *defaultBlockModel) DropBlockTable() error { + return m.DB.Migrator().DropTable(m.table) +} + +func (m *defaultBlockModel) GetBlocksList(limit int64, offset int64) (blocks []*Block, err error) { + var ( + txForeignKeyColumn = `Txs` + ) + + dbTx := m.DB.Table(m.table).Limit(int(limit)).Offset(int(offset)).Order("block_height desc").Find(&blocks) + if dbTx.Error != nil { + return nil, types.DbErrSqlOperation + } else if dbTx.RowsAffected == 0 { + return nil, types.DbErrNotFound + } + + for _, block := range blocks { + err = m.DB.Model(&block).Association(txForeignKeyColumn).Find(&block.Txs) + if err != nil { + return nil, types.DbErrSqlOperation + } + sort.Slice(block.Txs, func(i, j int) bool { + return block.Txs[i].TxIndex < block.Txs[j].TxIndex + }) + } + + return blocks, nil +} + +func (m *defaultBlockModel) GetBlocksBetween(start int64, end int64) (blocks []*Block, err error) { + var ( + txForeignKeyColumn = `Txs` + txDetailsForeignKeyColumn = `TxDetails` + ) + dbTx := m.DB.Table(m.table).Where("block_height >= ? AND block_height <= ?", start, end). + Order("block_height"). + Find(&blocks) + if dbTx.Error != nil { + return nil, types.DbErrSqlOperation + } else if dbTx.RowsAffected == 0 { + return nil, types.DbErrNotFound + } + + for index, block := range blocks { + // If the last block is proposing, skip it. + if index == len(blocks)-1 && block.BlockStatus <= StatusProposing { + blocks = blocks[:len(blocks)-1] + break + } + + err = m.DB.Model(&block).Association(txForeignKeyColumn).Find(&block.Txs) + if err != nil { + return nil, types.DbErrSqlOperation + } + sort.Slice(block.Txs, func(i, j int) bool { + return block.Txs[i].TxIndex < block.Txs[j].TxIndex + }) + + for _, txInfo := range block.Txs { + err = m.DB.Model(&txInfo).Association(txDetailsForeignKeyColumn).Find(&txInfo.TxDetails) + if err != nil { + return nil, types.DbErrSqlOperation + } + sort.Slice(txInfo.TxDetails, func(i, j int) bool { + return txInfo.TxDetails[i].Order < txInfo.TxDetails[j].Order + }) + } + } + return blocks, nil +} + +func (m *defaultBlockModel) GetBlockByCommitment(blockCommitment string) (block *Block, err error) { + var ( + txForeignKeyColumn = `Txs` + ) + dbTx := m.DB.Table(m.table).Where("block_commitment = ?", blockCommitment).Find(&block) + if dbTx.Error != nil { + return nil, types.DbErrSqlOperation + } else if dbTx.RowsAffected == 0 { + return nil, types.DbErrNotFound + } + err = m.DB.Model(&block).Association(txForeignKeyColumn).Find(&block.Txs) + sort.Slice(block.Txs, func(i, j int) bool { + return block.Txs[i].TxIndex < block.Txs[j].TxIndex + }) + if err != nil { + return nil, types.DbErrSqlOperation + } + return block, nil +} + +func (m *defaultBlockModel) GetBlockByHeight(blockHeight int64) (block *Block, err error) { + var ( + txForeignKeyColumn = `Txs` + ) + dbTx := m.DB.Table(m.table).Where("block_height = ?", blockHeight).Find(&block) + if dbTx.Error != nil { + return nil, types.DbErrSqlOperation + } else if dbTx.RowsAffected == 0 { + return nil, types.DbErrNotFound + } + err = m.DB.Model(&block).Association(txForeignKeyColumn).Find(&block.Txs) + sort.Slice(block.Txs, func(i, j int) bool { + return block.Txs[i].TxIndex < block.Txs[j].TxIndex + }) + if err != nil { + return nil, types.DbErrSqlOperation + } + + return block, nil +} + +func (m *defaultBlockModel) GetBlockByHeightWithoutTx(blockHeight int64) (block *Block, err error) { + dbTx := m.DB.Table(m.table).Where("block_height = ?", blockHeight).Find(&block) + if dbTx.Error != nil { + return nil, types.DbErrSqlOperation + } else if dbTx.RowsAffected == 0 { + return nil, types.DbErrNotFound + } + return block, nil +} + +func (m *defaultBlockModel) GetCommittedBlocksCount() (count int64, err error) { + dbTx := m.DB.Table(m.table).Where("block_status >= ? and deleted_at is NULL", StatusCommitted).Count(&count) + if dbTx.Error != nil { + if dbTx.Error == types.DbErrNotFound { + return 0, nil + } + return 0, types.DbErrSqlOperation + } + + return count, nil +} + +func (m *defaultBlockModel) GetVerifiedBlocksCount() (count int64, err error) { + dbTx := m.DB.Table(m.table).Where("block_status = ? and deleted_at is NULL", StatusVerifiedAndExecuted).Count(&count) + if dbTx.Error != nil { + if dbTx.Error == types.DbErrNotFound { + return 0, nil + } + return 0, types.DbErrSqlOperation + } + return count, nil +} + +func (m *defaultBlockModel) CreateGenesisBlock(block *Block) error { + dbTx := m.DB.Table(m.table).Omit("BlockDetails").Omit("Txs").Create(block) + + if dbTx.Error != nil { + return types.DbErrSqlOperation + } + if dbTx.RowsAffected == 0 { + return types.DbErrFailToCreateBlock + } + return nil +} + +func (m *defaultBlockModel) GetCurrentHeight() (blockHeight int64, err error) { + dbTx := m.DB.Table(m.table).Select("block_height").Order("block_height desc").Limit(1).Find(&blockHeight) + if dbTx.Error != nil { + return 0, types.DbErrSqlOperation + } else if dbTx.RowsAffected == 0 { + return 0, types.DbErrNotFound + } + return blockHeight, nil +} + +func (m *defaultBlockModel) GetBlocksTotalCount() (count int64, err error) { + dbTx := m.DB.Table(m.table).Where("deleted_at is NULL").Count(&count) + if dbTx.Error != nil { + return 0, dbTx.Error + } else if dbTx.RowsAffected == 0 { + return 0, nil + } + return count, nil +} + +type BlockStatusInfo struct { + BlockStatus int64 + CommittedAt int64 + VerifiedAt int64 +} + +func (m *defaultBlockModel) CreateCompressedBlock(pendingMempoolTxs []*mempool.MempoolTx, blockStates *BlockStates) error { + return m.DB.Transaction(func(tx *gorm.DB) error { // transact + // update mempool + for _, mempoolTx := range pendingMempoolTxs { + dbTx := tx.Table(mempool.MempoolTableName).Where("id = ?", mempoolTx.ID). + Select("*"). + Updates(&mempoolTx) + if dbTx.Error != nil { + return dbTx.Error + } + if dbTx.RowsAffected == 0 { + return errors.New("no new mempoolTx") + } + } + // create block + if blockStates.Block != nil { + dbTx := tx.Table(m.table).Where("id = ?", blockStates.Block.ID). + Select("*"). + Updates(&blockStates.Block) + if dbTx.Error != nil { + return dbTx.Error + } + if dbTx.RowsAffected == 0 { + return errors.New("invalid block info") + } + } + // create block for commit + if blockStates.CompressedBlock != nil { + dbTx := tx.Table(compressedblock.CompressedBlockTableName).Create(blockStates.CompressedBlock) + if dbTx.Error != nil { + return dbTx.Error + } + if dbTx.RowsAffected == 0 { + return errors.New("invalid block for commit info") + } + } + // create new account + if len(blockStates.PendingNewAccount) != 0 { + dbTx := tx.Table(account.AccountTableName).CreateInBatches(blockStates.PendingNewAccount, len(blockStates.PendingNewAccount)) + if dbTx.Error != nil { + return dbTx.Error + } + if dbTx.RowsAffected != int64(len(blockStates.PendingNewAccount)) { + return errors.New("unable to create new account") + } + } + // update account + for _, pendingAccount := range blockStates.PendingUpdateAccount { + dbTx := tx.Table(account.AccountTableName).Where("account_index = ?", pendingAccount.AccountIndex). + Select("*"). + Updates(&pendingAccount) + if dbTx.Error != nil { + return dbTx.Error + } + if dbTx.RowsAffected == 0 { + return errors.New("no updated account") + } + } + // create new account history + if len(blockStates.PendingNewAccountHistory) != 0 { + dbTx := tx.Table(account.AccountHistoryTableName).CreateInBatches(blockStates.PendingNewAccountHistory, len(blockStates.PendingNewAccountHistory)) + if dbTx.Error != nil { + return dbTx.Error + } + if dbTx.RowsAffected != int64(len(blockStates.PendingNewAccountHistory)) { + return errors.New("unable to create new account history") + } + } + // create new liquidity + if len(blockStates.PendingNewLiquidity) != 0 { + dbTx := tx.Table(liquidity.LiquidityTable).CreateInBatches(blockStates.PendingNewLiquidity, len(blockStates.PendingNewLiquidity)) + if dbTx.Error != nil { + return dbTx.Error + } + if dbTx.RowsAffected != int64(len(blockStates.PendingNewLiquidity)) { + return errors.New("unable to create new liquidity") + } + } + // update liquidity + for _, pendingLiquidity := range blockStates.PendingUpdateLiquidity { + dbTx := tx.Table(liquidity.LiquidityTable).Where("pair_index = ?", pendingLiquidity.PairIndex). + Select("*"). + Updates(&pendingLiquidity) + if dbTx.Error != nil { + return dbTx.Error + } + if dbTx.RowsAffected == 0 { + return errors.New("no updated liquidity") + } + } + // create new liquidity history + if len(blockStates.PendingNewLiquidityHistory) != 0 { + dbTx := tx.Table(liquidity.LiquidityHistoryTable).CreateInBatches(blockStates.PendingNewLiquidityHistory, len(blockStates.PendingNewLiquidityHistory)) + if dbTx.Error != nil { + return dbTx.Error + } + if dbTx.RowsAffected != int64(len(blockStates.PendingNewLiquidityHistory)) { + return errors.New("unable to create new liquidity history") + } + } + // create new nft + if len(blockStates.PendingNewNft) != 0 { + dbTx := tx.Table(nft.L2NftTableName).CreateInBatches(blockStates.PendingNewNft, len(blockStates.PendingNewNft)) + if dbTx.Error != nil { + return dbTx.Error + } + if dbTx.RowsAffected != int64(len(blockStates.PendingNewNft)) { + return errors.New("unable to create new nft") + } + } + // update nft + for _, pendingNft := range blockStates.PendingUpdateNft { + dbTx := tx.Table(nft.L2NftTableName).Where("nft_index = ?", pendingNft.NftIndex). + Select("*"). + Updates(&pendingNft) + if dbTx.Error != nil { + return dbTx.Error + } + if dbTx.RowsAffected == 0 { + return errors.New("no updated nft") + } + } + // new nft history + if len(blockStates.PendingNewNftHistory) != 0 { + dbTx := tx.Table(nft.L2NftHistoryTableName).CreateInBatches(blockStates.PendingNewNftHistory, len(blockStates.PendingNewNftHistory)) + if dbTx.Error != nil { + return dbTx.Error + } + if dbTx.RowsAffected != int64(len(blockStates.PendingNewNftHistory)) { + return errors.New("unable to create new nft history") + } + } + return nil + }) +} + +func (m *defaultBlockModel) CreateNewBlock(oBlock *Block) (err error) { + if oBlock == nil { + return errors.New("nil block") + } + if oBlock.BlockStatus != StatusProposing { + return errors.New("new block status isn't proposing") + } + + return m.DB.Transaction(func(tx *gorm.DB) error { // transact + dbTx := tx.Table(m.table).Create(oBlock) + if dbTx.Error != nil { + return dbTx.Error + } + if dbTx.RowsAffected == 0 { + if err != nil { + return err + } + return errors.New("invalid block info") + } + + return nil + }) +} + +func (m *defaultBlockModel) GetCommittedBlocksBetween(start, end int64) (blocks []*Block, err error) { + dbTx := m.DB.Table(m.table).Where("block_status = ? AND block_height >= ? AND block_height <= ?", StatusCommitted, start, end). + Order("block_height"). + Find(&blocks) + if dbTx.Error != nil { + return nil, types.DbErrSqlOperation + } else if dbTx.RowsAffected == 0 { + return nil, types.DbErrNotFound + } + return blocks, nil +} + +func (m *defaultBlockModel) GetLatestVerifiedHeight() (height int64, err error) { + block := &Block{} + dbTx := m.DB.Table(m.table).Where("block_status = ?", StatusVerifiedAndExecuted). + Order("block_height DESC"). + Limit(1). + First(&block) + if dbTx.Error != nil { + return 0, types.DbErrSqlOperation + } else if dbTx.RowsAffected == 0 { + return 0, types.DbErrNotFound + } + return block.BlockHeight, nil +} diff --git a/dao/blockwitness/block_witness.go b/dao/blockwitness/block_witness.go new file mode 100644 index 000000000..0fbad1be7 --- /dev/null +++ b/dao/blockwitness/block_witness.go @@ -0,0 +1,126 @@ +/* + * Copyright © 2021 ZkBAS Protocol + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package blockwitness + +import ( + "fmt" + "time" + + "gorm.io/gorm" + + "github.com/bnb-chain/zkbas/types" +) + +type ( + BlockWitnessModel interface { + CreateBlockWitnessTable() error + DropBlockWitnessTable() error + GetLatestBlockWitnessHeight() (blockNumber int64, err error) + GetBlockWitnessByNumber(height int64) (witness *BlockWitness, err error) + UpdateBlockWitnessStatus(witness *BlockWitness, status int64) error + GetLatestBlockWitness() (witness *BlockWitness, err error) + CreateBlockWitness(witness *BlockWitness) error + } + + defaultBlockWitnessModel struct { + table string + DB *gorm.DB + } + + BlockWitness struct { + gorm.Model + Height int64 `gorm:"index:idx_height,unique"` + WitnessData string + Status int64 + } +) + +func NewBlockWitnessModel(db *gorm.DB) BlockWitnessModel { + return &defaultBlockWitnessModel{ + table: TableName, + DB: db, + } +} + +func (*BlockWitness) TableName() string { + return TableName +} + +func (m *defaultBlockWitnessModel) CreateBlockWitnessTable() error { + return m.DB.AutoMigrate(BlockWitness{}) +} + +func (m *defaultBlockWitnessModel) DropBlockWitnessTable() error { + return m.DB.Migrator().DropTable(m.table) +} + +func (m *defaultBlockWitnessModel) GetLatestBlockWitnessHeight() (blockNumber int64, err error) { + var row *BlockWitness + dbTx := m.DB.Table(m.table).Order("height desc").Limit(1).Find(&row) + if dbTx.Error != nil { + return 0, types.DbErrSqlOperation + } else if dbTx.RowsAffected == 0 { + return 0, types.DbErrNotFound + } + return row.Height, nil +} + +func (m *defaultBlockWitnessModel) GetLatestBlockWitness() (witness *BlockWitness, err error) { + dbTx := m.DB.Table(m.table).Where("status = ?", StatusPublished).Order("height asc").Limit(1).Find(&witness) + if dbTx.Error != nil { + return nil, types.DbErrSqlOperation + } else if dbTx.RowsAffected == 0 { + return nil, types.DbErrNotFound + } + return witness, nil +} + +func (m *defaultBlockWitnessModel) GetBlockWitnessByNumber(height int64) (witness *BlockWitness, err error) { + dbTx := m.DB.Table(m.table).Where("height = ?", height).Limit(1).Find(&witness) + if dbTx.Error != nil { + return nil, types.DbErrSqlOperation + } else if dbTx.RowsAffected == 0 { + return nil, types.DbErrNotFound + } + return witness, nil +} + +func (m *defaultBlockWitnessModel) CreateBlockWitness(witness *BlockWitness) error { + if witness.Height > 1 { + _, err := m.GetBlockWitnessByNumber(witness.Height - 1) + if err != nil { + return fmt.Errorf("previous witness does not exist") + } + } + + dbTx := m.DB.Table(m.table).Create(witness) + if dbTx.Error != nil { + return types.DbErrSqlOperation + } + return nil +} + +func (m *defaultBlockWitnessModel) UpdateBlockWitnessStatus(witness *BlockWitness, status int64) error { + witness.Status = status + witness.UpdatedAt = time.Now() + dbTx := m.DB.Table(m.table).Save(witness) + if dbTx.Error != nil { + return types.DbErrSqlOperation + } + return nil +} diff --git a/common/model/blockForProof/constant.go b/dao/blockwitness/constant.go similarity index 84% rename from common/model/blockForProof/constant.go rename to dao/blockwitness/constant.go index b858d219e..16dea6d4b 100644 --- a/common/model/blockForProof/constant.go +++ b/dao/blockwitness/constant.go @@ -1,5 +1,5 @@ /* - * Copyright © 2021 Zkbas Protocol + * Copyright © 2021 ZkBAS Protocol * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -15,14 +15,13 @@ * */ -package blockForProof +package blockwitness const ( StatusPublished = iota StatusReceived - StatusVerified ) const ( - BlockForProofTableName = `block_for_proof` + TableName = `block_witness` ) diff --git a/dao/compressedblock/compressed_block.go b/dao/compressedblock/compressed_block.go new file mode 100644 index 000000000..51391455c --- /dev/null +++ b/dao/compressedblock/compressed_block.go @@ -0,0 +1,80 @@ +/* + * Copyright © 2021 ZkBAS Protocol + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package compressedblock + +import ( + "gorm.io/gorm" + + "github.com/bnb-chain/zkbas/types" +) + +const ( + CompressedBlockTableName = `compressed_block` +) + +type ( + CompressedBlockModel interface { + CreateCompressedBlockTable() error + DropCompressedBlockTable() error + GetCompressedBlockBetween(start, end int64) (blocksForCommit []*CompressedBlock, err error) + } + + defaultCompressedBlockModel struct { + table string + DB *gorm.DB + } + + CompressedBlock struct { + gorm.Model + BlockSize uint16 + BlockHeight int64 + StateRoot string + PublicData string + Timestamp int64 + PublicDataOffsets string + } +) + +func NewCompressedBlockModel(db *gorm.DB) CompressedBlockModel { + return &defaultCompressedBlockModel{ + table: CompressedBlockTableName, + DB: db, + } +} + +func (*CompressedBlock) TableName() string { + return CompressedBlockTableName +} + +func (m *defaultCompressedBlockModel) CreateCompressedBlockTable() error { + return m.DB.AutoMigrate(CompressedBlock{}) +} + +func (m *defaultCompressedBlockModel) DropCompressedBlockTable() error { + return m.DB.Migrator().DropTable(m.table) +} + +func (m *defaultCompressedBlockModel) GetCompressedBlockBetween(start, end int64) (blocksForCommit []*CompressedBlock, err error) { + dbTx := m.DB.Table(m.table).Where("block_height >= ? AND block_height <= ?", start, end).Find(&blocksForCommit) + if dbTx.Error != nil { + return nil, types.DbErrSqlOperation + } else if dbTx.RowsAffected == 0 { + return nil, types.DbErrNotFound + } + return blocksForCommit, nil +} diff --git a/dao/dbcache/cache.go b/dao/dbcache/cache.go new file mode 100644 index 000000000..5fe3b7ce9 --- /dev/null +++ b/dao/dbcache/cache.go @@ -0,0 +1,33 @@ +package dbcache + +import ( + "context" + "fmt" +) + +type QueryFunc func() (interface{}, error) + +type Cache interface { + GetWithSet(ctx context.Context, key string, value interface{}, query QueryFunc) (interface{}, error) + Get(ctx context.Context, key string, value interface{}) (interface{}, error) + Set(ctx context.Context, key string, value interface{}) error + Delete(ctx context.Context, key string) error +} + +const ( + AccountKeyPrefix = "cache:account_" + LiquidityKeyPrefix = "cache:liquidity_" + NftKeyPrefix = "cache:nft_" +) + +func AccountKeyByIndex(accountIndex int64) string { + return AccountKeyPrefix + fmt.Sprintf("%d", accountIndex) +} + +func LiquidityKeyByIndex(pairIndex int64) string { + return LiquidityKeyPrefix + fmt.Sprintf("%d", pairIndex) +} + +func NftKeyByIndex(nftIndex int64) string { + return NftKeyPrefix + fmt.Sprintf("%d", nftIndex) +} diff --git a/dao/dbcache/redis_cache.go b/dao/dbcache/redis_cache.go new file mode 100644 index 000000000..ff4159667 --- /dev/null +++ b/dao/dbcache/redis_cache.go @@ -0,0 +1,65 @@ +package dbcache + +import ( + "context" + "errors" + "time" + + "github.com/eko/gocache/v2/cache" + "github.com/eko/gocache/v2/marshaler" + "github.com/eko/gocache/v2/metrics" + "github.com/eko/gocache/v2/store" + "github.com/go-redis/redis/v8" +) + +var ( + redisKeyNotExist = errors.New("redis: nil") +) + +type RedisCache struct { + marshal *marshaler.Marshaler + expiration time.Duration +} + +func NewRedisCache(redisAdd, password string, expiration time.Duration) Cache { + client := redis.NewClient(&redis.Options{Addr: redisAdd, Password: password}) + redisInstance := store.NewRedis(client, &store.Options{Expiration: expiration}) + redisCacheManager := cache.New(redisInstance) + promMetrics := metrics.NewPrometheus("zkbas") + cacheManager := cache.NewMetric(promMetrics, redisCacheManager) + return &RedisCache{ + marshal: marshaler.New(cacheManager), + expiration: expiration, + } +} + +func (c *RedisCache) GetWithSet(ctx context.Context, key string, valueStruct interface{}, query QueryFunc) (interface{}, error) { + value, err := c.marshal.Get(ctx, key, valueStruct) + if err == nil { + return value, nil + } + if err.Error() == redisKeyNotExist.Error() { + value, err = query() + if err != nil { + return nil, err + } + return value, c.Set(ctx, key, value) + } + return nil, err +} + +func (c *RedisCache) Get(ctx context.Context, key string, value interface{}) (interface{}, error) { + object, err := c.marshal.Get(ctx, key, value) + if err != nil { + return nil, err + } + return object, nil +} + +func (c *RedisCache) Set(ctx context.Context, key string, value interface{}) error { + return c.marshal.Set(ctx, key, value, &store.Options{Expiration: c.expiration}) +} + +func (c *RedisCache) Delete(ctx context.Context, key string) error { + return c.marshal.Delete(ctx, key) +} diff --git a/dao/l1rolluptx/l1_rollup_tx.go b/dao/l1rolluptx/l1_rollup_tx.go new file mode 100644 index 000000000..f2af88e2e --- /dev/null +++ b/dao/l1rolluptx/l1_rollup_tx.go @@ -0,0 +1,191 @@ +/* + * Copyright © 2021 ZkBAS Protocol + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package l1rolluptx + +import ( + "errors" + "fmt" + + "gorm.io/gorm" + + "github.com/bnb-chain/zkbas/dao/proof" + "github.com/bnb-chain/zkbas/types" +) + +const ( + TableName = "l1_rollup_tx" + + StatusPending = 1 + StatusHandled = 2 + + TxTypeCommit = 1 + TxTypeVerifyAndExecute = 2 +) + +type ( + L1RollupTxModel interface { + CreateL1RollupTxTable() error + DropL1RollupTxTable() error + CreateL1RollupTx(tx *L1RollupTx) error + GetLatestHandledTx(txType int64) (tx *L1RollupTx, err error) + GetLatestPendingTx(txType int64) (tx *L1RollupTx, err error) + GetL1RollupTxsByStatus(txStatus int) (txs []*L1RollupTx, err error) + DeleteL1RollupTx(tx *L1RollupTx) error + UpdateL1RollupTxs( + pendingUpdateTxs []*L1RollupTx, + pendingUpdateProofStatus map[int64]int, + ) (err error) + } + + defaultL1RollupTxModel struct { + table string + DB *gorm.DB + } + + L1RollupTx struct { + gorm.Model + // txVerification hash + L1TxHash string + // txVerification status, 1 - pending, 2 - handled + TxStatus int + // txVerification type: commit / verify + TxType uint8 + // layer-2 block height + L2BlockHeight int64 + } +) + +func (*L1RollupTx) TableName() string { + return TableName +} + +func NewL1RollupTxModel(db *gorm.DB) L1RollupTxModel { + return &defaultL1RollupTxModel{ + table: TableName, + DB: db, + } +} + +func (m *defaultL1RollupTxModel) CreateL1RollupTxTable() error { + return m.DB.AutoMigrate(L1RollupTx{}) +} + +func (m *defaultL1RollupTxModel) DropL1RollupTxTable() error { + return m.DB.Migrator().DropTable(m.table) +} + +func (m *defaultL1RollupTxModel) CreateL1RollupTx(tx *L1RollupTx) error { + dbTx := m.DB.Table(m.table).Create(tx) + if dbTx.Error != nil { + return dbTx.Error + } else if dbTx.RowsAffected == 0 { + return errors.New("invalid rollup tx") + } + return nil +} + +func (m *defaultL1RollupTxModel) GetL1RollupTxsByStatus(txStatus int) (txs []*L1RollupTx, err error) { + dbTx := m.DB.Table(m.table).Where("tx_status = ?", txStatus).Order("l2_block_height, tx_type").Find(&txs) + if dbTx.Error != nil { + return nil, types.DbErrSqlOperation + } else if dbTx.RowsAffected == 0 { + return nil, types.DbErrNotFound + } + return txs, nil +} + +func (m *defaultL1RollupTxModel) DeleteL1RollupTx(rollupTx *L1RollupTx) error { + return m.DB.Transaction(func(tx *gorm.DB) error { + dbTx := tx.Table(m.table).Where("id = ?", rollupTx.ID).Delete(&rollupTx) + if dbTx.Error != nil { + return dbTx.Error + } + if dbTx.RowsAffected == 0 { + return errors.New("delete invalid rollupTx") + } + return nil + }) +} + +func (m *defaultL1RollupTxModel) UpdateL1RollupTxs( + pendingUpdateTxs []*L1RollupTx, + pendingUpdateProofStatus map[int64]int, +) (err error) { + err = m.DB.Transaction(func(tx *gorm.DB) error { + for _, pendingUpdateTx := range pendingUpdateTxs { + dbTx := tx.Table(TableName).Where("id = ?", pendingUpdateTx.ID). + Select("*"). + Updates(&pendingUpdateTx) + if dbTx.Error != nil { + return dbTx.Error + } + if dbTx.RowsAffected == 0 { + if err != nil { + return err + } + return errors.New("invalid rollup tx") + } + } + + for blockHeight, newStatus := range pendingUpdateProofStatus { + var row *proof.Proof + dbTx := tx.Table(proof.TableName).Where("block_number = ?", blockHeight).Find(&row) + if dbTx.Error != nil { + return dbTx.Error + } + if dbTx.RowsAffected == 0 { + return fmt.Errorf("no such proof. height: %d", blockHeight) + } + dbTx = tx.Model(&row). + Select("status"). + Updates(&proof.Proof{Status: int64(newStatus)}) + if dbTx.Error != nil { + return dbTx.Error + } + if dbTx.RowsAffected == 0 { + return fmt.Errorf("update no proof: %d", row.BlockNumber) + } + } + return nil + }) + return err +} + +func (m *defaultL1RollupTxModel) GetLatestHandledTx(txType int64) (tx *L1RollupTx, err error) { + tx = &L1RollupTx{} + + dbTx := m.DB.Table(m.table).Where("tx_type = ? AND tx_status = ?", txType, StatusHandled).Order("l2_block_height desc").Find(&tx) + if dbTx.Error != nil { + return nil, types.DbErrSqlOperation + } else if dbTx.RowsAffected == 0 { + return nil, types.DbErrNotFound + } + return tx, nil +} + +func (m *defaultL1RollupTxModel) GetLatestPendingTx(txType int64) (tx *L1RollupTx, err error) { + tx = &L1RollupTx{} + + dbTx := m.DB.Table(m.table).Where("tx_type = ? AND tx_status = ?", txType, StatusPending).Find(&tx) + if dbTx.Error != nil { + return nil, types.DbErrSqlOperation + } else if dbTx.RowsAffected == 0 { + return nil, types.DbErrNotFound + } + return tx, nil +} diff --git a/dao/l1syncedblock/l1_synced_block.go b/dao/l1syncedblock/l1_synced_block.go new file mode 100644 index 000000000..587b778f8 --- /dev/null +++ b/dao/l1syncedblock/l1_synced_block.go @@ -0,0 +1,222 @@ +/* + * Copyright © 2021 ZkBAS Protocol + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package l1syncedblock + +import ( + "errors" + + "gorm.io/gorm" + + "github.com/bnb-chain/zkbas/dao/asset" + "github.com/bnb-chain/zkbas/dao/block" + "github.com/bnb-chain/zkbas/dao/mempool" + "github.com/bnb-chain/zkbas/dao/priorityrequest" + "github.com/bnb-chain/zkbas/dao/sysconfig" + "github.com/bnb-chain/zkbas/types" +) + +const ( + TableName = "l1_synced_block" + + TypeGeneric int = 0 + TypeGovernance int = 1 +) + +type ( + L1SyncedBlockModel interface { + CreateL1SyncedBlockTable() error + DropL1SyncedBlockTable() error + CreateGenericBlock( + block *L1SyncedBlock, + priorityRequests []*priorityrequest.PriorityRequest, + pendingUpdateBlocks []*block.Block, + pendingUpdateMempoolTxs []*mempool.MempoolTx, + ) (err error) + + CreateGovernanceBlock( + block *L1SyncedBlock, + l2Assets []*asset.Asset, + pendingUpdateL2Assets []*asset.Asset, + pendingNewSysConfigs []*sysconfig.SysConfig, + pendingUpdateSysConfigs []*sysconfig.SysConfig, + ) (err error) + GetLatestL1BlockByType(blockType int) (blockInfo *L1SyncedBlock, err error) + } + + defaultL1EventModel struct { + table string + DB *gorm.DB + } + + L1SyncedBlock struct { + gorm.Model + // l1 block height + L1BlockHeight int64 + // block info, array of hashes + BlockInfo string + Type int + } +) + +func (*L1SyncedBlock) TableName() string { + return TableName +} + +func NewL1SyncedBlockModel(db *gorm.DB) L1SyncedBlockModel { + return &defaultL1EventModel{ + table: TableName, + DB: db, + } +} + +func (m *defaultL1EventModel) CreateL1SyncedBlockTable() error { + return m.DB.AutoMigrate(L1SyncedBlock{}) +} + +func (m *defaultL1EventModel) DropL1SyncedBlockTable() error { + return m.DB.Migrator().DropTable(m.table) +} + +func (m *defaultL1EventModel) CreateGenericBlock( + blockInfo *L1SyncedBlock, + priorityRequests []*priorityrequest.PriorityRequest, + pendingUpdateBlocks []*block.Block, + pendingUpdateMempoolTxs []*mempool.MempoolTx, +) (err error) { + const ( + Txs = "Txs" + ) + + err = m.DB.Transaction( + func(tx *gorm.DB) error { // transact + dbTx := tx.Table(m.table).Create(blockInfo) + if dbTx.Error != nil { + return dbTx.Error + } + if dbTx.RowsAffected == 0 { + return errors.New("unable to create l1 block info") + } + + dbTx = tx.Table(priorityrequest.TableName).CreateInBatches(priorityRequests, len(priorityRequests)) + if dbTx.Error != nil { + return dbTx.Error + } + if dbTx.RowsAffected != int64(len(priorityRequests)) { + return errors.New("unable to create priority requests") + } + + // update blocks + for _, pendingUpdateBlock := range pendingUpdateBlocks { + dbTx := tx.Table(block.BlockTableName).Where("id = ?", pendingUpdateBlock.ID). + Omit(Txs). + Select("*"). + Updates(&pendingUpdateBlock) + if dbTx.Error != nil { + return dbTx.Error + } + if dbTx.RowsAffected == 0 { + if err != nil { + return err + } + return errors.New("invalid block") + } + } + + // delete mempool txs + for _, pendingDeleteMempoolTx := range pendingUpdateMempoolTxs { + dbTx := tx.Table(mempool.MempoolTableName).Where("id = ?", pendingDeleteMempoolTx.ID).Delete(&pendingDeleteMempoolTx) + if dbTx.Error != nil { + return dbTx.Error + } + if dbTx.RowsAffected == 0 { + return errors.New("delete invalid mempool tx") + } + } + return nil + }, + ) + return err +} + +func (m *defaultL1EventModel) CreateGovernanceBlock( + block *L1SyncedBlock, + pendingNewL2Assets []*asset.Asset, + pendingUpdateL2Assets []*asset.Asset, + pendingNewSysConfigs []*sysconfig.SysConfig, + pendingUpdateSysConfigs []*sysconfig.SysConfig, +) (err error) { + err = m.DB.Transaction( + func(tx *gorm.DB) error { + // create data for l1 block info + dbTx := tx.Table(m.table).Create(block) + if dbTx.Error != nil { + return dbTx.Error + } + if dbTx.RowsAffected == 0 { + return errors.New("unable to create l1 block info") + } + // create l2 asset info + if len(pendingNewL2Assets) != 0 { + dbTx = tx.Table(asset.AssetTableName).CreateInBatches(pendingNewL2Assets, len(pendingNewL2Assets)) + if dbTx.Error != nil { + return dbTx.Error + } + if dbTx.RowsAffected != int64(len(pendingNewL2Assets)) { + return errors.New("invalid l2 asset info") + } + } + // update l2 asset info + for _, pendingUpdateL2AssetInfo := range pendingUpdateL2Assets { + dbTx = tx.Table(asset.AssetTableName).Where("id = ?", pendingUpdateL2AssetInfo.ID).Select("*").Updates(&pendingUpdateL2AssetInfo) + if dbTx.Error != nil { + return dbTx.Error + } + } + // create new sys config + if len(pendingNewSysConfigs) != 0 { + dbTx = tx.Table(sysconfig.TableName).CreateInBatches(pendingNewSysConfigs, len(pendingNewSysConfigs)) + if dbTx.Error != nil { + return dbTx.Error + } + if dbTx.RowsAffected != int64(len(pendingNewSysConfigs)) { + return errors.New("invalid sys config info") + } + } + // update sys config + for _, pendingUpdateSysConfig := range pendingUpdateSysConfigs { + dbTx = tx.Table(sysconfig.TableName).Where("id = ?", pendingUpdateSysConfig.ID).Select("*").Updates(&pendingUpdateSysConfig) + if dbTx.Error != nil { + return dbTx.Error + } + } + return nil + }, + ) + return err +} + +func (m *defaultL1EventModel) GetLatestL1BlockByType(blockType int) (blockInfo *L1SyncedBlock, err error) { + dbTx := m.DB.Table(m.table).Where("type = ?", blockType).Order("l1_block_height desc").Find(&blockInfo) + if dbTx.Error != nil { + return nil, types.DbErrSqlOperation + } + if dbTx.RowsAffected == 0 { + return nil, types.DbErrNotFound + } + return blockInfo, nil +} diff --git a/dao/liquidity/liquidity.go b/dao/liquidity/liquidity.go new file mode 100644 index 000000000..b628292ab --- /dev/null +++ b/dao/liquidity/liquidity.go @@ -0,0 +1,95 @@ +/* + * Copyright © 2021 ZkBAS Protocol + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package liquidity + +import ( + "gorm.io/gorm" + + "github.com/bnb-chain/zkbas/types" +) + +const ( + LiquidityTable = `liquidity` +) + +type ( + LiquidityModel interface { + CreateLiquidityTable() error + DropLiquidityTable() error + GetLiquidityByPairIndex(pairIndex int64) (entity *Liquidity, err error) + GetAllLiquidityAssets() (liquidityList []*Liquidity, err error) + } + + defaultLiquidityModel struct { + table string + DB *gorm.DB + } + + Liquidity struct { + gorm.Model + PairIndex int64 + AssetAId int64 + AssetA string + AssetBId int64 + AssetB string + LpAmount string + KLast string + FeeRate int64 + TreasuryAccountIndex int64 + TreasuryRate int64 + } +) + +func NewLiquidityModel(db *gorm.DB) LiquidityModel { + return &defaultLiquidityModel{ + table: LiquidityTable, + DB: db, + } +} + +func (*Liquidity) TableName() string { + return LiquidityTable +} + +func (m *defaultLiquidityModel) CreateLiquidityTable() error { + return m.DB.AutoMigrate(Liquidity{}) +} + +func (m *defaultLiquidityModel) DropLiquidityTable() error { + return m.DB.Migrator().DropTable(m.table) +} + +func (m *defaultLiquidityModel) GetLiquidityByPairIndex(pairIndex int64) (entity *Liquidity, err error) { + dbTx := m.DB.Table(m.table).Where("pair_index = ?", pairIndex).Find(&entity) + if dbTx.Error != nil { + return nil, types.DbErrSqlOperation + } else if dbTx.RowsAffected == 0 { + return nil, types.DbErrNotFound + } + return entity, nil +} + +func (m *defaultLiquidityModel) GetAllLiquidityAssets() (liquidityList []*Liquidity, err error) { + dbTx := m.DB.Table(m.table).Order("id").Find(&liquidityList) + if dbTx.Error != nil { + return liquidityList, dbTx.Error + } else if dbTx.RowsAffected == 0 { + return nil, types.DbErrNotFound + } + return liquidityList, nil +} diff --git a/dao/liquidity/liquidity_history.go b/dao/liquidity/liquidity_history.go new file mode 100644 index 000000000..ef50c2288 --- /dev/null +++ b/dao/liquidity/liquidity_history.go @@ -0,0 +1,106 @@ +/* + * Copyright © 2021 ZkBAS Protocol + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package liquidity + +import ( + "gorm.io/gorm" + + "github.com/bnb-chain/zkbas/types" +) + +const ( + LiquidityHistoryTable = `liquidity_history` +) + +type ( + LiquidityHistoryModel interface { + CreateLiquidityHistoryTable() error + DropLiquidityHistoryTable() error + GetLatestLiquidityByBlockHeight(blockHeight int64, limit int, offset int) (entities []*LiquidityHistory, err error) + GetLatestLiquidityCountByBlockHeight(blockHeight int64) (count int64, err error) + } + + defaultLiquidityHistoryModel struct { + table string + DB *gorm.DB + } + + LiquidityHistory struct { + gorm.Model + PairIndex int64 + AssetAId int64 + AssetA string + AssetBId int64 + AssetB string + LpAmount string + KLast string + FeeRate int64 + TreasuryAccountIndex int64 + TreasuryRate int64 + L2BlockHeight int64 + } +) + +func NewLiquidityHistoryModel(db *gorm.DB) LiquidityHistoryModel { + return &defaultLiquidityHistoryModel{ + table: LiquidityHistoryTable, + DB: db, + } +} + +func (*LiquidityHistory) TableName() string { + return LiquidityHistoryTable +} + +func (m *defaultLiquidityHistoryModel) CreateLiquidityHistoryTable() error { + return m.DB.AutoMigrate(LiquidityHistory{}) +} + +func (m *defaultLiquidityHistoryModel) DropLiquidityHistoryTable() error { + return m.DB.Migrator().DropTable(m.table) +} + +func (m *defaultLiquidityHistoryModel) GetLatestLiquidityByBlockHeight(blockHeight int64, limit int, offset int) (entities []*LiquidityHistory, err error) { + subQuery := m.DB.Table(m.table).Select("*"). + Where("pair_index = a.pair_index AND l2_block_height <= ? AND l2_block_height > a.l2_block_height", blockHeight) + + dbTx := m.DB.Table(m.table+" as a").Select("*"). + Where("NOT EXISTS (?) AND l2_block_height <= ?", subQuery, blockHeight). + Limit(limit).Offset(offset). + Order("pair_index") + + if dbTx.Find(&entities).Error != nil { + return nil, types.DbErrSqlOperation + } else if dbTx.RowsAffected == 0 { + return nil, types.DbErrNotFound + } + return entities, nil +} + +func (m *defaultLiquidityHistoryModel) GetLatestLiquidityCountByBlockHeight(blockHeight int64) (count int64, err error) { + subQuery := m.DB.Table(m.table).Select("*"). + Where("pair_index = a.pair_index AND l2_block_height <= ? AND l2_block_height > a.l2_block_height", blockHeight) + + dbTx := m.DB.Table(m.table+" as a"). + Where("NOT EXISTS (?) AND l2_block_height <= ?", subQuery, blockHeight) + + if dbTx.Count(&count).Error != nil { + return 0, dbTx.Error + } + return count, nil +} diff --git a/dao/mempool/mempool.go b/dao/mempool/mempool.go new file mode 100644 index 000000000..3b853cad8 --- /dev/null +++ b/dao/mempool/mempool.go @@ -0,0 +1,220 @@ +/* + * Copyright © 2021 ZkBAS Protocol + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package mempool + +import ( + "errors" + + "gorm.io/gorm" + + "github.com/bnb-chain/zkbas/types" +) + +const ( + MempoolTableName = `mempool_tx` +) + +const ( + PendingTxStatus = iota + ExecutedTxStatus + SuccessTxStatus + FailTxStatus +) + +type ( + MempoolModel interface { + CreateMempoolTxTable() error + DropMempoolTxTable() error + GetMempoolTxsList(limit int64, offset int64) (mempoolTxs []*MempoolTx, err error) + GetMempoolTxsTotalCount() (count int64, err error) + GetMempoolTxByTxHash(hash string) (mempoolTxs *MempoolTx, err error) + GetMempoolTxsByStatus(status int) (mempoolTxs []*MempoolTx, err error) + GetMempoolTxsByBlockHeight(l2BlockHeight int64) (rowsAffected int64, mempoolTxs []*MempoolTx, err error) + CreateBatchedMempoolTxs(mempoolTxs []*MempoolTx) error + GetPendingMempoolTxsByAccountIndex(accountIndex int64) (mempoolTxs []*MempoolTx, err error) + GetMaxNonceByAccountIndex(accountIndex int64) (nonce int64, err error) + UpdateMempoolTxs(pendingUpdateMempoolTxs []*MempoolTx, pendingDeleteMempoolTxs []*MempoolTx) error + } + + defaultMempoolModel struct { + table string + DB *gorm.DB + } + + MempoolTx struct { + gorm.Model + TxHash string `gorm:"uniqueIndex"` + TxType int64 + GasFeeAssetId int64 + GasFee string + NftIndex int64 + PairIndex int64 + AssetId int64 + TxAmount string + NativeAddress string + TxInfo string + ExtraInfo string + Memo string + AccountIndex int64 + Nonce int64 + ExpiredAt int64 + L2BlockHeight int64 + Status int `gorm:"index"` // 0: pending tx; 1: committed tx; 2: verified tx; + } +) + +func NewMempoolModel(db *gorm.DB) MempoolModel { + return &defaultMempoolModel{ + table: MempoolTableName, + DB: db, + } +} + +func (*MempoolTx) TableName() string { + return MempoolTableName +} + +func (m *defaultMempoolModel) CreateMempoolTxTable() error { + return m.DB.AutoMigrate(MempoolTx{}) +} + +func (m *defaultMempoolModel) DropMempoolTxTable() error { + return m.DB.Migrator().DropTable(m.table) +} + +func (m *defaultMempoolModel) GetMempoolTxsList(limit int64, offset int64) (mempoolTxs []*MempoolTx, err error) { + dbTx := m.DB.Table(m.table).Where("status = ?", PendingTxStatus).Limit(int(limit)).Offset(int(offset)).Order("created_at desc, id desc").Find(&mempoolTxs) + if dbTx.Error != nil { + return nil, types.DbErrSqlOperation + } + return mempoolTxs, nil +} + +func (m *defaultMempoolModel) GetMempoolTxsByBlockHeight(l2BlockHeight int64) (rowsAffected int64, mempoolTxs []*MempoolTx, err error) { + dbTx := m.DB.Table(m.table).Where("l2_block_height = ?", l2BlockHeight).Find(&mempoolTxs) + if dbTx.Error != nil { + return 0, nil, types.DbErrSqlOperation + } + return dbTx.RowsAffected, mempoolTxs, nil +} + +func (m *defaultMempoolModel) GetMempoolTxsByStatus(status int) (mempoolTxs []*MempoolTx, err error) { + dbTx := m.DB.Table(m.table).Where("status = ?", status).Order("created_at, id").Find(&mempoolTxs) + if dbTx.Error != nil { + return nil, types.DbErrSqlOperation + } + return mempoolTxs, nil +} + +func (m *defaultMempoolModel) GetMempoolTxsTotalCount() (count int64, err error) { + dbTx := m.DB.Table(m.table).Where("status = ? and deleted_at is NULL", PendingTxStatus).Count(&count) + if dbTx.Error != nil { + return 0, types.DbErrSqlOperation + } else if dbTx.RowsAffected == 0 { + return 0, nil + } + return count, nil +} + +func (m *defaultMempoolModel) GetMempoolTxByTxHash(hash string) (mempoolTx *MempoolTx, err error) { + dbTx := m.DB.Table(m.table).Where("status = ? and tx_hash = ?", PendingTxStatus, hash).Find(&mempoolTx) + if dbTx.Error != nil { + if dbTx.Error == types.DbErrNotFound { + return mempoolTx, dbTx.Error + } else { + return nil, types.DbErrSqlOperation + } + } else if dbTx.RowsAffected == 0 { + return nil, types.DbErrNotFound + } + return mempoolTx, nil +} + +func (m *defaultMempoolModel) CreateBatchedMempoolTxs(mempoolTxs []*MempoolTx) error { + return m.DB.Transaction(func(tx *gorm.DB) error { // transact + dbTx := tx.Table(m.table).Create(mempoolTxs) + if dbTx.Error != nil { + return dbTx.Error + } + if dbTx.RowsAffected == 0 { + return types.DbErrFailToCreateMempoolTx + } + return nil + }) +} + +func (m *defaultMempoolModel) GetMempoolTxsListByL2BlockHeight(blockHeight int64) (mempoolTxs []*MempoolTx, err error) { + dbTx := m.DB.Table(m.table).Where("status = ? and l2_block_height <= ?", SuccessTxStatus, blockHeight).Find(&mempoolTxs) + if dbTx.Error != nil { + return nil, types.DbErrSqlOperation + } else if dbTx.RowsAffected == 0 { + return nil, types.DbErrNotFound + } + + return mempoolTxs, nil +} + +func (m *defaultMempoolModel) GetPendingMempoolTxsByAccountIndex(accountIndex int64) (mempoolTxs []*MempoolTx, err error) { + dbTx := m.DB.Table(m.table).Where("status = ? AND account_index = ?", PendingTxStatus, accountIndex). + Order("created_at, id").Find(&mempoolTxs) + if dbTx.Error != nil { + return nil, types.DbErrSqlOperation + } else if dbTx.RowsAffected == 0 { + return nil, types.DbErrNotFound + } + return mempoolTxs, nil +} + +func (m *defaultMempoolModel) GetMaxNonceByAccountIndex(accountIndex int64) (nonce int64, err error) { + dbTx := m.DB.Table(m.table).Select("nonce").Where("deleted_at is null and account_index = ?", accountIndex).Order("nonce desc").Limit(1).Find(&nonce) + if dbTx.Error != nil { + return 0, types.DbErrSqlOperation + } else if dbTx.RowsAffected == 0 { + return 0, types.DbErrNotFound + } + return nonce, nil +} + +func (m *defaultMempoolModel) UpdateMempoolTxs(pendingUpdateMempoolTxs []*MempoolTx, pendingDeleteMempoolTxs []*MempoolTx) (err error) { + return m.DB.Transaction(func(tx *gorm.DB) error { // transact + + // update mempool + for _, mempoolTx := range pendingUpdateMempoolTxs { + dbTx := tx.Table(MempoolTableName).Where("id = ?", mempoolTx.ID). + Select("*"). + Updates(&mempoolTx) + if dbTx.Error != nil { + return dbTx.Error + } + if dbTx.RowsAffected == 0 { + return errors.New("no new mempoolTx") + } + } + for _, pendingDeleteMempoolTx := range pendingDeleteMempoolTxs { + dbTx := tx.Table(MempoolTableName).Where("id = ?", pendingDeleteMempoolTx.ID).Delete(&pendingDeleteMempoolTx) + if dbTx.Error != nil { + return dbTx.Error + } + if dbTx.RowsAffected == 0 { + return errors.New("delete invalid mempool tx") + } + } + + return nil + }) +} diff --git a/common/model/nft/nft.go b/dao/nft/nft.go similarity index 60% rename from common/model/nft/nft.go rename to dao/nft/nft.go index 5da9f94f8..caa72f024 100644 --- a/common/model/nft/nft.go +++ b/dao/nft/nft.go @@ -1,5 +1,5 @@ /* - * Copyright © 2021 Zkbas Protocol + * Copyright © 2021 ZkBAS Protocol * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -18,13 +18,13 @@ package nft import ( - "github.com/zeromicro/go-zero/core/logx" - "github.com/zeromicro/go-zero/core/stores/cache" - "github.com/zeromicro/go-zero/core/stores/sqlc" - "github.com/zeromicro/go-zero/core/stores/sqlx" "gorm.io/gorm" - "github.com/bnb-chain/zkbas/errorcode" + "github.com/bnb-chain/zkbas/types" +) + +const ( + L2NftTableName = `l2_nft` ) type ( @@ -33,9 +33,10 @@ type ( DropL2NftTable() error GetNftAsset(nftIndex int64) (nftAsset *L2Nft, err error) GetLatestNftIndex() (nftIndex int64, err error) + GetNftListByAccountIndex(accountIndex, limit, offset int64) (nfts []*L2Nft, err error) + GetAccountNftTotalCount(accountIndex int64) (int64, error) } defaultL2NftModel struct { - sqlc.CachedConn table string DB *gorm.DB } @@ -53,11 +54,10 @@ type ( } ) -func NewL2NftModel(conn sqlx.SqlConn, c cache.CacheConf, db *gorm.DB) L2NftModel { +func NewL2NftModel(db *gorm.DB) L2NftModel { return &defaultL2NftModel{ - CachedConn: sqlc.NewConn(conn, c), - table: L2NftTableName, - DB: db, + table: L2NftTableName, + DB: db, } } @@ -65,22 +65,10 @@ func (*L2Nft) TableName() string { return L2NftTableName } -/* - Func: CreateL2NftTable - Params: - Return: err error - Description: create account l2 nft table -*/ func (m *defaultL2NftModel) CreateL2NftTable() error { return m.DB.AutoMigrate(L2Nft{}) } -/* - Func: DropL2NftTable - Params: - Return: err error - Description: drop account l2 nft table -*/ func (m *defaultL2NftModel) DropL2NftTable() error { return m.DB.Migrator().DropTable(m.table) } @@ -88,11 +76,9 @@ func (m *defaultL2NftModel) DropL2NftTable() error { func (m *defaultL2NftModel) GetNftAsset(nftIndex int64) (nftAsset *L2Nft, err error) { dbTx := m.DB.Table(m.table).Where("nft_index = ?", nftIndex).Find(&nftAsset) if dbTx.Error != nil { - logx.Errorf("[GetNftAsset] unable to get nft asset: %s", dbTx.Error.Error()) - return nil, errorcode.DbErrSqlOperation + return nil, types.DbErrSqlOperation } else if dbTx.RowsAffected == 0 { - logx.Errorf("[GetNftAsset] no such info") - return nil, errorcode.DbErrNotFound + return nil, types.DbErrNotFound } return nftAsset, nil } @@ -101,10 +87,31 @@ func (m *defaultL2NftModel) GetLatestNftIndex() (nftIndex int64, err error) { var nftInfo *L2Nft dbTx := m.DB.Table(m.table).Order("nft_index desc").Find(&nftInfo) if dbTx.Error != nil { - logx.Errorf("[GetLatestNftIndex] unable to get latest nft info: %s", dbTx.Error.Error()) - return -1, dbTx.Error + return -1, types.DbErrSqlOperation } else if dbTx.RowsAffected == 0 { return -1, nil } return nftInfo.NftIndex, nil } + +func (m *defaultL2NftModel) GetNftListByAccountIndex(accountIndex, limit, offset int64) (nftList []*L2Nft, err error) { + dbTx := m.DB.Table(m.table).Where("owner_account_index = ? and deleted_at is NULL", accountIndex). + Limit(int(limit)).Offset(int(offset)).Order("nft_index desc").Find(&nftList) + if dbTx.Error != nil { + return nil, types.DbErrSqlOperation + } else if dbTx.RowsAffected == 0 { + return nil, types.DbErrNotFound + } + return nftList, nil +} + +func (m *defaultL2NftModel) GetAccountNftTotalCount(accountIndex int64) (int64, error) { + var count int64 + dbTx := m.DB.Table(m.table).Where("owner_account_index = ? and deleted_at is NULL", accountIndex).Count(&count) + if dbTx.Error != nil { + return 0, types.DbErrSqlOperation + } else if dbTx.RowsAffected == 0 { + return 0, types.DbErrNotFound + } + return count, nil +} diff --git a/dao/nft/nft_history.go b/dao/nft/nft_history.go new file mode 100644 index 000000000..cf769e637 --- /dev/null +++ b/dao/nft/nft_history.go @@ -0,0 +1,111 @@ +/* + * Copyright © 2021 ZkBAS Protocol + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package nft + +import ( + "gorm.io/gorm" + + "github.com/bnb-chain/zkbas/types" +) + +const ( + L2NftHistoryTableName = `l2_nft_history` +) + +type ( + L2NftHistoryModel interface { + CreateL2NftHistoryTable() error + DropL2NftHistoryTable() error + GetLatestNftAssetCountByBlockHeight(height int64) ( + count int64, err error, + ) + GetLatestNftAssetsByBlockHeight(height int64, limit int, offset int) ( + rowsAffected int64, nftAssets []*L2NftHistory, err error, + ) + } + defaultL2NftHistoryModel struct { + table string + DB *gorm.DB + } + + L2NftHistory struct { + gorm.Model + NftIndex int64 + CreatorAccountIndex int64 + OwnerAccountIndex int64 + NftContentHash string + NftL1Address string + NftL1TokenId string + CreatorTreasuryRate int64 + CollectionId int64 + Status int + L2BlockHeight int64 + } +) + +func NewL2NftHistoryModel(db *gorm.DB) L2NftHistoryModel { + return &defaultL2NftHistoryModel{ + table: L2NftHistoryTableName, + DB: db, + } +} + +func (*L2NftHistory) TableName() string { + return L2NftHistoryTableName +} + +func (m *defaultL2NftHistoryModel) CreateL2NftHistoryTable() error { + return m.DB.AutoMigrate(L2NftHistory{}) +} + +func (m *defaultL2NftHistoryModel) DropL2NftHistoryTable() error { + return m.DB.Migrator().DropTable(m.table) +} + +func (m *defaultL2NftHistoryModel) GetLatestNftAssetCountByBlockHeight(height int64) ( + count int64, err error, +) { + subQuery := m.DB.Table(m.table).Select("*"). + Where("nft_index = a.nft_index AND l2_block_height <= ? AND l2_block_height > a.l2_block_height", height) + + dbTx := m.DB.Table(m.table+" as a"). + Where("NOT EXISTS (?) AND l2_block_height <= ?", subQuery, height) + + if dbTx.Count(&count).Error != nil { + return 0, types.DbErrSqlOperation + } + + return count, nil +} + +func (m *defaultL2NftHistoryModel) GetLatestNftAssetsByBlockHeight(height int64, limit int, offset int) ( + rowsAffected int64, accountNftAssets []*L2NftHistory, err error, +) { + subQuery := m.DB.Table(m.table).Select("*"). + Where("nft_index = a.nft_index AND l2_block_height <= ? AND l2_block_height > a.l2_block_height", height) + + dbTx := m.DB.Table(m.table+" as a").Select("*"). + Where("NOT EXISTS (?) AND l2_block_height <= ?", subQuery, height). + Limit(limit).Offset(offset). + Order("nft_index") + + if dbTx.Find(&accountNftAssets).Error != nil { + return 0, nil, types.DbErrSqlOperation + } + return dbTx.RowsAffected, accountNftAssets, nil +} diff --git a/dao/priorityrequest/priority_request.go b/dao/priorityrequest/priority_request.go new file mode 100644 index 000000000..c1449beed --- /dev/null +++ b/dao/priorityrequest/priority_request.go @@ -0,0 +1,148 @@ +/* + * Copyright © 2021 ZkBAS Protocol + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package priorityrequest + +import ( + "errors" + + "gorm.io/gorm" + + "github.com/bnb-chain/zkbas/dao/mempool" + "github.com/bnb-chain/zkbas/types" +) + +const ( + TableName = "priority_request" + + PendingStatus = 1 + HandledStatus = 2 +) + +type ( + PriorityRequestModel interface { + CreatePriorityRequestTable() error + DropPriorityRequestTable() error + GetPriorityRequestsByStatus(status int) (txs []*PriorityRequest, err error) + CreateMempoolTxsAndUpdateRequests(pendingNewMempoolTxs []*mempool.MempoolTx, pendingUpdateRequests []*PriorityRequest) (err error) + GetLatestHandledRequestId() (requestId int64, err error) + } + + defaultPriorityRequestModel struct { + table string + DB *gorm.DB + } + + PriorityRequest struct { + gorm.Model + // related txVerification hash + L1TxHash string + // related block height + L1BlockHeight int64 + // sender + SenderAddress string + // request id + RequestId int64 + // tx type + TxType int64 + // pub data + Pubdata string + // expirationBlock + ExpirationBlock int64 + // status + Status int + } +) + +func (*PriorityRequest) TableName() string { + return TableName +} + +func NewPriorityRequestModel(db *gorm.DB) PriorityRequestModel { + return &defaultPriorityRequestModel{ + table: TableName, + DB: db, + } +} + +func (m *defaultPriorityRequestModel) CreatePriorityRequestTable() error { + return m.DB.AutoMigrate(PriorityRequest{}) +} + +func (m *defaultPriorityRequestModel) DropPriorityRequestTable() error { + return m.DB.Migrator().DropTable(m.table) +} + +func (m *defaultPriorityRequestModel) GetL2TxEventMonitors() (txs []*PriorityRequest, err error) { + dbTx := m.DB.Table(m.table).Find(&txs).Order("l1_block_height") + if dbTx.Error != nil { + return nil, types.DbErrSqlOperation + } else if dbTx.RowsAffected == 0 { + return nil, types.DbErrNotFound + } + return txs, dbTx.Error +} + +func (m *defaultPriorityRequestModel) GetPriorityRequestsByStatus(status int) (txs []*PriorityRequest, err error) { + // todo order id + dbTx := m.DB.Table(m.table).Where("status = ?", status).Order("request_id").Find(&txs) + if dbTx.Error != nil { + return nil, types.DbErrSqlOperation + } else if dbTx.RowsAffected == 0 { + return nil, types.DbErrNotFound + } + return txs, nil +} + +func (m *defaultPriorityRequestModel) CreateMempoolTxsAndUpdateRequests(newMempoolTxs []*mempool.MempoolTx, toUpdateL2Events []*PriorityRequest) (err error) { + err = m.DB.Transaction( + func(tx *gorm.DB) error { + dbTx := tx.Table(mempool.MempoolTableName).CreateInBatches(newMempoolTxs, len(newMempoolTxs)) + if dbTx.Error != nil { + return dbTx.Error + } + if dbTx.RowsAffected != int64(len(newMempoolTxs)) { + return errors.New("create mempool txs error") + } + + eventIds := make([]uint, 0, len(toUpdateL2Events)) + for _, l2Event := range toUpdateL2Events { + eventIds = append(eventIds, l2Event.ID) + } + dbTx = tx.Table(m.table).Where("id in ?", eventIds).Update("status", HandledStatus) + if dbTx.Error != nil { + return dbTx.Error + } + if dbTx.RowsAffected != int64(len(eventIds)) { + return errors.New("update l2 events error") + } + return nil + }) + return err +} + +func (m *defaultPriorityRequestModel) GetLatestHandledRequestId() (requestId int64, err error) { + var event *PriorityRequest + dbTx := m.DB.Table(m.table).Where("status = ?", HandledStatus).Order("request_id desc").Find(&event) + if dbTx.Error != nil { + return -1, dbTx.Error + } + if dbTx.RowsAffected == 0 { + return -1, nil + } + return event.RequestId, nil +} diff --git a/dao/proof/proof.go b/dao/proof/proof.go new file mode 100644 index 000000000..171e7d506 --- /dev/null +++ b/dao/proof/proof.go @@ -0,0 +1,127 @@ +/* + * Copyright © 2021 ZkBAS Protocol + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package proof + +import ( + "gorm.io/gorm" + + "github.com/bnb-chain/zkbas/types" +) + +const ( + TableName = "proof" +) + +const ( + NotSent = iota + NotConfirmed + Confirmed +) + +type ( + ProofModel interface { + CreateProofTable() error + DropProofTable() error + CreateProof(row *Proof) error + GetProofsBetween(start int64, end int64) (proofs []*Proof, err error) + GetLatestConfirmedProof() (p *Proof, err error) + GetProofByBlockNumber(num int64) (p *Proof, err error) + } + + defaultProofModel struct { + table string + DB *gorm.DB + } + + Proof struct { + gorm.Model + ProofInfo string + BlockNumber int64 `gorm:"index:idx_number,unique"` + Status int64 + } +) + +func (*Proof) TableName() string { + return TableName +} + +func NewProofModel(db *gorm.DB) ProofModel { + return &defaultProofModel{ + table: TableName, + DB: db, + } +} + +func (m *defaultProofModel) CreateProofTable() error { + return m.DB.AutoMigrate(Proof{}) +} + +func (m *defaultProofModel) DropProofTable() error { + return m.DB.Migrator().DropTable(m.table) +} + +func (m *defaultProofModel) CreateProof(row *Proof) error { + dbTx := m.DB.Table(m.table).Create(row) + if dbTx.Error != nil { + return dbTx.Error + } + if dbTx.RowsAffected == 0 { + return types.DbErrFailToCreateProof + } + return nil +} + +func (m *defaultProofModel) GetProofsBetween(start int64, end int64) (proofs []*Proof, err error) { + dbTx := m.DB.Debug().Table(m.table).Where("block_number >= ? AND block_number <= ? AND status = ?", + start, + end, + NotSent). + Order("block_number"). + Find(&proofs) + + if dbTx.Error != nil { + return proofs, types.DbErrSqlOperation + } else if dbTx.RowsAffected == 0 { + return nil, types.DbErrNotFound + } + + return proofs, err +} + +func (m *defaultProofModel) GetLatestConfirmedProof() (p *Proof, err error) { + var row *Proof + dbTx := m.DB.Table(m.table).Where("status >= ?", NotConfirmed).Order("block_number desc").Limit(1).Find(&row) + if dbTx.Error != nil { + return nil, types.DbErrSqlOperation + } else if dbTx.RowsAffected == 0 { + return nil, types.DbErrNotFound + } else { + return row, nil + } +} + +func (m *defaultProofModel) GetProofByBlockNumber(num int64) (p *Proof, err error) { + var row *Proof + dbTx := m.DB.Table(m.table).Where("block_number = ?", num).Find(&row) + if dbTx.Error != nil { + return nil, types.DbErrSqlOperation + } else if dbTx.RowsAffected == 0 { + return nil, types.DbErrNotFound + } else { + return row, nil + } +} diff --git a/dao/sysconfig/sysconfig.go b/dao/sysconfig/sysconfig.go new file mode 100644 index 000000000..a535c3bc0 --- /dev/null +++ b/dao/sysconfig/sysconfig.go @@ -0,0 +1,90 @@ +/* + * Copyright © 2021 ZkBAS Protocol + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package sysconfig + +import ( + "gorm.io/gorm" + + "github.com/bnb-chain/zkbas/types" +) + +const ( + TableName = `sys_config` +) + +type ( + SysConfigModel interface { + CreateSysConfigTable() error + DropSysConfigTable() error + GetSysConfigByName(name string) (info *SysConfig, err error) + CreateSysConfigInBatches(configs []*SysConfig) (rowsAffected int64, err error) + } + + defaultSysConfigModel struct { + table string + DB *gorm.DB + } + + SysConfig struct { + gorm.Model + Name string + Value string + ValueType string + Comment string + } +) + +func NewSysConfigModel(db *gorm.DB) SysConfigModel { + return &defaultSysConfigModel{ + table: TableName, + DB: db, + } +} + +func (*SysConfig) TableName() string { + return TableName +} + +func (m *defaultSysConfigModel) CreateSysConfigTable() error { + return m.DB.AutoMigrate(SysConfig{}) +} + +func (m *defaultSysConfigModel) DropSysConfigTable() error { + return m.DB.Migrator().DropTable(m.table) +} + +func (m *defaultSysConfigModel) GetSysConfigByName(name string) (config *SysConfig, err error) { + dbTx := m.DB.Table(m.table).Where("name = ?", name).Find(&config) + if dbTx.Error != nil { + return nil, types.DbErrSqlOperation + } else if dbTx.RowsAffected == 0 { + return nil, types.DbErrNotFound + } + return config, nil +} + +func (m *defaultSysConfigModel) CreateSysConfigInBatches(configs []*SysConfig) (rowsAffected int64, err error) { + dbTx := m.DB.Table(m.table).CreateInBatches(configs, len(configs)) + if dbTx.Error != nil { + return 0, types.DbErrSqlOperation + } + if dbTx.RowsAffected == 0 { + return 0, types.DbErrFailToCreateSysconfig + } + return dbTx.RowsAffected, nil +} diff --git a/common/model/tx/failTx.go b/dao/tx/fail_tx.go similarity index 61% rename from common/model/tx/failTx.go rename to dao/tx/fail_tx.go index 784de9d9b..963b5f5b6 100644 --- a/common/model/tx/failTx.go +++ b/dao/tx/fail_tx.go @@ -1,5 +1,5 @@ /* - * Copyright © 2021 Zkbas Protocol + * Copyright © 2021 ZkBAS Protocol * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -18,13 +18,13 @@ package tx import ( - "github.com/zeromicro/go-zero/core/logx" - "github.com/zeromicro/go-zero/core/stores/cache" - "github.com/zeromicro/go-zero/core/stores/sqlc" - "github.com/zeromicro/go-zero/core/stores/sqlx" "gorm.io/gorm" - "github.com/bnb-chain/zkbas/errorcode" + "github.com/bnb-chain/zkbas/types" +) + +const ( + FailTxTableName = `fail_tx` ) type ( @@ -35,7 +35,6 @@ type ( } defaultFailTxModel struct { - sqlc.CachedConn table string DB *gorm.DB } @@ -57,53 +56,32 @@ type ( } ) -func NewFailTxModel(conn sqlx.SqlConn, c cache.CacheConf, db *gorm.DB) FailTxModel { +func NewFailTxModel(db *gorm.DB) FailTxModel { return &defaultFailTxModel{ - CachedConn: sqlc.NewConn(conn, c), - table: `fail_tx`, - DB: db, + table: FailTxTableName, + DB: db, } } func (*FailTx) TableName() string { - return `fail_tx` + return FailTxTableName } -/* - Func: CreateFailTxTable - Params: - Return: err error - Description: create txVerification fail table -*/ func (m *defaultFailTxModel) CreateFailTxTable() error { return m.DB.AutoMigrate(FailTx{}) } -/* - Func: DropFailTxTable - Params: - Return: err error - Description: drop txVerification fail table -*/ func (m *defaultFailTxModel) DropFailTxTable() error { return m.DB.Migrator().DropTable(m.table) } -/* - Func: CreateFailTx - Params: failTx *FailTx - Return: err error - Description: create fail txVerification -*/ func (m *defaultFailTxModel) CreateFailTx(failTx *FailTx) error { dbTx := m.DB.Table(m.table).Create(failTx) if dbTx.Error != nil { - logx.Errorf("[txVerification.CreateFailTx] %s", dbTx.Error.Error()) - return errorcode.DbErrSqlOperation + return types.DbErrSqlOperation } if dbTx.RowsAffected == 0 { - logx.Error("[txVerification.CreateFailTx] Create Invalid Fail Tx") - return errorcode.DbErrFailToCreateFailTx + return types.DbErrFailToCreateFailTx } return nil } diff --git a/dao/tx/tx.go b/dao/tx/tx.go new file mode 100644 index 000000000..9c0fc2016 --- /dev/null +++ b/dao/tx/tx.go @@ -0,0 +1,228 @@ +/* + * Copyright © 2021 ZkBAS Protocol + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package tx + +import ( + "sort" + "time" + + "gorm.io/gorm" + + "github.com/bnb-chain/zkbas/types" +) + +const ( + TxTableName = `tx` +) + +const ( + _ = iota + StatusPending + StatusSuccess + StatusFail +) + +type ( + TxModel interface { + CreateTxTable() error + DropTxTable() error + GetTxsTotalCount() (count int64, err error) + GetTxsList(limit int64, offset int64) (txList []*Tx, err error) + GetTxsListByAccountIndex(accountIndex int64, limit int64, offset int64) (txList []*Tx, err error) + GetTxsCountByAccountIndex(accountIndex int64) (count int64, err error) + GetTxsListByAccountIndexTxType(accountIndex int64, txType int64, limit int64, offset int64) (txList []*Tx, err error) + GetTxsCountByAccountIndexTxType(accountIndex int64, txType int64) (count int64, err error) + GetTxByHash(txHash string) (tx *Tx, err error) + GetTxById(id int64) (tx *Tx, err error) + GetTxsTotalCountBetween(from, to time.Time) (count int64, err error) + GetDistinctAccountsCountBetween(from, to time.Time) (count int64, err error) + } + + defaultTxModel struct { + table string + DB *gorm.DB + } + + Tx struct { + gorm.Model + TxHash string `gorm:"uniqueIndex"` + TxType int64 + GasFee string + GasFeeAssetId int64 + TxStatus int64 + BlockHeight int64 `gorm:"index"` + BlockId int64 `gorm:"index"` + StateRoot string + NftIndex int64 + PairIndex int64 + CollectionId int64 + AssetId int64 + TxAmount string + NativeAddress string + TxInfo string + TxDetails []*TxDetail `gorm:"foreignKey:TxId"` + ExtraInfo string + Memo string + AccountIndex int64 + Nonce int64 + ExpiredAt int64 + TxIndex int64 + } +) + +func NewTxModel(db *gorm.DB) TxModel { + return &defaultTxModel{ + table: TxTableName, + DB: db, + } +} + +func (*Tx) TableName() string { + return TxTableName +} + +func (m *defaultTxModel) CreateTxTable() error { + return m.DB.AutoMigrate(Tx{}) +} + +func (m *defaultTxModel) DropTxTable() error { + return m.DB.Migrator().DropTable(m.table) +} + +func (m *defaultTxModel) GetTxsTotalCount() (count int64, err error) { + dbTx := m.DB.Table(m.table).Where("deleted_at is NULL").Count(&count) + if dbTx.Error != nil { + if dbTx.Error == types.DbErrNotFound { + return 0, nil + } + return 0, types.DbErrSqlOperation + } + return count, nil +} + +func (m *defaultTxModel) GetTxsList(limit int64, offset int64) (txList []*Tx, err error) { + dbTx := m.DB.Table(m.table).Limit(int(limit)).Offset(int(offset)).Order("created_at desc").Find(&txList) + if dbTx.Error != nil { + return nil, types.DbErrSqlOperation + } else if dbTx.RowsAffected == 0 { + return nil, types.DbErrNotFound + } + return txList, nil +} + +func (m *defaultTxModel) GetTxsListByAccountIndex(accountIndex int64, limit int64, offset int64) (txList []*Tx, err error) { + dbTx := m.DB.Table(m.table).Where("account_index = ?", accountIndex).Limit(int(limit)).Offset(int(offset)).Order("created_at desc").Find(&txList) + if dbTx.Error != nil { + return nil, types.DbErrSqlOperation + } else if dbTx.RowsAffected == 0 { + return nil, types.DbErrNotFound + } + return txList, nil +} + +func (m *defaultTxModel) GetTxsCountByAccountIndex(accountIndex int64) (count int64, err error) { + dbTx := m.DB.Table(m.table).Where("account_index = ?", accountIndex).Count(&count) + if dbTx.Error != nil { + return 0, types.DbErrSqlOperation + } else if dbTx.RowsAffected == 0 { + return 0, nil + } + return count, nil +} + +func (m *defaultTxModel) GetTxsListByAccountIndexTxType(accountIndex int64, txType int64, limit int64, offset int64) (txList []*Tx, err error) { + dbTx := m.DB.Table(m.table).Where("account_index = ? and tx_type = ?", accountIndex, txType).Limit(int(limit)).Offset(int(offset)).Order("created_at desc").Find(&txList) + if dbTx.Error != nil { + return nil, types.DbErrSqlOperation + } else if dbTx.RowsAffected == 0 { + return nil, types.DbErrNotFound + } + return txList, nil +} + +func (m *defaultTxModel) GetTxsCountByAccountIndexTxType(accountIndex int64, txType int64) (count int64, err error) { + dbTx := m.DB.Table(m.table).Where("account_index = ? and tx_type = ?", accountIndex, txType).Count(&count) + if dbTx.Error != nil { + return 0, types.DbErrSqlOperation + } else if dbTx.RowsAffected == 0 { + return 0, nil + } + return count, nil +} + +func (m *defaultTxModel) GetTxByHash(txHash string) (tx *Tx, err error) { + var txForeignKeyColumn = `TxDetails` + + dbTx := m.DB.Table(m.table).Where("tx_hash = ?", txHash).Find(&tx) + if dbTx.Error != nil { + return nil, types.DbErrSqlOperation + } else if dbTx.RowsAffected == 0 { + return nil, types.DbErrNotFound + } + err = m.DB.Model(&tx).Association(txForeignKeyColumn).Find(&tx.TxDetails) + if err != nil { + return nil, err + } + // re-order tx details + sort.SliceStable(tx.TxDetails, func(i, j int) bool { + return tx.TxDetails[i].Order < tx.TxDetails[j].Order + }) + + return tx, nil +} + +func (m *defaultTxModel) GetTxById(id int64) (tx *Tx, err error) { + var txForeignKeyColumn = `TxDetails` + + dbTx := m.DB.Table(m.table).Where("id = ?", id).Find(&tx) + if dbTx.Error != nil { + return nil, types.DbErrSqlOperation + } else if dbTx.RowsAffected == 0 { + return nil, types.DbErrNotFound + } + err = m.DB.Model(&tx).Association(txForeignKeyColumn).Find(&tx.TxDetails) + if err != nil { + return nil, err + } + // re-order tx details + sort.SliceStable(tx.TxDetails, func(i, j int) bool { + return tx.TxDetails[i].Order < tx.TxDetails[j].Order + }) + + return tx, nil +} + +func (m *defaultTxModel) GetTxsTotalCountBetween(from, to time.Time) (count int64, err error) { + dbTx := m.DB.Table(m.table).Where("created_at BETWEEN ? AND ?", from, to).Count(&count) + if dbTx.Error != nil { + return 0, types.DbErrSqlOperation + } else if dbTx.RowsAffected == 0 { + return 0, nil + } + return count, nil +} + +func (m *defaultTxModel) GetDistinctAccountsCountBetween(from, to time.Time) (count int64, err error) { + dbTx := m.DB.Raw("SELECT account_index FROM tx WHERE created_at BETWEEN ? AND ? AND account_index != -1 GROUP BY account_index", from, to).Count(&count) + if dbTx.Error != nil { + return 0, types.DbErrSqlOperation + } else if dbTx.RowsAffected == 0 { + return 0, nil + } + return count, nil +} diff --git a/common/model/tx/txDetail.go b/dao/tx/txdetail.go similarity index 50% rename from common/model/tx/txDetail.go rename to dao/tx/txdetail.go index 459e5a9cb..df427d4b2 100644 --- a/common/model/tx/txDetail.go +++ b/dao/tx/txdetail.go @@ -1,5 +1,5 @@ /* - * Copyright © 2021 Zkbas Protocol + * Copyright © 2021 ZkBAS Protocol * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -18,25 +18,24 @@ package tx import ( - "github.com/zeromicro/go-zero/core/logx" - "github.com/zeromicro/go-zero/core/stores/cache" - "github.com/zeromicro/go-zero/core/stores/sqlc" - "github.com/zeromicro/go-zero/core/stores/sqlx" + "sort" + "gorm.io/gorm" - "github.com/bnb-chain/zkbas/errorcode" + "github.com/bnb-chain/zkbas/types" ) +const TxDetailTableName = `tx_detail` + type ( TxDetailModel interface { CreateTxDetailTable() error DropTxDetailTable() error - GetTxDetailsByAccountName(name string) (txDetails []*TxDetail, err error) - UpdateTxDetail(detail *TxDetail) error + GetTxDetailByAccountIndex(accountIndex int64) (txDetails []*TxDetail, err error) + GetTxIdsByAccountIndex(accountIndex int64) (txIds []int64, err error) } defaultTxDetailModel struct { - sqlc.CachedConn table string DB *gorm.DB } @@ -57,11 +56,10 @@ type ( } ) -func NewTxDetailModel(conn sqlx.SqlConn, c cache.CacheConf, db *gorm.DB) TxDetailModel { +func NewTxDetailModel(db *gorm.DB) TxDetailModel { return &defaultTxDetailModel{ - CachedConn: sqlc.NewConn(conn, c), - table: TxDetailTableName, - DB: db, + table: TxDetailTableName, + DB: db, } } @@ -69,52 +67,33 @@ func (*TxDetail) TableName() string { return TxDetailTableName } -/* - Func: CreateTxDetailTable - Params: - Return: err error - Description: create txVerification detail table -*/ func (m *defaultTxDetailModel) CreateTxDetailTable() error { return m.DB.AutoMigrate(TxDetail{}) } -/* - Func: DropTxDetailTable - Params: - Return: err error - Description: drop txVerification detail table -*/ func (m *defaultTxDetailModel) DropTxDetailTable() error { return m.DB.Migrator().DropTable(m.table) } -/* - Func: GetTxDetailsByAccountName - Params: name string - Return: txDetails []*TxDetail, err error - Description: GetTxDetailsByAccountName -*/ -func (m *defaultTxDetailModel) GetTxDetailsByAccountName(name string) (txDetails []*TxDetail, err error) { - dbTx := m.DB.Table(m.table).Where("account_name = ?", name).Find(&txDetails) +func (m *defaultTxDetailModel) GetTxDetailByAccountIndex(accountIndex int64) (txDetails []*TxDetail, err error) { + dbTx := m.DB.Table(m.table).Where("account_index = ?", accountIndex).Find(&txDetails) if dbTx.Error != nil { - logx.Errorf("fail to get tx by account: %s, error: %s", name, dbTx.Error.Error()) - return nil, errorcode.DbErrSqlOperation + return nil, types.DbErrSqlOperation } else if dbTx.RowsAffected == 0 { - return nil, errorcode.DbErrNotFound + return nil, types.DbErrNotFound } return txDetails, nil } -func (m *defaultTxDetailModel) UpdateTxDetail(detail *TxDetail) error { - dbTx := m.DB.Save(&detail) +func (m *defaultTxDetailModel) GetTxIdsByAccountIndex(accountIndex int64) (txIds []int64, err error) { + dbTx := m.DB.Table(m.table).Select("tx_id").Where("account_index = ?", accountIndex).Group("tx_id").Find(&txIds) if dbTx.Error != nil { - if dbTx.Error == errorcode.DbErrNotFound { - return nil - } else { - return dbTx.Error - } - } else { - return nil + return nil, types.DbErrSqlOperation + } else if dbTx.RowsAffected == 0 { + return nil, types.DbErrNotFound } + sort.Slice(txIds, func(i, j int) bool { + return txIds[i] > txIds[j] + }) + return txIds, nil } diff --git a/deploy-local.sh b/deploy-local.sh index 3caf92d75..9df9656dc 100644 --- a/deploy-local.sh +++ b/deploy-local.sh @@ -1,7 +1,16 @@ #!/bin/bash -# config +# Preparation: Install following tools when you first run this script!!! # GOBIN=/usr/local/bin/ go install github.com/zeromicro/go-zero/tools/goctl@latest +# yum install jq -y +# npm install pm2 -g +# You should install nodejs above v14 + +# Attention: Set the following variables to the right one before running!!! +DEPLOY_PATH=~/zkbas-deploy +KEY_PATH=~/.zkbas +ZKBAS_REPO_PATH=$(cd `dirname $0`; pwd) +CMC_TOKEN=cfce503f-fake-fake-fake-bbab5257dac8 export PATH=$PATH:/usr/local/go/bin:/usr/local/go/bin:/root/go/bin echo '0. stop old database/redis and docker run new database/redis' @@ -9,37 +18,35 @@ pm2 delete all docker kill $(docker ps -q) docker rm $(docker ps -a -q) docker run -d --name zkbasredis -p 6379:6379 redis -docker run --name postgres -p 5432:5432 -e POSTGRES_PASSWORD=Zkbas@123 -e POSTGRES_USER=postgres -e POSTGRES_DB=zkbas -d postgres +docker run --name postgres -p 5432:5432 -e PGDATA=/var/lib/postgresql/pgdata -e POSTGRES_PASSWORD=Zkbas@123 -e POSTGRES_USER=postgres -e POSTGRES_DB=zkbas -d postgres echo '1. basic config and git clone repos' -#yum install jq -y -#npm install pm2 -g export PATH=$PATH:/usr/local/go/bin/ cd ~ -rm -rf ~/zkbas-deploy-bak && mv ~/zkbas-deploy ~/zkbas-deploy-bak -mkdir zkbas-deploy && cd zkbas-deploy -git clone --branch develop https://github.com/bnb-chain/zkbas.git +rm -rf ${DEPLOY_PATH}-bak && mv ${DEPLOY_PATH} ${DEPLOY_PATH}-bak +mkdir -p ${DEPLOY_PATH} && cd ${DEPLOY_PATH} git clone --branch develop https://github.com/bnb-chain/zkbas-contract.git git clone --branch develop https://github.com/bnb-chain/zkbas-crypto.git +cp -r ${ZKBAS_REPO_PATH} ${DEPLOY_PATH} flag=$1 if [ $flag = "new" ]; then echo "new crypto env" echo '2. start generate zkbas.vk and zkbas.pk' - cd ~/zkbas-deploy + cd ${DEPLOY_PATH} cd zkbas-crypto && go test ./legend/circuit/bn254/solidity -timeout 99999s -run TestExportSol - cd ~/zkbas-deploy - sudo mkdir /home/.zkbas - cp -r ./zkbas-crypto/legend/circuit/bn254/solidity/* /home/.zkbas + cd ${DEPLOY_PATH} + mkdir -p $KEY_PATH + cp -r ./zkbas-crypto/legend/circuit/bn254/solidity/* $KEY_PATH fi echo '3. start verify_parse for ZkbasVerifier' -cd ~/zkbas-deploy/zkbas/service/cronjob/prover/ -python3 verifier_parse.py /home/.zkbas/ZkbasVerifier1.sol,/home/.zkbas/ZkbasVerifier10.sol 1,10 ~/zkbas-deploy/zkbas-contract/contracts/ZkbasVerifier.sol +cd ${DEPLOY_PATH}/zkbas/service/prover/ +python3 verifier_parse.py ${KEY_PATH}/ZkbasVerifier1.sol,${KEY_PATH}/ZkbasVerifier10.sol 1,10 ${DEPLOY_PATH}/zkbas-contract/contracts/ZkbasVerifier.sol @@ -51,49 +58,52 @@ echo 'latest block number = ' $blockNumber echo '4-2. deploy contracts, register and deposit on BSC Testnet' -cd ~/zkbas-deploy -cd ./zkbas-contract && sudo npm install +cd ${DEPLOY_PATH} +cd ./zkbas-contract && npm install npx hardhat --network BSCTestnet run ./scripts/deploy-keccak256/deploy.js -echo 'Recorded latest contract addresses into ~/zkbas-deploy/zkbas-contract/info/addresses.json' +echo 'Recorded latest contract addresses into ${DEPLOY_PATH}/zkbas-contract/info/addresses.json' npx hardhat --network BSCTestnet run ./scripts/deploy-keccak256/register.js npx hardhat --network BSCTestnet run ./scripts/deploy-keccak256/deposit.js echo '5. modify deployed contracts into zkbas config' -cd ~/zkbas-deploy/zkbas/common/model/init/ +cd ${DEPLOY_PATH}/zkbas/tools/dbinitializer/ cp -r ./contractaddr.yaml.example ./contractaddr.yaml -ZkbasContractAddr=`cat ~/zkbas-deploy/zkbas-contract/info/addresses.json | jq -r '.zkbasProxy'` -sed -i "s/ZkbasProxy: .*/ZkbasProxy: ${ZkbasContractAddr}/" ~/zkbas-deploy/zkbas/common/model/init/contractaddr.yaml +ZkbasContractAddr=`cat ${DEPLOY_PATH}/zkbas-contract/info/addresses.json | jq -r '.zkbasProxy'` +sed -i -e "s/ZkbasProxy: .*/ZkbasProxy: ${ZkbasContractAddr}/" ${DEPLOY_PATH}/zkbas/tools/dbinitializer/contractaddr.yaml -GovernanceContractAddr=`cat ~/zkbas-deploy/zkbas-contract/info/addresses.json | jq -r '.governance'` -sed -i "s/Governance: .*/Governance: ${GovernanceContractAddr}/" ~/zkbas-deploy/zkbas/common/model/init/contractaddr.yaml +GovernanceContractAddr=`cat ${DEPLOY_PATH}/zkbas-contract/info/addresses.json | jq -r '.governance'` +sed -i -e "s/Governance: .*/Governance: ${GovernanceContractAddr}/" ${DEPLOY_PATH}/zkbas/tools/dbinitializer/contractaddr.yaml -sed -i "s/BSC_Test_Network_RPC *= .*/BSC_Test_Network_RPC = \"https\:\/\/data-seed-prebsc-1-s1.binance.org:8545\"/" ~/zkbas-deploy/zkbas/common/model/init/init.go +sed -i -e "s/BSCTestNetworkRPC *= .*/BSCTestNetworkRPC = \"https\:\/\/data-seed-prebsc-1-s1.binance.org:8545\"/" ${DEPLOY_PATH}/zkbas/tools/dbinitializer/main.go -cd ~/zkbas-deploy/zkbas/ -make app && make globalRPCProto -cd ~/zkbas-deploy/zkbas && go mod tidy +cd ${DEPLOY_PATH}/zkbas/ +make api-server +cd ${DEPLOY_PATH}/zkbas && go mod tidy echo "6. init tables on database" -sed -i "s/password=.* dbname/password=Zkbas@123 dbname/" ~/zkbas-deploy/zkbas/common/model/basic/connection.go -cd ~/zkbas-deploy/zkbas/common/model/init/ +sed -i -e "s/password=.* dbname/password=Zkbas@123 dbname/" ${DEPLOY_PATH}/zkbas/tools/dbinitializer/main.go +cd ${DEPLOY_PATH}/zkbas/tools/dbinitializer/ go run . -cd ~/zkbas-deploy/zkbas/ -make app && make globalRPCProto +cd ${DEPLOY_PATH}/zkbas/ +make api-server + + +sleep 30s echo "7. run prover" echo -e " -Name: prover.cronjob +Name: prover Postgres: DataSource: host=127.0.0.1 user=postgres password=Zkbas@123 dbname=zkbas port=5432 sslmode=disable @@ -102,24 +112,26 @@ CacheRedis: Type: node KeyPath: - ProvingKeyPath: [/home/.zkbas/zkbas1.pk, /home/.zkbas/zkbas10.pk] - VerifyingKeyPath: [/home/.zkbas/zkbas1.vk, /home/.zkbas/zkbas10.vk] - KeyTxCounts: [1, 10] + ProvingKeyPath: [${KEY_PATH}/zkbas1.pk, ${KEY_PATH}/zkbas10.pk] + VerifyingKeyPath: [${KEY_PATH}/zkbas1.vk, ${KEY_PATH}/zkbas10.vk] + +BlockConfig: + OptionalBlockSizes: [1, 10] TreeDB: Driver: memorydb -" > ~/zkbas-deploy/zkbas/service/cronjob/prover/etc/prover.yaml +" > ${DEPLOY_PATH}/zkbas/service/prover/etc/config.yaml -cd ~/zkbas-deploy/zkbas/service/cronjob/prover/ -pm2 start --name prover "go run ./prover.go" +cd ${DEPLOY_PATH}/zkbas/service/prover/ +pm2 start --name prover "go run ./main.go" -echo "8. run witnessGenerator" +echo "8. run witness" echo -e " -Name: witnessGenerator.cronjob +Name: witness Postgres: DataSource: host=127.0.0.1 user=postgres password=Zkbas@123 dbname=zkbas port=5432 sslmode=disable @@ -130,20 +142,16 @@ CacheRedis: TreeDB: Driver: memorydb -" > ~/zkbas-deploy/zkbas/service/cronjob/witnessGenerator/etc/witnessGenerator.yaml - -cd ~/zkbas-deploy/zkbas/service/cronjob/witnessGenerator/ -pm2 start --name witnessGenerator "go run ./witnessgenerator.go" - - - +" > ${DEPLOY_PATH}/zkbas/service/witness/etc/config.yaml +cd ${DEPLOY_PATH}/zkbas/service/witness/ +pm2 start --name witness "go run ./main.go" echo "9. run monitor" echo -e " -Name: monitor.cronjob +Name: monitor Postgres: DataSource: host=127.0.0.1 user=postgres password=Zkbas@123 dbname=zkbas port=5432 sslmode=disable @@ -155,25 +163,23 @@ CacheRedis: ChainConfig: NetworkRPCSysConfigName: "BscTestNetworkRpc" #NetworkRPCSysConfigName: "LocalTestNetworkRpc" - ZkbasContractAddrSysConfigName: "ZkbasContract" - GovernanceContractAddrSysConfigName: "GovernanceContract" StartL1BlockHeight: $blockNumber - PendingBlocksCount: 0 + ConfirmBlocksCount: 0 MaxHandledBlocksCount: 5000 TreeDB: Driver: memorydb -" > ~/zkbas-deploy/zkbas/service/cronjob/monitor/etc/monitor.yaml +" > ${DEPLOY_PATH}/zkbas/service/monitor/etc/config.yaml -cd ~/zkbas-deploy/zkbas/service/cronjob/monitor/ -pm2 start --name monitor "go run ./monitor.go" +cd ${DEPLOY_PATH}/zkbas/service/monitor/ +pm2 start --name monitor "go run ./main.go" echo "10. run committer" echo -e " -Name: committer.cronjob +Name: committer Postgres: DataSource: host=127.0.0.1 user=postgres password=Zkbas@123 dbname=zkbas port=5432 sslmode=disable @@ -182,23 +188,21 @@ CacheRedis: - Host: 127.0.0.1:6379 Type: node -KeyPath: - KeyTxCounts: [1, 10] +BlockConfig: + OptionalBlockSizes: [1, 10] TreeDB: Driver: memorydb -" >> ~/zkbas-deploy/zkbas/service/cronjob/committer/etc/committer.yaml - -cd ~/zkbas-deploy/zkbas/service/cronjob/committer/ -pm2 start --name committer "go run ./committer.go" - +" > ${DEPLOY_PATH}/zkbas/service/committer/etc/config.yaml +cd ${DEPLOY_PATH}/zkbas/service/committer/ +pm2 start --name committer "go run ./main.go" echo "11. run sender" echo -e " -Name: sender.cronjob +Name: sender Postgres: DataSource: host=127.0.0.1 user=postgres password=Zkbas@123 dbname=zkbas port=5432 sslmode=disable @@ -210,58 +214,32 @@ CacheRedis: ChainConfig: NetworkRPCSysConfigName: "BscTestNetworkRpc" #NetworkRPCSysConfigName: "LocalTestNetworkRpc" - ZkbasContractAddrSysConfigName: "ZkbasContract" + ConfirmBlocksCount: 0 MaxWaitingTime: 120 MaxBlockCount: 4 Sk: "acbaa269bd7573ff12361be4b97201aef019776ea13384681d4e5ba6a88367d9" GasLimit: 5000000 - L1ChainId: \"97\" TreeDB: Driver: memorydb -" > ~/zkbas-deploy/zkbas/service/cronjob/sender/etc/sender.yaml +" > ${DEPLOY_PATH}/zkbas/service/sender/etc/config.yaml -cd ~/zkbas-deploy/zkbas/service/cronjob/sender/ -pm2 start --name sender "go run ./sender.go" +cd ${DEPLOY_PATH}/zkbas/service/sender/ +pm2 start --name sender "go run ./main.go" - - - -echo "12. run globalRPC" +echo "12. run api-server" echo -e " -Name: global.rpc -ListenOn: 127.0.0.1:8080 - -Postgres: - DataSource: host=127.0.0.1 user=postgres password=Zkbas@123 dbname=zkbas port=5432 sslmode=disable - -CacheRedis: - - Host: 127.0.0.1:6379 - Type: node - -LogConf: - ServiceName: global.rpc - Mode: console - Path: ./log/globalrpc - StackCooldownMillis: 500 - -TreeDB: - Driver: memorydb -" > ~/zkbas-deploy/zkbas/service/rpc/globalRPC/etc/config.yaml - -cd ~/zkbas-deploy/zkbas/service/rpc/globalRPC/ -pm2 start --name globalRPC "go run ./globalrpc.go" - - - -echo "13. run app" - -echo -e " -Name: appService-api +Name: api-server Host: 0.0.0.0 Port: 8888 + +Prometheus: + Host: 0.0.0.0 + Port: 9091 + Path: /metrics + Postgres: DataSource: host=127.0.0.1 user=postgres password=Zkbas@123 dbname=zkbas port=5432 sslmode=disable @@ -269,19 +247,24 @@ CacheRedis: - Host: 127.0.0.1:6379 Type: node -GlobalRpc: - Endpoints: - - 127.0.0.1:8080 - LogConf: - ServiceName: appservice + ServiceName: api-server Mode: console - Path: ./log/appService + Path: ./log/api-server StackCooldownMillis: 500 - -TreeDB: - Driver: memorydb - " > ~/zkbas-deploy/zkbas/service/api/app/etc/app.yaml - -cd ~/zkbas-deploy/zkbas/service/api/app -pm2 start --name app "go run ./app.go" + Level: error + +CoinMarketCap: + Url: https://pro-api.coinmarketcap.com/v1/cryptocurrency/quotes/latest?symbol= + Token: ${CMC_TOKEN} + +MemCache: + AccountExpiration: 200 + AssetExpiration: 600 + BlockExpiration: 400 + TxExpiration: 400 + PriceExpiration: 200 + " > ${DEPLOY_PATH}/zkbas/service/apiserver/etc/config.yaml + +cd ${DEPLOY_PATH}/zkbas/service/apiserver +pm2 start --name api-server "go run ./server.go" diff --git a/deploy-qa.sh b/deploy-qa.sh new file mode 100644 index 000000000..21a8a4fe2 --- /dev/null +++ b/deploy-qa.sh @@ -0,0 +1,285 @@ +#!/bin/bash + +# config +# GOBIN=/usr/local/bin/ go install github.com/zeromicro/go-zero/tools/goctl@latest + +export PATH=$PATH:/usr/local/go/bin:/usr/local/go/bin:/root/go/bin +echo '0. stop old database/redis and docker run new database/redis' +pm2 delete all +docker kill $(docker ps -q) +docker rm $(docker ps -a -q) +docker run -d --name zkbasredis -p 6379:6379 redis +docker run --name postgres -p 5432:5432 -e POSTGRES_PASSWORD=Zkbas@123 -e POSTGRES_USER=postgres -e POSTGRES_DB=zkbas -d postgres + + +echo '1. basic config and git clone repos' +#yum install jq -y +#npm install pm2 -g +export PATH=$PATH:/usr/local/go/bin/ +cd ~ +rm -rf ~/zkbas-deploy-bak && mv ~/zkbas-deploy ~/zkbas-deploy-bak +mkdir zkbas-deploy && cd zkbas-deploy +git clone --branch develop https://github.com/bnb-chain/zkbas-contract.git +git clone --branch develop https://github.com/bnb-chain/zkbas-crypto.git + +# mv /home/ec2-user/zkbas ~/zkbas-deploy +branch=$1 +git clone --branch $branch https://github.com/bnb-chain/zkbas.git + +echo "new crypto env" +echo '2. start generate zkbas.vk and zkbas.pk' +cd ~/zkbas-deploy +cd zkbas-crypto && go test ./legend/circuit/bn254/solidity -timeout 99999s -run TestExportSol +cd ~/zkbas-deploy +sudo mkdir /home/.zkbas +cp -r ./zkbas-crypto/legend/circuit/bn254/solidity/* /home/.zkbas + + +echo '3. start verify_parse for ZkbasVerifier' +cd ~/zkbas-deploy/zkbas/service/cronjob/prover/ +python3 verifier_parse.py /home/.zkbas/ZkbasVerifier1.sol,/home/.zkbas/ZkbasVerifier10.sol 1,10 ~/zkbas-deploy/zkbas-contract/contracts/ZkbasVerifier.sol + + + +echo '4-1. get latest block number' +hexNumber=`curl -X POST 'https://data-seed-prebsc-1-s1.binance.org:8545' --header 'Content-Type: application/json' --data-raw '{"jsonrpc":"2.0", "method":"eth_blockNumber", "params": [], "id":1 }' | jq -r '.result'` +blockNumber=`echo $((${hexNumber}))` +echo 'latest block number = ' $blockNumber + + + +echo '4-2. deploy contracts, register and deposit on BSC Testnet' +cd ~/zkbas-deploy +cd ./zkbas-contract && sudo npm install +npx hardhat --network BSCTestnet run ./scripts/deploy-keccak256/deploy.js +echo 'Recorded latest contract addresses into ~/zkbas-deploy/zkbas-contract/info/addresses.json' + +npx hardhat --network BSCTestnet run ./scripts/deploy-keccak256/register.js +npx hardhat --network BSCTestnet run ./scripts/deploy-keccak256/deposit.js + + +echo '5. modify deployed contracts into zkbas config' +cd ~/zkbas-deploy/zkbas/tools/dbinitializer/ +cp -r ./contractaddr.yaml.example ./contractaddr.yaml + +ZkbasContractAddr=`cat ~/zkbas-deploy/zkbas-contract/info/addresses.json | jq -r '.zkbasProxy'` +sed -i "s/ZkbasProxy: .*/ZkbasProxy: ${ZkbasContractAddr}/" ~/zkbas-deploy/zkbas/tools/dbinitializer/contractaddr.yaml + +GovernanceContractAddr=`cat ~/zkbas-deploy/zkbas-contract/info/addresses.json | jq -r '.governance'` +sed -i "s/Governance: .*/Governance: ${GovernanceContractAddr}/" ~/zkbas-deploy/zkbas/tools/dbinitializer/contractaddr.yaml + +sed -i "s/BSC_Test_Network_RPC *= .*/BSC_Test_Network_RPC = \"https\:\/\/data-seed-prebsc-1-s1.binance.org:8545\"/" ~/zkbas-deploy/zkbas/tools/dbinitializer/main.go + + + +cd ~/zkbas-deploy/zkbas/ +make app && make globalRPCProto +cd ~/zkbas-deploy/zkbas && go mod tidy + + + +echo "6. init tables on database" +sed -i "s/password=.* dbname/password=Zkbas@123 dbname/" ~/zkbas-deploy/zkbas/tools/dbinitializer/main.go +cd ~/zkbas-deploy/zkbas/tools/dbinitializer/ +go run . + + +cd ~/zkbas-deploy/zkbas/ +make app && make globalRPCProto + +sleep 30s + +echo "7. run prover" + +echo -e " +Name: prover.cronjob +Postgres: + DataSource: host=127.0.0.1 user=postgres password=Zkbas@123 dbname=zkbas port=5432 sslmode=disable + +CacheRedis: + - Host: 127.0.0.1:6379 + Type: node + +KeyPath: + ProvingKeyPath: [/home/.zkbas/zkbas1.pk, /home/.zkbas/zkbas10.pk] + VerifyingKeyPath: [/home/.zkbas/zkbas1.vk, /home/.zkbas/zkbas10.vk] + KeyTxCounts: [1, 10] + +TreeDB: + Driver: memorydb +" > ~/zkbas-deploy/zkbas/service/cronjob/prover/etc/prover.yaml + +cd ~/zkbas-deploy/zkbas/service/cronjob/prover/ +pm2 start --name prover "go run ./prover.go" + + + + +echo "8. run witnessGenerator" + +echo -e " +Name: witnessGenerator.cronjob + +Postgres: + DataSource: host=127.0.0.1 user=postgres password=Zkbas@123 dbname=zkbas port=5432 sslmode=disable + +CacheRedis: + - Host: 127.0.0.1:6379 + Type: node + +TreeDB: + Driver: memorydb +" > ~/zkbas-deploy/zkbas/service/cronjob/witnessGenerator/etc/witnessGenerator.yaml + +cd ~/zkbas-deploy/zkbas/service/cronjob/witnessGenerator/ +pm2 start --name witnessGenerator "go run ./witnessgenerator.go" + + + + + + +echo "9. run monitor" + +echo -e " +Name: monitor.cronjob + +Postgres: + DataSource: host=127.0.0.1 user=postgres password=Zkbas@123 dbname=zkbas port=5432 sslmode=disable + +CacheRedis: + - Host: 127.0.0.1:6379 + Type: node + +ChainConfig: + NetworkRPCSysConfigName: "BscTestNetworkRpc" + #NetworkRPCSysConfigName: "LocalTestNetworkRpc" + ZkbasContractAddrSysConfigName: "ZkbasContract" + GovernanceContractAddrSysConfigName: "GovernanceContract" + StartL1BlockHeight: $blockNumber + PendingBlocksCount: 0 + MaxHandledBlocksCount: 5000 + +TreeDB: + Driver: memorydb +" > ~/zkbas-deploy/zkbas/service/cronjob/monitor/etc/monitor.yaml + +cd ~/zkbas-deploy/zkbas/service/cronjob/monitor/ +pm2 start --name monitor "go run ./monitor.go" + + + +echo "10. run committer" + +echo -e " +Name: committer.cronjob + +Postgres: + DataSource: host=127.0.0.1 user=postgres password=Zkbas@123 dbname=zkbas port=5432 sslmode=disable + +CacheRedis: + - Host: 127.0.0.1:6379 + Type: node + +KeyPath: + KeyTxCounts: [1, 10] + +TreeDB: + Driver: memorydb +" >> ~/zkbas-deploy/zkbas/service/cronjob/committer/etc/committer.yaml + +cd ~/zkbas-deploy/zkbas/service/cronjob/committer/ +pm2 start --name committer "go run ./committer.go" + + + + +echo "11. run sender" + +echo -e " +Name: sender.cronjob + +Postgres: + DataSource: host=127.0.0.1 user=postgres password=Zkbas@123 dbname=zkbas port=5432 sslmode=disable + +CacheRedis: + - Host: 127.0.0.1:6379 + Type: node + +ChainConfig: + NetworkRPCSysConfigName: "BscTestNetworkRpc" + #NetworkRPCSysConfigName: "LocalTestNetworkRpc" + ZkbasContractAddrSysConfigName: "ZkbasContract" + MaxWaitingTime: 120 + MaxBlockCount: 4 + Sk: "acbaa269bd7573ff12361be4b97201aef019776ea13384681d4e5ba6a88367d9" + GasLimit: 5000000 + +TreeDB: + Driver: memorydb +" > ~/zkbas-deploy/zkbas/service/cronjob/sender/etc/sender.yaml + +cd ~/zkbas-deploy/zkbas/service/cronjob/sender/ +pm2 start --name sender "go run ./sender.go" + + + + + +echo "12. run globalRPC" + +echo -e " +Name: global.rpc +ListenOn: 127.0.0.1:8080 + +Postgres: + DataSource: host=127.0.0.1 user=postgres password=Zkbas@123 dbname=zkbas port=5432 sslmode=disable + +CacheRedis: + - Host: 127.0.0.1:6379 + Type: node + +LogConf: + ServiceName: global.rpc + Mode: console + Path: ./log/globalrpc + StackCooldownMillis: 500 + +TreeDB: + Driver: memorydb +" > ~/zkbas-deploy/zkbas/service/rpc/globalRPC/etc/config.yaml + +cd ~/zkbas-deploy/zkbas/service/rpc/globalRPC/ +pm2 start --name globalRPC "go run ./globalrpc.go" + + + +echo "13. run app" + +echo -e " +Name: appService-api +Host: 0.0.0.0 +Port: 8888 +Postgres: + DataSource: host=127.0.0.1 user=postgres password=Zkbas@123 dbname=zkbas port=5432 sslmode=disable + +CacheRedis: + - Host: 127.0.0.1:6379 + Type: node + +GlobalRpc: + Endpoints: + - 127.0.0.1:8080 + +LogConf: + ServiceName: appservice + Mode: console + Path: ./log/appService + StackCooldownMillis: 500 + +TreeDB: + Driver: memorydb + " > ~/zkbas-deploy/zkbas/service/api/app/etc/app.yaml + +cd ~/zkbas-deploy/zkbas/service/api/app +pm2 start --name app "go run ./app.go" diff --git a/deployment/README.md b/deployment/README.md new file mode 100644 index 000000000..7875745ef --- /dev/null +++ b/deployment/README.md @@ -0,0 +1,16 @@ +## Local Setup + +#### Docker-Compose + +Start... +```bash +blockNr=$(bash ./deployment/tool/tool.sh blockHeight) +bash ./deployment/tool/tool.sh all new +bash ./deployment/docker-compose/docker-compose.sh up $blockNr +``` + +Stop... +```bash +bash ./deployment/docker-compose/docker-compose.sh down +``` + diff --git a/deployment/docker-compose/docker-compose.sh b/deployment/docker-compose/docker-compose.sh new file mode 100644 index 000000000..e4dc420ba --- /dev/null +++ b/deployment/docker-compose/docker-compose.sh @@ -0,0 +1,170 @@ +#!/usr/bin/env bash +WORKDIR=$(cd `dirname $0`; pwd) +export BASEDIR=$WORKDIR/.. +export CONFIG_PATH=$WORKDIR/../configs +export PATH=$PATH:/usr/local/go/bin:/usr/local/go/bin:/root/go/bin:/usr/local/bin + +mkdir -p $CONFIG_PATH +set -a +. $WORKDIR/docker.env +set +a + + +function prepareConfigs() { +if [ -z $1 ] ; then + echo "invalid block height" + exit 1 +fi + +BLOCK_NUMBER=$1 + +echo -e " +Name: prover +Postgres: + DataSource: host=database user=$DATABASE_USER password=$DATABASE_PASS dbname=$DATABASE_NAME port=5432 sslmode=disable + +CacheRedis: + - Host: redis:6379 + Type: node + +KeyPath: + ProvingKeyPath: [/server/.zkbas/zkbas1.pk, /server/.zkbas/zkbas10.pk] + VerifyingKeyPath: [/server/.zkbas/zkbas1.vk, /server/.zkbas/zkbas10.vk] + +BlockConfig: + OptionalBlockSizes: [1, 10] +" > ${CONFIG_PATH}/prover.yaml + +echo -e " +Name: witness + +Postgres: + DataSource: host=database user=$DATABASE_USER password=$DATABASE_PASS dbname=$DATABASE_NAME port=5432 sslmode=disable + +CacheRedis: + - Host: redis:6379 + Type: node + +TreeDB: + Driver: memorydb +" > ${CONFIG_PATH}/witness.yaml + +echo -e " +Name: monitor + +Postgres: + DataSource: host=database user=$DATABASE_USER password=$DATABASE_PASS dbname=$DATABASE_NAME port=5432 sslmode=disable + +CacheRedis: + - Host: redis:6379 + Type: node + +ChainConfig: + NetworkRPCSysConfigName: \"BscTestNetworkRpc\" + #NetworkRPCSysConfigName: \"LocalTestNetworkRpc\" + StartL1BlockHeight: $BLOCK_NUMBER + ConfirmBlocksCount: 0 + MaxHandledBlocksCount: 5000 +" > ${CONFIG_PATH}/monitor.yaml + +echo -e " +Name: committer + +Postgres: + DataSource: host=database user=$DATABASE_USER password=$DATABASE_PASS dbname=$DATABASE_NAME port=5432 sslmode=disable + +CacheRedis: + - Host: redis:6379 + Type: node + +BlockConfig: + OptionalBlockSizes: [1, 10] + +TreeDB: + Driver: memorydb +" > ${CONFIG_PATH}/committer.yaml + +echo -e " +Name: sender + +Postgres: + DataSource: host=database user=$DATABASE_USER password=$DATABASE_PASS dbname=$DATABASE_NAME port=5432 sslmode=disable + +CacheRedis: + - Host: redis:6379 + Type: node + +ChainConfig: + NetworkRPCSysConfigName: \"BscTestNetworkRpc\" + #NetworkRPCSysConfigName: \"LocalTestNetworkRpc\" + ConfirmBlocksCount: 0 + MaxWaitingTime: 120 + MaxBlockCount: 4 + Sk: \"$SK\" + GasLimit: 5000000 + +" > ${CONFIG_PATH}/sender.yaml + +echo -e " +Name: api-server +Host: 0.0.0.0 +Port: 8888 + +Prometheus: + Host: 0.0.0.0 + Port: 9091 + Path: /metrics + +Postgres: + DataSource: host=database user=$DATABASE_USER password=$DATABASE_PASS dbname=$DATABASE_NAME port=5432 sslmode=disable + +CacheRedis: + - Host: redis:6379 + Type: node + +LogConf: + ServiceName: api-server + Mode: console + Path: ./log/api-server + StackCooldownMillis: 500 + Level: error + +CoinMarketCap: + Url: $CMC_URL + Token: $CMC_TOKEN + +MemCache: + AccountExpiration: 200 + AssetExpiration: 600 + BlockExpiration: 400 + TxExpiration: 400 + PriceExpiration: 200 + +" > ${CONFIG_PATH}/apiserver.yaml + +} + +function up() { + cd $WORKDIR + docker rm -f $(docker ps -aq) + docker-compose up -d +} + +function down() { + cd $WORKDIR + docker-compose down +} + +CMD=$1 +case ${CMD} in +up) + prepareConfigs $2 + up + ;; +down) + down + ;; +*) + echo "Usage: docker-compose.sh up \$block_number | down" + ;; +esac \ No newline at end of file diff --git a/deployment/docker-compose/docker-compose.yaml b/deployment/docker-compose/docker-compose.yaml new file mode 100644 index 000000000..d6eaf13fc --- /dev/null +++ b/deployment/docker-compose/docker-compose.yaml @@ -0,0 +1,131 @@ +version: '3.4' +services: + initializer: + container_name: initializer + image: ghcr.io/bnb-chain/zkbas + restart: on-failure + command: + - db + - initialize + - --dsn + - host=database user=$DATABASE_USER password=$DATABASE_PASS dbname=$DATABASE_NAME port=5432 sslmode=disable + - --contractAddr + - /server/configs/contractaddr.yaml + volumes: + - $BASEDIR/configs:/server/configs + depends_on: + database: + condition: service_healthy + redis: + condition: service_healthy + + prover: + container_name: prover + image: ghcr.io/bnb-chain/zkbas + restart: always + command: + - prover + - --config + - /server/configs/prover.yaml + volumes: + - $BASEDIR/configs:/server/configs + - $BASEDIR/.zkbas:/server/.zkbas + depends_on: + - initializer + + witness: + container_name: witness + image: ghcr.io/bnb-chain/zkbas + restart: always + command: + - witness + - --config + - /server/configs/witness.yaml + volumes: + - $BASEDIR/configs:/server/configs + depends_on: + - initializer + + monitor: + container_name: monitor + image: ghcr.io/bnb-chain/zkbas + restart: always + command: + - monitor + - --config + - /server/configs/monitor.yaml + volumes: + - $BASEDIR/configs:/server/configs + depends_on: + - initializer + + committer: + container_name: committer + image: ghcr.io/bnb-chain/zkbas + restart: always + command: + - committer + - --config + - /server/configs/committer.yaml + volumes: + - $BASEDIR/configs:/server/configs + depends_on: + - initializer + + sender: + container_name: sender + image: $IMAGE_NAME:$IMAGE_VERSION + restart: always + command: + - sender + - --config + - /server/configs/sender.yaml + volumes: + - $BASEDIR/configs:/server/configs + depends_on: + - initializer + + apiserver: + container_name: apiserver + image: ghcr.io/bnb-chain/zkbas + restart: always + command: + - apiserver + - --config + - /server/configs/apiserver.yaml + ports: + - 8888:8888 + - 9091:9091 + volumes: + - $BASEDIR/configs:/server/configs + depends_on: + - initializer + + database: + container_name: database + image: $POSTGRES_IMAGE:$POSTGRES_VERSION + restart: always + healthcheck: + test: ["CMD-SHELL", "pg_isready -U postgres"] + interval: 10s + timeout: 5s + retries: 5 + ports: + - 5432:5432 + environment: + POSTGRES_DB: $DATABASE_NAME + POSTGRES_USER: $DATABASE_USER + POSTGRES_PASSWORD: $DATABASE_PASS + + redis: + container_name: redis + image: $REDIS_IMAGE:$REDIS_VERSION + restart: always + healthcheck: + test: ["CMD", "redis-cli","ping"] + interval: 3s + timeout: 5s + retries: 5 + ports: + - 6379:6379 + \ No newline at end of file diff --git a/deployment/docker-compose/docker.env b/deployment/docker-compose/docker.env new file mode 100644 index 000000000..017ae2013 --- /dev/null +++ b/deployment/docker-compose/docker.env @@ -0,0 +1,12 @@ +IMAGE_NAME=ghcr.io/bnb-chain/zkbas +IMAGE_VERSION=latest +POSTGRES_IMAGE=postgres +POSTGRES_VERSION=latest +REDIS_IMAGE=redis +REDIS_VERSION=latest +DATABASE_USER=postgres +DATABASE_PASS=Zkbas@123 +DATABASE_NAME=zkbas +SK=acbaa269bd7573ff12361be4b97201aef019776ea13384681d4e5ba6a88367d9 +CMC_TOKEN=cfce503f-fake-fake-fake-bbab5257dac8 +CMC_URL=https://pro-api.coinmarketcap.com/v1/cryptocurrency/quotes/latest?symbol= \ No newline at end of file diff --git a/deployment/tool/generate_api.sh b/deployment/tool/generate_api.sh new file mode 100644 index 000000000..4e209db42 --- /dev/null +++ b/deployment/tool/generate_api.sh @@ -0,0 +1,5 @@ +#!/usr/bin/env bash + +export PATH=$PATH:/usr/local/go/bin:/usr/local/go/bin:/root/go/bin +API_SERVER=./service/apiserver +cd $API_SERVER && goctl api go -api server.api -dir . \ No newline at end of file diff --git a/deployment/tool/tool.sh b/deployment/tool/tool.sh new file mode 100644 index 000000000..779877907 --- /dev/null +++ b/deployment/tool/tool.sh @@ -0,0 +1,85 @@ +#!/usr/bin/env bash + +# Preparation: Install following tools when you first run this script!!! +# GOBIN=/usr/local/bin/ go install github.com/zeromicro/go-zero/tools/goctl@latest +# yum install jq -y +# You should install nodejs above v14 + +# Attention: Set the following variables to the right one before running!!! +WORKDIR=$(cd `dirname $0`/..; pwd) +KEY_PATH=${WORKDIR}/.zkbas +ZKBAS_CONTRACT_REPO=https://github.com/bnb-chain/zkbas-contract.git +ZKBAS_CRYPTO_REPO=https://github.com/bnb-chain/zkbas-crypto.git +BSC_TESTNET_ENDPOINT=https://data-seed-prebsc-1-s1.binance.org:8545 +ZKBAS_CRYPTO_BRANCH=$(cat $WORKDIR/../go.mod | grep github.com/bnb-chain/zkbas-crypto | awk -F" " '{print $2}') + +export PATH=$PATH:/usr/local/go/bin:/usr/local/go/bin:/root/go/bin + +function prepare() { + echo 'basic config and git clone repos ...' + rm -rf ${WORKDIR}/dependency + mkdir -p ${WORKDIR}/dependency && cd ${WORKDIR}/dependency + + git clone --branch develop ${ZKBAS_CONTRACT_REPO} + git clone --branch ${ZKBAS_CRYPTO_BRANCH} ${ZKBAS_CRYPTO_REPO} + + if [ ! -z $1 ] && [ "$1" = "new" ]; then + echo "new crypto env" + echo 'start generate zkbas.vk and zkbas.pk ...' + cd ${WORKDIR}/dependency/zkbas-crypto + go test ./legend/circuit/bn254/solidity -timeout 99999s -run TestExportSol + mkdir -p $KEY_PATH + cp -r ./legend/circuit/bn254/solidity/* $KEY_PATH/ + fi + + echo 'start verify_parse for ZkbasVerifier ...' + cd ${WORKDIR}/../service/prover/ + python3 verifier_parse.py ${KEY_PATH}/ZkbasVerifier1.sol,${KEY_PATH}/ZkbasVerifier10.sol 1,10 ${WORKDIR}/dependency/zkbas-contract/contracts/ZkbasVerifier.sol +} + +function getLatestBlockHeight() { + hexNumber=$(curl -X POST $BSC_TESTNET_ENDPOINT --header 'Content-Type: application/json' --data-raw '{"jsonrpc":"2.0", "method":"eth_blockNumber", "params": [], "id":1 }' | jq -r '.result') + blockNumber=`echo $((${hexNumber}))` + + echo $blockNumber +} + +function deployContracts() { + echo 'deploy contracts, register and deposit on BSC Testnet' + cd ${WORKDIR}/dependency/zkbas-contract && npm install + npx hardhat --network BSCTestnet run ./scripts/deploy-keccak256/deploy.js + echo "Recorded latest contract addresses into ${WORKDIR}/dependency/zkbas-contract/info/addresses.json" + npx hardhat --network BSCTestnet run ./scripts/deploy-keccak256/register.js + npx hardhat --network BSCTestnet run ./scripts/deploy-keccak256/deposit.js + + mkdir -p ${WORKDIR}/configs/ + echo 'modify deployed contracts into zkbas config ...' + cp -r ${WORKDIR}/../tools/dbinitializer/contractaddr.yaml.example ${WORKDIR}/configs/contractaddr.yaml + + ZkbasContractAddr=`cat ${WORKDIR}/dependency/zkbas-contract/info/addresses.json | jq -r '.zkbasProxy'` + sed -i -e "s/ZkbasProxy: .*/ZkbasProxy: ${ZkbasContractAddr}/" ${WORKDIR}/configs/contractaddr.yaml + + GovernanceContractAddr=`cat ${WORKDIR}/dependency/zkbas-contract/info/addresses.json | jq -r '.governance'` + sed -i -e "s/Governance: .*/Governance: ${GovernanceContractAddr}/" ${WORKDIR}/configs/contractaddr.yaml +} + +CMD=$1 +case ${CMD} in +prepare) + prepare $2 + ;; +blockHeight) + blockNumber=$(getLatestBlockHeight) + echo "$blockNumber" + ;; +deployContracts) + deployContracts + ;; +all) + prepare $2 + deployContracts + ;; +*) + echo "Usage: tool.sh prepare | blockHeight | deployContracts | all " + ;; +esac diff --git a/docker/run_etcd-win10.bat b/docker/run_etcd-win10.bat deleted file mode 100644 index a63b2772c..000000000 --- a/docker/run_etcd-win10.bat +++ /dev/null @@ -1,21 +0,0 @@ -@echo off -unset https_proxy http_proxy all_proxy && \ - rm -rf /tmp/etcd-data.tmp && mkdir -p /tmp/etcd-data.tmp && \ - docker run --rm \ - -p 2379:2379 \ - --mount type=bind,source=/tmp/etcd-data.tmp,destination=/etcd-data \ - --name etcd-gcr-v3.5.1 \ - -d quay.io/coreos/etcd:v3.5.1 \ - /usr/local/bin/etcd \ - --name s1 \ - --data-dir /etcd-data \ - --listen-client-urls http://0.0.0.0:2379 \ - --advertise-client-urls http://0.0.0.0:2379 \ - --listen-peer-urls http://0.0.0.0:2380 \ - --initial-advertise-peer-urls http://0.0.0.0:2380 \ - --initial-cluster s1=http://0.0.0.0:2380 \ - --initial-cluster-token tkn \ - --initial-cluster-state new \ - --log-level info \ - --logger zap \ - --log-outputs stderr \ No newline at end of file diff --git a/docs/api_reference.md b/docs/api_reference.md new file mode 100644 index 000000000..35a78f852 --- /dev/null +++ b/docs/api_reference.md @@ -0,0 +1,942 @@ +# API Reference + +## Version: 1.0 + +### / + +#### GET +##### Summary + +Get status of zkbas + +##### Responses + +| Code | Description | Schema | +| ---- | ----------- | ------ | +| 200 | A successful response. | [Status](#status) | + +### /api/v1/account + +#### GET +##### Summary + +Get account by account's name, index or pk + +##### Parameters + +| Name | Located in | Description | Required | Schema | +| ---- | ---------- | ----------- | -------- | ---- | +| by | query | name/index/pk | Yes | string | +| value | query | value of name/index/pk | Yes | string | + +##### Responses + +| Code | Description | Schema | +| ---- | ----------- | ------ | +| 200 | A successful response. | [Account](#account) | + +### /api/v1/accountMempoolTxs + +#### GET +##### Summary + +Get mempool transactions of a specific account + +##### Parameters + +| Name | Located in | Description | Required | Schema | +| ---- | ---------- | ----------- | -------- | ---- | +| by | query | account_name/account_index/account_pk | Yes | string | +| value | query | value of account_name/account_index/account_pk | Yes | string | + +##### Responses + +| Code | Description | Schema | +| ---- | ----------- | ------ | +| 200 | A successful response. | [MempoolTxs](#mempooltxs) | + +### /api/v1/accountNfts + +#### GET +##### Summary + +Get nfts of a specific account + +##### Parameters + +| Name | Located in | Description | Required | Schema | +| ---- | ---------- | ----------- | -------- | ---- | +| by | query | account_name/account_index/account_pk | Yes | string | +| value | query | value of account_name/account_index/account_pk | Yes | string | +| offset | query | offset, min 0 and max 100000 | Yes | integer | +| limit | query | limit, min 1 and max 100 | Yes | integer | + +##### Responses + +| Code | Description | Schema | +| ---- | ----------- | ------ | +| 200 | A successful response. | [Nfts](#nfts) | + +### /api/v1/accountTxs + +#### GET +##### Summary + +Get transactions of a specific account + +##### Parameters + +| Name | Located in | Description | Required | Schema | +| ---- | ---------- | ----------- | -------- | ---- | +| by | query | account_name/account_index/account_pk | Yes | string | +| value | query | value of account_name/account_index/account_pk | Yes | string | +| offset | query | offset, min 0 and max 100000 | Yes | integer | +| limit | query | limit, min 1 and max 100 | Yes | integer | + +##### Responses + +| Code | Description | Schema | +| ---- | ----------- | ------ | +| 200 | A successful response. | [Txs](#txs) | + +### /api/v1/accounts + +#### GET +##### Summary + +Get accounts + +##### Parameters + +| Name | Located in | Description | Required | Schema | +| ---- | ---------- | ----------- | -------- | ---- | +| offset | query | offset, min 0 and max 100000 | Yes | integer | +| limit | query | limit, min 1 and max 100 | Yes | integer | + +##### Responses + +| Code | Description | Schema | +| ---- | ----------- | ------ | +| 200 | A successful response. | [Accounts](#accounts) | + +### /api/v1/assets + +#### GET +##### Summary + +Get assets + +##### Parameters + +| Name | Located in | Description | Required | Schema | +| ---- | ---------- | ----------- | -------- | ---- | +| offset | query | offset, min 0 and max 100000 | Yes | integer | +| limit | query | limit, min 1 and max 100 | Yes | integer | + +##### Responses + +| Code | Description | Schema | +| ---- | ----------- | ------ | +| 200 | A successful response. | [Assets](#assets) | + +### /api/v1/block + +#### GET +##### Summary + +Get block by its height or commitment + +##### Parameters + +| Name | Located in | Description | Required | Schema | +| ---- | ---------- | ----------- | -------- | ---- | +| by | query | height/commitment | Yes | string | +| value | query | value of height/commitment | Yes | string | + +##### Responses + +| Code | Description | Schema | +| ---- | ----------- | ------ | +| 200 | A successful response. | [Block](#block) | + +### /api/v1/blockTxs + +#### GET +##### Summary + +Get transactions in a block + +##### Parameters + +| Name | Located in | Description | Required | Schema | +| ---- | ---------- | ----------- | -------- | ---- | +| by | query | block_height/block_commitment | Yes | string | +| value | query | value of block_height/block_commitment | Yes | string | + +##### Responses + +| Code | Description | Schema | +| ---- | ----------- | ------ | +| 200 | A successful response. | [Txs](#txs) | + +### /api/v1/blocks + +#### GET +##### Summary + +Get blocks + +##### Parameters + +| Name | Located in | Description | Required | Schema | +| ---- | ---------- | ----------- | -------- | ---- | +| offset | query | offset, min 0 and max 100000 | Yes | integer | +| limit | query | limit, min 1 and max 100 | Yes | integer | + +##### Responses + +| Code | Description | Schema | +| ---- | ----------- | ------ | +| 200 | A successful response. | [Blocks](#blocks) | + +### /api/v1/currencyPrice + +#### GET +##### Summary + +Get asset price by its symbol + +##### Parameters + +| Name | Located in | Description | Required | Schema | +| ---- | ---------- | ----------- | -------- | ---- | +| by | query | symbol | Yes | string | +| value | query | value of symbol | Yes | string | + +##### Responses + +| Code | Description | Schema | +| ---- | ----------- | ------ | +| 200 | A successful response. | [CurrencyPrice](#currencyprice) | + +### /api/v1/currencyPrices + +#### GET +##### Summary + +Get assets' prices + +##### Parameters + +| Name | Located in | Description | Required | Schema | +| ---- | ---------- | ----------- | -------- | ---- | +| offset | query | offset, min 0 and max 100000 | Yes | integer | +| limit | query | limit, min 1 and max 100 | Yes | integer | + +##### Responses + +| Code | Description | Schema | +| ---- | ----------- | ------ | +| 200 | A successful response. | [CurrencyPrices](#currencyprices) | + +### /api/v1/currentHeight + +#### GET +##### Responses + +| Code | Description | Schema | +| ---- | ----------- | ------ | +| 200 | A successful response. | [CurrentHeight](#currentheight) | + +### /api/v1/gasAccount + +#### GET +##### Summary + +Get gas account, who will charge gas fees for transactions + +##### Responses + +| Code | Description | Schema | +| ---- | ----------- | ------ | +| 200 | A successful response. | [GasAccount](#gasaccount) | + +### /api/v1/gasFee + +#### GET +##### Summary + +Get gas fee amount for using a specific asset as gas asset + +##### Parameters + +| Name | Located in | Description | Required | Schema | +| ---- | ---------- | ----------- | -------- | ---- | +| asset_id | query | id of asset | Yes | integer | + +##### Responses + +| Code | Description | Schema | +| ---- | ----------- | ------ | +| 200 | A successful response. | [GasFee](#gasfee) | + +### /api/v1/gasFeeAssets + +#### GET +##### Summary + +Get supported gas fee assets + +##### Responses + +| Code | Description | Schema | +| ---- | ----------- | ------ | +| 200 | A successful response. | [GasFeeAssets](#gasfeeassets) | + +### /api/v1/layer2BasicInfo + +#### GET +##### Summary + +Get zkbas general info, including contract address, and count of transactions and active users + +##### Responses + +| Code | Description | Schema | +| ---- | ----------- | ------ | +| 200 | A successful response. | [Layer2BasicInfo](#layer2basicinfo) | + +### /api/v1/lpValue + +#### GET +##### Summary + +Get liquidity pool amount for a specific liquidity pair + +##### Parameters + +| Name | Located in | Description | Required | Schema | +| ---- | ---------- | ----------- | -------- | ---- | +| pair_index | query | index of pair | Yes | integer | +| lp_amount | query | lp amount | Yes | string | + +##### Responses + +| Code | Description | Schema | +| ---- | ----------- | ------ | +| 200 | A successful response. | [LpValue](#lpvalue) | + +### /api/v1/maxOfferId + +#### GET +##### Summary + +Get max nft offer id for a specific account + +##### Parameters + +| Name | Located in | Description | Required | Schema | +| ---- | ---------- | ----------- | -------- | ---- | +| account_index | query | index of account | Yes | integer | + +##### Responses + +| Code | Description | Schema | +| ---- | ----------- | ------ | +| 200 | A successful response. | [MaxOfferId](#maxofferid) | + +### /api/v1/mempoolTxs + +#### GET +##### Summary + +Get mempool transactions + +##### Parameters + +| Name | Located in | Description | Required | Schema | +| ---- | ---------- | ----------- | -------- | ---- | +| offset | query | offset, min 0 and max 100000 | Yes | integer | +| limit | query | limit, min 1 and max 100 | Yes | integer | + +##### Responses + +| Code | Description | Schema | +| ---- | ----------- | ------ | +| 200 | A successful response. | [MempoolTxs](#mempooltxs) | + +### /api/v1/nextNonce + +#### GET +##### Summary + +Get next nonce + +##### Parameters + +| Name | Located in | Description | Required | Schema | +| ---- | ---------- | ----------- | -------- | ---- | +| account_index | query | index of account | Yes | integer | + +##### Responses + +| Code | Description | Schema | +| ---- | ----------- | ------ | +| 200 | A successful response. | [NextNonce](#nextnonce) | + +### /api/v1/pair + +#### GET +##### Summary + +Get liquidity pool info by its index + +##### Parameters + +| Name | Located in | Description | Required | Schema | +| ---- | ---------- | ----------- | -------- | ---- | +| index | query | index of pair | Yes | integer | + +##### Responses + +| Code | Description | Schema | +| ---- | ----------- | ------ | +| 200 | A successful response. | [Pair](#pair) | + +### /api/v1/pairs + +#### GET +##### Summary + +Get liquidity pairs + +##### Responses + +| Code | Description | Schema | +| ---- | ----------- | ------ | +| 200 | A successful response. | [Pairs](#pairs) | + +### /api/v1/search + +#### GET +##### Summary + +Search with a specific keyword + +##### Parameters + +| Name | Located in | Description | Required | Schema | +| ---- | ---------- | ----------- | -------- | ---- | +| keyword | query | keyword | Yes | string | + +##### Responses + +| Code | Description | Schema | +| ---- | ----------- | ------ | +| 200 | A successful response. | [Search](#search) | + + +### /api/v1/swapAmount + +#### GET +##### Summary + +Get swap amount for a specific liquidity pair and in asset amount + +##### Parameters + +| Name | Located in | Description | Required | Schema | +| ---- | ---------- | ----------- | -------- | ---- | +| pair_index | query | index of pair | Yes | integer | +| asset_id | query | id of asset | Yes | integer | +| asset_amount | query | amount of asset | Yes | string | +| is_from | query | is from asset | Yes | boolean (boolean) | + +##### Responses + +| Code | Description | Schema | +| ---- | ----------- | ------ | +| 200 | A successful response. | [SwapAmount](#swapamount) | + +### /api/v1/tx + +#### GET +##### Summary + +Get transaction by hash + +##### Parameters + +| Name | Located in | Description | Required | Schema | +| ---- | ---------- | ----------- | -------- | ---- | +| hash | query | hash of tx | Yes | string | + +##### Responses + +| Code | Description | Schema | +| ---- | ----------- | ------ | +| 200 | A successful response. | [EnrichedTx](#enrichedtx) | + +### /api/v1/txs + +#### GET +##### Summary + +Get transactions + +##### Parameters + +| Name | Located in | Description | Required | Schema | +| ---- | ---------- | ----------- | -------- | ---- | +| offset | query | offset, min 0 and max 100000 | Yes | integer | +| limit | query | limit, min 1 and max 100 | Yes | integer | + +##### Responses + +| Code | Description | Schema | +| ---- | ----------- | ------ | +| 200 | A successful response. | [Txs](#txs) | + +### /api/v1/withdrawGasFee + +#### GET +##### Summary + +Get withdraw gas fee amount for using a specific asset as gas asset + +##### Parameters + +| Name | Located in | Description | Required | Schema | +| ---- | ---------- | ----------- | -------- | ---- | +| asset_id | query | id of asset | Yes | integer | + +##### Responses + +| Code | Description | Schema | +| ---- | ----------- | ------ | +| 200 | A successful response. | [GasFee](#gasfee) | + +### /api/v1/sendTx + +#### POST +##### Summary + +Send raw transaction + +##### Parameters + +| Name | Located in | Description | Required | Schema | +| ---- | ---------- | ----------- | -------- | ---- | +| body | body | raw tx | Yes | [ReqSendTx](#reqsendtx) | + +##### Responses + +| Code | Description | Schema | +| ---- | ----------- | ------ | +| 200 | A successful response. | [TxHash](#txhash) | + +### Models + +#### Account + +| Name | Type | Description | Required | +| ---- | ---- | ----------- | -------- | +| status | integer | | Yes | +| index | long | | Yes | +| name | string | | Yes | +| pk | string | | Yes | +| nonce | long | | Yes | +| assets | [ [AccountAsset](#accountasset) ] | | Yes | + +#### AccountAsset + +| Name | Type | Description | Required | +| ---- | ---- | ----------- | -------- | +| id | integer | | Yes | +| name | string | | Yes | +| balance | string | | Yes | +| lp_amount | string | | Yes | + +#### Accounts + +| Name | Type | Description | Required | +| ---- | ---- | ----------- | -------- | +| total | integer | | Yes | +| accounts | [ [SimpleAccount](#simpleaccount) ] | | Yes | + +#### Asset + +| Name | Type | Description | Required | +| ---- | ---- | ----------- | -------- | +| id | integer | | Yes | +| name | string | | Yes | +| decimals | integer | | Yes | +| symbol | string | | Yes | +| address | string | | Yes | +| is_gas_asset | integer | | Yes | + +#### Assets + +| Name | Type | Description | Required | +| ---- | ---- | ----------- | -------- | +| total | integer | | Yes | +| assets | [ [Asset](#asset) ] | | Yes | + +#### Block + +| Name | Type | Description | Required | +| ---- | ---- | ----------- | -------- | +| commitment | string | | Yes | +| height | long | | Yes | +| state_root | string | | Yes | +| priority_operations | long | | Yes | +| pending_on_chain_operations_hash | string | | Yes | +| pending_on_chain_operations_pub_data | string | | Yes | +| committed_tx_hash | string | | Yes | +| committed_at | long | | Yes | +| verified_tx_hash | string | | Yes | +| verified_at | long | | Yes | +| txs | [ [Tx](#tx) ] | | Yes | +| status | long | | Yes | + +#### Blocks + +| Name | Type | Description | Required | +| ---- | ---- | ----------- | -------- | +| total | integer | | Yes | +| blocks | [ [Block](#block) ] | | Yes | + +#### ContractAddress + +| Name | Type | Description | Required | +| ---- | ---- | ----------- | -------- | +| name | string | | Yes | +| address | string | | Yes | + +#### CurrencyPrice + +| Name | Type | Description | Required | +| ---- | ---- | ----------- | -------- | +| pair | string | | Yes | +| asset_id | integer | | Yes | +| price | string | | Yes | + +#### CurrencyPrices + +| Name | Type | Description | Required | +| ---- | ---- | ----------- | -------- | +| total | integer | | Yes | +| currency_prices | [ [CurrencyPrice](#currencyprice) ] | | Yes | + +#### CurrentHeight + +| Name | Type | Description | Required | +| ---- | ---- | ----------- | -------- | +| height | long | | Yes | + +#### EnrichedTx + +| Name | Type | Description | Required | +| ---- | ---- | ----------- | -------- | +| | [Tx](#tx) | | No | +| committed_at | long | | Yes | +| verified_at | long | | Yes | +| executed_at | long | | Yes | + +#### GasAccount + +| Name | Type | Description | Required | +| ---- | ---- | ----------- | -------- | +| status | long | | Yes | +| index | long | | Yes | +| name | string | | Yes | + +#### GasFee + +| Name | Type | Description | Required | +| ---- | ---- | ----------- | -------- | +| gas_fee | string | | Yes | + +#### GasFeeAssets + +| Name | Type | Description | Required | +| ---- | ---- | ----------- | -------- | +| assets | [ [Asset](#asset) ] | | Yes | + +#### Layer2BasicInfo + +| Name | Type | Description | Required | +| ---- | ---- | ----------- | -------- | +| block_committed | long | | Yes | +| block_verified | long | | Yes | +| total_transaction_count | long | | Yes | +| yesterday_transaction_count | long | | Yes | +| today_transaction_count | long | | Yes | +| yesterday_active_user_count | long | | Yes | +| today_active_user_count | long | | Yes | +| contract_addresses | [ [ContractAddress](#contractaddress) ] | | Yes | + +#### LpValue + +| Name | Type | Description | Required | +| ---- | ---- | ----------- | -------- | +| asset_a_id | integer | | Yes | +| asset_a_name | string | | Yes | +| asset_a_amount | string | | Yes | +| asset_b_id | integer | | Yes | +| asset_b_name | string | | Yes | +| asset_b_amount | string | | Yes | + +#### MaxOfferId + +| Name | Type | Description | Required | +| ---- | ---- | ----------- | -------- | +| offer_id | long | | Yes | + +#### MempoolTxs + +| Name | Type | Description | Required | +| ---- | ---- | ----------- | -------- | +| total | integer | | Yes | +| mempool_txs | [ [Tx](#tx) ] | | Yes | + +#### NextNonce + +| Name | Type | Description | Required | +| ---- | ---- | ----------- | -------- | +| nonce | long | | Yes | + +#### Nft + +| Name | Type | Description | Required | +| ---- | ---- | ----------- | -------- | +| index | long | | Yes | +| creator_account_index | long | | Yes | +| owner_account_index | long | | Yes | +| content_hash | string | | Yes | +| l1_address | string | | Yes | +| l1_token_id | string | | Yes | +| creator_treasury_rate | long | | Yes | +| collection_id | long | | Yes | + +#### Nfts + +| Name | Type | Description | Required | +| ---- | ---- | ----------- | -------- | +| total | long | | Yes | +| nfts | [ [Nft](#nft) ] | | Yes | + +#### Pair + +| Name | Type | Description | Required | +| ---- | ---- | ----------- | -------- | +| index | integer | | Yes | +| asset_a_id | integer | | Yes | +| asset_a_name | string | | Yes | +| asset_a_amount | string | | Yes | +| asset_b_id | integer | | Yes | +| asset_b_name | string | | Yes | +| asset_b_amount | string | | Yes | +| fee_rate | long | | Yes | +| treasury_rate | long | | Yes | +| total_lp_amount | string | | Yes | + +#### Pairs + +| Name | Type | Description | Required | +| ---- | ---- | ----------- | -------- | +| pairs | [ [Pair](#pair) ] | | Yes | + +#### ReqGetAccount + +| Name | Type | Description | Required | +| ---- | ---- | ----------- | -------- | +| by | string | | Yes | +| value | string | | Yes | + +#### ReqGetAccountMempoolTxs + +| Name | Type | Description | Required | +| ---- | ---- | ----------- | -------- | +| by | string | | Yes | +| value | string | | Yes | + +#### ReqGetAccountNfts + +| Name | Type | Description | Required | +| ---- | ---- | ----------- | -------- | +| by | string | | Yes | +| value | string | | Yes | +| offset | [uint16](#uint16) | | Yes | +| limit | [uint16](#uint16) | | Yes | + +#### ReqGetAccountTxs + +| Name | Type | Description | Required | +| ---- | ---- | ----------- | -------- | +| by | string | | Yes | +| value | string | | Yes | +| offset | [uint16](#uint16) | | Yes | +| limit | [uint16](#uint16) | | Yes | + +#### ReqGetBlock + +| Name | Type | Description | Required | +| ---- | ---- | ----------- | -------- | +| by | string | | Yes | +| value | string | | Yes | + +#### ReqGetBlockTxs + +| Name | Type | Description | Required | +| ---- | ---- | ----------- | -------- | +| by | string | | Yes | +| value | string | | Yes | + +#### ReqGetCurrencyPrice + +| Name | Type | Description | Required | +| ---- | ---- | ----------- | -------- | +| by | string | | Yes | +| value | string | | Yes | + +#### ReqGetGasFee + +| Name | Type | Description | Required | +| ---- | ---- | ----------- | -------- | +| asset_id | integer | | Yes | + +#### ReqGetLpValue + +| Name | Type | Description | Required | +| ---- | ---- | ----------- | -------- | +| pair_index | integer | | Yes | +| lp_amount | string | | Yes | + +#### ReqGetMaxOfferId + +| Name | Type | Description | Required | +| ---- | ---- | ----------- | -------- | +| account_index | integer | | Yes | + +#### ReqGetNextNonce + +| Name | Type | Description | Required | +| ---- | ---- | ----------- | -------- | +| account_index | integer | | Yes | + +#### ReqGetPair + +| Name | Type | Description | Required | +| ---- | ---- | ----------- | -------- | +| index | integer | | Yes | + +#### ReqGetRange + +| Name | Type | Description | Required | +| ---- | ---- | ----------- | -------- | +| offset | integer | | Yes | +| limit | integer | | Yes | + +#### ReqGetSwapAmount + +| Name | Type | Description | Required | +| ---- | ---- | ----------- | -------- | +| pair_index | integer | | Yes | +| asset_id | integer | | Yes | +| asset_amount | string | | Yes | +| is_from | boolean (boolean) | | Yes | + +#### ReqGetTx + +| Name | Type | Description | Required | +| ---- | ---- | ----------- | -------- | +| hash | string | | Yes | + +#### ReqGetWithdrawGasFee + +| Name | Type | Description | Required | +| ---- | ---- | ----------- | -------- | +| asset_id | integer | | Yes | + +#### ReqSearch + +| Name | Type | Description | Required | +| ---- | ---- | ----------- | -------- | +| keyword | string | | Yes | + +#### ReqSendTx + +| Name | Type | Description | Required | +| ---- | ---- | ----------- | -------- | +| tx_type | integer | | Yes | +| tx_info | string | | Yes | + +#### Search + +| Name | Type | Description | Required | +| ---- | ---- | ----------- | -------- | +| data_type | integer | 2:account; 4:pk; 9:block; 10:tx | Yes | + +#### SimpleAccount + +| Name | Type | Description | Required | +| ---- | ---- | ----------- | -------- | +| index | long | | Yes | +| name | string | | Yes | +| pk | string | | Yes | + +#### Status + +| Name | Type | Description | Required | +| ---- | ---- | ----------- | -------- | +| status | integer | | Yes | +| network_id | integer | | Yes | + +#### SwapAmount + +| Name | Type | Description | Required | +| ---- | ---- | ----------- | -------- | +| asset_id | integer | | Yes | +| asset_name | string | | Yes | +| asset_amount | string | | Yes | + +#### Tx + +| Name | Type | Description | Required | +| ---- | ---- | ----------- | -------- | +| hash | string | | Yes | +| type | long | | Yes | +| amount | string | | Yes | +| info | string | | Yes | +| status | long | | Yes | +| index | long | | Yes | +| gas_fee_asset_id | long | | Yes | +| gas_fee | string | | Yes | +| nft_index | long | | Yes | +| pair_index | long | | Yes | +| asset_id | long | | Yes | +| asset_name | string | | Yes | +| native_adress | string | | Yes | +| extra_info | string | | Yes | +| memo | string | | Yes | +| account_index | long | | Yes | +| account_name | string | | Yes | +| nonce | long | | Yes | +| expire_at | long | | Yes | +| block_height | long | | Yes | +| created_at | long | | Yes | +| state_root | string | | Yes | + +#### TxHash + +| Name | Type | Description | Required | +| ---- | ---- | ----------- | -------- | +| tx_hash | string | | Yes | + +#### Txs + +| Name | Type | Description | Required | +| ---- | ---- | ----------- | -------- | +| total | integer | | Yes | +| txs | [ [Tx](#tx) ] | | Yes | diff --git a/docs/assets/Frame_work.png b/docs/assets/Frame_work.png new file mode 100644 index 000000000..f405d3455 Binary files /dev/null and b/docs/assets/Frame_work.png differ diff --git a/docs/assets/L2Block.png b/docs/assets/L2Block.png new file mode 100644 index 000000000..479743298 Binary files /dev/null and b/docs/assets/L2Block.png differ diff --git a/docs/assets/NFT_Marketplace.png b/docs/assets/NFT_Marketplace.png new file mode 100644 index 000000000..3d566325f Binary files /dev/null and b/docs/assets/NFT_Marketplace.png differ diff --git a/docs/assets/banner.png b/docs/assets/banner.png new file mode 100644 index 000000000..78a30ed60 Binary files /dev/null and b/docs/assets/banner.png differ diff --git a/docs/assets/eip_brief_view.svg b/docs/assets/eip_brief_view.svg new file mode 100644 index 000000000..f51a3a0e1 --- /dev/null +++ b/docs/assets/eip_brief_view.svg @@ -0,0 +1 @@ +prefix0x1901EIP712Domain HashMessage Type Hashproperty 1property 2property nhash(type) input as bytes32propretiesrawEncode result which should be 32bytes * (len(properties) + 1)hashStruct(message) keccakabi.encodepropertyor constantkeccak(no circuit)keccak of encode results should be 32bytesint(8, 16, 32, 64, 128, 256), bytes(16, 20, 32)......... \ No newline at end of file diff --git a/docs/assets/l1_wallet_eip712_usage.png b/docs/assets/l1_wallet_eip712_usage.png new file mode 100644 index 000000000..8c1255a72 Binary files /dev/null and b/docs/assets/l1_wallet_eip712_usage.png differ diff --git a/docs/assets/storage_layout.png b/docs/assets/storage_layout.png new file mode 100644 index 000000000..6a2b62500 Binary files /dev/null and b/docs/assets/storage_layout.png differ diff --git a/protocol.md b/docs/protocol.md similarity index 90% rename from protocol.md rename to docs/protocol.md index eb417d4ed..947a50e0e 100644 --- a/protocol.md +++ b/docs/protocol.md @@ -1,39 +1,41 @@ - - -# Zkbas Layer-2 Design - -## Table of contents +# ZkBAS Protocol Design ## Glossary -- **L1**: layer-1 blockchain(BNB Chain) -- **Rollup**: layer-2 network (Zkbas) -- **Owner**: a user who controls some assets in L2. -- **Operator**: entity operating the rollup. +- **L1**: layer 1 blockchain, it is BNB Smart Chain. +- **Rollup**: Zk Rollup based layer-2 network, it is ZkBAS. +- **Owner**: A user get a L2 account. +- **Committer**: Entity executing transactions and producing consecutive blocks on L2. - **Eventually**: happening within finite time. -- **Assets in rollup**: assets in L2 smart contract controlled by owners. -- **Rollup key**: owner's private key used to control deposited assets. -- **MiMC signature**: the result of signing the owner's message, using his private key, used in rollup internal transactions. +- **Assets in L2**: Assets in L2 smart contract controlled by owners. +- **L2 Key**: Owner's private key used to send transaction on L2. +- **MiMC Signature**: The result of signing the owner's message, +using his private key, used in L2 internal transactions. + +The current implementation we still use EDDSA as the signature scheme, we will soon support +switch to EDCSA. ## Design ### Overview -Zkbas implements a ZK rollup protocol (in short "rollup" below) for: +ZkBAS implements a ZK rollup protocol (in short "rollup" below) for: - BNB and BEP20 fungible token deposit and transfer - AMM-based fungible token swap on L2 -- BEP721 and BEP1155 non-fungible token deposit and transfer -- mint BEP721 or BEP1155 non-fungible tokens on L2 +- BEP721 non-fungible token deposit and transfer +- mint BEP721 non-fungible tokens on L2 - NFT-marketplace on L2 General rollup workflow is as follows: - Users can become owners in rollup by calling registerZNS in L1 to register a short name for L2; - Owners can transfer assets to each other, mint NFT on L2 or make a swap on L2; -- Owners can withdraw assets under their control to an L1 address. +- Owners can withdraw assets under their control to any L1 address. -Rollup operation requires the assistance of an operator, who rolls transactions together, computes a zero-knowledge proof of the correct state transition, and affects the state transition by interacting with the rollup contract. +Rollup operation requires the assistance of a committer, who rolls transactions together, also a prover who computes +a zero-knowledge proof of the correct state transition, and affects the state transition by interacting with the +rollup contract. ## Data format @@ -41,32 +43,33 @@ Rollup operation requires the assistance of an operator, who rolls transactions We assume that 1 `Chunk` = 32 bytes. -| Type | Size(Byte) | Type | Comment | -| -------------- | ---------- | -------- | ------------------------------------------------------------ | -| AccountIndex | 4 | uint32 | Incremented number of accounts in Rollup. New account will have the next free id. Max value is 2^32 - 1 = 4.294967295 × 10^9 | -| AssetId | 2 | uint16 | Incremented number of tokens in Rollup, max value is 65535 | -| PackedTxAmount | 5 | int64 | Packed transactions amounts are represented with 40 bit (5 byte) values, encoded as mantissa × 10^exponent where mantissa is represented with 35 bits, exponent is represented with 5 bits. This gives a range from 0 to 34359738368 × 10^31, providing 10 full decimal digit precision. | -| PackedFee | 2 | uint16 | Packed fees must be represented with 2 bytes: 5 bit for exponent, 11 bit for mantissa. | -| StateAmount | 16 | *big.Int | State amount is represented as uint128 with a range from 0 to ~3.4 × 10^38. It allows to represent up to 3.4 × 10^20 "units" if standard Ethereum's 18 decimal symbols are used. This should be a sufficient range. | +| Type | Size(Byte) | Type | Comment | +|----------------|------------|----------|----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| AccountIndex | 4 | uint32 | Incremented number of accounts in Rollup. New account will have the next free id. Max value is 2^32 - 1 = 4.294967295 × 10^9 | +| AssetId | 2 | uint16 | Incremented number of tokens in Rollup, max value is 65535 | +| PackedTxAmount | 5 | int64 | Packed transactions amounts are represented with 40 bit (5 byte) values, encoded as mantissa × 10^exponent where mantissa is represented with 35 bits, exponent is represented with 5 bits. This gives a range from 0 to 34359738368 × 10^31, providing 10 full decimal digit precision. | +| PackedFee | 2 | uint16 | Packed fees must be represented with 2 bytes: 5 bit for exponent, 11 bit for mantissa. | +| StateAmount | 16 | *big.Int | State amount is represented as uint128 with a range from 0 to ~3.4 × 10^38. It allows to represent up to 3.4 × 10^20 "units" if standard Ethereum's 18 decimal symbols are used. This should be a sufficient range. | | Nonce | 4 | uint32 | Nonce is the total number of executed transactions of the account. In order to apply the update of this state, it is necessary to indicate the current account nonce in the corresponding transaction, after which it will be automatically incremented. If you specify the wrong nonce, the changes will not occur. | -| EthAddress | 20 | string | To make an BNB Chain address from the BNB Chain's public key, all we need to do is to apply Keccak-256 hash function to the key and then take the last 20 bytes of the result. | -| Signature | 64 | []byte | Based on eddsa | -| HashValue | 32 | string | hash value based on MiMC | +| EthAddress | 20 | string | To make an BNB Smart Chain address from the BNB Smart Chain's public key, all we need to do is to apply Keccak-256 hash function to the key and then take the last 20 bytes of the result. | +| Signature | 64 | []byte | Based on EDDSA. | +| HashValue | 32 | string | hash value based on MiMC | ### Amount packing -Mantissa and exponent parameters used in Zkbas: +Mantissa and exponent parameters used in ZkBAS: `amount = mantissa * radix^{exponent}` | Type | Exponent bit width | Mantissa bit width | Radix | -| -------------- | ------------------ | ------------------ | ----- | +|----------------|--------------------|--------------------|-------| | PackedTxAmount | 5 | 35 | 10 | | PackedFee | 5 | 11 | 10 | ### State Merkle Tree(height) -We have 3 unique trees: `AccountTree(32)`, `LiquidityTree(16)`, `NftTree(40)` and one sub tree `AssetTree(16)` which belongs to `AccountTree(32)`. The empty leaf for all of the trees is just set every attribute as `0` for every node. +We have 3 unique trees: `AccountTree(32)`, `LiquidityTree(16)`, `NftTree(40)` and one sub-tree `AssetTree(16)` which +belongs to `AccountTree(32)`. The empty leaf for all the trees is just set every attribute as `0` for every node. #### AccountTree @@ -112,7 +115,7 @@ func ComputeAccountLeafHash( ##### AssetTree -`AssetTree` is a sub tree of `AccountTree` and it stores all of the assets `balance`, `lpAmount` and `offerCanceledOrFinalized`. The node of asset tree is: +`AssetTree` is sub-tree of `AccountTree` and it stores all the assets `balance`, `lpAmount` and `offerCanceledOrFinalized`. The node of asset tree is: ```go type AssetNode struct { @@ -154,7 +157,7 @@ func ComputeAccountAssetLeafHash( #### LiquidityTree -`LiquidityTree` is used for storing all of the liquidities info and the node of the liquidity tree is: +`LiquidityTree` is used for storing all the liquidity info and the node of the liquidity tree is: ```go type LiquidityNode struct { @@ -221,7 +224,7 @@ func ComputeLiquidityAssetLeafHash( #### NftTree -`NftTree` is used for storing all of the nfts and the node info is: +`NftTree` is used for storing all the NFTs and the node info is: ```go type NftNode struct { @@ -290,9 +293,9 @@ func ComputeStateRootHash( } ``` -## Zkbas Transactions +## ZkBAS Transactions -Zkbas transactions are divided into Rollup transactions (initiated inside Rollup by a Rollup account) and Priority operations (initiated on the mainchain by an BNB Chain account). +ZkBAS transactions are divided into Rollup transactions (initiated inside Rollup by a Rollup account) and Priority operations (initiated on the BSC by an BNB Smart Chain account). Rollup transactions: @@ -322,9 +325,11 @@ Priority operations: ### Rollup transaction lifecycle 1. User creates a `Transaction` or a `Priority operation`. -2. After processing this request, operator creates a `Rollup operation` and adds it to the block. -3. Once the block is complete, operator submits it to the Zkbas smart contract as a block commitment. Part of the logic of some `Rollup transaction` is checked by the smart contract. -4. The proof for the block is submitted to the Zkbas smart contract as the block verification. If the verification succeeds, the new state is considered finalized. +2. After processing this request, committer creates a `Rollup operation` and adds it to the block. +3. Once the block is complete, sender submits it to the ZkBAS smart contract as a block commitment. + Part of the logic of some `Rollup transaction` is checked by the smart contract. +4. The proof for the block is submitted to the ZkBAS smart contract as the block verification. + If the verification succeeds, the new state is considered finalized. ### EmptyTx @@ -332,18 +337,18 @@ Priority operations: No effects. -#### Onchain operation +#### On-Chain operation ##### Size | Chunks | Significant bytes | -| ------ | ----------------- | +|--------|-------------------| | 1 | 1 | ##### Structure | Field | Size(byte) | Value/type | Description | -| ------ | ---------- | ---------- | ---------------- | +|--------|------------|------------|------------------| | TxType | 1 | `0x00` | Transaction type | #### User transaction @@ -356,7 +361,7 @@ No user transaction This is a layer-1 transaction and a user needs to call this method first to register a layer-2 account. -#### Onchain operation +#### On-Chain operation ##### Size @@ -367,7 +372,7 @@ This is a layer-1 transaction and a user needs to call this method first to regi ##### Structure | Name | Size(byte) | Comment | -| --------------- | ---------- | ------------------------------ | +|-----------------|------------|--------------------------------| | TxType | 1 | transaction type | | AccountIndex | 4 | unique account index | | AccountName | 32 | account name | @@ -436,7 +441,7 @@ func VerifyRegisterZNSTx( This is a layer-1 transaction and is used for creating a trading pair for L2. -#### Onchain operation +#### On-Chain operation ##### Size @@ -517,7 +522,7 @@ func VerifyCreatePairTx( This is a layer-1 transaction and is used for updating a trading pair for L2. -#### Onchain operation +#### On-Chain operation ##### Size @@ -597,7 +602,7 @@ func VerifyUpdatePairRateTx( This is a layer-1 transaction and is used for depositing assets into the layer-2 account. -#### Onchain operation +#### On-Chain operation ##### Size @@ -681,9 +686,9 @@ func VerifyDepositTx( #### Description -This is a layer-1 transaction and is used for depositing nfts into the layer-2 account. +This is a layer-1 transaction and is used for depositing NFTs into the layer-2 account. -#### Onchain operation +#### On-Chain operation ##### Size @@ -774,9 +779,9 @@ func VerifyDepositNftTx( #### Description -This is a layer-2 transaction and is used for transfering assets in the layer-2 network. +This is a layer-2 transaction and is used for transferring assets in the layer-2 network. -#### Onchain operation +#### On-Chain operation ##### Size @@ -871,7 +876,7 @@ func VerifyTransferTx( tx *TransferTxConstraints, accountsBefore [NbAccountsPerTx]AccountConstraints, ) (pubData [PubDataSizePerTx]Variable) { - // collect pubdata + // collect pub-data pubData = CollectPubDataFromTransfer(api, *tx) // verify params // account index @@ -902,7 +907,7 @@ func VerifyTransferTx( This is a layer-2 transaction and is used for making a swap for assets in the layer-2 network. -#### Onchain operation +#### On-Chain operation ##### Size @@ -1066,7 +1071,7 @@ func VerifySwapTx( This is a layer-2 transaction and is used for adding liquidity for a trading pair in the layer-2 network. -#### Onchain operation +#### On-Chain operation ##### Size @@ -1242,7 +1247,7 @@ func VerifyAddLiquidityTx( This is a layer-2 transaction and is used for removing liquidity for a trading pair in the layer-2 network. -#### Onchain operation +#### On-Chain operation ##### Size @@ -1415,7 +1420,7 @@ func VerifyRemoveLiquidityTx( This is a layer-2 transaction and is used for withdrawing assets from the layer-2 to the layer-1. -#### Onchain operation +#### On-Chain operation ##### Size @@ -1524,7 +1529,7 @@ func VerifyWithdrawTx( This is a layer-2 transaction and is used for creating a new collection -#### Onchain operation +#### On-Chain operation ##### Size @@ -1626,9 +1631,9 @@ func VerifyCreateCollectionTx( #### Description -This is a layer-2 transaction and is used for minting nfts in the layer-2 network. +This is a layer-2 transaction and is used for minting NFTs in the layer-2 network. -#### Onchain operation +#### On-Chain operation ##### Size @@ -1639,9 +1644,9 @@ This is a layer-2 transaction and is used for minting nfts in the layer-2 networ ##### Structure | Name | Size(byte) | Comment | -| ------------------- | ---------- | ---------------------- | +|---------------------| ---------- | ---------------------- | | TxType | 1 | transaction type | -| CraetorAccountIndex | 4 | creator account index | +| CreatorAccountIndex | 4 | creator account index | | ToAccountIndex | 4 | receiver account index | | NftIndex | 5 | unique nft index | | GasFeeAccountIndex | 4 | gas fee account index | @@ -1745,9 +1750,9 @@ func VerifyMintNftTx( #### Description -This is a layer-2 transaction and is used for transfering nfts to others in the layer-2 network. +This is a layer-2 transaction and is used for transferring NFTs to others in the layer-2 network. -#### Onchain operation +#### On-Chain operation ##### Size @@ -1861,7 +1866,7 @@ func VerifyTransferNftTx( This is a layer-2 transaction that will be used for buying or selling Nft in the layer-2 network. -#### Onchain operation +#### On-Chain operation ##### Size @@ -1884,7 +1889,7 @@ This is a layer-2 transaction that will be used for buying or selling Nft in the | ExpiredAt | 8 | timestamp after which the order is invalid | | Sig | 64 | signature generated by buyer/seller_account_index's private key | -`AtomicMatch`(**below is the only info that will be uploaded onchain**): +`AtomicMatch`(**below is the only info that will be uploaded on-chain**): | Name | Size(byte) | Comment | | --------------------- | ---------- | -------------------------- | @@ -2094,7 +2099,7 @@ func VerifyAtomicMatchTx( This is a layer-2 transaction and is used for canceling nft offer. -#### Onchain operation +#### On-Chain operation ##### Size @@ -2194,7 +2199,7 @@ func VerifyCancelOfferTx( This is a layer-2 transaction and is used for withdrawing nft from the layer-2 to the layer-1. -#### Onchain operation +#### On-Chain operation ##### Size @@ -2333,7 +2338,7 @@ func VerifyWithdrawNftTx( This is a layer-1 transaction and is used for full exit assets from the layer-2 to the layer-1. -#### Onchain operation +#### On-Chain operation ##### Size @@ -2409,9 +2414,9 @@ func VerifyFullExitTx( #### Description -This is a layer-1 transaction and is used for full exit nfts from the layer-2 to the layer-1. +This is a layer-1 transaction and is used for full exit NFTs from the layer-2 to the layer-1. -#### Onchain operation +#### On-Chain operation ##### Size @@ -2520,8 +2525,8 @@ function registerZNS(string calldata _name, address _owner, bytes32 _zkbasPubKey - `_name`: your favor account name - `_owner`: account name layer-1 owner address -- `_zkbasPubKeyX`: zkbas layer-2 public key X -- `_zkbasPubKeyY`: zkbas layer-2 public key Y +- `_zkbasPubKeyX`: ZkBAS layer-2 public key X +- `_zkbasPubKeyY`: ZkBAS layer-2 public key Y #### CreatePair @@ -2640,7 +2645,7 @@ Withdraws token from Rollup to L1 in case of desert mode. User must provide proo ##### Commit block -Submit committed block data. Only active validator can make it. Onchain operations will be checked on contract and fulfilled on block verification. +Submit committed block data. Only active validator can make it. On-chain operations will be checked on contract and fulfilled on block verification. ```js struct StoredBlockInfo { @@ -2667,11 +2672,11 @@ function commitBlocks( external ``` -`StoredBlockInfo`: block data that we store on BNB Chain. We store hash of this structure in storage and pass it in tx arguments every time we need to access any of its field. +`StoredBlockInfo`: block data that we store on BNB Smart Chain. We store hash of this structure in storage and pass it in tx arguments every time we need to access any of its field. - `blockNumber`: rollup block number - `priorityOperations`: priority operations count -- `pendingOnchainOperationsHash`: hash of all onchain operations that have to be processed when block is finalized (verified) +- `pendingOnchainOperationsHash`: hash of all on-chain operations that have to be processed when block is finalized (verified) - `timestamp`: block timestamp - `stateRoot`: root hash of the rollup tree state - `commitment`: rollup block commitment @@ -2681,17 +2686,17 @@ external - `newStateRoot`: new layer-2 root hash - `publicData`: public data of the executed rollup operations - `timestamp`: block timestamp -- `publicDataOffsets`: list of onchain operations offset +- `publicDataOffsets`: list of on-chain operations offset - `blockNumber`: rollup block number -`commitBlocks` and `commitOneBlock` are used for committing layer-2 transactions data onchain. +`commitBlocks` and `commitOneBlock` are used for committing layer-2 transactions data on-chain. - `_lastCommittedBlockData`: last committed block header - `_newBlocksData`: pending commit blocks ##### Verify and execute blocks -Submit proofs of blocks and make it verified onchain. Only active validator can make it. This block onchain operations will be fulfilled. +Submit proofs of blocks and make it verified on-chain. Only active validator can make it. This block on-chain operations will be fulfilled. ```js struct VerifyAndExecuteBlockInfo { @@ -2705,7 +2710,7 @@ function verifyAndExecuteBlocks(VerifyAndExecuteBlockInfo[] memory _blocks, uint `VerifyAndExecuteBlockInfo`: block data that is used for verifying blocks - `blockHeader`: related block header -- `pendingOnchainOpsPubdata`: public data of pending onchain operations +- `pendingOnchainOpsPubdata`: public data of pending on-chain operations `verifyBlocks`: is used for verifying block data and proofs @@ -2714,7 +2719,7 @@ function verifyAndExecuteBlocks(VerifyAndExecuteBlockInfo[] memory _blocks, uint #### Desert mode trigger -Checks if Desert mode must be entered. Desert mode must be entered in case of current BNB Chain block number is higher than the oldest of existed priority requests expiration block number. +Checks if Desert mode must be entered. Desert mode must be entered in case of current BNB Smart Chain block number is higher than the oldest of existed priority requests expiration block number. ```js function activateDesertMode() public returns (bool) @@ -2722,8 +2727,7 @@ function activateDesertMode() public returns (bool) #### Revert blocks -Revert blocks that were not verified before deadline determined by `EXPECT_VERIFICATION_IN` constant. The caller must be valid operator. - +Revert blocks that were not verified before deadline determined by `EXPECT_VERIFICATION_IN` constant. ```js function revertBlocks(StoredBlockInfo[] memory _blocksToRevert) external ``` @@ -2789,7 +2793,7 @@ function addAsset(address _asset) external #### Set asset paused -Set asset status as paused or actived. The caller must be current governor. Its impossible to create deposits of the paused assets. +Set asset status as paused or active. The caller must be current governor. It is impossible to create deposits of the paused assets. ```js function setAssetPaused(address _assetAddress, bool _assetPaused) external @@ -2850,7 +2854,7 @@ Returns: asset id. #### Add asset -Collects fees for adding a asset and passes the call to the `addAsset` function in the governance contract. +Collecting fees for adding an asset and passing the call to the `addAsset` function in the governance contract. ```js function addAsset(address _assetAddress) external diff --git a/docs/readme.md b/docs/readme.md new file mode 100644 index 000000000..8483a4fc9 --- /dev/null +++ b/docs/readme.md @@ -0,0 +1,45 @@ +# OverView + +## Introduction +ZkBAS is built on ZK Rollup architecture. ZkBAS bundle (or “roll-up”) hundreds of transactions off-chain and generates +cryptographic proof. These proofs can come in the form of SNARKs (succinct non-interactive argument of knowledge) which +can prove the validity of every single transaction in the Rollup Block. It means all funds are held on the BSC, +while computation and storage are performed on BAS with less cost and fast speed. + +## Problems ZkBAS solves +Today BSC is experiencing network scalability problems and the core developer has proposed to use BAS in their [Outlook +2022](https://forum.bnbchain.org/t/bsc-development-outlook-2022/44) paper to solve this problem because these side +chains can be designed for much higher throughput and lower gas fees. + +The [BEP100](https://github.com/bnb-chain/BEPs/pull/132/files) propose a modular framework for creating BSC-compatible +side chains and connect them by native relayer hub. The security of native relayer hub is guaranteed by the side chain. +According to [the analysis](https://blog.chainalysis.com/reports/cross-chain-bridge-hacks-2022/) of chainalysis, bridges +are now a top target for the hackers and attacks on bridges account for 69% of total funds stolen in 2022. ZkBAS can +perfectly solve the problem! Thanks to zkSNARK proofs, ZkBAS share the same security as BSC does. + +## ZkBAS features + +ZkBAS implement the following features so far: +- **L1 security**. The ZkBAS share the same security as BSC does. Thanks to zkSNARK proofs, the security is guaranteed by + cryptographic. Users do not have to trust any third parties or keep monitoring the Rollup blocks in order to + prevent fraud. +- **L1<>L2 Communication**. BNB, and BEP20/BEP721/BEP1155 created on BSC or zkBAS can flow freely between BSC and zkBAS. +- **Built-in instant AMM swap**. It allows digital assets to be traded without permission and automatically by using + liquidity pools. +- **Built-in NFT marketplace**. Developer can build marketplace for crypto collectibles and non-fungible tokens (NFTs) + out of box on ZkBAS. +- **Fast transaction speed and faster finality**. +- **Low gas fee**. The gas token on the zkBAS can be either BEP20 or BNB. +- **"Full exit" on BSC**. The user can request this operation to withdraw funds if he thinks that his transactions + are censored by zkBAS. + +## Find More + +- [ZkBAS Technology](./technology.md) +- [ZkBAS Protocol Design](./protocol.md) +- [Quick Start Tutorial](./tutorial.md) +- [Tokenomics](./tokenomics.md) +- [API Reference](./api_reference.md) +- [Storage Layout](./storage_layout.md) +- [Wallets](./wallets.md) + \ No newline at end of file diff --git a/docs/storage_layout.md b/docs/storage_layout.md new file mode 100644 index 000000000..6d817b6d8 --- /dev/null +++ b/docs/storage_layout.md @@ -0,0 +1,57 @@ +# Storage Layout + +![Layout](./assets/storage_layout.png) +## Table + - `Account`: record account related information + - `Account History`: record the historical change information of the account + - `Asset`: record Asset related information + - `Block Witness`: record the information about the generated Witness + - `L1 Rollup Tx`: record transaction information from L1 + - `L1 Synced Block`: record block information from L1 + - `Compressed Block`: record other information of L2 block + - `Block`: record L2 block information + - `Liquidity`: record Liquidity related information + - `Liquidity History`: record the historical change information of Liquidity + - `MemPool Tx`: record unprocessed Tx messages + - `MemPool Tx Detail`: record unprocessed Tx details + - `NFT`: record NFT related information + - `NFT Collection`: record NFT Collection related information + - `NFT Exchange`: record NFT transaction information + - `NFT History`: record the historical status change information of NFT + - `NFT Withdraw History`: record NFT Withdraw information + - `Offer`: record the offer information of NFT + - `Priority Request`: record Priority Request information from L1 + - `Proof`: record the Proof information generated by the circuit + - `Sys Config`: store system variables + - `Tx`: record transaction information on L2 + - `Tx Detail`: record detailed transaction information on L2 + - `Fail Tx`: record the failed transaction information on L2 + +![L2Block](./assets/L2Block.png) + +## Tree +There are 4 types of trees in the system. + - Account Tree + - Account Asset Tree + - Liquidity Tree + - Nft Tree + +## Account Tree +The `AccountNameHash`, `PublicKey`, `Nonce`, `CollectionNonce`, `AccountAssetTree.Root()` of each Account will be calculated as a hash and written into the tree corresponding to the `AccountID`. +Used to record and save account status under each block height. + +## Account Asset Tree +Each Account will maintain an Asset tree, the `Balance`, `LpAmount`, and `OfferCanceledOrFinalized` of each Asset will be calculated as a hash and written into the tree corresponding to the `AssetID`. + +## Liquidity Tree +The `AssetAId`, `AssetA`, `AssetBId`, `AssetB`, `LpAmount`, `KLast`, `FeeRate`, `TreasuryAccountIndex`, and `TreasuryRate` of each liquidity resource will be calculated as a hash and written into the tree corresponding to the `PairIndex`. +Used to record and save liquidity status under each block height. + +## Nft Tree +The `CreatorAccountIndex`, `OwnerAccountIndex`, `NftContentHash`, `NftL1Address`, `NftL1TokenId`, `CreatorTreasuryRate`, and `CollectionId` of each nft resource will be calculated as a hash and written into the tree corresponding to the `NftIndex`. +Used to record and save nft status under each block height. + +## Physical Storage +The Tree in `zkBAS` uses the Sparse Merkle Tree (SMT) structure. In order to optimize the storage space as much as possible, we have implemented a SMT library, compressing the four-layer tree structure into one layer, reducing the depth of the tree and achieving a higher level. storage space usage. + +Find More: https://github.com/bnb-chain/zkbas-smt/blob/main/docs/design.md \ No newline at end of file diff --git a/docs/technology.md b/docs/technology.md new file mode 100644 index 000000000..1b99e8593 --- /dev/null +++ b/docs/technology.md @@ -0,0 +1,48 @@ +# ZkBAS Technology + +## ZK Rollup Architecture +![Framework](./assets/Frame_work.png) +- **committer**. Committer executes transactions and produce consecutive blocks. +- **monitor**. Monitor tracks events on BSC, and translates them into **transactions** on zkBAS. +- **witness**. Witness re-executes the transactions within the block and generates witness materials. +- **prover**. Prover generates cryptographic proof based on the witness materials. +- **sender**. The sender rollups the compressed l2 blocks to L1, and submit proof to verify it. +- **api server**. The api server is the access endpoints for most users, it provides rich data, including + digital assets, blocks, transactions, swap info, gas fees. +- **recovery**. A tool to recover the sparse merkle tree in kv-rocks based on the state world in postgresql. + +## Maximum throughput +Pending benchmark... + +## Data Availability +ZkBAS publish state data for every transaction processed off-chain to BSC. With this data, it is possible for +individuals or businesses to reproduce the rollup’s state and validate the chain themselves. BSC makes this data +available to all participants of the network as calldata. + +ZkBAS don't need to publish much transaction data on-chain because validity proofs already verify the authenticity +of state transitions. Nevertheless, storing data on-chain is still important because it allows permissionless, +independent verification of the L2 chain's state which in turn allows anyone to submit batches of transactions, +preventing malicious committer from censoring or freezing the chain. + +ZkBAS will provide a default client to replay all state on Layer2 based on these call data. + +## Transaction Finality +BSC acts as a settlement layer for ZkBAS: L2 transactions are finalized only if the L1 contract accepts the validity +proof and execute the txs. This eliminates the risk of malicious operators corrupting the chain +(e.g., stealing rollup funds) since every transaction must be approved on Mainnet. Also, BSC guarantees that user +operations cannot be reversed once finalized on L1. + +ZkBAS provides relative fast finality speed within 10 minutes. + +## Instant confirmation ZkBS +Even though time to finality is about 10 minutes, it does not affect the usability of the network. The state transition +happens immediately once the block been proposed on ZkBAS. The rollup operations are totally transparent to most users, +users can make further transfers without waiting. + +## Censorship resistance +Committer will execute transactions, produce batches. While this ensures efficiency, it increases the risk of censorship +: malicious ZK-rollup committer can censor users by refusing to include their transactions in batches. + +As a security measure, ZkBAS allow users to submit transactions directly to the rollup contract on Mainnet if +they think they are being censored by the operator. This allows users to force an exit from the ZK-rollup to BSC without +having to rely on the commiter's permission. \ No newline at end of file diff --git a/docs/tokenomics.md b/docs/tokenomics.md new file mode 100644 index 000000000..259a3fa12 --- /dev/null +++ b/docs/tokenomics.md @@ -0,0 +1,20 @@ +# Tokenomics + +## Supported Tokens +ZkBAS can be used to transfer BNB, BEP20 and BEP721 tokens. ZkBAS supports a maximum of 65535 BEP20 tokens and 1099511627775 BEP721 tokens. For BEP20 tokens, each supported token need to be listed on ZkBAS L1 contract first. For BEP721 tokens, there are two sources: one is to deposit BEP721 token from BSC network to ZkBAS, another is to directly mint on ZkBAS. + +ZkBAS is not responsible for security of supported token contract. Please use any token on your own risk. + +## List Token +ZkBAS use `AssetGovernance` contract to manage supported tokens. To list token on ZkBAS, call function `addAsset(address _assetAddress)` of AssetGovernance contract. + +Notice there is a listing fee for listing token, and fees are denominated using a specific token. The listing fee and fee token can be retrived by calling function `listingFee` and `listingFeeToken`. Make sure the sender that calls `addAsset` has enough fee token balance. + +## Fee +In ZkBAS the cost of every transaction comes from two parts: + +- **L2 part**: every transaction needs to read or write state storage, and ZkBAS prover needs to generate a SNARK proof for every transaction. This part depends on the use of hardware resources and is therefor invariable; + +- **L1 part**: ZkBAS must pay BNB gas to commit, verify and execute L2 blocks by sending corresponding L1 transaction. The L1 fees need to be averaged per L2 transaction which is orders of magnitude cheaper than the cost of normal BNB/BEP20 transfers. In addition, for the special transaction types which need to be executed on contract such as `Withdraw` and `WithdrawNft`, there is extra gas cost to be covered. + +Users can easily get fee cost of every transaction type using rpc method provided by ZkBAS, then pay transaction fees in multi fee tokens supported by ZkBAS. For example, suppose ZkBAS supports BNB/USDT, when users make a transaction, users can use BNB or USDT to pay transaction fees for their own convenience. \ No newline at end of file diff --git a/docs/tree/recovery.md b/docs/tree/recovery.md new file mode 100644 index 000000000..3658b6547 --- /dev/null +++ b/docs/tree/recovery.md @@ -0,0 +1,26 @@ +## Recovery + +Due to the introduction of the persistent SparseMerkleTree structure and support for persisting data to local (leveldb) and remote storage spaces (kvrocks, redis). + +When stored tree data is accidentally lost, this tool can help recover tree data. +#### Usage + +1. Prepare a config.yaml to set the RDB, Redis, and target tree sources you want to restore. +```yaml +Postgres: + DataSource: host=127.0.0.1 user=postgres password=Zkbas@123 dbname=zkbas port=5432 sslmode=disable + +CacheRedis: + - Host: 127.0.0.1:6379 + # Pass: myredis + Type: node + +TreeDB: + Driver: leveldb + LevelDBOption: + File: /tmp/test +``` +2. execute the tool +```sh +recovery -f ${config} -height 300 -service committer +``` diff --git a/docs/tutorial.md b/docs/tutorial.md new file mode 100644 index 000000000..c6d0ba33d --- /dev/null +++ b/docs/tutorial.md @@ -0,0 +1 @@ +# Quick Start Tutorial \ No newline at end of file diff --git a/docs/wallets.md b/docs/wallets.md new file mode 100644 index 000000000..4d994548a --- /dev/null +++ b/docs/wallets.md @@ -0,0 +1,331 @@ +# ZkBAS Wallets + +## L1 Wallet Support Introduction + +ZkBAS is built on ZK Rollup architecture. +ZkBAS bundle (or “roll-up”) hundreds of transactions off-chain and generates cryptographic proof. + +For convenience of user, ZKBAS implemented new version supporting L1 wallets by taking advantages of +EIP-712 standard signature which is able to sign any structured message and supported well by the mainstream wallets. (f.g. metamask, trust wallet, etc.) + +Any user can be accessible to ZkBAS only if the user gets wallet apps which support the EIP-712 signing scheme, and any user can easily manage their L2 assets(NFT, Tokens, etc.) +by importing the ecdsa(secp256k1) private key to their wallets mentioned above. + +**NOTE: this feature is still on development** + +![L1_USAGE](./assets/l1_wallet_eip712_usage.png) + +## EIP-712 Standard Adaptation + +[EIP-712](https://eips.ethereum.org/EIPS/eip-712) is a standard for hashing and signing of typed structured data as opposed to just bytestrings. It includes a + +* theoretical framework for correctness of encoding functions, +* specification of structured data similar to and compatible with Solidity structs, +* safe hashing algorithm for instances of those structures, +* safe inclusion of those instances in the set of signable messages, +* an extensible mechanism for domain separation, +* new RPC call eth_signTypedData, and +* an optimized implementation of the hashing algorithm in EVM. + +### Brief View of EIP-712 Implementation + +EIP-712 standard is implemented as below + +![EIP_BRIEF_VIEW](./assets/eip_brief_view.svg) + +Below is the encode and sign flow from bottom to top. + +1. hash of message type, we call it **hash(type)** +2. rawEncode of hash(type) and message entity properties, we call it **rawEncode result**. rawEncode result is rawEncode(hash(type), properties...) +3. hash of rawEncode result, we call it **hashStruct(message)** +4. same structure as hashStruct(message) for the EIP712Domain hashStruct, we call it **hashStruct(EIP712Domain)** +5. concat bytes of prefix, hashStruct(EIP712Domain), hashStruct(message), we call them **bytesConcat**. +6. hash of bytesConcat, we call it **hash(bytesConcat)** +7. sign the hash(bytesConcat) using the privateKey, and generate final ecdsa signature based on curve secp256k1. + +### Elliptic Curve Adaptation + +For an adaptation of EIP-712 standard signing scheme, ZKBAS switch ecc signature algorithm's elliptic curve from original eddsa to ecdsa, precisely secp256k1 which +is used across EVM-based blockchain. + +Moreover, in order to support ecdsa change ZKBAS switch the zksnarks proving system from groth16 to plonk, so the contract verifier switch from groth16 verifier contract to plonk verifier contract. + +*This feature is still on develop phase, and will support soon.* + +### Hash Function Adaptation + +For an adaptation of EIP-712 standard signing scheme, ZKBAS switch hash algorithm from originally MIMC to Keccak256(SHA-3). + +The keccak256 in Circuit is still in hint scope for the complexity of keccak functions, which pure implementation by circuit will add constraints more than **200K**. + +*Trick here: as we can see from [Brief View of EIP-712 Implementation](#brief-view-of-eip-712-implementation), the hash results of hashStruct(eip712domain) and hash(type) will be skipped in circuit by using a constant value, which can significantly reduce keccaks times* + +*f.g. we define prefix + hashStruct(EIP712Domain) in code as below:* +```golang= +var HexPrefixAndEip712DomainKeccakHash = "1901b4c86e5ff1abb2a7aae82a5ced0f0733dfd26fbef5c4713bfbf42d46a73e21c4" +``` +*f.g. we definae hash(type) for transfer as below* +```golang= +HexEIP712MessageTypeHash[Transfer] = "96695797a85b65c62a1eb8e28852fc7d5a34b668e127752d9a132d6d5e2d3717" +``` + +### Encodes Adaptation + +For an adaptation of EIP-712 standard signing scheme, ZKBAS switch encodes of transaction from originally joining all bytes to +EIP-712 signed typed message encoding, as described at [Brief View of EIP-712 Implementation](#brief-view-of-eip-712-implementation). + +Basically the encode switched to a +non-standard abi.encode function, while use the type hash as the fist input and properties of transactions as others, and called as a constructor abi.encode for each transaction. +See more about abi.encode [here](https://docs.soliditylang.org/en/v0.8.15/abi-spec.html). + +And the message of every transaction is described in [Transactions Type Definition](#typed-message-definition-of-all-transactions). +The abi.encode in Circuit is still in hint scope. + +## Typed Message Signature + +Below is the typescript definition of TypedMessage. The TypedMessage use types as types definition, and primaryType as the selected encode types, and message as the values to be signed, the domain here is EIP712Domain. + +```typescript= +/** +* This is the message format used for `signTypeData`, for all versions +* except `V1`. +* +* @template T - The custom types used by this message. +* @property types - The custom types used by this message. +* @property primaryType - The type of the message. +* @property domain - Signing domain metadata. The signing domain is the intended context for the +* signature (e.g. the dapp, protocol, etc. that it's intended for). This data is used to +* construct the domain seperator of the message. +* @property domain.name - The name of the signing domain. +* @property domain.version - The current major version of the signing domain. +* @property domain.chainId - The chain ID of the signing domain. +* @property domain.verifyingContract - The address of the contract that can verify the signature. +* @property domain.salt - A disambiguating salt for the protocol. +* @property message - The message to be signed. +*/ +export interface TypedMessage { + types: T; + primaryType: keyof T; + domain: { + name?: string; + version?: string; + chainId?: number; + verifyingContract?: string; + salt?: ArrayBuffer; + }; + message: Record; +} +``` + +### Typed Message Definition of All Transactions + +Below is the typescript definition of transaction and EIP712Domain. These typed message can be used in all wallets that support EIP-712 sign scheme and thus generate the signature ready for L2 verification and execution, finally rollup proof generation. + +#### EIP712Domain +```typescript= + const types = { + EIP712Domain: [ + { name: 'name', type: 'string' }, + { name: 'version', type: 'string' }, + { name: 'chainId', type: 'uint256' }, + { name: 'verifyingContract', type: 'address' }, + { name: 'salt', type: 'bytes32' }, + ], +``` + +#### EIP712Domain Static Value +```typescript= + const salt = Buffer.from( + '0xf2d857f4a3edcb9b78b4d503bfe733db1e3f6cdc2b7971ee739626c97e86a558', + 'hex', + ); + const domain = { + name: 'ZKBAS', + version: '1.0', + chainId: 10, + verifyingContract: '', + salt: salt, + }; +``` + +#### Message +##### Transfer +```typescript= + Transfer: [ + { name: 'FromAccountIndex', type: 'uint256' }, + { name: 'ToAccountIndex', type: 'uint256' }, + { name: 'ToAccountNameHash', type: 'bytes32' }, + { name: 'AssetId', type: 'uint256' }, + { name: 'packedAmount', type: 'uint256' }, + { name: 'GasAccountIndex', type: 'uint256' }, + { name: 'GasFeeAssetId', type: 'uint256' }, + { name: 'packedFee', type: 'uint256' }, + { name: 'CallDataHash', type: 'bytes32' }, + { name: 'ExpiredAt', type: 'uint256' }, + { name: 'Nonce', type: 'uint256' }, + { name: 'ChainId', type: 'uint256' }, + ], +``` +##### Withdraw +```typescript= + Withdraw: [ + { name: 'FromAccountIndex', type: 'uint256' }, + { name: 'AssetId', type: 'uint256' }, + { name: 'AssetAmount', type: 'bytes16' }, + { name: 'GasAccountIndex', type: 'uint256' }, + { name: 'GasFeeAssetId', type: 'uint256' }, + { name: 'GasFeeAssetAmount', type: 'uint256' }, + { name: 'ToAddress', type: 'bytes20' }, + { name: 'ExpiredAt', type: 'uint256' }, + { name: 'Nonce', type: 'uint256' }, + { name: 'ChainId', type: 'uint256' }, + ], +``` +##### AddLiquidity +```typescript= + AddLiquidity: [ + { name: 'FromAccountIndex', type: 'uint256' }, + { name: 'PairIndex', type: 'uint256' }, + { name: 'AssetAAmount', type: 'uint256' }, + { name: 'AssetBAmount', type: 'uint256' }, + { name: 'GasAccountIndex', type: 'uint256' }, + { name: 'GasFeeAssetId', type: 'uint256' }, + { name: 'GasFeeAssetAmount', type: 'uint256' }, + { name: 'ExpiredAt', type: 'uint256' }, + { name: 'Nonce', type: 'uint256' }, + { name: 'ChainId', type: 'uint256' }, + ], +``` +##### RemoveLiquidity +```typescript= + RemoveLiquidity: [ + { name: 'FromAccountIndex', type: 'uint256' }, + { name: 'PairIndex', type: 'uint256' }, + { name: 'AssetAMinAmount', type: 'uint256' }, + { name: 'AssetBMinAmount', type: 'uint256' }, + { name: 'LpAmount', type: 'uint256' }, + { name: 'GasAccountIndex', type: 'uint256' }, + { name: 'GasFeeAssetId', type: 'uint256' }, + { name: 'GasFeeAssetAmount', type: 'uint256' }, + { name: 'ExpiredAt', type: 'uint256' }, + { name: 'Nonce', type: 'uint256' }, + { name: 'ChainId', type: 'uint256' }, + ], +``` +##### Swap +```typescript= + Swap: [ + { name: 'FromAccountIndex', type: 'uint256' }, + { name: 'PairIndex', type: 'uint256' }, + { name: 'AssetAAmount', type: 'uint256' }, + { name: 'AssetBMinAmount', type: 'uint256' }, + { name: 'GasAccountIndex', type: 'uint256' }, + { name: 'GasFeeAssetId', type: 'uint256' }, + { name: 'GasFeeAssetAmount', type: 'uint256' }, + { name: 'ExpiredAt', type: 'uint256' }, + { name: 'Nonce', type: 'uint256' }, + { name: 'ChainId', type: 'uint256' }, + ], +``` +##### CreateCollection +```typescript= + CreateCollection: [ + { name: 'AccountIndex', type: 'uint256' }, + { name: 'GasAccountIndex', type: 'uint256' }, + { name: 'GasFeeAssetId', type: 'uint256' }, + { name: 'GasFeeAssetAmount', type: 'uint256' }, + { name: 'ExpiredAt', type: 'uint256' }, + { name: 'Nonce', type: 'uint256' }, + { name: 'ChainId', type: 'uint256' }, + ], +``` +##### MintNft +```typescript= + MintNft: [ + { name: 'CreatorAccountIndex', type: 'uint256' }, + { name: 'ToAccountIndex', type: 'uint256' }, + { name: 'ToAccountNameHash', type: 'bytes32' }, + { name: 'NftContentHash', type: 'bytes32' }, + { name: 'GasAccountIndex', type: 'uint256' }, + { name: 'GasFeeAssetId', type: 'uint256' }, + { name: 'GasFeeAssetAmount', type: 'uint256' }, + { name: 'CreatorTreasuryRate', type: 'uint256' }, + { name: 'NftCollectionId', type: 'uint256' }, + { name: 'ExpiredAt', type: 'uint256' }, + { name: 'Nonce', type: 'uint256' }, + { name: 'ChainId', type: 'uint256' }, + ], +``` +##### TransferNft +```typescript= + TransferNft: [ + { name: 'FromAccountIndex', type: 'uint256' }, + { name: 'ToAccountIndex', type: 'uint256' }, + { name: 'ToAccountNameHash', type: 'bytes32' }, + { name: 'NftIndex', type: 'uint256' }, + { name: 'GasAccountIndex', type: 'uint256' }, + { name: 'GasFeeAssetId', type: 'uint256' }, + { name: 'GasFeeAssetAmount', type: 'uint256' }, + { name: 'CallDataHash', type: 'bytes32' }, + { name: 'ExpiredAt', type: 'uint256' }, + { name: 'Nonce', type: 'uint256' }, + { name: 'ChainId', type: 'uint256' }, + ], +``` +##### WithdrawNft +```typescript= + WithdrawNft: [ + { name: 'AccountIndex', type: 'uint256' }, + { name: 'NftIndex', type: 'uint256' }, + { name: 'ToAddress', type: 'bytes20' }, + { name: 'GasAccountIndex', type: 'uint256' }, + { name: 'GasFeeAssetId', type: 'uint256' }, + { name: 'GasFeeAssetAmount', type: 'uint256' }, + { name: 'ExpiredAt', type: 'uint256' }, + { name: 'Nonce', type: 'uint256' }, + { name: 'ChainId', type: 'uint256' }, + ], +``` +##### CancelOffer +```typescript= + CancelOffer: [ + { name: 'AccountIndex', type: 'uint256' }, + { name: 'OfferId', type: 'uint256' }, + { name: 'GasAccountIndex', type: 'uint256' }, + { name: 'GasFeeAssetId', type: 'uint256' }, + { name: 'GasFeeAssetAmount', type: 'uint256' }, + { name: 'ExpiredAt', type: 'uint256' }, + { name: 'Nonce', type: 'uint256' }, + { name: 'ChainId', type: 'uint256' }, + ], +``` +##### AtomicMatch +```typescript= + AtomicMatch: [ + { name: 'sellerAccountIndex', type: 'uint256' }, + { name: 'sellerNftIndex', type: 'uint256' }, + { name: 'sellerOfferId', type: 'uint256' }, + { name: 'sellerType', type: 'uint256' }, + { name: 'sellerAssetId', type: 'uint256' }, + { name: 'sellerAssetAmount', type: 'uint256' }, + { name: 'sellerListedAt', type: 'uint256' }, + { name: 'sellerExpiredAt', type: 'uint256' }, + { name: 'sellerTreasureRate', type: 'uint256' }, + { name: 'sellerSigR', type: 'bytes32' }, + { name: 'sellerSigS', type: 'bytes32' }, + { name: 'buyerAccountIndex', type: 'uint256' }, + { name: 'buyerNftIndex', type: 'uint256' }, + { name: 'buyerOfferId', type: 'uint256' }, + { name: 'buyerType', type: 'uint256' }, + { name: 'buyerAssetId', type: 'uint256' }, + { name: 'buyerAssetAmount', type: 'uint256' }, + { name: 'buyerListedAt', type: 'uint256' }, + { name: 'buyerExpiredAt', type: 'uint256' }, + { name: 'buyerTreasureRate', type: 'uint256' }, + { name: 'buyerSigR', type: 'bytes32' }, + { name: 'buyerSigS', type: 'bytes32' }, + { name: 'Nonce', type: 'uint256' }, + { name: 'ChainId', type: 'uint256' }, + ], + }; +``` \ No newline at end of file diff --git a/errorcode/code.go b/errorcode/code.go deleted file mode 100644 index 41986dda5..000000000 --- a/errorcode/code.go +++ /dev/null @@ -1,60 +0,0 @@ -package errorcode - -import ( - "errors" - - "github.com/zeromicro/go-zero/core/stores/sqlx" -) - -// For internal errors, `Code` is not needed in current implementation. -// For external errors (app & glaobalRPC), we can define codes, however the current framework also -// does not use the codes. We can leave the codes for future enhancement. - -var ( - DbErrNotFound = sqlx.ErrNotFound - DbErrSqlOperation = errors.New("unknown sql operation error") - DbErrDuplicatedAccountName = errors.New("duplicated account name") - DbErrDuplicatedAccountIndex = errors.New("duplicated account index") - DbErrDuplicatedCollectionIndex = errors.New("duplicated collection index") - DbErrFailToCreateBlock = errors.New("fail to create block") - DbErrFailToCreateAssetInfo = errors.New("fail to create asset info") - DbErrFailToCreateVolume = errors.New("fail to create volume") - DbErrFailToCreateTVL = errors.New("fail to create tvl") - DbErrFailToCreateLiquidity = errors.New("fail to create liquidity") - DbErrFailToCreateMempoolTx = errors.New("fail to create mempool tx") - DbErrFailToCreateProof = errors.New("fail to create proof") - DbErrFailToCreateFailTx = errors.New("fail to create fail tx") - DbErrFailToCreateSysconfig = errors.New("fail to create system config") - - JsonErrUnmarshal = errors.New("json.Unmarshal err") - JsonErrMarshal = errors.New("json.Marshal err") - - HttpErrFailToRequest = errors.New("http.NewRequest err") - HttpErrClientDo = errors.New("http.Client.Do err") - - IoErrFailToRead = errors.New("ioutil.ReadAll err") - - //TODO: more error code, parameter check, transaction check - - //global rpc - - RpcErrInvalidTx = New(20001, "invalid tx: cannot parse tx") - RpcErrInvalidTxType = New(20002, "invalid tx type ") - RpcErrInvalidTxField = New(20003, "invalid tx field: ") - RpcErrVerification = New(20004, "tx verification failed: ") - - RpcErrInvalidParam = New(20005, "invalid param: ") - RpcErrLiquidityInvalidAssetAmount = New(20006, "invalid liquidity asset amount") - RpcErrLiquidityInvalidAssetID = New(20007, "invalid liquidity asset id") - - RpcErrNotFound = New(24404, "not found") - RpcErrInternal = New(24500, "internal server error") - - //app service - - AppErrInvalidParam = New(25000, "invalid param") - AppErrQuoteNotExist = New(25004, "quote asset does not exist") - AppErrInvalidGasAsset = New(25006, "invalid gas asset") - AppErrNotFound = New(29404, "not found") - AppErrInternal = New(29500, "internal server error") -) diff --git a/errorcode/error.go b/errorcode/error.go deleted file mode 100644 index eb7e2b138..000000000 --- a/errorcode/error.go +++ /dev/null @@ -1,39 +0,0 @@ -package errorcode - -import ( - "fmt" -) - -type Error interface { - Error() string - Code() int32 - RefineError(err ...interface{}) *error -} - -func New(code int32, msg string) Error { - return new(code, msg) -} - -type error struct { - code int32 - message string -} - -func (e *error) Error() string { - return fmt.Sprintf("%d: %s", e.code, e.message) -} - -func (e *error) Code() int32 { - return e.code -} - -func (e *error) RefineError(err ...interface{}) *error { - return new(e.Code(), e.message+fmt.Sprint(err...)) -} - -func new(code int32, msg string) *error { - return &error{ - code: code, - message: msg, - } -} diff --git a/go.mod b/go.mod index aed126473..5ddb2d775 100644 --- a/go.mod +++ b/go.mod @@ -1,35 +1,111 @@ module github.com/bnb-chain/zkbas -go 1.16 +go 1.17 require ( - github.com/bnb-chain/bas-smt v0.0.0-20220804090937-a7712cdee391 github.com/zeromicro/go-zero v1.3.4 gorm.io/gorm v1.23.4 ) require ( - github.com/bnb-chain/zkbas-crypto v0.0.2 + github.com/XiaoMi/pegasus-go-client v0.0.0-20210427083443-f3b6b08bc4c2 // indirect + github.com/beorn7/perks v1.0.1 // indirect + github.com/bradfitz/gomemcache v0.0.0-20220106215444-fb4bf637b56d // indirect + github.com/btcsuite/btcd/btcec/v2 v2.2.0 // indirect + github.com/cenkalti/backoff/v4 v4.1.3 // indirect + github.com/cespare/xxhash/v2 v2.1.2 // indirect + github.com/cpuguy83/go-md2man/v2 v2.0.2 // indirect + github.com/davecgh/go-spew v1.1.1 // indirect + github.com/deckarep/golang-set v1.8.0 // indirect + github.com/decred/dcrd/dcrec/secp256k1/v4 v4.0.1 // indirect + github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f // indirect + github.com/fatih/color v1.13.0 // indirect + github.com/fxamacker/cbor/v2 v2.2.0 // indirect + github.com/go-logr/logr v1.2.3 // indirect + github.com/go-logr/stdr v1.2.2 // indirect + github.com/go-ole/go-ole v1.2.6 // indirect + github.com/go-sql-driver/mysql v1.6.0 // indirect + github.com/go-stack/stack v1.8.0 // indirect + github.com/golang-jwt/jwt/v4 v4.4.1 // indirect + github.com/golang/protobuf v1.5.2 // indirect + github.com/golang/snappy v0.0.4 // indirect + github.com/gorilla/websocket v1.4.2 // indirect + github.com/jackc/chunkreader/v2 v2.0.1 // indirect + github.com/jackc/pgconn v1.12.1 // indirect + github.com/jackc/pgio v1.0.0 // indirect + github.com/jackc/pgpassfile v1.0.0 // indirect + github.com/jackc/pgproto3/v2 v2.3.0 // indirect + github.com/jackc/pgservicefile v0.0.0-20200714003250-2b9c44734f2b // indirect + github.com/jackc/pgtype v1.11.0 // indirect + github.com/jackc/pgx/v4 v4.16.1 // indirect + github.com/jinzhu/inflection v1.0.0 // indirect + github.com/jinzhu/now v1.1.4 // indirect + github.com/justinas/alice v1.2.0 // indirect + github.com/mattn/go-colorable v0.1.9 // indirect + github.com/mattn/go-isatty v0.0.14 // indirect + github.com/matttproud/golang_protobuf_extensions v1.0.2-0.20181231171920-c182affec369 // indirect + github.com/mmcloughlin/addchain v0.4.0 // indirect + github.com/openzipkin/zipkin-go v0.4.0 // indirect + github.com/pbnjay/memory v0.0.0-20210728143218-7b4eea64cf58 // indirect + github.com/pegasus-kv/thrift v0.13.0 // indirect + github.com/pelletier/go-toml/v2 v2.0.1 // indirect + github.com/pmezard/go-difflib v1.0.0 // indirect + github.com/prometheus/client_golang v1.12.2 // indirect + github.com/prometheus/client_model v0.2.0 // indirect + github.com/prometheus/common v0.33.0 // indirect + github.com/prometheus/procfs v0.7.3 // indirect + github.com/rjeczalik/notify v0.9.1 // indirect + github.com/rs/zerolog v1.26.1 // indirect + github.com/russross/blackfriday/v2 v2.1.0 // indirect + github.com/shirou/gopsutil v3.21.11+incompatible // indirect + github.com/sirupsen/logrus v1.8.1 // indirect + github.com/spaolacci/murmur3 v1.1.0 // indirect + github.com/spf13/cast v1.5.0 // indirect + github.com/syndtr/goleveldb v1.0.1-0.20210819022825-2ae1ddf74ef7 // indirect + github.com/tklauser/go-sysconf v0.3.10 // indirect + github.com/tklauser/numcpus v0.4.0 // indirect + github.com/vmihailenco/msgpack v4.0.4+incompatible // indirect + github.com/x448/float16 v0.8.4 // indirect + github.com/xrash/smetrics v0.0.0-20201216005158-039620a65673 // indirect + go.opentelemetry.io/otel v1.7.0 // indirect + go.opentelemetry.io/otel/exporters/jaeger v1.7.0 // indirect + go.opentelemetry.io/otel/exporters/zipkin v1.7.0 // indirect + go.opentelemetry.io/otel/sdk v1.7.0 // indirect + go.opentelemetry.io/otel/trace v1.7.0 // indirect + go.uber.org/automaxprocs v1.5.1 // indirect + golang.org/x/crypto v0.0.0-20220321153916-2c7772ba3064 // indirect + golang.org/x/text v0.3.7 // indirect + google.golang.org/appengine v1.6.7 // indirect + google.golang.org/genproto v0.0.0-20220422154200-b37d22cd5731 // indirect + gopkg.in/natefinch/lumberjack.v2 v2.0.0 // indirect + gopkg.in/natefinch/npipe.v2 v2.0.0-20160621034901-c1b8fa8bdcce // indirect + gopkg.in/tomb.v2 v2.0.0-20161208151619-d5d1b5820637 // indirect + gopkg.in/yaml.v2 v2.4.0 // indirect + gopkg.in/yaml.v3 v3.0.1 // indirect +) + +require ( + github.com/bnb-chain/zkbas-crypto v0.0.5 github.com/bnb-chain/zkbas-eth-rpc v0.0.1 + github.com/bnb-chain/zkbas-smt v0.0.1 github.com/consensys/gnark v0.7.0 github.com/consensys/gnark-crypto v0.7.0 github.com/eko/gocache/v2 v2.3.1 - github.com/ethereum/go-ethereum v1.10.17 + github.com/ethereum/go-ethereum v1.10.23 github.com/go-redis/redis/v8 v8.11.5 - github.com/golang/mock v1.6.0 github.com/google/uuid v1.3.0 github.com/patrickmn/go-cache v2.1.0+incompatible github.com/pkg/errors v0.9.1 github.com/robfig/cron/v3 v3.0.1 - github.com/stretchr/testify v1.7.1 + github.com/stretchr/testify v1.7.2 + github.com/urfave/cli/v2 v2.11.2 github.com/yusufpapurcu/wmi v1.2.2 // indirect golang.org/x/net v0.0.0-20220722155237-a158d28d115b // indirect golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4 // indirect - golang.org/x/sys v0.0.0-20220728004956-3c1f35247d10 // indirect + golang.org/x/sys v0.0.0-20220823224334-20c2bfdbfe24 // indirect golang.org/x/xerrors v0.0.0-20220609144429-65e65417b02f // indirect google.golang.org/grpc v1.46.2 google.golang.org/protobuf v1.28.0 - gopkg.in/yaml.v3 v3.0.0-20220512140231-539c8e751b99 // indirect gorm.io/driver/postgres v1.3.6 - k8s.io/client-go v0.24.1 // indirect + k8s.io/apimachinery v0.24.1 // indirect ) diff --git a/go.sum b/go.sum index 2b13b91ce..573353eac 100644 --- a/go.sum +++ b/go.sum @@ -51,8 +51,9 @@ github.com/Azure/go-autorest/autorest/date v0.3.0/go.mod h1:BI0uouVdmngYNUzGWeSY github.com/Azure/go-autorest/autorest/mocks v0.4.1/go.mod h1:LTp+uSrOhSkaKrUy935gNZuuIPPVsHlr9DSOxSayd+k= github.com/Azure/go-autorest/logger v0.2.1/go.mod h1:T9E3cAhj2VqvPOtCYAvby9aBXkZmbF5NWuPV8+WeEW8= github.com/Azure/go-autorest/tracing v0.6.0/go.mod h1:+vhtPC754Xsa23ID7GlGsrdKBpUA79WCAKPPZVC2DeU= -github.com/BurntSushi/toml v0.3.1 h1:WXkYYl6Yr3qBf1K79EBnL4mak0OimBfB0XUf9Vl28OQ= github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= +github.com/BurntSushi/toml v1.1.0 h1:ksErzDEI1khOiGPgpwuI7x2ebx/uXQNw7xJpn9Eq1+I= +github.com/BurntSushi/toml v1.1.0/go.mod h1:CxXYINrC8qIiEnFrOxCa7Jy5BFHlXnUU2pbicEuybxQ= github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo= github.com/ClickHouse/clickhouse-go v1.5.4/go.mod h1:EaI/sW7Azgz9UATzd5ZdZHRUhHgv5+JMS9NSr2smCJI= github.com/ClickHouse/clickhouse-go/v2 v2.0.14/go.mod h1:iq2DUGgpA4BBki2CVwrF8x43zqBjdgHtbexkFkh5a6M= @@ -64,10 +65,8 @@ github.com/Masterminds/semver/v3 v3.1.1/go.mod h1:VPu/7SZ7ePZ3QOrcuXROw5FAcLl4a0 github.com/NYTimes/gziphandler v0.0.0-20170623195520-56545f4a5d46/go.mod h1:3wb06e3pkSAbeQ52E9H9iFoQsEEwGN64994WTCIhntQ= github.com/OneOfOne/xxhash v1.2.2/go.mod h1:HSdplMjZKSmBqAxg5vPj2TmRDmfkzw+cTzAElWljhcU= github.com/PuerkitoBio/purell v1.0.0/go.mod h1:c11w/QuzBsJSee3cPx9rAFu61PvFxuPbtSwDGJws/X0= -github.com/PuerkitoBio/purell v1.1.1 h1:WEQqlqaGbrPkxLJWfBwQmfEAE1Z7ONdDLqrN38tNFfI= github.com/PuerkitoBio/purell v1.1.1/go.mod h1:c11w/QuzBsJSee3cPx9rAFu61PvFxuPbtSwDGJws/X0= github.com/PuerkitoBio/urlesc v0.0.0-20160726150825-5bd2802263f2/go.mod h1:uGdkoq3SwY9Y+13GIhn11/XLaGBb4BfwItxLd5jeuXE= -github.com/PuerkitoBio/urlesc v0.0.0-20170810143723-de5bf2ad4578 h1:d+Bc7a5rLufV/sSk/8dngufqelfh6jnri85riMAaF/M= github.com/PuerkitoBio/urlesc v0.0.0-20170810143723-de5bf2ad4578/go.mod h1:uGdkoq3SwY9Y+13GIhn11/XLaGBb4BfwItxLd5jeuXE= github.com/Shopify/sarama v1.30.0/go.mod h1:zujlQQx1kzHsh4jfV1USnptCQrHAEZ2Hk8fTKCulPVs= github.com/Shopify/toxiproxy/v2 v2.1.6-0.20210914104332-15ea381dcdae/go.mod h1:/cvHQkZ1fst0EmZnA5dFtiQdWCNCFYzb+uE2vqVgvx0= @@ -109,7 +108,6 @@ github.com/aws/aws-sdk-go-v2/service/route53 v1.1.1/go.mod h1:rLiOUrPLW/Er5kRcQ7 github.com/aws/aws-sdk-go-v2/service/sso v1.1.1/go.mod h1:SuZJxklHxLAXgLTc1iFXbEWkXs7QRTQpCLGaKIprQW0= github.com/aws/aws-sdk-go-v2/service/sts v1.1.1/go.mod h1:Wi0EBZwiz/K44YliU0EKxqTCJGUfYTWXrrBwkq736bM= github.com/aws/smithy-go v1.1.0/go.mod h1:EzMw8dbp/YJL4A5/sbhGddag+NPT7q084agLbB9LgIw= -github.com/benbjohnson/clock v1.1.0 h1:Q92kusRqC1XV2MjkWETPvjJVqKetz1OzxZB7mHJLju8= github.com/benbjohnson/clock v1.1.0/go.mod h1:J11/hYXuz8f4ySSvYwY0FKfm+ezbsZBKZxNJlLklBHA= github.com/beorn7/perks v0.0.0-20180321164747-3a771d992973/go.mod h1:Dwedo/Wpr24TaqPxmxbtue+5NUziq4I4S80YR8gNf3Q= github.com/beorn7/perks v1.0.0/go.mod h1:KWe93zE9D1o94FZ5RNwFwVgaQK1VOXiVxmqh+CedLV8= @@ -119,19 +117,21 @@ github.com/bgentry/speakeasy v0.1.0/go.mod h1:+zsyZBPWlz7T6j88CTgSN5bM796AkVf0kB github.com/bkaradzic/go-lz4 v1.0.0/go.mod h1:0YdlkowM3VswSROI7qDxhRvJ3sLhlFrRRwjwegp5jy4= github.com/bketelsen/crypt v0.0.4/go.mod h1:aI6NrJ0pMGgvZKL1iVgXLnfIFJtfV+bKCoqOes/6LfM= github.com/bmizerany/pat v0.0.0-20170815010413-6226ea591a40/go.mod h1:8rLXio+WjiTceGBHIoTvn60HIbs7Hm7bcHjyrSqYB9c= -github.com/bnb-chain/bas-smt v0.0.0-20220804090937-a7712cdee391 h1:U0pz17/+58EvEzjpqmRoJJ+wy0hYWNDQb+afohYrths= -github.com/bnb-chain/bas-smt v0.0.0-20220804090937-a7712cdee391/go.mod h1:VJsBjo+k7FICpTw27+LwuAvfq9I13UJ4hUjmberr22Q= -github.com/bnb-chain/zkbas-crypto v0.0.2 h1:mTtXLTiLNi/BVkLdz0gOyypM9mtWAQDcmN1FETqIFbQ= -github.com/bnb-chain/zkbas-crypto v0.0.2/go.mod h1:T2erL+OCXw66yqBdsI5waTEx4FG2d8lYDh1XcEB1kRU= +github.com/bnb-chain/zkbas-crypto v0.0.5 h1:lmrF6n/8ZRFwvIaehayAecaJy9sUS7gSLle94wRLTJo= +github.com/bnb-chain/zkbas-crypto v0.0.5/go.mod h1:W9Daa6XJT7U48WA9+5YQTsEfwzxFs8rC3FeUQ6hGFNo= github.com/bnb-chain/zkbas-eth-rpc v0.0.1 h1:5wqS5TumcxXWovdMRWE6ZWC8mU4lmfRrRCV7sI7mVvw= github.com/bnb-chain/zkbas-eth-rpc v0.0.1/go.mod h1:t8jY6/Jt/Rw8LeU9Rhw406uaOIOou4ycVX06LKyVxKo= +github.com/bnb-chain/zkbas-smt v0.0.1 h1:y2HfTgV7Ya23UCgb0tU7DXtrvU5iFrV9GC/rAwc4bgs= +github.com/bnb-chain/zkbas-smt v0.0.1/go.mod h1:y4+RJZUZHT/tDhQikXOZ+5iWUa5i8qFCRfMvxArgJfo= github.com/boltdb/bolt v1.3.1/go.mod h1:clJnj/oiGkjum5o1McbSZDSLxVThjynRyGBgiAx27Ps= github.com/bradfitz/gomemcache v0.0.0-20220106215444-fb4bf637b56d h1:pVrfxiGfwelyab6n21ZBkbkmbevaf+WvMIiR7sr97hw= github.com/bradfitz/gomemcache v0.0.0-20220106215444-fb4bf637b56d/go.mod h1:H0wQNHz2YrLsuXOZozoeDmnHXkNCRmMW0gwFWDfEZDA= -github.com/btcsuite/btcd/btcec/v2 v2.1.2 h1:YoYoC9J0jwfukodSBMzZYUVQ8PTiYg4BnOWiJVzTmLs= github.com/btcsuite/btcd/btcec/v2 v2.1.2/go.mod h1:ctjw4H1kknNJmRN4iP1R7bTQ+v3GJkZBd6mui8ZsAZE= -github.com/btcsuite/btcd/chaincfg/chainhash v1.0.0 h1:MSskdM4/xJYcFzy0altH/C/xHopifpWzHUi1JeVI34Q= +github.com/btcsuite/btcd/btcec/v2 v2.2.0 h1:fzn1qaOt32TuLjFlkzYSsBC35Q3KUjT1SwPxiMSCF5k= +github.com/btcsuite/btcd/btcec/v2 v2.2.0/go.mod h1:U7MHm051Al6XmscBQ0BoNydpOTsFAn707034b5nY8zU= github.com/btcsuite/btcd/chaincfg/chainhash v1.0.0/go.mod h1:7SFka0XMvUgj3hfZtydOrQY2mwhPclbT2snogU7SQQc= +github.com/btcsuite/btcd/chaincfg/chainhash v1.0.1 h1:q0rUy8C/TYNBQS1+CGKw68tLOFYSNEs0TFnxxnS9+4U= +github.com/btcsuite/btcd/chaincfg/chainhash v1.0.1/go.mod h1:7SFka0XMvUgj3hfZtydOrQY2mwhPclbT2snogU7SQQc= github.com/c-bata/go-prompt v0.2.2/go.mod h1:VzqtzE2ksDBcdln8G7mk2RX9QyGjH+OVqOCSiVIqS34= github.com/cenkalti/backoff/v4 v4.1.0/go.mod h1:scbssz8iZGpm3xbr14ovlUdkxfGXNInqkPWOWmG2CLw= github.com/cenkalti/backoff/v4 v4.1.3 h1:cFAlzYUlVYDysBEH2T5hyJZMh3+5+WCBvSnK6Q8UtC4= @@ -169,15 +169,14 @@ github.com/consensys/gnark-crypto v0.7.0 h1:rwdy8+ssmLYRqKp+ryRRgQJl/rCq2uv+n83c github.com/consensys/gnark-crypto v0.7.0/go.mod h1:KPSuJzyxkJA8xZ/+CV47tyqkr9MmpZA3PXivK4VPrVg= github.com/coocood/freecache v1.2.1 h1:/v1CqMq45NFH9mp/Pt142reundeBM0dVUD3osQBeu/U= github.com/coocood/freecache v1.2.1/go.mod h1:RBUWa/Cy+OHdfTGFEhEuE1pMCMX51Ncizj7rthiQ3vk= -github.com/coreos/go-semver v0.3.0 h1:wkHLiw0WNATZnSG7epLsujiMCgPAc9xhjJ4tgnAxmfM= github.com/coreos/go-semver v0.3.0/go.mod h1:nnelYz7RCh+5ahJtPPxZlU+153eP4D4r3EedlOD2RNk= github.com/coreos/go-systemd v0.0.0-20190321100706-95778dfbb74e/go.mod h1:F5haX7vjVVG0kc13fIWeqUViNPyEJxv/OmvnBo0Yme4= -github.com/coreos/go-systemd v0.0.0-20190719114852-fd7a80b32e1f h1:JOrtw2xFKzlg+cbHpyrpLDmnN1HqhBfnX7WDiW7eG2c= github.com/coreos/go-systemd v0.0.0-20190719114852-fd7a80b32e1f/go.mod h1:F5haX7vjVVG0kc13fIWeqUViNPyEJxv/OmvnBo0Yme4= -github.com/coreos/go-systemd/v22 v22.3.2 h1:D9/bQk5vlXQFZ6Kwuu6zaiXJ9oTPe68++AzAJc1DzSI= github.com/coreos/go-systemd/v22 v22.3.2/go.mod h1:Y58oyj3AT4RCenI/lSvhwexgC+NSVTIJ3seZv2GcEnc= github.com/cpuguy83/go-md2man/v2 v2.0.0-20190314233015-f79a8a8ca69d/go.mod h1:maD7wRr/U5Z6m/iR4s+kqSMx2CaBsrgA7czyZG/E6dU= github.com/cpuguy83/go-md2man/v2 v2.0.0/go.mod h1:maD7wRr/U5Z6m/iR4s+kqSMx2CaBsrgA7czyZG/E6dU= +github.com/cpuguy83/go-md2man/v2 v2.0.2 h1:p1EgwI/C7NhT0JmVkwCD2ZBK8j4aeHQX2pMHHBfMQ6w= +github.com/cpuguy83/go-md2man/v2 v2.0.2/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o= github.com/creack/pty v1.1.7/go.mod h1:lj5s0c3V2DBrqTV7llrYr5NG6My20zk30Fl46Y7DoTY= github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E= github.com/cyberdelia/templates v0.0.0-20141128023046-ca7fffd4298c/go.mod h1:GyV+0YP4qX0UQ7r2MoYZ+AvYDp12OF5yg4q8rGnyNh4= @@ -206,9 +205,11 @@ github.com/dlclark/regexp2 v1.4.1-0.20201116162257-a2a8dda75c91/go.mod h1:2pZnwu github.com/dnaeon/go-vcr v1.1.0/go.mod h1:M7tiix8f0r6mKKJ3Yq/kqU1OYf3MnfmBWVbPx/yU9ko= github.com/dnaeon/go-vcr v1.2.0/go.mod h1:R4UdLID7HZT3taECzJs4YgbbH6PIGXB6W/sc5OLb6RQ= github.com/docker/docker v1.4.2-0.20180625184442-8e610b2b55bf/go.mod h1:eEKB0N0r5NX/I1kEveEz05bcu8tLC/8azJZsviup8Sk= +github.com/docker/docker v1.6.2/go.mod h1:eEKB0N0r5NX/I1kEveEz05bcu8tLC/8azJZsviup8Sk= github.com/docker/spdystream v0.0.0-20160310174837-449fdfce4d96/go.mod h1:Qh8CwZgvJUkLughtfhJv5dyTYa91l1fOUCrgjqmcifM= github.com/docopt/docopt-go v0.0.0-20180111231733-ee0de3bc6815/go.mod h1:WwZ+bS3ebgob9U8Nd0kOddGdZWjyMGR8Wziv+TBNwSE= github.com/dop251/goja v0.0.0-20211011172007-d99e4b8cbf48/go.mod h1:R9ET47fwRVRPZnOGvHxxhuZcbrMCuiqOz3Rlrh4KSnk= +github.com/dop251/goja v0.0.0-20220405120441-9037c2b61cbf/go.mod h1:R9ET47fwRVRPZnOGvHxxhuZcbrMCuiqOz3Rlrh4KSnk= github.com/dop251/goja_nodejs v0.0.0-20210225215109-d91c329300e7/go.mod h1:hn7BA7c8pLvoGndExHudxTDKZ84Pyvv+90pbBjbTz0Y= github.com/dustin/go-humanize v1.0.0 h1:VSnTsYCnlFHaM2/igO1h6X3HA71jcobQuxemgkq4zYo= github.com/dustin/go-humanize v1.0.0/go.mod h1:HtrtbFcZ19U5GC7JDqmcUSB87Iq5E25KnS6fMYU6eOk= @@ -223,8 +224,6 @@ github.com/eko/gocache/v2 v2.3.1/go.mod h1:l2z8OmpZHL0CpuzDJtxm267eF3mZW1NqUsMj+ github.com/elazarl/goproxy v0.0.0-20170405201442-c4fc26588b6e/go.mod h1:/Zj4wYkgs4iZTTu3o/KG3Itv/qCCa8VVMlb3i9OVuzc= github.com/elazarl/goproxy v0.0.0-20180725130230-947c36da3153/go.mod h1:/Zj4wYkgs4iZTTu3o/KG3Itv/qCCa8VVMlb3i9OVuzc= github.com/emicklei/go-restful v0.0.0-20170410110728-ff4f55a20633/go.mod h1:otzb+WCGbkyDHkqmQmT5YD2WR4BBwUdeQoFo8l/7tVs= -github.com/emicklei/go-restful v2.9.5+incompatible h1:spTtZBk5DYEvbxMVutUuTyh1Ao2r4iyvLdACqsl/Ljk= -github.com/emicklei/go-restful v2.9.5+incompatible/go.mod h1:otzb+WCGbkyDHkqmQmT5YD2WR4BBwUdeQoFo8l/7tVs= github.com/envoyproxy/go-control-plane v0.9.0/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= github.com/envoyproxy/go-control-plane v0.9.4/go.mod h1:6rpuAdCZL397s3pYoYcLgu1mIlRU8Am5FuJP05cCM98= @@ -234,14 +233,16 @@ github.com/envoyproxy/go-control-plane v0.9.9-0.20210217033140-668b12f5399d/go.m github.com/envoyproxy/go-control-plane v0.9.10-0.20210907150352-cf90f659a021/go.mod h1:AFq3mo9L8Lqqiid3OhADV3RfLJnjiw63cSpi+fDTRC0= github.com/envoyproxy/go-control-plane v0.10.2-0.20220325020618-49ff273808a1/go.mod h1:KJwIaB5Mv44NWtYuAOFCVOjcI94vtpEz2JU/D2v6IjE= github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c= -github.com/ethereum/go-ethereum v1.10.17 h1:XEcumY+qSr1cZQaWsQs5Kck3FHB0V2RiMHPdTBJ+oT8= github.com/ethereum/go-ethereum v1.10.17/go.mod h1:Lt5WzjM07XlXc95YzrhosmR4J9Ahd6X2wyEV2SvGhk0= +github.com/ethereum/go-ethereum v1.10.23 h1:Xk8XAT4/UuqcjMLIMF+7imjkg32kfVFKoeyQDaO2yWM= +github.com/ethereum/go-ethereum v1.10.23/go.mod h1:EYFyF19u3ezGLD4RqOkLq+ZCXzYbLoNDdZlMt7kyKFg= github.com/evanphx/json-patch v4.2.0+incompatible/go.mod h1:50XU6AFN0ol/bzJsmQLiYLvXMP4fmwYFNcr97nuDLSk= github.com/evanphx/json-patch v4.11.0+incompatible/go.mod h1:50XU6AFN0ol/bzJsmQLiYLvXMP4fmwYFNcr97nuDLSk= github.com/evanphx/json-patch v4.12.0+incompatible/go.mod h1:50XU6AFN0ol/bzJsmQLiYLvXMP4fmwYFNcr97nuDLSk= github.com/fatih/color v1.7.0/go.mod h1:Zm6kSWBoL9eyXnKyktHP6abPY2pDugNf5KwzbycvMj4= github.com/fatih/color v1.13.0 h1:8LOYc1KYPPmyKMuN8QV2DNRWNbLo6LZ0iLs8+mlH53w= github.com/fatih/color v1.13.0/go.mod h1:kLAiJbzzSOZDVNGyDpeOxJ47H46qBXwg5ILebYFFOfk= +github.com/fjl/gencodec v0.0.0-20220412091415-8bb9e558978c/go.mod h1:AzA8Lj6YtixmJWL+wkKoBGsLWy9gFrAzi4g+5bCKwpY= github.com/fjl/memsize v0.0.0-20190710130421-bcb5799ab5e5 h1:FtmdgXiUlNeRsoNMFlKLDt+S+6hbjVMEW6RGQ7aUf7c= github.com/fjl/memsize v0.0.0-20190710130421-bcb5799ab5e5/go.mod h1:VvhXpOYNQvB+uIk2RvXzuaQtkQJzzIx6lSBe1xv7hi0= github.com/fogleman/gg v1.2.1-0.20190220221249-0403632d5b90/go.mod h1:R/bRT+9gY/C5z7JzPU0zXsXHKM4/ayA+zqcVNZzPa1k= @@ -257,6 +258,7 @@ github.com/fsnotify/fsnotify v1.4.9 h1:hsms1Qyu0jgnwNXIxa+/V/PDsU6CfLf6CNO8H7IWo github.com/fsnotify/fsnotify v1.4.9/go.mod h1:znqG4EE+3YCdAaPaxE2ZRY/06pZUdp0tY4IgpuI1SZQ= github.com/fxamacker/cbor/v2 v2.2.0 h1:6eXqdDDe588rSYAi1HfZKbx6YYQO4mxQ9eC6xYpU/JQ= github.com/fxamacker/cbor/v2 v2.2.0/go.mod h1:TA1xS00nchWmaBnEIxPSE5oHLuJBAVvqrtAnWBwBCVo= +github.com/garslo/gogen v0.0.0-20170306192744-1d203ffc1f61/go.mod h1:Q0X6pkwTILDlzrGEckF6HKjXe48EgsY/l7K7vhY4MW8= github.com/gballet/go-libpcsclite v0.0.0-20190607065134-2772fd86a8ff h1:tY80oXqGNY4FhTFhk+o9oFHGINQ/+vhlm8HFzi6znCI= github.com/gballet/go-libpcsclite v0.0.0-20190607065134-2772fd86a8ff/go.mod h1:x7DCsMOv1taUwEWCzT4cmDeAkigA5/QCwUodaVOe8Ww= github.com/getkin/kin-openapi v0.53.0/go.mod h1:7Yn5whZr5kJi6t+kShccXS8ae1APpYTW6yheSwk8Yi4= @@ -294,17 +296,12 @@ github.com/go-ole/go-ole v1.2.6 h1:/Fpf6oFPoeFik9ty7siob0G6Ke8QvQEuVcuChpwXzpY= github.com/go-ole/go-ole v1.2.6/go.mod h1:pprOEPIfldk/42T2oK7lQ4v4JSDwmV0As9GaiUsvbm0= github.com/go-openapi/jsonpointer v0.0.0-20160704185906-46af16f9f7b1/go.mod h1:+35s3my2LFTysnkMfxsJBAMHj/DoqoB9knIWoYG/Vk0= github.com/go-openapi/jsonpointer v0.19.3/go.mod h1:Pl9vOtqEWErmShwVjC8pYs9cog34VGT37dQOVbmoatg= -github.com/go-openapi/jsonpointer v0.19.5 h1:gZr+CIYByUqjcgeLXnQu2gHYQC9o73G2XUeOFYEICuY= github.com/go-openapi/jsonpointer v0.19.5/go.mod h1:Pl9vOtqEWErmShwVjC8pYs9cog34VGT37dQOVbmoatg= github.com/go-openapi/jsonreference v0.0.0-20160704190145-13c6e3589ad9/go.mod h1:W3Z9FmVs9qj+KR4zFKmDPGiLdk1D9Rlm7cyMvf57TTg= github.com/go-openapi/jsonreference v0.19.3/go.mod h1:rjx6GuL8TTa9VaixXglHmQmIL98+wF9xc8zWvFonSJ8= -github.com/go-openapi/jsonreference v0.19.5 h1:1WJP/wi4OjB4iV8KVbH73rQaoialJrqv8gitZLxGLtM= -github.com/go-openapi/jsonreference v0.19.5/go.mod h1:RdybgQwPxbL4UEjuAruzK1x3nE69AqPYEJeo/TWfEeg= github.com/go-openapi/spec v0.0.0-20160808142527-6aced65f8501/go.mod h1:J8+jY1nAiCcj+friV/PDoE1/3eeccG9LYBs0tYvLOWc= github.com/go-openapi/swag v0.0.0-20160704191624-1d0bd113de87/go.mod h1:DXUve3Dpr1UfpPtxFw+EFuQ41HhCWZfha5jSVRG7C7I= github.com/go-openapi/swag v0.19.5/go.mod h1:POnQmlKehdgb5mhVOsnJFsivZCEZ/vjK9gh66Z9tfKk= -github.com/go-openapi/swag v0.19.14 h1:gm3vOOXfiuw5i9p5N9xJvfjvuofpyvLA9Wr6QfK5Fng= -github.com/go-openapi/swag v0.19.14/go.mod h1:QYRuS/SOXUCsnplDa677K7+DxSOj6IPNl/eQntq43wQ= github.com/go-redis/redis/v8 v8.11.5 h1:AcZZR7igkdvfVmQTPnu9WE37LRrO/YrBH5zWyjDC0oI= github.com/go-redis/redis/v8 v8.11.5/go.mod h1:gREzHqY1hg6oD9ngVRbLStwAWKhA0FEgq8Jd4h5lpwo= github.com/go-sourcemap/sourcemap v2.1.3+incompatible/go.mod h1:F8jJfvm2KbVjc5NqelyYJmf/v5J0dwNLS2mL4sNA1Jg= @@ -322,7 +319,6 @@ github.com/gofrs/uuid v4.0.0+incompatible/go.mod h1:b2aQJv3Z4Fp6yNu3cdSllBxTCLRx github.com/gogo/protobuf v1.1.1/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7atdtwQ= github.com/gogo/protobuf v1.2.2-0.20190723190241-65acae22fc9d/go.mod h1:SlYgWuQ5SjCEi6WLHjHCa1yvBfUnHcTbrrZtXPKa29o= github.com/gogo/protobuf v1.3.1/go.mod h1:SlYgWuQ5SjCEi6WLHjHCa1yvBfUnHcTbrrZtXPKa29o= -github.com/gogo/protobuf v1.3.2 h1:Ov1cvc58UF3b5XjBnZv7+opcTcQFZebYjWzi34vdm4Q= github.com/gogo/protobuf v1.3.2/go.mod h1:P1XiOD3dCwIKUDQYPy72D8LYyHL2YPYrpS2s69NZV8Q= github.com/golang-jwt/jwt/v4 v4.3.0/go.mod h1:/xlHOz8bRuivTWchD4jCa+NbatV+wEUSzwAxVc6locg= github.com/golang-jwt/jwt/v4 v4.4.1 h1:pC5DB52sCeK48Wlb9oPcdhnjkz1TKt1D/P7WKJ0kUcQ= @@ -376,7 +372,6 @@ github.com/google/btree v0.0.0-20180813153112-4030bb1f1f0c/go.mod h1:lNA+9X1NB3Z github.com/google/btree v1.0.0/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= github.com/google/btree v1.0.1/go.mod h1:xXMiIv4Fb/0kKde4SpL7qlzvu5cMJDRkFDxJfI9uaxA= github.com/google/flatbuffers v1.11.0/go.mod h1:1AeVuKshWv4vARoZatz6mlQ0JxURH0Kv5+zNeJKJCa8= -github.com/google/gnostic v0.5.7-v3refs h1:FhTMOKj2VhjpouxvWJAV1TL304uMlb9zcDqkl6cEI54= github.com/google/gnostic v0.5.7-v3refs/go.mod h1:73MKFl6jIHelAJNaBGFzt3SPtZULs9dYrGFt8OiIsHQ= github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M= github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= @@ -396,7 +391,6 @@ github.com/google/gofuzz v0.0.0-20161122191042-44d81051d367/go.mod h1:HP5RmnzzSN github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= github.com/google/gofuzz v1.1.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= github.com/google/gofuzz v1.1.1-0.20200604201612-c04b05f3adfa/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= -github.com/google/gofuzz v1.2.0 h1:xRy4A+RhZaiKjJ1bPfwQ8sedCA+YS2YcCHW6ec7JMi0= github.com/google/gofuzz v1.2.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= github.com/google/martian v2.1.0+incompatible/go.mod h1:9I4somxYTbIHy5NJKHRl3wXiIaQGbYVAs8BPL6v8lEs= github.com/google/martian/v3 v3.0.0/go.mod h1:y5Zk1BBys9G+gd6Jrk0W3cC1+ELVxBWuIGO+w/tUAp0= @@ -471,8 +465,9 @@ github.com/holiman/bloomfilter/v2 v2.0.3/go.mod h1:zpoh+gs7qcpqrHr3dB55AMiJwo0iU github.com/holiman/uint256 v1.2.0 h1:gpSYcPLWGv4sG43I2mVLiDZCNDh/EpGjSk8tmtxitHM= github.com/holiman/uint256 v1.2.0/go.mod h1:y4ga/t+u+Xwd7CpDgZESaRcWy0I7XMlTMA25ApIH5Jw= github.com/hpcloud/tail v1.0.0/go.mod h1:ab1qPbhIpdTxEkNHXyeSf5vhxWSCs/tWer42PpOxQnU= -github.com/huin/goupnp v1.0.3-0.20220313090229-ca81a64b4204 h1:+EYBkW+dbi3F/atB+LSQZSWh7+HNrV3A/N0y6DSoy9k= github.com/huin/goupnp v1.0.3-0.20220313090229-ca81a64b4204/go.mod h1:ZxNlw5WqJj6wSsRK5+YfflQGXYfccj5VgQsMNixHM7Y= +github.com/huin/goupnp v1.0.3 h1:N8No57ls+MnjlB+JPiCVSOyy/ot7MJTqlo7rn+NYSqQ= +github.com/huin/goupnp v1.0.3/go.mod h1:ZxNlw5WqJj6wSsRK5+YfflQGXYfccj5VgQsMNixHM7Y= github.com/huin/goutil v0.0.0-20170803182201-1ca381bf3150/go.mod h1:PpLOETDnJ0o3iZrZfqZzyLl6l7F3c6L1oWn7OICBi6o= github.com/ianlancetaylor/demangle v0.0.0-20181102032728-5e5cf60278f6/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc= github.com/ianlancetaylor/demangle v0.0.0-20200824232613-28f6c0f3b639/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc= @@ -554,15 +549,12 @@ github.com/jinzhu/now v1.1.4/go.mod h1:d3SSVoowX0Lcu0IBviAWJpolVfI5UJVZZ7cO71lE/ github.com/jmespath/go-jmespath v0.4.0/go.mod h1:T8mJZnbsbmF+m6zOOFylbeCJqk5+pHWvzYPziyZiYoo= github.com/jmespath/go-jmespath/internal/testify v1.5.1/go.mod h1:L3OGu8Wl2/fWfCI6z80xFu9LTZmf1ZRjMHUOPmWr69U= github.com/jmoiron/sqlx v1.2.0/go.mod h1:1FEQNm3xlJgrMD+FBdI9+xvCksHtbpVBBw5dYhBSsks= -github.com/josharian/intern v1.0.0 h1:vlS4z54oSdjm0bgjRigI+G1HpF+tI+9rE5LLzOg8HmY= -github.com/josharian/intern v1.0.0/go.mod h1:5DoeVV0s6jJacbCEi61lwdGj/aVlrQvzHFFd8Hwg//Y= github.com/jpillora/backoff v1.0.0/go.mod h1:J/6gKK9jxlEcS3zixgDgUAsiuZ7yrSoa/FX5e0EB2j4= github.com/json-iterator/go v0.0.0-20180612202835-f2b4162afba3/go.mod h1:+SdeFBvtyEkXs7REEP0seUULqWtbJapLOCVDaaPEHmU= github.com/json-iterator/go v1.1.6/go.mod h1:+SdeFBvtyEkXs7REEP0seUULqWtbJapLOCVDaaPEHmU= github.com/json-iterator/go v1.1.8/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= github.com/json-iterator/go v1.1.10/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= github.com/json-iterator/go v1.1.11/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= -github.com/json-iterator/go v1.1.12 h1:PV8peI4a0ysnczrg+LtxykD8LfKY9ML6u2jnxaEnrnM= github.com/json-iterator/go v1.1.12/go.mod h1:e30LSqwooZae/UwlEbR2852Gd8hjQvJoHmT4TnhNGBo= github.com/jstemmer/go-junit-report v0.0.0-20190106144839-af01ea7f8024/go.mod h1:6v2b51hI/fHJwM22ozAgKL4VKDeJcHhJFhtBdhmNjmU= github.com/jstemmer/go-junit-report v0.9.1/go.mod h1:Brl9GWCQeLvo8nXZwPNNblvFj/XSXhF0NWZEnDohbsk= @@ -599,6 +591,7 @@ github.com/kr/pty v1.1.8/go.mod h1:O1sed60cT9XZ5uDucP5qwvh+TE3NnUj51EiZO/lmSfw= github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= +github.com/kylelemons/godebug v0.0.0-20170224010052-a616ab194758/go.mod h1:B69LEHPfb2qLo0BaaOLcbitczOKLWTsrBG9LczfCD4k= github.com/kylelemons/godebug v1.1.0/go.mod h1:9/0rRGxNHcop5bhtWyNeEfOS8JIWk580+fNqagV/RAw= github.com/labstack/echo/v4 v4.2.1/go.mod h1:AA49e0DZ8kk5jTOOCKNuPR6oTnBS0dYiM4FW1e6jwpg= github.com/labstack/gommon v0.3.0/go.mod h1:MULnywXg0yavhxWKc+lOruYdAhDwPK9wf0OL7NoOu+k= @@ -610,13 +603,10 @@ github.com/lib/pq v1.2.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo= github.com/lib/pq v1.10.2/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o= github.com/lib/pq v1.10.6 h1:jbk+ZieJ0D7EVGJYpL9QTz7/YW6UHbmdnZWYyK5cdBs= github.com/lib/pq v1.10.6/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o= -github.com/magiconair/properties v1.8.5 h1:b6kJs+EmPFMYGkow9GiUyCyOvIwYetYJ3fSaWak/Gls= github.com/magiconair/properties v1.8.5/go.mod h1:y3VJvCyxH9uVvJTWEGAELF3aiYNyPKd5NZ3oSwXrF60= github.com/mailru/easyjson v0.0.0-20160728113105-d5b7844b561a/go.mod h1:C1wdFJiN94OJF2b5HbByQZoLdCWB1Yqtg26g4irojpc= github.com/mailru/easyjson v0.0.0-20190614124828-94de47d64c63/go.mod h1:C1wdFJiN94OJF2b5HbByQZoLdCWB1Yqtg26g4irojpc= github.com/mailru/easyjson v0.0.0-20190626092158-b2ccc519800e/go.mod h1:C1wdFJiN94OJF2b5HbByQZoLdCWB1Yqtg26g4irojpc= -github.com/mailru/easyjson v0.7.6 h1:8yTIVnZgCoiM1TgqoeTl+LfU5Jg6/xL3QhGQnimLYnA= -github.com/mailru/easyjson v0.7.6/go.mod h1:xzfreul335JAWq5oZzymOObrkdz5UnU4kGfJJLY9Nlc= github.com/matryer/moq v0.0.0-20190312154309-6cfb0558e1bd/go.mod h1:9ELz6aaclSIGnZBoaSLZ3NAl1VTufbOrXBPvtcy6WiQ= github.com/mattn/go-colorable v0.0.9/go.mod h1:9vuHe8Xs5qXnSaW/c/ABM9alt+Vo+STaOChaDxuIBZU= github.com/mattn/go-colorable v0.1.1/go.mod h1:FuOcm+DKB9mbwrcAfNl7/TZVBZ6rcnceauSikq3lYCQ= @@ -663,19 +653,15 @@ github.com/mmcloughlin/addchain v0.4.0/go.mod h1:A86O+tHqZLMNO4w6ZZ4FlVQEadcoqky github.com/mmcloughlin/profile v0.1.1/go.mod h1:IhHD7q1ooxgwTgjxQYkACGA77oFTDdFVejUS1/tS/qU= github.com/moby/spdystream v0.2.0/go.mod h1:f7i0iNDQJ059oMTcWxx8MA/zKFIuD/lY+0GqbN2Wy8c= github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= -github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd h1:TRLaZ9cD/w8PVh93nsPXa1VrQ6jlwL5oN8l14QlcNfg= github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= github.com/modern-go/reflect2 v0.0.0-20180320133207-05fbef0ca5da/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0= github.com/modern-go/reflect2 v0.0.0-20180701023420-4b7aa43c6742/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0= github.com/modern-go/reflect2 v1.0.1/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0= -github.com/modern-go/reflect2 v1.0.2 h1:xBagoLtFs94CBntxluKeaWgTMpvLxC4ur3nMaC9Gz0M= github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjYzDa0/r8luk= github.com/modocache/gover v0.0.0-20171022184752-b58185e213c5/go.mod h1:caMODM3PzxT8aQXRPkAt8xlV/e7d7w8GM5g0fa5F0D8= github.com/montanaflynn/stats v0.0.0-20171201202039-1bf9dbcd8cbe/go.mod h1:wL8QJuTMNUDYhXwkmfOly8iTdp5TEcJFWZD2D7SIkUc= github.com/mschoch/smat v0.0.0-20160514031455-90eadee771ae/go.mod h1:qAyveg+e4CE+eKJXWVjKXM4ck2QobLqTDytGJbLLhJg= github.com/munnerz/goautoneg v0.0.0-20120707110453-a547fc61f48d/go.mod h1:+n7T8mK8HuQTcFwEeznm/DIxMOiR9yIdICNftLE1DvQ= -github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822 h1:C3w9PqII01/Oq1c1nUAm88MOHcQC9l5mIlSMApZMrHA= -github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822/go.mod h1:+n7T8mK8HuQTcFwEeznm/DIxMOiR9yIdICNftLE1DvQ= github.com/mwitkow/go-conntrack v0.0.0-20161129095857-cc309e4a2223/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U= github.com/mwitkow/go-conntrack v0.0.0-20190716064945-2f068394615f/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U= github.com/mxk/go-flowrate v0.0.0-20140419014527-cca7078d478f/go.mod h1:ZdcZmHo+o7JKHSa8/e818NopupXU1YMK5fe1lsApnBw= @@ -694,6 +680,7 @@ github.com/olekukonko/tablewriter v0.0.5/go.mod h1:hPp6KlRPjbx+hW8ykQs1w3UBbZlj6 github.com/onsi/ginkgo v0.0.0-20170829012221-11459a886d9c/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE= github.com/onsi/ginkgo v1.6.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE= github.com/onsi/ginkgo v1.10.1/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE= +github.com/onsi/ginkgo v1.10.3/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE= github.com/onsi/ginkgo v1.12.1/go.mod h1:zj2OWP4+oCPe1qIXoGWkgMRwljMUYCdkwsT2108oapk= github.com/onsi/ginkgo v1.14.0/go.mod h1:iSB4RoI2tjJc9BBv4NKIKWKya62Rps+oPG/Lv9klQyY= github.com/onsi/ginkgo v1.16.4/go.mod h1:dX+/inL/fNMqNlz0e9LfyB9TswhZpCVdJM/Z6Vvnwo0= @@ -801,6 +788,8 @@ github.com/rs/zerolog v1.15.0/go.mod h1:xYTKnLHcpfU2225ny5qZjxnj9NvkumZYjJHlAThC github.com/rs/zerolog v1.26.1 h1:/ihwxqH+4z8UxyI70wM1z9yCvkWcfz/a3mj48k/Zngc= github.com/rs/zerolog v1.26.1/go.mod h1:/wSSJWX7lVrsOwlbyTRSOJvqRlc+WjWlfes+CiJ+tmc= github.com/russross/blackfriday/v2 v2.0.1/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= +github.com/russross/blackfriday/v2 v2.1.0 h1:JIOH55/0cWyOuilr9/qlrm0BSXldqnqwMsf35Ld67mk= +github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= github.com/ryanuber/columnize v0.0.0-20160712163229-9b3edd62028f/go.mod h1:sm1tb6uqfes/u+d4ooFouqFdy9/2g9QGwK3SQygK0Ts= github.com/satori/go.uuid v1.2.0/go.mod h1:dA0hQrYB0VpLJoorglMZABFdXlWrHn1NEOzdhQKdks0= github.com/sean-/seed v0.0.0-20170313163322-e2103e2c3529/go.mod h1:DxrIzT+xaE7yg65j358z/aeFdxmN0P9QXhEzd20vsDc= @@ -847,7 +836,6 @@ github.com/spf13/cobra v1.2.1/go.mod h1:ExllRjgxM/piMAM+3tAZvg8fsklGAf3tPfi+i8t6 github.com/spf13/jwalterweatherman v1.1.0/go.mod h1:aNWZUN0dPAAO/Ljvb5BEdw96iTZ0EXowPYD95IqWIGo= github.com/spf13/pflag v0.0.0-20170130214245-9ff6c6923cff/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4= github.com/spf13/pflag v1.0.3/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4= -github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA= github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg= github.com/spf13/viper v1.8.1/go.mod h1:o0Pch8wJ9BVSWGQMbra6iw0oQ5oktSIBaujf1rJH9Ns= github.com/status-im/keycard-go v0.0.0-20190316090335-8537d3370df4 h1:Gb2Tyox57NRNuZ2d3rmvB3pcmbu7O1RS3m8WRx7ilrg= @@ -865,9 +853,11 @@ github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81P github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA= github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= -github.com/stretchr/testify v1.7.1 h1:5TQK59W5E3v0r2duFAb7P95B6hEeOyEnHRa8MjYSMTY= github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= +github.com/stretchr/testify v1.7.2 h1:4jaiDzPyXQvSd7D0EjG45355tLlV3VOECpq10pLC+8s= +github.com/stretchr/testify v1.7.2/go.mod h1:R6va5+xMeoiuVRoj+gSkQ7d3FALtqAAGI1FQKckRals= github.com/subosito/gotenv v1.2.0/go.mod h1:N0PQaV/YGNqwC0u51sEeR/aUtSLEXKX9iv69rRypqCw= +github.com/supranational/blst v0.3.8-0.20220526154634-513d2456b344/go.mod h1:jZJtfjgudtNl4en1tzwPIV3KjUnQUvG3/j+w+fVonLw= github.com/syndtr/goleveldb v1.0.1-0.20210819022825-2ae1ddf74ef7 h1:epCh84lMvA70Z7CTTCmYQn2CKbY8j86K7/FAIr141uY= github.com/syndtr/goleveldb v1.0.1-0.20210819022825-2ae1ddf74ef7/go.mod h1:q4W45IWZaF22tdD+VEXcAWRA037jwmWEB5VWYORlTpc= github.com/tidwall/pretty v1.0.0/go.mod h1:XNkn88O1ChpSDQmQeStsy+sBenx6DDtFZJxhVysOjyk= @@ -881,6 +871,9 @@ github.com/tklauser/numcpus v0.4.0/go.mod h1:1+UI3pD8NW14VMwdgJNJ1ESk2UnwhAnz5hM github.com/tyler-smith/go-bip39 v1.0.1-0.20181017060643-dbb3b84ba2ef h1:wHSqTBrZW24CsNJDfeh9Ex6Pm0Rcpc7qrgKBiL44vF4= github.com/tyler-smith/go-bip39 v1.0.1-0.20181017060643-dbb3b84ba2ef/go.mod h1:sJ5fKU0s6JVwZjjcUEX2zFOnvq0ASQ2K9Zr6cf67kNs= github.com/urfave/cli/v2 v2.3.0/go.mod h1:LJmUH05zAU44vOAcrfzZQKsZbVcdbOG8rtL3/XcUArI= +github.com/urfave/cli/v2 v2.10.2/go.mod h1:f8iq5LtQ/bLxafbdBSLPPNsgaW0l/2fYYEHhAyPlwvo= +github.com/urfave/cli/v2 v2.11.2 h1:FVfNg4m3vbjbBpLYxW//WjxUoHvJ9TlppXcqY9Q9ZfA= +github.com/urfave/cli/v2 v2.11.2/go.mod h1:f8iq5LtQ/bLxafbdBSLPPNsgaW0l/2fYYEHhAyPlwvo= github.com/valyala/bytebufferpool v1.0.0/go.mod h1:6bBcMArwyJ5K/AmCkWv1jt77kVWyCJ6HpOuEn7z0Csc= github.com/valyala/fasttemplate v1.0.1/go.mod h1:UQGH1tvbgY+Nz5t2n7tXsz52dQxojPUpymEIMZ47gx8= github.com/valyala/fasttemplate v1.2.1/go.mod h1:KHLXt3tVN2HBp8eijSv/kGJopbvo7S+qRAEEKiv+SiQ= @@ -893,6 +886,8 @@ github.com/xdg-go/pbkdf2 v1.0.0/go.mod h1:jrpuAogTd400dnrH08LKmI/xc1MbPOebTwRqcT github.com/xdg-go/scram v1.0.2/go.mod h1:1WAq6h33pAW+iRreB34OORO2Nf7qel3VV3fjBj+hCSs= github.com/xdg-go/stringprep v1.0.2/go.mod h1:8F9zXuvzgwmyT5DUm4GUfZGDdT3W+LCvS6+da4O5kxM= github.com/xlab/treeprint v0.0.0-20180616005107-d6fb6747feb6/go.mod h1:ce1O1j6UtZfjr22oyGxGLbauSBp2YVXpARAosm7dHBg= +github.com/xrash/smetrics v0.0.0-20201216005158-039620a65673 h1:bAn7/zixMGCfxrRTfdpNzjtPYqr8smhKouy9mxVdGPU= +github.com/xrash/smetrics v0.0.0-20201216005158-039620a65673/go.mod h1:N3UwUGtsrSj3ccvlPHLoLsHnpR27oXr4ZE984MbSER8= github.com/youmark/pkcs8 v0.0.0-20181117223130-1be2e3e5546d/go.mod h1:rHwXgn7JulP+udvsHwJoVG1YGAP6VLg4y9I5dyZdqmA= github.com/yuin/goldmark v1.1.25/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= @@ -900,6 +895,7 @@ github.com/yuin/goldmark v1.1.32/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9de github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.3.5/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k= github.com/yuin/goldmark v1.4.0/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k= +github.com/yuin/goldmark v1.4.1/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k= github.com/yuin/gopher-lua v0.0.0-20210529063254-f4c35e4016d9 h1:k/gmLsJDWwWqbLCur2yWnJzwQEKRcAHXo6seXGuSwWw= github.com/yuin/gopher-lua v0.0.0-20210529063254-f4c35e4016d9/go.mod h1:E1AXubJBdNmFERAOucpDIxNzeGfLzg0mYh+UfMWdChA= github.com/yusufpapurcu/wmi v1.2.2 h1:KBNDSne4vP5mbSWnJbO+51IMOXJB67QiYCSBrubbPRg= @@ -908,13 +904,10 @@ github.com/zenazn/goji v0.9.0/go.mod h1:7S9M489iMyHBNxwZnk9/EHS098H4/F6TATF2mIxt github.com/zeromicro/go-zero v1.3.4 h1:XeNdwcrOmnvHj891AmeCA9RrRj1PeN49//KKCK4WAXk= github.com/zeromicro/go-zero v1.3.4/go.mod h1:nEU/ITZSmxRxvr/JmSoJ48MNV62UpY6bqJz9Voba7Yw= go.etcd.io/etcd/api/v3 v3.5.0/go.mod h1:cbVKeC6lCfl7j/8jBhAK6aIYO9XOjdptoxU/nLQcPvs= -go.etcd.io/etcd/api/v3 v3.5.4 h1:OHVyt3TopwtUQ2GKdd5wu3PmmipR4FTwCqoEjSyRdIc= go.etcd.io/etcd/api/v3 v3.5.4/go.mod h1:5GB2vv4A4AOn3yk7MftYGHkUfGtDHnEraIjym4dYz5A= go.etcd.io/etcd/client/pkg/v3 v3.5.0/go.mod h1:IJHfcCEKxYu1Os13ZdwCwIUTUVGYTSAM3YSwc9/Ac1g= -go.etcd.io/etcd/client/pkg/v3 v3.5.4 h1:lrneYvz923dvC14R54XcA7FXoZ3mlGZAgmwhfm7HqOg= go.etcd.io/etcd/client/pkg/v3 v3.5.4/go.mod h1:IJHfcCEKxYu1Os13ZdwCwIUTUVGYTSAM3YSwc9/Ac1g= go.etcd.io/etcd/client/v2 v2.305.0/go.mod h1:h9puh54ZTgAKtEbut2oe9P4L/oqKCVB6xsXlzd7alYQ= -go.etcd.io/etcd/client/v3 v3.5.4 h1:p83BUL3tAYS0OT/r0qglgc3M1JjhM0diV8DSWAhVXv4= go.etcd.io/etcd/client/v3 v3.5.4/go.mod h1:ZaRkVgBZC+L+dLCjTcF1hRXpgZXQPOvnA/Ak/gq3kiY= go.mongodb.org/mongo-driver v1.9.1/go.mod h1:0sQWfOeY63QTntERDJJ/0SuKK0T1uVSgKCuAROlKEPY= go.opencensus.io v0.21.0/go.mod h1:mSImk1erAIZhrmZN+AvHh14ztQfjbGwt4TtuofqLduU= @@ -940,25 +933,21 @@ go.uber.org/atomic v1.4.0/go.mod h1:gD2HeocX3+yG+ygLZcrzQJaqmWj9AIm7n08wl/qW/PE= go.uber.org/atomic v1.5.0/go.mod h1:sABNBOSYdrvTF6hTgEIbc7YasKWGhgEQZyfxyTvoXHQ= go.uber.org/atomic v1.6.0/go.mod h1:sABNBOSYdrvTF6hTgEIbc7YasKWGhgEQZyfxyTvoXHQ= go.uber.org/atomic v1.7.0/go.mod h1:fEN4uk6kAWBTFdckzkM89CLk9XfWZrxpCo0nPH17wJc= -go.uber.org/atomic v1.9.0 h1:ECmE8Bn/WFTYwEW/bpKD3M8VtR/zQVbavAoalC1PYyE= go.uber.org/atomic v1.9.0/go.mod h1:fEN4uk6kAWBTFdckzkM89CLk9XfWZrxpCo0nPH17wJc= go.uber.org/automaxprocs v1.5.1 h1:e1YG66Lrk73dn4qhg8WFSvhF0JuFQF0ERIp4rpuV8Qk= go.uber.org/automaxprocs v1.5.1/go.mod h1:BF4eumQw0P9GtnuxxovUd06vwm1o18oMzFtK66vU6XU= go.uber.org/goleak v1.1.11/go.mod h1:cwTWslyiVhfpKIDGSZEM2HlOvcqm+tG4zioyIeLoqMQ= -go.uber.org/goleak v1.1.12 h1:gZAh5/EyT/HQwlpkCy6wTpqfH9H8Lz8zbm3dZh+OyzA= go.uber.org/goleak v1.1.12/go.mod h1:cwTWslyiVhfpKIDGSZEM2HlOvcqm+tG4zioyIeLoqMQ= go.uber.org/multierr v1.1.0/go.mod h1:wR5kodmAFQ0UK8QlbwjlSNy0Z68gJhDJUG5sjR94q/0= go.uber.org/multierr v1.3.0/go.mod h1:VgVr7evmIr6uPjLBxg28wmKNXyqE9akIJ5XnfpiKl+4= go.uber.org/multierr v1.5.0/go.mod h1:FeouvMocqHpRaaGuG9EjoKcStLC43Zu/fmqdUMPcKYU= go.uber.org/multierr v1.6.0/go.mod h1:cdWPpRnG4AhwMwsgIHip0KRBQjJy5kYEpYjJxpXp9iU= -go.uber.org/multierr v1.8.0 h1:dg6GjLku4EH+249NNmoIciG9N/jURbDG+pFlTkhzIC8= go.uber.org/multierr v1.8.0/go.mod h1:7EAYxJLBy9rStEaz58O2t4Uvip6FSURkq8/ppBp95ak= go.uber.org/tools v0.0.0-20190618225709-2cfd321de3ee/go.mod h1:vJERXedbb3MVM5f9Ejo0C68/HhF8uaILCdgjnY+goOA= go.uber.org/zap v1.9.1/go.mod h1:vwi/ZaCAaUcBkycHslxD9B2zi4UTXhF60s6SWpuDF0Q= go.uber.org/zap v1.10.0/go.mod h1:vwi/ZaCAaUcBkycHslxD9B2zi4UTXhF60s6SWpuDF0Q= go.uber.org/zap v1.13.0/go.mod h1:zwrFLgMcdUuIBviXEYEH1YKNaOBnKXsx2IPda5bBwHM= go.uber.org/zap v1.17.0/go.mod h1:MXVU+bhUf/A7Xi2HNOnopQOrmycQ5Ih87HtOu4q5SSo= -go.uber.org/zap v1.21.0 h1:WefMeulhovoZ2sYXz7st6K0sLj7bBhpiFaud4r4zST8= go.uber.org/zap v1.21.0/go.mod h1:wjWOCqI0f2ZZrJF/UufIOkiC8ii6tm1iqIsLo76RfJw= golang.org/x/crypto v0.0.0-20180904163835-0709b304e793/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= golang.org/x/crypto v0.0.0-20181029021203-45a5f77698d3/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= @@ -983,7 +972,6 @@ golang.org/x/crypto v0.0.0-20210711020723-a769d52b0f97/go.mod h1:GvvjBRRGRdwPK5y golang.org/x/crypto v0.0.0-20210920023735-84f357641f63/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= golang.org/x/crypto v0.0.0-20211215165025-cf75a172585e/go.mod h1:P+XmwS30IXTQdn5tA2iutPOUgjI07+tq3H3K9MVA1s8= -golang.org/x/crypto v0.0.0-20220214200702-86341886e292/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= golang.org/x/crypto v0.0.0-20220321153916-2c7772ba3064 h1:S25/rfnfsMVgORT4/J61MJ7rdyseOZOyvLIrZEZ7s6s= golang.org/x/crypto v0.0.0-20220321153916-2c7772ba3064/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= golang.org/x/exp v0.0.0-20180321215751-8460e604b9de/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= @@ -992,7 +980,6 @@ golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL golang.org/x/exp v0.0.0-20190125153040-c74c464bbbf2/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8= -golang.org/x/exp v0.0.0-20190731235908-ec7cb31e5a56/go.mod h1:JhuoJpWY28nO4Vef9tZUw9qufEGTyX1+7lmHxV5q5G4= golang.org/x/exp v0.0.0-20190829153037-c13cbed26979/go.mod h1:86+5VVa7VpoJ4kLfm080zCjGlMRFzhUhsZKEZO7MGek= golang.org/x/exp v0.0.0-20191030013958-a1ab85dbe136/go.mod h1:JXzH8nQsPlswgeRAPE3MuO9GYsAcnJvJ4vnMwN/5qkY= golang.org/x/exp v0.0.0-20191129062945-2f5052295587/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4= @@ -1000,6 +987,7 @@ golang.org/x/exp v0.0.0-20191227195350-da58074b4299/go.mod h1:2RIsYlXP63K8oxa1u0 golang.org/x/exp v0.0.0-20200119233911-0405dc783f0a/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4= golang.org/x/exp v0.0.0-20200207192155-f17229e696bd/go.mod h1:J/WKrq2StrnmMY6+EHIKF9dgMWnmCNThgcyBT1FY9mM= golang.org/x/exp v0.0.0-20200224162631-6cc2880d07d6/go.mod h1:3jZMyOhIsHpP37uCMkUooju7aAi5cS1Q23tOzKc+0MU= +golang.org/x/exp v0.0.0-20220426173459-3bcf042a4bf5/go.mod h1:lgLbSvA5ygNOMpwM/9anMpWVlVJ7Z+cHWq/eFuinpGE= golang.org/x/image v0.0.0-20180708004352-c73c2afc3b81/go.mod h1:ux5Hcp/YLpHSI86hEcLt0YII63i6oz57MZXIpbrjZUs= golang.org/x/image v0.0.0-20190227222117-0694c2d4d067/go.mod h1:kZ7UVZpmo3dzQBMxlp+ypCbDeSB+sBbTgSJuh5dn5js= golang.org/x/image v0.0.0-20190802002840-cff245a6509b/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0= @@ -1017,7 +1005,6 @@ golang.org/x/lint v0.0.0-20201208152925-83fdc39ff7b5/go.mod h1:3xt1FjdF8hUf6vQPI golang.org/x/lint v0.0.0-20210508222113-6edffad5e616/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= golang.org/x/mobile v0.0.0-20190312151609-d3739f865fa6/go.mod h1:z+o9i4GpDbdi3rU15maQ/Ox0txvL9dWGYEHz965HBQE= golang.org/x/mobile v0.0.0-20190719004257-d2bd2a29d028/go.mod h1:E/iHnbuqvinMTCcRqshq8CkpyQDoeVncDDYHnLhea+o= -golang.org/x/mobile v0.0.0-20220518205345-8578da9835fd/go.mod h1:pe2sM7Uk+2Su1y7u/6Z8KJ24D7lepUjFZbhFOrmDfuQ= golang.org/x/mod v0.0.0-20190513183733-4bf6d317e70e/go.mod h1:mXi4GBBbnImb6dmsKGUJ2LatrhH/nqhxcFungHvyanc= golang.org/x/mod v0.1.0/go.mod h1:0QHyrYULN0/3qlju5TqG8bIK38QM8yzMo5ekMj3DlcY= golang.org/x/mod v0.1.1-0.20191105210325-c90efee705ee/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg= @@ -1027,6 +1014,8 @@ golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.4.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.4.1/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.4.2/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/mod v0.5.1/go.mod h1:5OXOZSfqPIIbmVBIIKWRFfZjPR0E5r58TLhUjH0a2Ro= +golang.org/x/mod v0.6.0-dev.0.20211013180041-c96bc1413d57/go.mod h1:3p9vT2HGsQu2K1YbXdKPJLVgG5VJdoTa1poYQBtP1AY= golang.org/x/net v0.0.0-20170114055629-f2499483f923/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= @@ -1080,12 +1069,14 @@ golang.org/x/net v0.0.0-20210525063256-abc453219eb5/go.mod h1:9nx3DQGgdP8bBQD5qx golang.org/x/net v0.0.0-20210610132358-84b48f89b13b/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= golang.org/x/net v0.0.0-20210805182204-aaa1db679c0d/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= golang.org/x/net v0.0.0-20210917221730-978cfadd31cf/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= +golang.org/x/net v0.0.0-20211015210444-4f30a5c0130f/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= golang.org/x/net v0.0.0-20211209124913-491a49abca63/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= golang.org/x/net v0.0.0-20220127200216-cd36cc0744dd/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk= golang.org/x/net v0.0.0-20220225172249-27dd8689420f/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk= golang.org/x/net v0.0.0-20220412020605-290c469a71a5/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk= golang.org/x/net v0.0.0-20220421235706-1d1ef9303861/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk= +golang.org/x/net v0.0.0-20220607020251-c690dde0001d/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c= golang.org/x/net v0.0.0-20220722155237-a158d28d115b h1:PxfKdU9lEEDYjdIzOtC4qFWgkU2rGHdKlKowJSMN9h0= golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c= golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= @@ -1101,8 +1092,6 @@ golang.org/x/oauth2 v0.0.0-20210220000619-9bb904979d93/go.mod h1:KelEdhl1UZF7XfJ golang.org/x/oauth2 v0.0.0-20210313182246-cd4f82c27b84/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= golang.org/x/oauth2 v0.0.0-20210402161424-2e8d93401602/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= golang.org/x/oauth2 v0.0.0-20210514164344-f6687ab2804c/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= -golang.org/x/oauth2 v0.0.0-20211104180415-d3ed0bb246c8/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= -golang.org/x/oauth2 v0.0.0-20220223155221-ee480838109b h1:clP8eMhB30EHdc0bd2Twtq6kgU7yl5ub2cQLSdrv1Dg= golang.org/x/oauth2 v0.0.0-20220223155221-ee480838109b/go.mod h1:DAh4E804XQdzx2j+YRIaUnCqCV2RuMz24cGBJ5QYIrc= golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= @@ -1197,6 +1186,7 @@ golang.org/x/sys v0.0.0-20210630005230-0f9fa26af87c/go.mod h1:oPkhp1MJrh7nUepCBc golang.org/x/sys v0.0.0-20210809222454-d867a43fc93e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210816183151-1e6c022a8912/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210831042530-f4d43177bf5e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20211019181941-9d821ace8654/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20211216021012-1d35b9e2eb4e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220114195835-da31bd327af9/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220128215802-99c3d69c2c27/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= @@ -1205,12 +1195,11 @@ golang.org/x/sys v0.0.0-20220319134239-a9b59b0215f8/go.mod h1:oPkhp1MJrh7nUepCBc golang.org/x/sys v0.0.0-20220412211240-33da011f77ad/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220429233432-b5fbb4746d32/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20220728004956-3c1f35247d10 h1:WIoqL4EROvwiPdUtaip4VcDdpZ4kha7wBWZrbVKCIZg= -golang.org/x/sys v0.0.0-20220728004956-3c1f35247d10/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220823224334-20c2bfdbfe24 h1:TyKJRhyo17yWxOMCTHKWrc5rddHORMlnZ/j57umaUd8= +golang.org/x/sys v0.0.0-20220823224334-20c2bfdbfe24/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/term v0.0.0-20201117132131-f5c789dd3221/go.mod h1:Nr5EML6q2oocZ2LXRh80K7BxOlk5/8JxuGnuhpl+muw= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/term v0.0.0-20210220032956-6a3ed077a48d/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= -golang.org/x/term v0.0.0-20210927222741-03fcf44c2211 h1:JGgROgKl9N8DuW20oFS5gxc+lE67/N3FcwmBPMe7ArY= golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= golang.org/x/text v0.0.0-20160726164857-2910a502d2bf/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= @@ -1229,7 +1218,6 @@ golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxb golang.org/x/time v0.0.0-20201208040808-7e3f01d25324/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20210220033141-f8bda1e9f3ba/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20210723032227-1f47c861a9ac/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= -golang.org/x/time v0.0.0-20220210224613-90d013bbcef8/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20220411224347-583f2d630306 h1:+gHMid33q6pen7kv9xvT+JRinntgeXO2AeZVd0AWD3w= golang.org/x/time v0.0.0-20220411224347-583f2d630306/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/tools v0.0.0-20180525024113-a5b4c53f6e8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= @@ -1262,6 +1250,7 @@ golang.org/x/tools v0.0.0-20191113191852-77e3bb0ad9e7/go.mod h1:b+2E5dAYhXwXZwtn golang.org/x/tools v0.0.0-20191115202509-3a792d9c32b2/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20191125144606-a911d9008d1f/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191126055441-b0650ceb63d9/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20191130070609-6e064ea0cf2d/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20191216173652-a0e659d51361/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= golang.org/x/tools v0.0.0-20191227053925-7b8e75db28f4/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= @@ -1299,13 +1288,14 @@ golang.org/x/tools v0.1.1/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= golang.org/x/tools v0.1.2/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= golang.org/x/tools v0.1.5/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= golang.org/x/tools v0.1.7/go.mod h1:LGqMHiF4EqQNHR1JncWGqT5BVaXmza+X+BDGol+dOxo= -golang.org/x/tools v0.1.8-0.20211022200916-316ba0b74098/go.mod h1:LGqMHiF4EqQNHR1JncWGqT5BVaXmza+X+BDGol+dOxo= +golang.org/x/tools v0.1.8-0.20211029000441-d6a9af8af023/go.mod h1:nABZi5QlRsZVlzPpHl034qft6wpY4eDcsTt5AaioBiU= golang.org/x/xerrors v0.0.0-20190410155217-1f06c39b4373/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20190513163551-3ee3066db522/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20220517211312-f3a8303e98df/go.mod h1:K8+ghG5WaK9qNqU5K3HdILfMLy1f3aNYFI/wnl100a8= golang.org/x/xerrors v0.0.0-20220609144429-65e65417b02f h1:uF6paiQQebLeSXkrTqHqz0MXhXXS1KgF41eUdBNvxK0= golang.org/x/xerrors v0.0.0-20220609144429-65e65417b02f/go.mod h1:K8+ghG5WaK9qNqU5K3HdILfMLy1f3aNYFI/wnl100a8= gonum.org/v1/gonum v0.0.0-20180816165407-929014505bf4/go.mod h1:Y+Yx5eoAFn32cQvJDxZx5Dpnq+c3wtXuadVZAcxbbBo= @@ -1442,7 +1432,6 @@ gopkg.in/fsnotify.v1 v1.4.7/go.mod h1:Tz8NjZHkW78fSQdbUxIjBTcgA1z1m8ZHf0WmKUhAMy gopkg.in/h2non/gock.v1 v1.1.2 h1:jBbHXgGBK/AoPVfJh5x4r/WxIrElvbLel8TCZkkZJoY= gopkg.in/h2non/gock.v1 v1.1.2/go.mod h1:n7UGz/ckNChHiK05rDoiC4MYSunEC/lyaUm2WWaDva0= gopkg.in/inconshreveable/log15.v2 v2.0.0-20180818164646-67afb5ed74ec/go.mod h1:aPpfJ7XW+gOuirDoZ8gHhLh3kZ1B08FtV2bbmy7Jv3s= -gopkg.in/inf.v0 v0.9.1 h1:73M5CoZyi3ZLMOyDlQh031Cx6N9NDJ2Vvfl76EDAgDc= gopkg.in/inf.v0 v0.9.1/go.mod h1:cWUDdTG/fYaXco+Dcufb5Vnc6Gp2YChqWtbxRZE0mXw= gopkg.in/ini.v1 v1.62.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k= gopkg.in/natefinch/lumberjack.v2 v2.0.0 h1:1Lc07Kr7qY4U2YPouBjpCLxpiyxIVoxqXgkXLknAOE8= @@ -1454,7 +1443,6 @@ gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7 h1:uRGJdciOHaEIrze2W8Q3AKkep gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7/go.mod h1:dt/ZhP58zS4L8KSrWDmTeBkI65Dw0HsyUHuEVlX15mw= gopkg.in/tomb.v2 v2.0.0-20161208151619-d5d1b5820637 h1:yiW+nvdHb9LVqSHQBXfZCieqV4fzYhNBql77zY0ykqs= gopkg.in/tomb.v2 v2.0.0-20161208151619-d5d1b5820637/go.mod h1:BHsqpu/nsuzkT5BpiH1EMZPLyqSMM8JbIavyFACoFNk= -gopkg.in/urfave/cli.v1 v1.20.0 h1:NdAVW6RYxDif9DhDHaAortIu956m2c0v+09AZBPTbE0= gopkg.in/urfave/cli.v1 v1.20.0/go.mod h1:vuBzUtMdQeixQj8LVd+/98pzhxNGQoyuPBlsXHOQNO0= gopkg.in/yaml.v2 v2.2.1/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= @@ -1468,13 +1456,12 @@ gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ= gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= gopkg.in/yaml.v3 v3.0.0-20200615113413-eeeca48fe776/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= -gopkg.in/yaml.v3 v3.0.0-20220512140231-539c8e751b99 h1:dbuHpmKjkDzSOMKAWl10QNlgaZUd3V1q99xc81tt2Kc= -gopkg.in/yaml.v3 v3.0.0-20220512140231-539c8e751b99/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= +gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= gorm.io/driver/postgres v1.3.6 h1:Q0iLoYvWwsJVpYQrSrY5p5P4YzW7fJjFMBG2sa4Bz5U= gorm.io/driver/postgres v1.3.6/go.mod h1:f02ympjIcgtHEGFMZvdgTxODZ9snAHDb4hXfigBVuNI= gorm.io/gorm v1.23.4 h1:1BKWM67O6CflSLcwGQR7ccfmC4ebOxQrTfOQGRE9wjg= gorm.io/gorm v1.23.4/go.mod h1:l2lP/RyAtc1ynaTjFksBde/O8v9oOGIApu2/xRitmZk= -gotest.tools v2.2.0+incompatible h1:VsBPFP1AI068pPrMxtb/S8Zkgf9xEmTLJjfM+P5UIEo= gotest.tools v2.2.0+incompatible/go.mod h1:DsYFclhRJ6vuDpmuTbkuFWG+y2sxOXAzmJt81HFBacw= honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= honnef.co/go/tools v0.0.0-20190106161140-3f1c8253044a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= @@ -1485,33 +1472,26 @@ honnef.co/go/tools v0.0.1-2020.1.3/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9 honnef.co/go/tools v0.0.1-2020.1.4/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k= honnef.co/go/tools v0.1.3/go.mod h1:NgwopIslSNH47DimFoV78dnkksY2EFtX0ajyb3K/las= k8s.io/api v0.22.9/go.mod h1:rcjO/FPOuvc3x7nQWx29UcDrFJMx82RxDob71ntNH4A= -k8s.io/api v0.24.1 h1:BjCMRDcyEYz03joa3K1+rbshwh1Ay6oB53+iUx2H8UY= -k8s.io/api v0.24.1/go.mod h1:JhoOvNiLXKTPQ60zh2g0ewpA+bnEYf5q44Flhquh4vQ= k8s.io/apimachinery v0.0.0-20191123233150-4c4803ed55e3/go.mod h1:b9qmWdKlLuU9EBh+06BtLcSf/Mu89rWL33naRxs1uZg= k8s.io/apimachinery v0.22.9/go.mod h1:ZvVLP5iLhwVFg2Yx9Gh5W0um0DUauExbRhe+2Z8I1EU= k8s.io/apimachinery v0.23.5/go.mod h1:BEuFMMBaIbcOqVIJqNZJXGFTP4W6AycEpb5+m/97hrM= k8s.io/apimachinery v0.24.1 h1:ShD4aDxTQKN5zNf8K1RQ2u98ELLdIW7jEnlO9uAMX/I= k8s.io/apimachinery v0.24.1/go.mod h1:82Bi4sCzVBdpYjyI4jY6aHX+YCUchUIrZrXKedjd2UM= k8s.io/client-go v0.22.9/go.mod h1:IoH7exYnoH/zgvHOuVxh2c4yJepcCBt72FzCTisOc4k= -k8s.io/client-go v0.24.1 h1:w1hNdI9PFrzu3OlovVeTnf4oHDt+FJLd9Ndluvnb42E= -k8s.io/client-go v0.24.1/go.mod h1:f1kIDqcEYmwXS/vTbbhopMUbhKp2JhOeVTfxgaCIlF8= k8s.io/gengo v0.0.0-20190128074634-0689ccc1d7d6/go.mod h1:ezvh/TsK7cY6rbqRK0oQQ8IAqLxYwwyPxAX1Pzy0ii0= k8s.io/gengo v0.0.0-20200413195148-3a45101e95ac/go.mod h1:ezvh/TsK7cY6rbqRK0oQQ8IAqLxYwwyPxAX1Pzy0ii0= k8s.io/gengo v0.0.0-20210813121822-485abfe95c7c/go.mod h1:FiNAH4ZV3gBg2Kwh89tzAEV2be7d5xI0vBa/VySYy3E= k8s.io/klog v0.0.0-20181102134211-b9b56d5dfc92/go.mod h1:Gq+BEi5rUBO/HRz0bTSXDUcqjScdoY3a9IHpCEIOOfk= -k8s.io/klog v1.0.0 h1:Pt+yjF5aB1xDSVbau4VsWe+dQNzA0qv1LlXdC2dF6Q8= k8s.io/klog v1.0.0/go.mod h1:4Bi6QPql/J/LkTDqv7R/cd3hPo4k2DG6Ptcz060Ez5I= k8s.io/klog/v2 v2.0.0/go.mod h1:PBfzABfn139FHAV07az/IF9Wp1bkk3vpT2XSJ76fSDE= k8s.io/klog/v2 v2.2.0/go.mod h1:Od+F08eJP+W3HUb4pSrPpgp9DGU4GzlpG/TmITuYh/Y= k8s.io/klog/v2 v2.9.0/go.mod h1:hy9LJ/NvuK+iVyP4Ehqva4HxZG/oXyIS3n3Jmire4Ec= k8s.io/klog/v2 v2.30.0/go.mod h1:y1WjHnz7Dj687irZUWR/WLkLc5N1YHtjLdmgWjndZn0= k8s.io/klog/v2 v2.40.1/go.mod h1:y1WjHnz7Dj687irZUWR/WLkLc5N1YHtjLdmgWjndZn0= -k8s.io/klog/v2 v2.60.1 h1:VW25q3bZx9uE3vvdL6M8ezOX79vA2Aq1nEWLqNQclHc= k8s.io/klog/v2 v2.60.1/go.mod h1:y1WjHnz7Dj687irZUWR/WLkLc5N1YHtjLdmgWjndZn0= k8s.io/kube-openapi v0.0.0-20191107075043-30be4d16710a/go.mod h1:1TqjTSzOxsLGIKfj0lK8EeCP7K1iUG65v09OM0/WG5E= k8s.io/kube-openapi v0.0.0-20211109043538-20434351676c/go.mod h1:vHXdDvt9+2spS2Rx9ql3I8tycm3H9FDfdUoIuKCefvw= k8s.io/kube-openapi v0.0.0-20211115234752-e816edb12b65/go.mod h1:sX9MT8g7NVZM5lVL/j8QyCCJe8YSMW30QvGZWaCIDIk= -k8s.io/kube-openapi v0.0.0-20220328201542-3ee0da9b0b42 h1:Gii5eqf+GmIEwGNKQYQClCayuJCe2/4fZUvF7VG99sU= k8s.io/kube-openapi v0.0.0-20220328201542-3ee0da9b0b42/go.mod h1:Z/45zLw8lUo4wdiUkI+v/ImEGAvu3WatcZl3lPMR4Rk= k8s.io/utils v0.0.0-20210802155522-efc7438f0176/go.mod h1:jPW/WVKK9YHAvNhRxK0md/EJ228hCsBRufyofKtW8HA= k8s.io/utils v0.0.0-20211116205334-6203023598ed/go.mod h1:jPW/WVKK9YHAvNhRxK0md/EJ228hCsBRufyofKtW8HA= @@ -1523,13 +1503,9 @@ rsc.io/quote/v3 v3.1.0/go.mod h1:yEA65RcK8LyAZtP9Kv3t0HmxON59tX3rD+tICJqUlj0= rsc.io/sampler v1.3.0/go.mod h1:T1hPZKmBbMNahiBKFy5HrXp6adAjACjK9JXDnKaTXpA= rsc.io/tmplfunc v0.0.3/go.mod h1:AG3sTPzElb1Io3Yg4voV9AGZJuleGAwaVRxL9M49PhA= sigs.k8s.io/json v0.0.0-20211020170558-c049b76a60c6/go.mod h1:p4QtZmO4uMYipTQNzagwnNoseA6OxSUutVw05NhYDRs= -sigs.k8s.io/json v0.0.0-20211208200746-9f7c6b3444d2 h1:kDi4JBNAsJWfz1aEXhO8Jg87JJaPNLh5tIzYHgStQ9Y= sigs.k8s.io/json v0.0.0-20211208200746-9f7c6b3444d2/go.mod h1:B+TnT182UBxE84DiCz4CVE26eOSDAeYCpfDnC2kdKMY= -sigs.k8s.io/structured-merge-diff v0.0.0-20190525122527-15d366b2352e h1:4Z09Hglb792X0kfOBBJUPFEyvVfQWrYT/l8h5EKA6JQ= sigs.k8s.io/structured-merge-diff v0.0.0-20190525122527-15d366b2352e/go.mod h1:wWxsB5ozmmv/SG7nM11ayaAW51xMvak/t1r0CSlcokI= sigs.k8s.io/structured-merge-diff/v4 v4.0.2/go.mod h1:bJZC9H9iH24zzfZ/41RGcq60oK1F7G282QMXDPYydCw= -sigs.k8s.io/structured-merge-diff/v4 v4.2.1 h1:bKCqE9GvQ5tiVHn5rfn1r+yao3aLQEaLzkkmAkf+A6Y= sigs.k8s.io/structured-merge-diff/v4 v4.2.1/go.mod h1:j/nl6xW8vLS49O8YvXW1ocPhZawJtm+Yrr7PPRQ0Vg4= sigs.k8s.io/yaml v1.1.0/go.mod h1:UJmg0vDUVViEyp3mgSv9WPwZCDxu4rQW1olrI1uml+o= -sigs.k8s.io/yaml v1.2.0 h1:kr/MCeFWJWTwyaHoR9c8EjH9OumOmoF9YGiZd7lFm/Q= sigs.k8s.io/yaml v1.2.0/go.mod h1:yfXDCHCao9+ENCvLSE62v9VSji2MKu5jeNfTrofGhJc= diff --git a/hooks.json b/hooks.json deleted file mode 100644 index d7ff158f9..000000000 --- a/hooks.json +++ /dev/null @@ -1,44 +0,0 @@ -[ - { - "id": "pull-and-build", - "execute-command": "", - "pass-arguments-to-command": [ - { - "source": "payload", - "name": "release.tag_name" - }, - { - "source": "string", - "name": "" - } - ], - "trigger-rule": { - "and": [ - { - "match": - { - "type": "payload-hash-sha1", - "secret": "", - "parameter": - { - "source": "header", - "name": "X-Hub-Signature" - } - } - }, - { - "match": - { - "type": "value", - "value": "published", - "parameter": - { - "source": "payload", - "name": "action" - } - } - } - ] - } - } -] \ No newline at end of file diff --git a/kubeyaml/compiled.yaml b/kubeyaml/compiled.yaml deleted file mode 100644 index 3e9513df9..000000000 --- a/kubeyaml/compiled.yaml +++ /dev/null @@ -1,496 +0,0 @@ -apiVersion: v1 -kind: Service -metadata: - name: app-svc - namespace: default -spec: - ports: - - port: 8888 - selector: - app: app ---- -apiVersion: v1 -kind: Service -metadata: - name: globalrpc-svc - namespace: default -spec: - ports: - - port: 8080 - selector: - app: globalrpc ---- -apiVersion: apps/v1 -kind: Deployment -metadata: - labels: - app: app - name: app - namespace: default -spec: - replicas: 3 - revisionHistoryLimit: 5 - selector: - matchLabels: - app: app - template: - metadata: - labels: - app: app - spec: - serviceAccountName: find-endpoints - containers: - - image: us-central1-docker.pkg.dev/zkbas-330903/zkbas-webhook/app:$TAG_NAME - imagePullPolicy: Always - lifecycle: - preStop: - exec: - command: - - sh - - -c - - sleep 5 - livenessProbe: - initialDelaySeconds: 15 - periodSeconds: 20 - tcpSocket: - port: 8888 - name: app - ports: - - name: metrics - containerPort: 9091 - - name: app - containerPort: 8888 - readinessProbe: - initialDelaySeconds: 5 - periodSeconds: 10 - tcpSocket: - port: 8888 - resources: - limits: - cpu: 1000m - memory: 512Mi - requests: - cpu: 200m - memory: 256Mi - volumeMounts: - - mountPath: /etc/localtime - name: timezone - initContainers: - - command: - - sh - - -c - - until nslookup globalrpc-svc; do echo waiting for globalrpc; sleep 2; done; - image: busybox - name: init-globalrpc - volumes: - - hostPath: - path: /usr/share/zoneinfo/Asia/Shanghai - name: timezone - ---- -apiVersion: apps/v1 -kind: Deployment -metadata: - labels: - app: committer - name: committer - namespace: default -spec: - replicas: 1 - revisionHistoryLimit: 5 - strategy: - type: Recreate - selector: - matchLabels: - app: committer - template: - metadata: - labels: - app: committer - spec: - serviceAccountName: find-endpoints - containers: - - image: us-central1-docker.pkg.dev/zkbas-330903/zkbas-webhook/committer:$TAG_NAME - imagePullPolicy: Always - lifecycle: - preStop: - exec: - command: - - sh - - -c - - sleep 5 - name: committer - resources: - limits: - cpu: 1000m - memory: 1024Mi - requests: - cpu: 200m - memory: 256Mi - volumeMounts: - - mountPath: /etc/localtime - name: timezone - volumes: - - hostPath: - path: /usr/share/zoneinfo/Asia/Shanghai - name: timezone ---- -apiVersion: apps/v1 -kind: Deployment -metadata: - labels: - app: globalrpc - name: globalrpc - namespace: default -spec: - replicas: 3 - revisionHistoryLimit: 5 - selector: - matchLabels: - app: globalrpc - template: - metadata: - labels: - app: globalrpc - spec: - containers: - - image: us-central1-docker.pkg.dev/zkbas-330903/zkbas-webhook/globalrpc:$TAG_NAME - imagePullPolicy: Always - lifecycle: - preStop: - exec: - command: - - sh - - -c - - sleep 5 - livenessProbe: - initialDelaySeconds: 15 - periodSeconds: 20 - tcpSocket: - port: 8080 - name: globalrpc - ports: - - name: metrics - containerPort: 8080 - readinessProbe: - initialDelaySeconds: 5 - periodSeconds: 10 - tcpSocket: - port: 8080 - resources: - limits: - cpu: 1000m - memory: 1024Mi - requests: - cpu: 200m - memory: 256Mi - volumeMounts: - - mountPath: /etc/localtime - name: timezone - volumes: - - hostPath: - path: /usr/share/zoneinfo/Asia/Shanghai - name: timezone ---- -apiVersion: apps/v1 -kind: Deployment -metadata: - labels: - app: monitor - name: monitor - namespace: default -spec: - replicas: 1 - revisionHistoryLimit: 5 - strategy: - type: Recreate - selector: - matchLabels: - app: monitor - template: - metadata: - labels: - app: monitor - spec: - containers: - - image: us-central1-docker.pkg.dev/zkbas-330903/zkbas-webhook/monitor:$TAG_NAME - imagePullPolicy: Always - lifecycle: - preStop: - exec: - command: - - sh - - -c - - sleep 5 - name: monitor - resources: - limits: - cpu: 1000m - memory: 512Mi - requests: - cpu: 200m - memory: 256Mi - volumeMounts: - - mountPath: /etc/localtime - name: timezone - volumes: - - hostPath: - path: /usr/share/zoneinfo/Asia/Shanghai - name: timezone ---- -apiVersion: apps/v1 -kind: Deployment -metadata: - labels: - app: witnessgenerator - name: witnessgenerator - namespace: default -spec: - replicas: 1 - revisionHistoryLimit: 5 - strategy: - type: Recreate - selector: - matchLabels: - app: witnessgenerator - template: - metadata: - labels: - app: witnessgenerator - spec: - containers: - - image: us-central1-docker.pkg.dev/zkbas-330903/zkbas-webhook/witnessgenerator:$TAG_NAME - imagePullPolicy: Always - lifecycle: - preStop: - exec: - command: - - sh - - -c - - sleep 5 - name: witnessgenerator - resources: - limits: - cpu: 1000m - memory: 512Mi - requests: - cpu: 200m - memory: 256Mi - volumeMounts: - - mountPath: /etc/localtime - name: timezone - volumes: - - hostPath: - path: /usr/share/zoneinfo/Asia/Shanghai - name: timezone ---- -apiVersion: apps/v1 -kind: Deployment -metadata: - labels: - app: prover - name: prover - namespace: default -spec: - replicas: 3 - revisionHistoryLimit: 5 - selector: - matchLabels: - app: prover - template: - metadata: - labels: - app: prover - spec: - serviceAccountName: find-endpoints - containers: - - image: us-central1-docker.pkg.dev/zkbas-330903/zkbas-webhook/prover:$TAG_NAME - imagePullPolicy: Always - lifecycle: - preStop: - exec: - command: - - sh - - -c - - sleep 5 - name: prover - resources: - limits: - cpu: 2000m - memory: 8192Mi - requests: - cpu: 500m - memory: 2048Mi - volumeMounts: - - mountPath: /etc/localtime - name: timezone - volumes: - - hostPath: - path: /usr/share/zoneinfo/Asia/Shanghai - name: timezone ---- -apiVersion: apps/v1 -kind: Deployment -metadata: - labels: - app: sender - name: sender - namespace: default -spec: - replicas: 1 - strategy: - type: Recreate - revisionHistoryLimit: 5 - selector: - matchLabels: - app: sender - template: - metadata: - labels: - app: sender - spec: - containers: - - image: us-central1-docker.pkg.dev/zkbas-330903/zkbas-webhook/sender:$TAG_NAME - imagePullPolicy: Always - lifecycle: - preStop: - exec: - command: - - sh - - -c - - sleep 5 - name: sender - resources: - limits: - cpu: 1000m - memory: 1024Mi - requests: - cpu: 200m - memory: 256Mi - volumeMounts: - - mountPath: /etc/localtime - name: timezone - volumes: - - hostPath: - path: /usr/share/zoneinfo/Asia/Shanghai - name: timezone ---- -apiVersion: autoscaling/v2beta1 -kind: HorizontalPodAutoscaler -metadata: - labels: - app: app-hpa-c - name: app-hpa-c - namespace: default -spec: - maxReplicas: 5 - metrics: - - resource: - name: cpu - targetAverageUtilization: 80 - type: Resource - minReplicas: 1 - scaleTargetRef: - apiVersion: apps/v1 - kind: Deployment - name: app ---- -apiVersion: autoscaling/v2beta1 -kind: HorizontalPodAutoscaler -metadata: - labels: - app: app-hpa-m - name: app-hpa-m - namespace: default -spec: - maxReplicas: 5 - metrics: - - resource: - name: memory - targetAverageUtilization: 80 - type: Resource - minReplicas: 1 - scaleTargetRef: - apiVersion: apps/v1 - kind: Deployment - name: app ---- -apiVersion: autoscaling/v2beta1 -kind: HorizontalPodAutoscaler -metadata: - labels: - app: globalrpc-hpa-c - name: globalrpc-hpa-c - namespace: default -spec: - maxReplicas: 5 - metrics: - - resource: - name: cpu - targetAverageUtilization: 80 - type: Resource - minReplicas: 1 - scaleTargetRef: - apiVersion: apps/v1 - kind: Deployment - name: globalrpc ---- -apiVersion: autoscaling/v2beta1 -kind: HorizontalPodAutoscaler -metadata: - labels: - app: globalrpc-hpa-m - name: globalrpc-hpa-m - namespace: default -spec: - maxReplicas: 5 - metrics: - - resource: - name: memory - targetAverageUtilization: 80 - type: Resource - minReplicas: 1 - scaleTargetRef: - apiVersion: apps/v1 - kind: Deployment - name: globalrpc ---- -apiVersion: autoscaling/v2beta1 -kind: HorizontalPodAutoscaler -metadata: - labels: - app: prover-hpa-c - name: prover-hpa-c - namespace: default -spec: - maxReplicas: 5 - metrics: - - resource: - name: cpu - targetAverageUtilization: 80 - type: Resource - minReplicas: 1 - scaleTargetRef: - apiVersion: apps/v1 - kind: Deployment - name: prover ---- -apiVersion: autoscaling/v2beta1 -kind: HorizontalPodAutoscaler -metadata: - labels: - app: prover-hpa-m - name: prover-hpa-m - namespace: default -spec: - maxReplicas: 5 - metrics: - - resource: - name: memory - targetAverageUtilization: 80 - type: Resource - minReplicas: 1 - scaleTargetRef: - apiVersion: apps/v1 - kind: Deployment - name: prover diff --git a/kubeyaml/compiled_development.yaml b/kubeyaml/compiled_development.yaml deleted file mode 100644 index b260963ef..000000000 --- a/kubeyaml/compiled_development.yaml +++ /dev/null @@ -1,118 +0,0 @@ -apiVersion: v1 -kind: Service -metadata: - name: app-svc - namespace: $NAMESPACE -spec: - ports: - - port: 8888 - selector: - app: app ---- -apiVersion: apps/v1 -kind: Deployment -metadata: - labels: - app: app - name: app - namespace: $NAMESPACE -spec: - replicas: 3 - revisionHistoryLimit: 5 - selector: - matchLabels: - app: app - template: - metadata: - labels: - app: app - spec: - serviceAccountName: find-endpoints - containers: - - image: us-central1-docker.pkg.dev/zecrey-330903/zecrey-webhook/app:$TAG_NAME - imagePullPolicy: Always - lifecycle: - preStop: - exec: - command: - - sh - - -c - - sleep 5 - livenessProbe: - initialDelaySeconds: 15 - periodSeconds: 20 - tcpSocket: - port: 8888 - name: app - ports: - - name: metrics - containerPort: 9091 - - name: app - containerPort: 8888 - readinessProbe: - initialDelaySeconds: 5 - periodSeconds: 10 - tcpSocket: - port: 8888 - resources: - limits: - cpu: 1000m - memory: 512Mi - requests: - cpu: 200m - memory: 256Mi - volumeMounts: - - mountPath: /etc/localtime - name: timezone - # initContainers: - # - command: - # - sh - # - -c - # - until nslookup globalrpc-svc; do echo waiting for globalrpc; sleep 2; done; - # image: busybox - # name: init-globalrpc - volumes: - - hostPath: - path: /usr/share/zoneinfo/Asia/Shanghai - name: timezone - -apiVersion: autoscaling/v2beta1 -kind: HorizontalPodAutoscaler -metadata: - labels: - app: app-hpa-c - name: app-hpa-c - namespace: $NAMESPACE -spec: - maxReplicas: 5 - metrics: - - resource: - name: cpu - targetAverageUtilization: 80 - type: Resource - minReplicas: 1 - scaleTargetRef: - apiVersion: apps/v1 - kind: Deployment - name: app ---- -apiVersion: autoscaling/v2beta1 -kind: HorizontalPodAutoscaler -metadata: - labels: - app: app-hpa-m - name: app-hpa-m - namespace: $NAMESPACE -spec: - maxReplicas: 5 - metrics: - - resource: - name: memory - targetAverageUtilization: 80 - type: Resource - minReplicas: 1 - scaleTargetRef: - apiVersion: apps/v1 - kind: Deployment - name: app ---- diff --git a/kubeyaml/prom-frontend.yaml b/kubeyaml/prom-frontend.yaml deleted file mode 100644 index c6f2596b3..000000000 --- a/kubeyaml/prom-frontend.yaml +++ /dev/null @@ -1,69 +0,0 @@ ---- -apiVersion: v1 -kind: Service -metadata: - name: prom-frontend - namespace: monitoring -spec: - selector: - app: prom-frontend - ports: - - name: web - port: 9090 ---- -apiVersion: apps/v1 -kind: Deployment -metadata: - name: prom-frontend - namespace: monitoring -spec: - replicas: 1 - selector: - matchLabels: - app: prom-frontend - template: - metadata: - labels: - app: prom-frontend - spec: - automountServiceAccountToken: true - nodeSelector: - kubernetes.io/os: linux - kubernetes.io/arch: amd64 - containers: - - name: prom-frontend - image: "gke.gcr.io/prometheus-engine/frontend:v0.4.1-gke.0" - args: - - "--web.listen-address=:9090" - - "--query.project-id=zecrey-330903" - ports: - - name: web - containerPort: 9090 - readinessProbe: - httpGet: - path: /-/ready - port: web - livenessProbe: - httpGet: - path: /-/healthy - port: web ---- -apiVersion: autoscaling/v2beta1 -kind: HorizontalPodAutoscaler -metadata: - labels: - app: prom-frontend-hpa-c - name: prom-frontend-hpa-c - namespace: monitoring -spec: - maxReplicas: 3 - metrics: - - resource: - name: cpu - targetAverageUtilization: 80 - type: Resource - minReplicas: 1 - scaleTargetRef: - apiVersion: apps/v1 - kind: Deployment - name: prom-frontend \ No newline at end of file diff --git a/pkg/multcache/api.go b/pkg/multcache/api.go deleted file mode 100644 index 798024a6e..000000000 --- a/pkg/multcache/api.go +++ /dev/null @@ -1,46 +0,0 @@ -package multcache - -import ( - "context" - "time" - - "github.com/eko/gocache/v2/cache" - "github.com/eko/gocache/v2/marshaler" - "github.com/eko/gocache/v2/metrics" - "github.com/eko/gocache/v2/store" - "github.com/go-redis/redis/v8" - gocache "github.com/patrickmn/go-cache" -) - -// Query function when key does not exist -type MultCache interface { - GetWithSet(ctx context.Context, key string, value interface{}, duration time.Duration, - query QueryFunc) (interface{}, error) - Get(ctx context.Context, key string, value interface{}) (interface{}, error) - Set(ctx context.Context, key string, value interface{}, duration time.Duration) error - Delete(ctx context.Context, key string) error -} - -func NewGoCache(expiration, cleanupInterval uint32) MultCache { - gocacheClient := gocache.New(time.Duration(expiration)*time.Minute, - time.Duration(cleanupInterval)*time.Minute) - gocacheStore := store.NewGoCache(gocacheClient, nil) - goCacheManager := cache.New(gocacheStore) - promMetrics := metrics.NewPrometheus("my-amazing-app") - cacheManager := cache.NewMetric(promMetrics, goCacheManager) - return &multcache{ - marshal: marshaler.New(cacheManager), - } -} - -func NewRedisCache(redisAdd, password string, expiration uint32) MultCache { - redisClient := redis.NewClient(&redis.Options{Addr: redisAdd, Password: password}) - redisStore := store.NewRedis(redisClient, - &store.Options{Expiration: time.Duration(expiration) * time.Minute}) - redisCacheManager := cache.New(redisStore) - promMetrics := metrics.NewPrometheus("my-amazing-app") - cacheManager := cache.NewMetric(promMetrics, redisCacheManager) - return &multcache{ - marshal: marshaler.New(cacheManager), - } -} diff --git a/pkg/multcache/multcache.go b/pkg/multcache/multcache.go deleted file mode 100644 index 903023962..000000000 --- a/pkg/multcache/multcache.go +++ /dev/null @@ -1,53 +0,0 @@ -package multcache - -import ( - "context" - "time" - - "github.com/eko/gocache/v2/marshaler" - "github.com/eko/gocache/v2/store" -) - -type multcache struct { - marshal *marshaler.Marshaler -} - -type QueryFunc func() (interface{}, error) - -func (m *multcache) GetWithSet(ctx context.Context, key string, valueStruct interface{}, duration time.Duration, - query QueryFunc) (interface{}, error) { - value, err := m.marshal.Get(ctx, key, valueStruct) - if err == nil { - return value, nil - } - if err.Error() == errGoCacheKeyNotExist.Error() || err.Error() == errRedisCacheKeyNotExist.Error() { - value, err = query() - if err != nil { - return nil, err - } - return value, m.Set(ctx, key, value, duration) - } - return nil, err -} - -func (m *multcache) Get(ctx context.Context, key string, value interface{}) (interface{}, error) { - returnObj, err := m.marshal.Get(ctx, key, value) - if err != nil { - return nil, err - } - return returnObj, nil -} - -func (m *multcache) Set(ctx context.Context, key string, value interface{}, duration time.Duration) error { - if err := m.marshal.Set(ctx, key, value, &store.Options{Expiration: duration}); err != nil { - return err - } - return nil -} - -func (m *multcache) Delete(ctx context.Context, key string) error { - if err := m.marshal.Delete(ctx, key); err != nil { - return err - } - return nil -} diff --git a/pkg/multcache/ttl.go b/pkg/multcache/ttl.go deleted file mode 100644 index e9aea19cb..000000000 --- a/pkg/multcache/ttl.go +++ /dev/null @@ -1,31 +0,0 @@ -package multcache - -import ( - "time" -) - -const ( - AccountTtl = 1000 * time.Millisecond //cache ttl of account - - AssetTtl = 1000 * time.Millisecond //cache ttl of asset - AssetListTtl = 2000 * time.Millisecond //cache ttl of asset list - - NftTtl = 1000 * time.Millisecond //cache ttl of nft - NftCountTtl = 2000 * time.Millisecond //cache ttl of nft total count - NftListTtl = 2000 * time.Millisecond //cache ttl of nft list - - BlockTtl = 1000 * time.Millisecond //cache ttl of block - BlockListTtl = 2000 * time.Millisecond //cache ttl of block list - BlockHeightTtl = 500 * time.Millisecond //cache ttl of current block height - BlockCountTtl = 2000 * time.Millisecond //cache ttl of block count - - MempoolTxTtl = 500 * time.Millisecond //cache ttl of mempool tx - TxTtl = 2000 * time.Millisecond //cache ttl of tx - TxCountTtl = 2000 * time.Millisecond //cache ttl of tx count - - PriceTtl = 500 * time.Millisecond //cache ttl of currency price - - DauTtl = 5000 * time.Millisecond //cache ttl of dau - - SysconfigTtl = 10000 * time.Millisecond //cache ttl of sysconfig -) diff --git a/pkg/multcache/var.go b/pkg/multcache/var.go deleted file mode 100644 index 78be5de3f..000000000 --- a/pkg/multcache/var.go +++ /dev/null @@ -1,92 +0,0 @@ -package multcache - -import ( - "errors" - "fmt" -) - -// error got from other package -var ( - errRedisCacheKeyNotExist = errors.New("redis: nil") - errGoCacheKeyNotExist = errors.New("Value not found in GoCache store") -) - -const ( - KeyGetBlockByBlockHeight = "cache:block:blockHeight" - KeyGetBlockBlockCommitment = "cache::block:blockCommitment:" - KeyGetBlockWithTxHeight = "cache::block:blockHeightWithTx:" - KeyGetBlockList = "cache::block:blockList:" - KeyGetCommittedBlocksCount = "cache::block:CommittedBlocksCount:" - KeyGetVerifiedBlocksCount = "cache::block:VerifiedBlocksCount:" - KeyGetBlocksTotalCount = "cache::block:BlocksTotalCount:" - KeyGetCurrentBlockHeight = "cache::block:GetCurrentBlockHeight:" - - KeyGetL2AssetsList = "cache::l2asset:L2AssetsList:" - KeyGetL2AssetInfoBySymbol = "cache::l2asset:L2AssetInfoBySymbol:" - KeyGetSimpleL2AssetInfoByAssetId = "cache::l2asset:SimpleL2AssetInfoByAssetId:" - - KeyGetSysconfigByName = "cache::sysconf:GetSysconfigByName:" -) - -// cache key prefix: account -func SpliceCacheKeyAccountByAccountName(accountName string) string { - return "cache:account_accountName_" + accountName -} - -func SpliceCacheKeyAccountByAccountPk(accountPk string) string { - return "cache:account_accountPk_" + accountPk -} - -func SpliceCacheKeyBasicAccountByAccountIndex(accountIndex int64) string { - return fmt.Sprintf("cache:basicAccount_accountIndex_%d", accountIndex) -} - -func SpliceCacheKeyAccountByAccountIndex(accountIndex int64) string { - return fmt.Sprintf("cache:account_accountIndex_%d", accountIndex) -} - -// cache key prefix: tx -func SpliceCacheKeyTxsCount() string { - return "cache:txsCount" -} - -func SpliceCacheKeyTxByTxHash(txHash string) string { - return "cache:tx_txHash" + txHash -} - -func SpliceCacheKeyTxByTxId(txID int64) string { - return fmt.Sprintf("cache:tx_txId_%d", txID) -} - -func SpliceCacheKeyTxCountByTimeRange(data string) string { - return "cache:txCount_" + data -} - -// cache key prefix: liquidity -func SpliceCacheKeyLiquidityForReadByPairIndex(pairIndex int64) string { - return fmt.Sprintf("cache:liquidity_pairIndex_%d", pairIndex) -} - -func SpliceCacheKeyLiquidityInfoForWriteByPairIndex(pairIndex int64) string { - return fmt.Sprintf("cache:liquidity_pairIndex_%d", pairIndex) -} - -// cache key prefix: nft - -func SpliceCacheKeyNftInfoByNftIndex(nftIndex int64) string { - return fmt.Sprintf("cache:nftInfo_nftIndex_%d", nftIndex) -} - -func SpliceCacheKeyAccountTotalNftCount(accountIndex int64) string { - return fmt.Sprintf("cache:account_nftTotalCount_%d", accountIndex) -} - -func SpliceCacheKeyAccountNftList(accountIndex int64, offset, limit int64) string { - return fmt.Sprintf("cache:account_nftList_%d_%d_%d", accountIndex, offset, limit) -} - -// cache key prefix: price -func SpliceCacheKeyCurrencyPrice() string { - return "cache:currencyPrice:" - -} diff --git a/pkg/treedb/context.go b/pkg/treedb/context.go deleted file mode 100644 index 64de41c47..000000000 --- a/pkg/treedb/context.go +++ /dev/null @@ -1,27 +0,0 @@ -package treedb - -import ( - bsmt "github.com/bnb-chain/bas-smt" - "github.com/bnb-chain/bas-smt/database" -) - -type Context struct { - Name string - Driver Driver - LevelDBOption *LevelDBOption - RedisDBOption *RedisDBOption - - TreeDB database.TreeDB -} - -func (ctx *Context) IsLoad() bool { - return ctx.Driver == MemoryDB -} - -func (ctx *Context) Options(blockHeight int64) []bsmt.Option { - var opts []bsmt.Option - if ctx.Driver == MemoryDB { - opts = append(opts, bsmt.InitializeVersion(bsmt.Version(blockHeight))) - } - return opts -} diff --git a/pkg/treedb/errors.go b/pkg/treedb/errors.go deleted file mode 100644 index 5c93e1fdc..000000000 --- a/pkg/treedb/errors.go +++ /dev/null @@ -1,7 +0,0 @@ -package treedb - -import "errors" - -var ( - ErrUnsupportedDriver = errors.New("unsupported db driver") -) diff --git a/service/api/app/Dockerfile b/service/api/app/Dockerfile deleted file mode 100644 index 5d569f095..000000000 --- a/service/api/app/Dockerfile +++ /dev/null @@ -1,20 +0,0 @@ -FROM golang:alpine AS builder - -LABEL stage=gobuilder - -ENV CGO_ENABLED 0 -# ENV GOPROXY https://goproxy.cn,direct - -RUN apk update --no-cache && apk add --no-cache tzdata - -FROM alpine:3.4 - -COPY --from=builder /etc/ssl/certs/ca-certificates.crt /etc/ssl/certs/ca-certificates.crt -COPY --from=builder /usr/share/zoneinfo/Asia/Shanghai /usr/share/zoneinfo/Asia/Shanghai -ENV TZ Asia/Shanghai - -WORKDIR /app -COPY bin/app /app/app -COPY configyaml /app/etc - -CMD ["./app", "-f", "etc/app.yaml"] \ No newline at end of file diff --git a/service/api/app/app.api b/service/api/app/app.api deleted file mode 100644 index c654b02dc..000000000 --- a/service/api/app/app.api +++ /dev/null @@ -1,878 +0,0 @@ -syntax = "v1" - -info( - author: "Gavin Gao, Tao Zhu, Wenki Wu" - date: "2022-5-1" - desc: "App API" -) - -/* ========================= root =========================*/ - -/* root path */ -type ( - ReqGetStatus { - } - RespGetStatus { - Status uint32 `json:"status"` - NetworkId uint32 `json:"network_id"` - ServerVersion string `json:"server_version"` - } -) - -@server( - group: root -) -service app-api { - @handler GetStatus - get / (ReqGetStatus) returns (RespGetStatus) -} - -/* ========================= Account =========================*/ - -type AccountAsset { - AssetId uint32 `json:"asset_id"` - Balance string `json:"balance"` - LpAmount string `json:"lp_amount"` - OfferCanceledOrFinalized string `json:"offer_canceled_or_finalized"` -} - -// GetAccountStatusByAccountPk -type ( - ReqGetAccountStatusByAccountPk { - AccountPk string `form:"account_pk"` - } - RespGetAccountStatusByAccountPk { - AccountStatus int64 `json:"account_status"` - AccountIndex int64 `json:"account_index"` - AccountName string `json:"account_name"` - } -) - -// GetAccountInfoByPubKey -type ( - ReqGetAccountInfoByPubKey { - AccountPk string `form:"account_pk"` - } - RespGetAccountInfoByPubKey { - AccountStatus uint32 `json:"account_status"` - AccountName string `json:"account_name"` - AccountIndex int64 `json:"account_index"` - Nonce int64 `json:"nonce"` - Assets []*AccountAsset `json:"assets"` - } -) - -// getAccountInfoByAccountIndex -type ( - ReqGetAccountInfoByAccountIndex { - AccountIndex int64 `form:"account_index"` - } - RespGetAccountInfoByAccountIndex { - AccountStatus uint32 `json:"account_status"` - AccountName string `json:"account_name"` - AccountPk string `json:"account_pk"` - Nonce int64 `json:"nonce"` - Assets []*AccountAsset `json:"assets"` - } -) - -// GetAccountStatusByAccountName -type ( - ReqGetAccountStatusByAccountName { - AccountName string `form:"account_name"` - } - RespGetAccountStatusByAccountName { - AccountStatus uint32 `json:"account_status"` - AccountIndex uint32 `json:"account_index"` - AccountPk string `json:"account_pk"` - } -) - -// GetAccountInfoByAccountName -type ( - ReqGetAccountInfoByAccountName { - AccountName string `form:"account_name"` - } - RespGetAccountInfoByAccountName { - AccountIndex uint32 `json:"account_index"` - AccountPk string `json:"account_pk"` - Nonce int64 `json:"nonce"` - Assets []*AccountAsset `json:"assets"` - } -) - -// GetBalanceByAssetIdAndAccountName -type ( - ReqGetBlanceByAssetIdAndAccountName { - AssetId uint32 `form:"asset_id"` - AccountName string `form:"account_name"` - } - RespGetBlanceInfoByAssetIdAndAccountName { - Balance string `json:"balance_enc"` - } -) - -@server( - group: account -) - -service app-api { - @handler GetAccountStatusByAccountPk - get /api/v1/account/getAccountStatusByAccountPk (ReqGetAccountStatusByAccountPk) returns (RespGetAccountStatusByAccountPk) - - @handler GetAccountInfoByPubKey - get /api/v1/account/getAccountInfoByPubKey (ReqGetAccountInfoByPubKey) returns (RespGetAccountInfoByPubKey) - - @handler GetAccountInfoByAccountIndex - get /api/v1/account/getAccountInfoByAccountIndex (ReqGetAccountInfoByAccountIndex) returns (RespGetAccountInfoByAccountIndex) - - @handler GetAccountStatusByAccountName - get /api/v1/account/getAccountStatusByAccountName (ReqGetAccountStatusByAccountName) returns (RespGetAccountStatusByAccountName) - - @handler GetAccountInfoByAccountName - get /api/v1/account/getAccountInfoByAccountName (ReqGetAccountInfoByAccountName) returns (RespGetAccountInfoByAccountName) - - @handler GetBalanceByAssetIdAndAccountName - get /api/v1/account/getBalanceByAssetIdAndAccountName (ReqGetBlanceByAssetIdAndAccountName) returns (RespGetBlanceInfoByAssetIdAndAccountName) -} - -/* ========================= Block =========================*/ - -type ( - TxHash { - TxHash string `json:"tx_hash"` - CreatedAt int64 `json:"created_at"` - } - Block { - BlockCommitment string `json:"block_commitment"` - BlockHeight int64 `json:"block_height"` - StateRoot string `json:"state_root"` - PriorityOperations int64 `json:"priority_operations"` - PendingOnChainOperationsHash string `json:"pending_on_chain_operations_hash"` - PendingOnChainOperationsPubData string `json:"pending_on_chain_operations_hub_data"` - CommittedTxHash string `json:"committed_tx_hash"` - CommittedAt int64 `json:"committed_at"` - VerifiedTxHash string `json:"verified_tx_hash"` - VerifiedAt int64 `json:"verified_at"` - Txs []*Tx `json:"txs"` - BlockStatus int64 `json:"block_status"` - } -) - -// GetBlocks -type ( - ReqGetBlocks { - Offset uint16 `form:"offset"` - Limit uint16 `form:"limit"` - } - RespGetBlocks { - Total uint32 `json:"total"` - Blocks []*Block `json:"blocks"` - } -) - -// GetBlockByCommitment -type ( - ReqGetBlockByCommitment { - BlockCommitment string `form:"block_commitment"` - } - RespGetBlockByCommitment { - Block Block `json:"block"` - } -) - -// GetBlockByBlockHeight -type ( - ReqGetBlockByBlockHeight { - BlockHeight uint64 `form:"block_height"` - } - RespGetBlockByBlockHeight { - Block Block `json:"block"` - } -) - -// GetCurrentBlockHeight -type ( - RespCurrentBlockHeight { - Height int64 `json:"height"` - } -) -@server( - group: block -) -service app-api { - @handler GetBlocks - get /api/v1/block/getBlocks (ReqGetBlocks) returns (RespGetBlocks) - - @handler GetBlockByCommitment - get /api/v1/block/getBlockByCommitment (ReqGetBlockByCommitment) returns (RespGetBlockByCommitment) - - @handler GetBlockByBlockHeight - get /api/v1/block/getBlockByBlockHeight (ReqGetBlockByBlockHeight) returns (RespGetBlockByBlockHeight) - - @handler GetCurrentBlockHeight - get /api/v1/block/getCurrentBlockHeight returns (RespCurrentBlockHeight) -} - -/* ========================= Info =========================*/ - -// GetLayer2BasicInfo -type ( - ReqGetLayer2BasicInfo { - } - RespGetLayer2BasicInfo { - BlockCommitted int64 `json:"block_committed"` - BlockVerified int64 `json:"block_verified"` - TotalTransactions int64 `json:"total_transactions"` - TransactionsCountYesterday int64 `json:"transactions_count_yesterday"` - TransactionsCountToday int64 `json:"transactions_count_today"` - DauYesterday int64 `json:"dau_yesterday"` - DauToday int64 `json:"dau_today"` - ContractAddresses []string `json:"contract_addresses"` - } -) - -// GetAssetsList -type ( - ReqGetAssetsList { - } - AssetInfo { - AssetId uint32 `json:"asset_id"` - AssetName string `json:"asset_name"` - AssetDecimals uint32 `json:"asset_decimals"` - AssetSymbol string `json:"asset_symbol"` - AssetAddress string `json:"asset_address"` - IsGasAsset uint32 `json:"is_gas_asset"` - } - RespGetAssetsList { - Assets []*AssetInfo `json:"assets"` - } -) - -// GetCurrencyPriceBySymbol -type ( - ReqGetCurrencyPriceBySymbol { - Symbol string `form:"symbol"` - } - RespGetCurrencyPriceBySymbol { - AssetId uint32 `json:"assetId"` - Price string `json:"price"` - } -) - -// GetCurrencyPrices -type ( - ReqGetCurrencyPrices { - } - DataCurrencyPrices { - Pair string `json:"pair"` - AssetId uint32 `json:"assetId"` - Price string `json:"price"` - } - RespGetCurrencyPrices { - Data []*DataCurrencyPrices `json:"data"` - } -) - -// GetGasFee -type ( - ReqGetGasFee { - AssetId uint32 `form:"asset_id"` - } - RespGetGasFee { - GasFee string `json:"gas_fee"` - } -) - -// GetWithdrawGasFee -type ( - ReqGetWithdrawGasFee { - AssetId uint32 `form:"asset_id"` - } - RespGetWithdrawGasFee { - GasFee string `json:"gas_fee"` - } -) - -// GetGasFeeAssetList -type ( - ReqGetGasFeeAssetList { - } - RespGetGasFeeAssetList { - Assets []AssetInfo `json:"assets"` - } -) - -// GetAccounts -type ( - ReqGetAccounts { - Offset uint16 `form:"offset" validator:"min=0"` - Limit uint16 `form:"limit" validator:"min=0,max=50"` - } - Accounts { - AccountIndex uint32 `json:"account_index"` - AccountName string `json:"account_name"` - PublicKey string `json:"public_key"` - } - RespGetAccounts { - Total uint32 `json:"total"` - Accounts []*Accounts `json:"accounts"` - } -) - -// Search -type ( - ReqSearch { - Info string `form:"info"` - } - RespSearch { - DataType int32 `json:"data_type"` - } -) - -// GetGasAccount -type ( - ReqGetGasAccount { - } - RespGetGasAccount { - AccountStatus int64 `json:"account_status"` - AccountIndex int64 `json:"account_index"` - AccountName string `json:"account_name"` - } -) - -@server( - group: info -) -service app-api { - @handler GetLayer2BasicInfo - get /api/v1/info/getLayer2BasicInfo (ReqGetLayer2BasicInfo) returns (RespGetLayer2BasicInfo) - - @handler GetAssetsList - get /api/v1/info/getAssetsList (ReqGetAssetsList) returns (RespGetAssetsList) - - @handler GetCurrencyPriceBySymbol - get /api/v1/info/getCurrencyPriceBySymbol (ReqGetCurrencyPriceBySymbol) returns (RespGetCurrencyPriceBySymbol) - - @handler GetCurrencyPrices - get /api/v1/info/getCurrencyPrices (ReqGetCurrencyPrices) returns (RespGetCurrencyPrices) - - @handler GetGasFee - get /api/v1/info/getGasFee (ReqGetGasFee) returns (RespGetGasFee) - - @handler GetWithdrawGasFee - get /api/v1/info/getWithdrawGasFee (ReqGetWithdrawGasFee) returns (RespGetWithdrawGasFee) - - @handler GetGasFeeAssetList - get /api/v1/info/getGasFeeAssetList (ReqGetGasFeeAssetList) returns (RespGetGasFeeAssetList) - - @handler GetAccounts - get /api/v1/info/getAccounts (ReqGetAccounts) returns (RespGetAccounts) - - @handler Search - get /api/v1/info/search (ReqSearch) returns (RespSearch) - - @handler GetGasAccount - get /api/v1/info/getGasAccount (ReqGetGasAccount) returns (RespGetGasAccount) -} - -/* =========================== Pair ==========================*/ -// getSwapAmount -type ( - ReqGetSwapAmount { - PairIndex uint32 `form:"pair_index"` - AssetId uint32 `form:"asset_id"` - AssetAmount string `form:"asset_amount"` - IsFrom bool `form:"is_from"` - } - RespGetSwapAmount { - ResAssetAmount string `json:"res_asset_amount"` - ResAssetId uint32 `json:"res_asset_id"` - } -) - -// getAvailablePairs -type ( - ReqGetAvailablePairs { - } - Pair { - PairIndex uint32 `json:"pair_index"` - AssetAId uint32 `json:"asset_a_id"` - AssetAName string `json:"asset_a_name"` - AssetAAmount string `json:"asset_a_amount"` - AssetBId uint32 `json:"asset_b_id"` - AssetBName string `json:"asset_b_name"` - AssetBAmount string `json:"asset_b_amount"` - FeeRate int64 `json:"fee_Rate"` - TreasuryRate int64 `json:"treasury_rate"` - } - RespGetAvailablePairs { - Pairs []*Pair `json:"result"` - } -) - -// GetLPValue -type ( - ReqGetLPValue { - PairIndex uint32 `form:"pair_index"` - LpAmount string `form:"lp_amount"` - } - RespGetLPValue { - AssetAId uint32 `json:"asset_a_id"` - AssetAName string `json:"asset_a_name"` - AssetAAmount string `json:"asset_a_amount"` - AssetBid uint32 `json:"asset_b_id"` - AssetBName string `json:"asset_b_name"` - AssetBAmount string `json:"asset_b_amount"` - } -) - -// getPairInfo -type ( - ReqGetPairInfo { - PairIndex uint32 `form:"pair_index"` - } - RespGetPairInfo { - AssetAId uint32 `json:"asset_a_id"` - AssetAAmount string `json:"asset_a_amount"` - AssetBId uint32 `json:"asset_b_id"` - AssetBAmount string `json:"asset_b_amount"` - TotalLpAmount string `json:"total_lp_amount"` - } -) - -@server( - group: pair -) -service app-api { - @handler GetSwapAmount - get /api/v1/pair/getSwapAmount (ReqGetSwapAmount) returns (RespGetSwapAmount) - - @handler GetAvailablePairs - get /api/v1/pair/getAvailablePairs (ReqGetAvailablePairs) returns (RespGetAvailablePairs) - - @handler GetLPValue - get /api/v1/pair/getLPValue (ReqGetLPValue) returns (RespGetLPValue) - - @handler GetPairInfo - get /api/v1/pair/getPairInfo (ReqGetPairInfo) returns (RespGetPairInfo) -} - -/* ======================= transaction =======================*/ -type TxDetail { - TxId int64 `json:"tx_id"` - AssetId int64 `json:"asset_id"` - AssetType int64 `json:"asset_type"` - AccountIndex int64 `json:"account_index"` - AccountName string `json:"account_name"` - AccountBalance string `json:"accountBalance"` - AccountDelta string `json:"accountDelta"` - Order int64 `json:"order"` - AccountOrder int64 `json:"account_order"` - Nonce int64 `json:"nonce"` - CollectionNonce int64 `json:"collection_nonce"` -} - -type Tx { - TxHash string `json:"tx_hash"` - TxType int64 `json:"tx_type,range=[1:64]"` - TxAmount string `json:"tx_amount"` - TxInfo string `json:"tx_info"` - TxDetails []*TxDetail `json:"tx_details"` - TxStatus int64 `json:"tx_status"` - GasFeeAssetId int64 `json:"gas_fee_asset_id"` - GasFee string `json:"gas_fee"` - NftIndex int64 `json:"nft_index"` - PairIndex int64 `json:"pair_index"` - AssetId int64 `json:"asset_id"` - NativeAddress string `json:"native_adress"` - ExtraInfo string `json:"extra_info"` - Memo string `json:"memo"` - AccountIndex int64 `json:"account_index"` - Nonce int64 `json:"nonce"` - ExpiredAt int64 `json:"expire_at"` - Status int64 `json:"status,options=0|1|2"` - BlockId int64 `json:"block_id"` - BlockHeight int64 `json:"block_height"` - CreatedAt int64 `json:"created_at"` - StateRoot string `json:"state_root"` -} - -type TxAccount { - AccountIndex uint32 `json:"account_index"` - AccountName string `json:"account_name"` - AccountBalance string `json:"account_balance"` - AccountDelta string `json:"account_delta"` -} - -// GetTxsList -type ( - ReqGetTxsList { - Limit uint16 `form:"limit"` - Offset uint16 `form:"offset"` - } - RespGetTxsList { - Total uint32 `json:"total"` - Txs []*Tx `json:"txs"` - } -) - -// GetTxsListByBlockHeight -type ( - ReqGetTxsListByBlockHeight { - BlockHeight uint64 `form:"block_height"` - Limit uint16 `form:"limit"` - Offset uint16 `form:"offset"` - } - RespGetTxsListByBlockHeight { - Total uint32 `json:"total"` - Txs []*Tx `json:"txs"` - } -) - -// getTxsListByAccountIndex -type ( - ReqGetTxsListByAccountIndex { - AccountIndex uint32 `form:"account_index"` - Offset uint16 `form:"offset"` - Limit uint16 `form:"limit"` - } - RespGetTxsListByAccountIndex { - Total uint32 `json:"total"` - Txs []*Tx `json:"txs"` - } -) - -// getTxsByAccountIndexAndTxType -type ( - ReqGetTxsByAccountIndexAndTxType { - AccountIndex uint32 `form:"account_index"` - TxType uint32 `form:"tx_type"` - Offset uint32 `form:"offset"` - Limit uint32 `form:"limit"` - } - RespGetTxsByAccountIndexAndTxType { - Total uint32 `json:"total"` - Txs []*Tx `json:"txs"` - } -) - -// GetTxsByAccountName -type ( - ReqGetTxsByAccountName { - AccountName string `form:"account_name"` - Offset uint32 `form:"offset"` - Limit uint32 `form:"limit"` - } - RespGetTxsByAccountName { - Total uint32 `json:"total"` - Txs []*Tx `json:"txs"` - } -) - -// getTxsByPubKey -type ( - ReqGetTxsByPubKey { - AccountPk string `form:"account_pk"` - Offset uint32 `form:"offset"` - Limit uint32 `form:"limit"` - } - RespGetTxsByPubKey { - Total uint32 `json:"total"` - Txs []*Tx `json:"txs"` - } -) - -// GetTxByHash -type ( - ReqGetTxByHash { - TxHash string `form:"tx_hash"` - } - RespGetTxByHash { - Tx Tx `json:"result"` - CommittedAt int64 `json:"committed_at"` - VerifiedAt int64 `json:"verified_at"` - ExecutedAt int64 `json:"executed_at"` - AssetAId int64 `json:"asset_a_id"` - AssetBId int64 `json:"asset_b_id"` - } -) - -// sendTx -type ( - ReqSendTx { - TxType uint32 `form:"tx_type"` - TxInfo string `form:"tx_info"` - } - RespSendTx { - TxId string `json:"tx_id"` - } -) -// sendRawTx -type ( - ReqSendRawTx { - TxInfo string `form:"tx_info"` - } -) - -// GetMempoolTxs -type ( - ReqGetMempoolTxs { - Offset uint32 `form:"offset"` - Limit uint32 `form:"limit"` - } - RespGetMempoolTxs { - Total uint32 `json:"total"` - MempoolTxs []*Tx `json:"mempool_txs"` - } -) - -// getmempoolTxsByAccountName -type ( - ReqGetmempoolTxsByAccountName { - AccountName string `form:"account_name"` - } - RespGetmempoolTxsByAccountName { - Total uint32 `json:"total"` - Txs []*Tx `json:"mempool_txs"` - } -) - -// GetNextNonce -type ( - ReqGetNextNonce { - AccountIndex uint32 `form:"account_index"` - } - RespGetNextNonce { - Nonce uint64 `json:"nonce"` - } -) - -// SendCreateCollectionTx -type ( - ReqSendCreateCollectionTx { - TxInfo string `form:"tx_info"` - } - RespSendCreateCollectionTx { - CollectionId int64 `json:"collection_id"` - } -) - -// SendMintNftTx -type ( - ReqSendMintNftTx { - TxInfo string `form:"tx_info"` - } - RespSendMintNftTx { - NftIndex int64 `json:"nft_index"` - } -) -// SendAddLiquidityTx -type ( - ReqSendAddLiquidityTx { - TxInfo string `form:"tx_info"` - } - RespSendAddLiquidityTx { - TxId string `json:"tx_id"` - } -) - -// SendAtomicMatchTx -type ( - ReqSendAtomicMatchTx { - TxInfo string `form:"tx_info"` - } - RespSendAtomicMatchTx { - TxId string `json:"tx_id"` - } -) - -// SendCancelOfferTx -type ( - ReqSendCancelOfferTx { - TxInfo string `form:"tx_info"` - } - RespSendCancelOfferTx { - TxId string `json:"tx_id"` - } -) - -// SendRemoveLiquidityTx -type ( - ReqSendRemoveLiquidityTx { - TxInfo string `form:"tx_info"` - } - RespSendRemoveLiquidityTx { - TxId string `json:"tx_id"` - } -) - -// SendSwapTx -type ( - ReqSendSwapTx { - TxInfo string `form:"tx_info"` - } - RespSendSwapTx { - TxId string `json:"tx_id"` - } -) - -// SendTransferNftTx -type ( - ReqSendTransferNftTx { - TxInfo string `form:"tx_info"` - } - RespSendTransferNftTx { - TxId string `json:"tx_id"` - } -) - -// SendTransferTx -type ( - ReqSendTransferTx { - TxInfo string `form:"tx_info"` - } - RespSendTransferTx { - TxId string `json:"tx_id"` - } -) - -// SendWithdrawNftTx -type ( - ReqSendWithdrawNftTx { - TxInfo string `form:"tx_info"` - } - RespSendWithdrawNftTx { - TxId string `json:"tx_id"` - } -) - -// SendWithdrawTx -type ( - ReqSendWithdrawTx { - TxInfo string `form:"tx_info"` - } - RespSendWithdrawTx { - TxId string `json:"tx_id"` - } -) - -@server( - group: transaction -) -service app-api { - @handler GetTxsList - get /api/v1/tx/getTxsList (ReqGetTxsList) returns (RespGetTxsList) - - @handler GetTxsListByBlockHeight - get /api/v1/tx/getTxsListByBlockHeight (ReqGetTxsListByBlockHeight) returns (RespGetTxsListByBlockHeight) - - @handler GetTxsListByAccountIndex - get /api/v1/tx/getTxsListByAccountIndex (ReqGetTxsListByAccountIndex) returns (RespGetTxsListByAccountIndex) - - @handler GetTxsByAccountIndexAndTxType - get /api/v1/tx/getTxsByAccountIndexAndTxType (ReqGetTxsByAccountIndexAndTxType) returns (RespGetTxsByAccountIndexAndTxType) - - @handler GetTxsByAccountName - get /api/v1/tx/getTxsByAccountName (ReqGetTxsByAccountName) returns (RespGetTxsByAccountName) - - @handler GetTxsByPubKey - get /api/v1/tx/getTxsByPubKey (ReqGetTxsByPubKey) returns (RespGetTxsByPubKey) - - @handler GetTxByHash - get /api/v1/tx/getTxByHash (ReqGetTxByHash) returns (RespGetTxByHash) - - @handler GetMempoolTxs - get /api/v1/tx/getMempoolTxs (ReqGetMempoolTxs) returns (RespGetMempoolTxs) - - @handler GetmempoolTxsByAccountName - get /api/v1/tx/getmempoolTxsByAccountName (ReqGetmempoolTxsByAccountName) returns (RespGetmempoolTxsByAccountName) - - @handler GetNextNonce - get /api/v1/tx/getNextNonce (ReqGetNextNonce) returns (RespGetNextNonce) - - @handler SendTx - post /api/v1/tx/sendTx (ReqSendTx) returns (RespSendTx) - - @handler SendCreateCollectionTx - post /api/v1/tx/sendCreateCollectionTx (ReqSendCreateCollectionTx) returns (RespSendCreateCollectionTx) - - @handler SendMintNftTx - post /api/v1/tx/sendMintNftTx (ReqSendMintNftTx) returns (RespSendMintNftTx) - - @handler SendAddLiquidityTx - post /api/v1/tx/sendAddLiquidityTx (ReqSendAddLiquidityTx) returns (RespSendAddLiquidityTx) - - @handler SendAtomicMatchTx - post /api/v1/tx/sendAtomicMatchTx (ReqSendAtomicMatchTx) returns (RespSendAtomicMatchTx) - - @handler SendCancelOfferTx - post /api/v1/tx/sendCancelOfferTx (ReqSendCancelOfferTx) returns (RespSendCancelOfferTx) - - @handler SendRemoveLiquidityTx - post /api/v1/tx/sendRemoveLiquidityTx (ReqSendRemoveLiquidityTx) returns (RespSendRemoveLiquidityTx) - - @handler SendSwapTx - post /api/v1/tx/sendSwapTx (ReqSendSwapTx) returns (RespSendSwapTx) - - @handler SendTransferNftTx - post /api/v1/tx/sendTransferNftTx (ReqSendTransferNftTx) returns (RespSendTransferNftTx) - - @handler SendTransferTx - post /api/v1/tx/sendTransferTx (ReqSendTransferTx) returns (RespSendTransferTx) - - @handler SendWithdrawNftTx - post /api/v1/tx/sendWithdrawNftTx (ReqSendWithdrawNftTx) returns (RespSendWithdrawNftTx) - - @handler SendWithdrawTx - post /api/v1/tx/sendWithdrawTx (ReqSendWithdrawTx) returns (RespSendWithdrawTx) -} - -/* ========================= Nft =========================*/ - -// GetMaxOfferId -type ( - ReqGetMaxOfferId { - AccountIndex uint32 `form:"account_index"` - } - RespGetMaxOfferId { - OfferId uint64 `json:"offer_id"` - } -) - -type ( - Nft { - NftIndex int64 `json:"nft_index"` - CreatorAccountIndex int64 `json:"creator_account_index"` - OwnerAccountIndex int64 `json:"owner_account_index"` - NftContentHash string `json:"nft_content_hash"` - NftL1Address string `json:"nft_l1_address"` - NftL1TokenId string `json:"nft_l1_token_id"` - CreatorTreasuryRate int64 `json:"creator_treasury_rate"` - CollectionId int64 `json:"collection_id"` - } -) - -// GetAccountNftList -type ( - ReqGetAccountNftList { - AccountIndex int64 `form:"account_index"` - Offset uint16 `form:"offset"` - Limit uint16 `form:"limit"` - } - RespGetAccountNftList { - Total int64 `json:"total"` - Nfts []*Nft `json:"nfts"` - } -) - -@server( - group: nft -) -service app-api { - @handler GetMaxOfferId - get /api/v1/nft/getMaxOfferId (ReqGetMaxOfferId) returns (RespGetMaxOfferId) - - @handler GetAccountNftList - get /api/v1/nft/getAccountNftList (ReqGetAccountNftList) returns (RespGetAccountNftList) -} \ No newline at end of file diff --git a/service/api/app/app.go b/service/api/app/app.go deleted file mode 100644 index 8899ae12d..000000000 --- a/service/api/app/app.go +++ /dev/null @@ -1,44 +0,0 @@ -package main - -import ( - "flag" - "fmt" - "os" - - "github.com/zeromicro/go-zero/core/conf" - "github.com/zeromicro/go-zero/core/logx" - "github.com/zeromicro/go-zero/rest" - - "github.com/bnb-chain/zkbas/service/api/app/internal/config" - "github.com/bnb-chain/zkbas/service/api/app/internal/handler" - "github.com/bnb-chain/zkbas/service/api/app/internal/svc" -) - -var configFile = flag.String("f", "etc/app.yaml", "the config file") - -var ( - CodeVersion = "" - GitCommitHash = "" -) - -func main() { - args := os.Args - if len(args) == 2 && (args[1] == "--version" || args[1] == "-v") { - fmt.Printf("Git Commit Hash: %s\n", GitCommitHash) - fmt.Printf("Git Code Version : %s\n", CodeVersion) - return - } - flag.Parse() - var c config.Config - conf.MustLoad(*configFile, &c) - logx.Severef("[config] err:%v", c) - logx.DisableStat() - ctx := svc.NewServiceContext(c) - ctx.CodeVersion = CodeVersion - ctx.GitCommitHash = GitCommitHash - server := rest.MustNewServer(c.RestConf, rest.WithCors()) - defer server.Stop() - handler.RegisterHandlers(server, ctx) - fmt.Printf("Starting server at %s:%d...\n", c.Host, c.Port) - server.Start() -} diff --git a/service/api/app/etc/config.yaml.example b/service/api/app/etc/config.yaml.example deleted file mode 100644 index e5dcdb3b4..000000000 --- a/service/api/app/etc/config.yaml.example +++ /dev/null @@ -1,20 +0,0 @@ -Name: appService-api -Host: 0.0.0.0 -Port: 8888 -Postgres: - DataSource: host=127.0.0.1 user=postgres password=ZecreyProtocolDB@123 dbname=zkbas port=5432 sslmode=disable - -CacheRedis: - - Host: 127.0.0.1:6379 - Pass: myredis - Type: node -GlobalRpc: - Etcd: - Hosts: - - 127.0.0.1:2379 - Key: global.rpc -LogConf: - ServiceName: appservice - Mode: console - Path: ./log/appService - StackCooldownMillis: 500 \ No newline at end of file diff --git a/service/api/app/internal/config/config.go b/service/api/app/internal/config/config.go deleted file mode 100644 index 1a491903b..000000000 --- a/service/api/app/internal/config/config.go +++ /dev/null @@ -1,18 +0,0 @@ -package config - -import ( - "github.com/zeromicro/go-zero/core/logx" - "github.com/zeromicro/go-zero/core/stores/cache" - "github.com/zeromicro/go-zero/rest" - "github.com/zeromicro/go-zero/zrpc" -) - -type Config struct { - rest.RestConf - Postgres struct { - DataSource string - } - CacheRedis cache.CacheConf - GlobalRpc zrpc.RpcClientConf - LogConf logx.LogConf -} diff --git a/service/api/app/internal/handler/account/getaccountinfobyaccountindexhandler.go b/service/api/app/internal/handler/account/getaccountinfobyaccountindexhandler.go deleted file mode 100644 index 78d3b9255..000000000 --- a/service/api/app/internal/handler/account/getaccountinfobyaccountindexhandler.go +++ /dev/null @@ -1,29 +0,0 @@ -package account - -import ( - "net/http" - - "github.com/zeromicro/go-zero/rest/httpx" - - "github.com/bnb-chain/zkbas/service/api/app/internal/logic/account" - "github.com/bnb-chain/zkbas/service/api/app/internal/svc" - "github.com/bnb-chain/zkbas/service/api/app/internal/types" -) - -func GetAccountInfoByAccountIndexHandler(svcCtx *svc.ServiceContext) http.HandlerFunc { - return func(w http.ResponseWriter, r *http.Request) { - var req types.ReqGetAccountInfoByAccountIndex - if err := httpx.Parse(r, &req); err != nil { - httpx.Error(w, err) - return - } - - l := account.NewGetAccountInfoByAccountIndexLogic(r.Context(), svcCtx) - resp, err := l.GetAccountInfoByAccountIndex(&req) - if err != nil { - httpx.Error(w, err) - } else { - httpx.OkJson(w, resp) - } - } -} diff --git a/service/api/app/internal/handler/account/getaccountinfobyaccountnamehandler.go b/service/api/app/internal/handler/account/getaccountinfobyaccountnamehandler.go deleted file mode 100644 index 955705844..000000000 --- a/service/api/app/internal/handler/account/getaccountinfobyaccountnamehandler.go +++ /dev/null @@ -1,29 +0,0 @@ -package account - -import ( - "net/http" - - "github.com/zeromicro/go-zero/rest/httpx" - - "github.com/bnb-chain/zkbas/service/api/app/internal/logic/account" - "github.com/bnb-chain/zkbas/service/api/app/internal/svc" - "github.com/bnb-chain/zkbas/service/api/app/internal/types" -) - -func GetAccountInfoByAccountNameHandler(svcCtx *svc.ServiceContext) http.HandlerFunc { - return func(w http.ResponseWriter, r *http.Request) { - var req types.ReqGetAccountInfoByAccountName - if err := httpx.Parse(r, &req); err != nil { - httpx.Error(w, err) - return - } - - l := account.NewGetAccountInfoByAccountNameLogic(r.Context(), svcCtx) - resp, err := l.GetAccountInfoByAccountName(&req) - if err != nil { - httpx.Error(w, err) - } else { - httpx.OkJson(w, resp) - } - } -} diff --git a/service/api/app/internal/handler/account/getaccountinfobypubkeyhandler.go b/service/api/app/internal/handler/account/getaccountinfobypubkeyhandler.go deleted file mode 100644 index cfb18a5dc..000000000 --- a/service/api/app/internal/handler/account/getaccountinfobypubkeyhandler.go +++ /dev/null @@ -1,29 +0,0 @@ -package account - -import ( - "net/http" - - "github.com/zeromicro/go-zero/rest/httpx" - - "github.com/bnb-chain/zkbas/service/api/app/internal/logic/account" - "github.com/bnb-chain/zkbas/service/api/app/internal/svc" - "github.com/bnb-chain/zkbas/service/api/app/internal/types" -) - -func GetAccountInfoByPubKeyHandler(svcCtx *svc.ServiceContext) http.HandlerFunc { - return func(w http.ResponseWriter, r *http.Request) { - var req types.ReqGetAccountInfoByPubKey - if err := httpx.Parse(r, &req); err != nil { - httpx.Error(w, err) - return - } - - l := account.NewGetAccountInfoByPubKeyLogic(r.Context(), svcCtx) - resp, err := l.GetAccountInfoByPubKey(&req) - if err != nil { - httpx.Error(w, err) - } else { - httpx.OkJson(w, resp) - } - } -} diff --git a/service/api/app/internal/handler/account/getaccountstatusbyaccountnamehandler.go b/service/api/app/internal/handler/account/getaccountstatusbyaccountnamehandler.go deleted file mode 100644 index a75122d33..000000000 --- a/service/api/app/internal/handler/account/getaccountstatusbyaccountnamehandler.go +++ /dev/null @@ -1,29 +0,0 @@ -package account - -import ( - "net/http" - - "github.com/zeromicro/go-zero/rest/httpx" - - "github.com/bnb-chain/zkbas/service/api/app/internal/logic/account" - "github.com/bnb-chain/zkbas/service/api/app/internal/svc" - "github.com/bnb-chain/zkbas/service/api/app/internal/types" -) - -func GetAccountStatusByAccountNameHandler(svcCtx *svc.ServiceContext) http.HandlerFunc { - return func(w http.ResponseWriter, r *http.Request) { - var req types.ReqGetAccountStatusByAccountName - if err := httpx.Parse(r, &req); err != nil { - httpx.Error(w, err) - return - } - - l := account.NewGetAccountStatusByAccountNameLogic(r.Context(), svcCtx) - resp, err := l.GetAccountStatusByAccountName(&req) - if err != nil { - httpx.Error(w, err) - } else { - httpx.OkJson(w, resp) - } - } -} diff --git a/service/api/app/internal/handler/account/getaccountstatusbyaccountpkhandler.go b/service/api/app/internal/handler/account/getaccountstatusbyaccountpkhandler.go deleted file mode 100644 index 288418811..000000000 --- a/service/api/app/internal/handler/account/getaccountstatusbyaccountpkhandler.go +++ /dev/null @@ -1,29 +0,0 @@ -package account - -import ( - "net/http" - - "github.com/zeromicro/go-zero/rest/httpx" - - "github.com/bnb-chain/zkbas/service/api/app/internal/logic/account" - "github.com/bnb-chain/zkbas/service/api/app/internal/svc" - "github.com/bnb-chain/zkbas/service/api/app/internal/types" -) - -func GetAccountStatusByAccountPkHandler(svcCtx *svc.ServiceContext) http.HandlerFunc { - return func(w http.ResponseWriter, r *http.Request) { - var req types.ReqGetAccountStatusByAccountPk - if err := httpx.Parse(r, &req); err != nil { - httpx.Error(w, err) - return - } - - l := account.NewGetAccountStatusByAccountPkLogic(r.Context(), svcCtx) - resp, err := l.GetAccountStatusByAccountPk(&req) - if err != nil { - httpx.Error(w, err) - } else { - httpx.OkJson(w, resp) - } - } -} diff --git a/service/api/app/internal/handler/account/getbalancebyassetidandaccountnamehandler.go b/service/api/app/internal/handler/account/getbalancebyassetidandaccountnamehandler.go deleted file mode 100644 index ac7d8b345..000000000 --- a/service/api/app/internal/handler/account/getbalancebyassetidandaccountnamehandler.go +++ /dev/null @@ -1,29 +0,0 @@ -package account - -import ( - "net/http" - - "github.com/zeromicro/go-zero/rest/httpx" - - "github.com/bnb-chain/zkbas/service/api/app/internal/logic/account" - "github.com/bnb-chain/zkbas/service/api/app/internal/svc" - "github.com/bnb-chain/zkbas/service/api/app/internal/types" -) - -func GetBalanceByAssetIdAndAccountNameHandler(svcCtx *svc.ServiceContext) http.HandlerFunc { - return func(w http.ResponseWriter, r *http.Request) { - var req types.ReqGetBlanceByAssetIdAndAccountName - if err := httpx.Parse(r, &req); err != nil { - httpx.Error(w, err) - return - } - - l := account.NewGetBalanceByAssetIdAndAccountNameLogic(r.Context(), svcCtx) - resp, err := l.GetBalanceByAssetIdAndAccountName(&req) - if err != nil { - httpx.Error(w, err) - } else { - httpx.OkJson(w, resp) - } - } -} diff --git a/service/api/app/internal/handler/block/getblockbyblockheighthandler.go b/service/api/app/internal/handler/block/getblockbyblockheighthandler.go deleted file mode 100644 index d844bf36b..000000000 --- a/service/api/app/internal/handler/block/getblockbyblockheighthandler.go +++ /dev/null @@ -1,29 +0,0 @@ -package block - -import ( - "net/http" - - "github.com/zeromicro/go-zero/rest/httpx" - - "github.com/bnb-chain/zkbas/service/api/app/internal/logic/block" - "github.com/bnb-chain/zkbas/service/api/app/internal/svc" - "github.com/bnb-chain/zkbas/service/api/app/internal/types" -) - -func GetBlockByBlockHeightHandler(svcCtx *svc.ServiceContext) http.HandlerFunc { - return func(w http.ResponseWriter, r *http.Request) { - var req types.ReqGetBlockByBlockHeight - if err := httpx.Parse(r, &req); err != nil { - httpx.Error(w, err) - return - } - - l := block.NewGetBlockByBlockHeightLogic(r.Context(), svcCtx) - resp, err := l.GetBlockByBlockHeight(&req) - if err != nil { - httpx.Error(w, err) - } else { - httpx.OkJson(w, resp) - } - } -} diff --git a/service/api/app/internal/handler/block/getblockbycommitmenthandler.go b/service/api/app/internal/handler/block/getblockbycommitmenthandler.go deleted file mode 100644 index 86d5ec533..000000000 --- a/service/api/app/internal/handler/block/getblockbycommitmenthandler.go +++ /dev/null @@ -1,29 +0,0 @@ -package block - -import ( - "net/http" - - "github.com/zeromicro/go-zero/rest/httpx" - - "github.com/bnb-chain/zkbas/service/api/app/internal/logic/block" - "github.com/bnb-chain/zkbas/service/api/app/internal/svc" - "github.com/bnb-chain/zkbas/service/api/app/internal/types" -) - -func GetBlockByCommitmentHandler(svcCtx *svc.ServiceContext) http.HandlerFunc { - return func(w http.ResponseWriter, r *http.Request) { - var req types.ReqGetBlockByCommitment - if err := httpx.Parse(r, &req); err != nil { - httpx.Error(w, err) - return - } - - l := block.NewGetBlockByCommitmentLogic(r.Context(), svcCtx) - resp, err := l.GetBlockByCommitment(&req) - if err != nil { - httpx.Error(w, err) - } else { - httpx.OkJson(w, resp) - } - } -} diff --git a/service/api/app/internal/handler/block/getcurrentblockheighthandler.go b/service/api/app/internal/handler/block/getcurrentblockheighthandler.go deleted file mode 100644 index 624e0b845..000000000 --- a/service/api/app/internal/handler/block/getcurrentblockheighthandler.go +++ /dev/null @@ -1,22 +0,0 @@ -package block - -import ( - "net/http" - - "github.com/zeromicro/go-zero/rest/httpx" - - "github.com/bnb-chain/zkbas/service/api/app/internal/logic/block" - "github.com/bnb-chain/zkbas/service/api/app/internal/svc" -) - -func GetCurrentBlockHeightHandler(svcCtx *svc.ServiceContext) http.HandlerFunc { - return func(w http.ResponseWriter, r *http.Request) { - l := block.NewGetCurrentBlockHeightLogic(r.Context(), svcCtx) - resp, err := l.GetCurrentBlockHeight() - if err != nil { - httpx.Error(w, err) - } else { - httpx.OkJson(w, resp) - } - } -} diff --git a/service/api/app/internal/handler/info/getassetslisthandler.go b/service/api/app/internal/handler/info/getassetslisthandler.go deleted file mode 100644 index 1b30f07f4..000000000 --- a/service/api/app/internal/handler/info/getassetslisthandler.go +++ /dev/null @@ -1,29 +0,0 @@ -package info - -import ( - "net/http" - - "github.com/zeromicro/go-zero/rest/httpx" - - "github.com/bnb-chain/zkbas/service/api/app/internal/logic/info" - "github.com/bnb-chain/zkbas/service/api/app/internal/svc" - "github.com/bnb-chain/zkbas/service/api/app/internal/types" -) - -func GetAssetsListHandler(svcCtx *svc.ServiceContext) http.HandlerFunc { - return func(w http.ResponseWriter, r *http.Request) { - var req types.ReqGetAssetsList - if err := httpx.Parse(r, &req); err != nil { - httpx.Error(w, err) - return - } - - l := info.NewGetAssetsListLogic(r.Context(), svcCtx) - resp, err := l.GetAssetsList(&req) - if err != nil { - httpx.Error(w, err) - } else { - httpx.OkJson(w, resp) - } - } -} diff --git a/service/api/app/internal/handler/info/getcurrencypricebysymbolhandler.go b/service/api/app/internal/handler/info/getcurrencypricebysymbolhandler.go deleted file mode 100644 index f39d50754..000000000 --- a/service/api/app/internal/handler/info/getcurrencypricebysymbolhandler.go +++ /dev/null @@ -1,29 +0,0 @@ -package info - -import ( - "net/http" - - "github.com/zeromicro/go-zero/rest/httpx" - - "github.com/bnb-chain/zkbas/service/api/app/internal/logic/info" - "github.com/bnb-chain/zkbas/service/api/app/internal/svc" - "github.com/bnb-chain/zkbas/service/api/app/internal/types" -) - -func GetCurrencyPriceBySymbolHandler(svcCtx *svc.ServiceContext) http.HandlerFunc { - return func(w http.ResponseWriter, r *http.Request) { - var req types.ReqGetCurrencyPriceBySymbol - if err := httpx.Parse(r, &req); err != nil { - httpx.Error(w, err) - return - } - - l := info.NewGetCurrencyPriceBySymbolLogic(r.Context(), svcCtx) - resp, err := l.GetCurrencyPriceBySymbol(&req) - if err != nil { - httpx.Error(w, err) - } else { - httpx.OkJson(w, resp) - } - } -} diff --git a/service/api/app/internal/handler/info/getgasfeeassetlisthandler.go b/service/api/app/internal/handler/info/getgasfeeassetlisthandler.go deleted file mode 100644 index bbddb1225..000000000 --- a/service/api/app/internal/handler/info/getgasfeeassetlisthandler.go +++ /dev/null @@ -1,29 +0,0 @@ -package info - -import ( - "net/http" - - "github.com/zeromicro/go-zero/rest/httpx" - - "github.com/bnb-chain/zkbas/service/api/app/internal/logic/info" - "github.com/bnb-chain/zkbas/service/api/app/internal/svc" - "github.com/bnb-chain/zkbas/service/api/app/internal/types" -) - -func GetGasFeeAssetListHandler(svcCtx *svc.ServiceContext) http.HandlerFunc { - return func(w http.ResponseWriter, r *http.Request) { - var req types.ReqGetGasFeeAssetList - if err := httpx.Parse(r, &req); err != nil { - httpx.Error(w, err) - return - } - - l := info.NewGetGasFeeAssetListLogic(r.Context(), svcCtx) - resp, err := l.GetGasFeeAssetList(&req) - if err != nil { - httpx.Error(w, err) - } else { - httpx.OkJson(w, resp) - } - } -} diff --git a/service/api/app/internal/handler/nft/getaccountnftlisthandler.go b/service/api/app/internal/handler/nft/getaccountnftlisthandler.go deleted file mode 100644 index bb1237f61..000000000 --- a/service/api/app/internal/handler/nft/getaccountnftlisthandler.go +++ /dev/null @@ -1,29 +0,0 @@ -package nft - -import ( - "net/http" - - "github.com/zeromicro/go-zero/rest/httpx" - - "github.com/bnb-chain/zkbas/service/api/app/internal/logic/nft" - "github.com/bnb-chain/zkbas/service/api/app/internal/svc" - "github.com/bnb-chain/zkbas/service/api/app/internal/types" -) - -func GetAccountNftListHandler(svcCtx *svc.ServiceContext) http.HandlerFunc { - return func(w http.ResponseWriter, r *http.Request) { - var req types.ReqGetAccountNftList - if err := httpx.Parse(r, &req); err != nil { - httpx.Error(w, err) - return - } - - l := nft.NewGetAccountNftListLogic(r.Context(), svcCtx) - resp, err := l.GetAccountNftList(&req) - if err != nil { - httpx.Error(w, err) - } else { - httpx.OkJson(w, resp) - } - } -} diff --git a/service/api/app/internal/handler/pair/getavailablepairshandler.go b/service/api/app/internal/handler/pair/getavailablepairshandler.go deleted file mode 100644 index b667f81dd..000000000 --- a/service/api/app/internal/handler/pair/getavailablepairshandler.go +++ /dev/null @@ -1,29 +0,0 @@ -package pair - -import ( - "net/http" - - "github.com/zeromicro/go-zero/rest/httpx" - - "github.com/bnb-chain/zkbas/service/api/app/internal/logic/pair" - "github.com/bnb-chain/zkbas/service/api/app/internal/svc" - "github.com/bnb-chain/zkbas/service/api/app/internal/types" -) - -func GetAvailablePairsHandler(svcCtx *svc.ServiceContext) http.HandlerFunc { - return func(w http.ResponseWriter, r *http.Request) { - var req types.ReqGetAvailablePairs - if err := httpx.Parse(r, &req); err != nil { - httpx.Error(w, err) - return - } - - l := pair.NewGetAvailablePairsLogic(r.Context(), svcCtx) - resp, err := l.GetAvailablePairs(&req) - if err != nil { - httpx.Error(w, err) - } else { - httpx.OkJson(w, resp) - } - } -} diff --git a/service/api/app/internal/handler/pair/getpairinfohandler.go b/service/api/app/internal/handler/pair/getpairinfohandler.go deleted file mode 100644 index acbe164da..000000000 --- a/service/api/app/internal/handler/pair/getpairinfohandler.go +++ /dev/null @@ -1,29 +0,0 @@ -package pair - -import ( - "net/http" - - "github.com/zeromicro/go-zero/rest/httpx" - - "github.com/bnb-chain/zkbas/service/api/app/internal/logic/pair" - "github.com/bnb-chain/zkbas/service/api/app/internal/svc" - "github.com/bnb-chain/zkbas/service/api/app/internal/types" -) - -func GetPairInfoHandler(svcCtx *svc.ServiceContext) http.HandlerFunc { - return func(w http.ResponseWriter, r *http.Request) { - var req types.ReqGetPairInfo - if err := httpx.Parse(r, &req); err != nil { - httpx.Error(w, err) - return - } - - l := pair.NewGetPairInfoLogic(r.Context(), svcCtx) - resp, err := l.GetPairInfo(&req) - if err != nil { - httpx.Error(w, err) - } else { - httpx.OkJson(w, resp) - } - } -} diff --git a/service/api/app/internal/handler/routes.go b/service/api/app/internal/handler/routes.go deleted file mode 100644 index 4779ae696..000000000 --- a/service/api/app/internal/handler/routes.go +++ /dev/null @@ -1,299 +0,0 @@ -// Code generated by goctl. DO NOT EDIT. -package handler - -import ( - "net/http" - - account "github.com/bnb-chain/zkbas/service/api/app/internal/handler/account" - block "github.com/bnb-chain/zkbas/service/api/app/internal/handler/block" - info "github.com/bnb-chain/zkbas/service/api/app/internal/handler/info" - nft "github.com/bnb-chain/zkbas/service/api/app/internal/handler/nft" - pair "github.com/bnb-chain/zkbas/service/api/app/internal/handler/pair" - root "github.com/bnb-chain/zkbas/service/api/app/internal/handler/root" - transaction "github.com/bnb-chain/zkbas/service/api/app/internal/handler/transaction" - "github.com/bnb-chain/zkbas/service/api/app/internal/svc" - - "github.com/zeromicro/go-zero/rest" -) - -func RegisterHandlers(server *rest.Server, serverCtx *svc.ServiceContext) { - server.AddRoutes( - []rest.Route{ - { - Method: http.MethodGet, - Path: "/", - Handler: root.GetStatusHandler(serverCtx), - }, - }, - ) - - server.AddRoutes( - []rest.Route{ - { - Method: http.MethodGet, - Path: "/api/v1/account/getAccountStatusByAccountPk", - Handler: account.GetAccountStatusByAccountPkHandler(serverCtx), - }, - { - Method: http.MethodGet, - Path: "/api/v1/account/getAccountInfoByPubKey", - Handler: account.GetAccountInfoByPubKeyHandler(serverCtx), - }, - { - Method: http.MethodGet, - Path: "/api/v1/account/getAccountInfoByAccountIndex", - Handler: account.GetAccountInfoByAccountIndexHandler(serverCtx), - }, - { - Method: http.MethodGet, - Path: "/api/v1/account/getAccountStatusByAccountName", - Handler: account.GetAccountStatusByAccountNameHandler(serverCtx), - }, - { - Method: http.MethodGet, - Path: "/api/v1/account/getAccountInfoByAccountName", - Handler: account.GetAccountInfoByAccountNameHandler(serverCtx), - }, - { - Method: http.MethodGet, - Path: "/api/v1/account/getBalanceByAssetIdAndAccountName", - Handler: account.GetBalanceByAssetIdAndAccountNameHandler(serverCtx), - }, - }, - ) - - server.AddRoutes( - []rest.Route{ - { - Method: http.MethodGet, - Path: "/api/v1/block/getBlocks", - Handler: block.GetBlocksHandler(serverCtx), - }, - { - Method: http.MethodGet, - Path: "/api/v1/block/getBlockByCommitment", - Handler: block.GetBlockByCommitmentHandler(serverCtx), - }, - { - Method: http.MethodGet, - Path: "/api/v1/block/getBlockByBlockHeight", - Handler: block.GetBlockByBlockHeightHandler(serverCtx), - }, - { - Method: http.MethodGet, - Path: "/api/v1/block/getCurrentBlockHeight", - Handler: block.GetCurrentBlockHeightHandler(serverCtx), - }, - }, - ) - - server.AddRoutes( - []rest.Route{ - { - Method: http.MethodGet, - Path: "/api/v1/info/getLayer2BasicInfo", - Handler: info.GetLayer2BasicInfoHandler(serverCtx), - }, - { - Method: http.MethodGet, - Path: "/api/v1/info/getAssetsList", - Handler: info.GetAssetsListHandler(serverCtx), - }, - { - Method: http.MethodGet, - Path: "/api/v1/info/getCurrencyPriceBySymbol", - Handler: info.GetCurrencyPriceBySymbolHandler(serverCtx), - }, - { - Method: http.MethodGet, - Path: "/api/v1/info/getCurrencyPrices", - Handler: info.GetCurrencyPricesHandler(serverCtx), - }, - { - Method: http.MethodGet, - Path: "/api/v1/info/getGasFee", - Handler: info.GetGasFeeHandler(serverCtx), - }, - { - Method: http.MethodGet, - Path: "/api/v1/info/getWithdrawGasFee", - Handler: info.GetWithdrawGasFeeHandler(serverCtx), - }, - { - Method: http.MethodGet, - Path: "/api/v1/info/getGasFeeAssetList", - Handler: info.GetGasFeeAssetListHandler(serverCtx), - }, - { - Method: http.MethodGet, - Path: "/api/v1/info/getAccounts", - Handler: info.GetAccountsHandler(serverCtx), - }, - { - Method: http.MethodGet, - Path: "/api/v1/info/search", - Handler: info.SearchHandler(serverCtx), - }, - { - Method: http.MethodGet, - Path: "/api/v1/info/getGasAccount", - Handler: info.GetGasAccountHandler(serverCtx), - }, - }, - ) - - server.AddRoutes( - []rest.Route{ - { - Method: http.MethodGet, - Path: "/api/v1/pair/getSwapAmount", - Handler: pair.GetSwapAmountHandler(serverCtx), - }, - { - Method: http.MethodGet, - Path: "/api/v1/pair/getAvailablePairs", - Handler: pair.GetAvailablePairsHandler(serverCtx), - }, - { - Method: http.MethodGet, - Path: "/api/v1/pair/getLPValue", - Handler: pair.GetLPValueHandler(serverCtx), - }, - { - Method: http.MethodGet, - Path: "/api/v1/pair/getPairInfo", - Handler: pair.GetPairInfoHandler(serverCtx), - }, - }, - ) - - server.AddRoutes( - []rest.Route{ - { - Method: http.MethodGet, - Path: "/api/v1/tx/getTxsList", - Handler: transaction.GetTxsListHandler(serverCtx), - }, - { - Method: http.MethodGet, - Path: "/api/v1/tx/getTxsListByBlockHeight", - Handler: transaction.GetTxsListByBlockHeightHandler(serverCtx), - }, - { - Method: http.MethodGet, - Path: "/api/v1/tx/getTxsListByAccountIndex", - Handler: transaction.GetTxsListByAccountIndexHandler(serverCtx), - }, - { - Method: http.MethodGet, - Path: "/api/v1/tx/getTxsByAccountIndexAndTxType", - Handler: transaction.GetTxsByAccountIndexAndTxTypeHandler(serverCtx), - }, - { - Method: http.MethodGet, - Path: "/api/v1/tx/getTxsByAccountName", - Handler: transaction.GetTxsByAccountNameHandler(serverCtx), - }, - { - Method: http.MethodGet, - Path: "/api/v1/tx/getTxsByPubKey", - Handler: transaction.GetTxsByPubKeyHandler(serverCtx), - }, - { - Method: http.MethodGet, - Path: "/api/v1/tx/getTxByHash", - Handler: transaction.GetTxByHashHandler(serverCtx), - }, - { - Method: http.MethodGet, - Path: "/api/v1/tx/getMempoolTxs", - Handler: transaction.GetMempoolTxsHandler(serverCtx), - }, - { - Method: http.MethodGet, - Path: "/api/v1/tx/getmempoolTxsByAccountName", - Handler: transaction.GetmempoolTxsByAccountNameHandler(serverCtx), - }, - { - Method: http.MethodGet, - Path: "/api/v1/tx/getNextNonce", - Handler: transaction.GetNextNonceHandler(serverCtx), - }, - { - Method: http.MethodPost, - Path: "/api/v1/tx/sendTx", - Handler: transaction.SendTxHandler(serverCtx), - }, - { - Method: http.MethodPost, - Path: "/api/v1/tx/sendCreateCollectionTx", - Handler: transaction.SendCreateCollectionTxHandler(serverCtx), - }, - { - Method: http.MethodPost, - Path: "/api/v1/tx/sendMintNftTx", - Handler: transaction.SendMintNftTxHandler(serverCtx), - }, - { - Method: http.MethodPost, - Path: "/api/v1/tx/sendAddLiquidityTx", - Handler: transaction.SendAddLiquidityTxHandler(serverCtx), - }, - { - Method: http.MethodPost, - Path: "/api/v1/tx/sendAtomicMatchTx", - Handler: transaction.SendAtomicMatchTxHandler(serverCtx), - }, - { - Method: http.MethodPost, - Path: "/api/v1/tx/sendCancelOfferTx", - Handler: transaction.SendCancelOfferTxHandler(serverCtx), - }, - { - Method: http.MethodPost, - Path: "/api/v1/tx/sendRemoveLiquidityTx", - Handler: transaction.SendRemoveLiquidityTxHandler(serverCtx), - }, - { - Method: http.MethodPost, - Path: "/api/v1/tx/sendSwapTx", - Handler: transaction.SendSwapTxHandler(serverCtx), - }, - { - Method: http.MethodPost, - Path: "/api/v1/tx/sendTransferNftTx", - Handler: transaction.SendTransferNftTxHandler(serverCtx), - }, - { - Method: http.MethodPost, - Path: "/api/v1/tx/sendTransferTx", - Handler: transaction.SendTransferTxHandler(serverCtx), - }, - { - Method: http.MethodPost, - Path: "/api/v1/tx/sendWithdrawNftTx", - Handler: transaction.SendWithdrawNftTxHandler(serverCtx), - }, - { - Method: http.MethodPost, - Path: "/api/v1/tx/sendWithdrawTx", - Handler: transaction.SendWithdrawTxHandler(serverCtx), - }, - }, - ) - - server.AddRoutes( - []rest.Route{ - { - Method: http.MethodGet, - Path: "/api/v1/nft/getMaxOfferId", - Handler: nft.GetMaxOfferIdHandler(serverCtx), - }, - { - Method: http.MethodGet, - Path: "/api/v1/nft/getAccountNftList", - Handler: nft.GetAccountNftListHandler(serverCtx), - }, - }, - ) -} diff --git a/service/api/app/internal/handler/transaction/getmempooltxsbyaccountnamehandler.go b/service/api/app/internal/handler/transaction/getmempooltxsbyaccountnamehandler.go deleted file mode 100644 index 4611b939b..000000000 --- a/service/api/app/internal/handler/transaction/getmempooltxsbyaccountnamehandler.go +++ /dev/null @@ -1,29 +0,0 @@ -package transaction - -import ( - "net/http" - - "github.com/zeromicro/go-zero/rest/httpx" - - "github.com/bnb-chain/zkbas/service/api/app/internal/logic/transaction" - "github.com/bnb-chain/zkbas/service/api/app/internal/svc" - "github.com/bnb-chain/zkbas/service/api/app/internal/types" -) - -func GetmempoolTxsByAccountNameHandler(svcCtx *svc.ServiceContext) http.HandlerFunc { - return func(w http.ResponseWriter, r *http.Request) { - var req types.ReqGetmempoolTxsByAccountName - if err := httpx.Parse(r, &req); err != nil { - httpx.Error(w, err) - return - } - - l := transaction.NewGetmempoolTxsByAccountNameLogic(r.Context(), svcCtx) - resp, err := l.GetmempoolTxsByAccountName(&req) - if err != nil { - httpx.Error(w, err) - } else { - httpx.OkJson(w, resp) - } - } -} diff --git a/service/api/app/internal/handler/transaction/gettxbyhashhandler.go b/service/api/app/internal/handler/transaction/gettxbyhashhandler.go deleted file mode 100644 index ae2ddee62..000000000 --- a/service/api/app/internal/handler/transaction/gettxbyhashhandler.go +++ /dev/null @@ -1,29 +0,0 @@ -package transaction - -import ( - "net/http" - - "github.com/zeromicro/go-zero/rest/httpx" - - "github.com/bnb-chain/zkbas/service/api/app/internal/logic/transaction" - "github.com/bnb-chain/zkbas/service/api/app/internal/svc" - "github.com/bnb-chain/zkbas/service/api/app/internal/types" -) - -func GetTxByHashHandler(svcCtx *svc.ServiceContext) http.HandlerFunc { - return func(w http.ResponseWriter, r *http.Request) { - var req types.ReqGetTxByHash - if err := httpx.Parse(r, &req); err != nil { - httpx.Error(w, err) - return - } - - l := transaction.NewGetTxByHashLogic(r.Context(), svcCtx) - resp, err := l.GetTxByHash(&req) - if err != nil { - httpx.Error(w, err) - } else { - httpx.OkJson(w, resp) - } - } -} diff --git a/service/api/app/internal/handler/transaction/gettxsbyaccountindexandtxtypehandler.go b/service/api/app/internal/handler/transaction/gettxsbyaccountindexandtxtypehandler.go deleted file mode 100644 index fe23f9b93..000000000 --- a/service/api/app/internal/handler/transaction/gettxsbyaccountindexandtxtypehandler.go +++ /dev/null @@ -1,29 +0,0 @@ -package transaction - -import ( - "net/http" - - "github.com/zeromicro/go-zero/rest/httpx" - - "github.com/bnb-chain/zkbas/service/api/app/internal/logic/transaction" - "github.com/bnb-chain/zkbas/service/api/app/internal/svc" - "github.com/bnb-chain/zkbas/service/api/app/internal/types" -) - -func GetTxsByAccountIndexAndTxTypeHandler(svcCtx *svc.ServiceContext) http.HandlerFunc { - return func(w http.ResponseWriter, r *http.Request) { - var req types.ReqGetTxsByAccountIndexAndTxType - if err := httpx.Parse(r, &req); err != nil { - httpx.Error(w, err) - return - } - - l := transaction.NewGetTxsByAccountIndexAndTxTypeLogic(r.Context(), svcCtx) - resp, err := l.GetTxsByAccountIndexAndTxType(&req) - if err != nil { - httpx.Error(w, err) - } else { - httpx.OkJson(w, resp) - } - } -} diff --git a/service/api/app/internal/handler/transaction/gettxsbyaccountnamehandler.go b/service/api/app/internal/handler/transaction/gettxsbyaccountnamehandler.go deleted file mode 100644 index f26bdf486..000000000 --- a/service/api/app/internal/handler/transaction/gettxsbyaccountnamehandler.go +++ /dev/null @@ -1,29 +0,0 @@ -package transaction - -import ( - "net/http" - - "github.com/zeromicro/go-zero/rest/httpx" - - "github.com/bnb-chain/zkbas/service/api/app/internal/logic/transaction" - "github.com/bnb-chain/zkbas/service/api/app/internal/svc" - "github.com/bnb-chain/zkbas/service/api/app/internal/types" -) - -func GetTxsByAccountNameHandler(svcCtx *svc.ServiceContext) http.HandlerFunc { - return func(w http.ResponseWriter, r *http.Request) { - var req types.ReqGetTxsByAccountName - if err := httpx.Parse(r, &req); err != nil { - httpx.Error(w, err) - return - } - - l := transaction.NewGetTxsByAccountNameLogic(r.Context(), svcCtx) - resp, err := l.GetTxsByAccountName(&req) - if err != nil { - httpx.Error(w, err) - } else { - httpx.OkJson(w, resp) - } - } -} diff --git a/service/api/app/internal/handler/transaction/gettxsbypubkeyhandler.go b/service/api/app/internal/handler/transaction/gettxsbypubkeyhandler.go deleted file mode 100644 index bc49c7a10..000000000 --- a/service/api/app/internal/handler/transaction/gettxsbypubkeyhandler.go +++ /dev/null @@ -1,29 +0,0 @@ -package transaction - -import ( - "net/http" - - "github.com/zeromicro/go-zero/rest/httpx" - - "github.com/bnb-chain/zkbas/service/api/app/internal/logic/transaction" - "github.com/bnb-chain/zkbas/service/api/app/internal/svc" - "github.com/bnb-chain/zkbas/service/api/app/internal/types" -) - -func GetTxsByPubKeyHandler(svcCtx *svc.ServiceContext) http.HandlerFunc { - return func(w http.ResponseWriter, r *http.Request) { - var req types.ReqGetTxsByPubKey - if err := httpx.Parse(r, &req); err != nil { - httpx.Error(w, err) - return - } - - l := transaction.NewGetTxsByPubKeyLogic(r.Context(), svcCtx) - resp, err := l.GetTxsByPubKey(&req) - if err != nil { - httpx.Error(w, err) - } else { - httpx.OkJson(w, resp) - } - } -} diff --git a/service/api/app/internal/handler/transaction/gettxslistbyaccountindexhandler.go b/service/api/app/internal/handler/transaction/gettxslistbyaccountindexhandler.go deleted file mode 100644 index 775a4800a..000000000 --- a/service/api/app/internal/handler/transaction/gettxslistbyaccountindexhandler.go +++ /dev/null @@ -1,29 +0,0 @@ -package transaction - -import ( - "net/http" - - "github.com/zeromicro/go-zero/rest/httpx" - - "github.com/bnb-chain/zkbas/service/api/app/internal/logic/transaction" - "github.com/bnb-chain/zkbas/service/api/app/internal/svc" - "github.com/bnb-chain/zkbas/service/api/app/internal/types" -) - -func GetTxsListByAccountIndexHandler(svcCtx *svc.ServiceContext) http.HandlerFunc { - return func(w http.ResponseWriter, r *http.Request) { - var req types.ReqGetTxsListByAccountIndex - if err := httpx.Parse(r, &req); err != nil { - httpx.Error(w, err) - return - } - - l := transaction.NewGetTxsListByAccountIndexLogic(r.Context(), svcCtx) - resp, err := l.GetTxsListByAccountIndex(&req) - if err != nil { - httpx.Error(w, err) - } else { - httpx.OkJson(w, resp) - } - } -} diff --git a/service/api/app/internal/handler/transaction/gettxslistbyblockheighthandler.go b/service/api/app/internal/handler/transaction/gettxslistbyblockheighthandler.go deleted file mode 100644 index 652c85af7..000000000 --- a/service/api/app/internal/handler/transaction/gettxslistbyblockheighthandler.go +++ /dev/null @@ -1,29 +0,0 @@ -package transaction - -import ( - "net/http" - - "github.com/zeromicro/go-zero/rest/httpx" - - "github.com/bnb-chain/zkbas/service/api/app/internal/logic/transaction" - "github.com/bnb-chain/zkbas/service/api/app/internal/svc" - "github.com/bnb-chain/zkbas/service/api/app/internal/types" -) - -func GetTxsListByBlockHeightHandler(svcCtx *svc.ServiceContext) http.HandlerFunc { - return func(w http.ResponseWriter, r *http.Request) { - var req types.ReqGetTxsListByBlockHeight - if err := httpx.Parse(r, &req); err != nil { - httpx.Error(w, err) - return - } - - l := transaction.NewGetTxsListByBlockHeightLogic(r.Context(), svcCtx) - resp, err := l.GetTxsListByBlockHeight(&req) - if err != nil { - httpx.Error(w, err) - } else { - httpx.OkJson(w, resp) - } - } -} diff --git a/service/api/app/internal/handler/transaction/gettxslisthandler.go b/service/api/app/internal/handler/transaction/gettxslisthandler.go deleted file mode 100644 index 8959c8840..000000000 --- a/service/api/app/internal/handler/transaction/gettxslisthandler.go +++ /dev/null @@ -1,29 +0,0 @@ -package transaction - -import ( - "net/http" - - "github.com/zeromicro/go-zero/rest/httpx" - - "github.com/bnb-chain/zkbas/service/api/app/internal/logic/transaction" - "github.com/bnb-chain/zkbas/service/api/app/internal/svc" - "github.com/bnb-chain/zkbas/service/api/app/internal/types" -) - -func GetTxsListHandler(svcCtx *svc.ServiceContext) http.HandlerFunc { - return func(w http.ResponseWriter, r *http.Request) { - var req types.ReqGetTxsList - if err := httpx.Parse(r, &req); err != nil { - httpx.Error(w, err) - return - } - - l := transaction.NewGetTxsListLogic(r.Context(), svcCtx) - resp, err := l.GetTxsList(&req) - if err != nil { - httpx.Error(w, err) - } else { - httpx.OkJson(w, resp) - } - } -} diff --git a/service/api/app/internal/handler/transaction/sendaddliquiditytxhandler.go b/service/api/app/internal/handler/transaction/sendaddliquiditytxhandler.go deleted file mode 100644 index 400213a43..000000000 --- a/service/api/app/internal/handler/transaction/sendaddliquiditytxhandler.go +++ /dev/null @@ -1,29 +0,0 @@ -package transaction - -import ( - "net/http" - - "github.com/zeromicro/go-zero/rest/httpx" - - "github.com/bnb-chain/zkbas/service/api/app/internal/logic/transaction" - "github.com/bnb-chain/zkbas/service/api/app/internal/svc" - "github.com/bnb-chain/zkbas/service/api/app/internal/types" -) - -func SendAddLiquidityTxHandler(svcCtx *svc.ServiceContext) http.HandlerFunc { - return func(w http.ResponseWriter, r *http.Request) { - var req types.ReqSendAddLiquidityTx - if err := httpx.Parse(r, &req); err != nil { - httpx.Error(w, err) - return - } - - l := transaction.NewSendAddLiquidityTxLogic(r.Context(), svcCtx) - resp, err := l.SendAddLiquidityTx(&req) - if err != nil { - httpx.Error(w, err) - } else { - httpx.OkJson(w, resp) - } - } -} diff --git a/service/api/app/internal/handler/transaction/sendatomicmatchtxhandler.go b/service/api/app/internal/handler/transaction/sendatomicmatchtxhandler.go deleted file mode 100644 index 29a4dc337..000000000 --- a/service/api/app/internal/handler/transaction/sendatomicmatchtxhandler.go +++ /dev/null @@ -1,29 +0,0 @@ -package transaction - -import ( - "net/http" - - "github.com/zeromicro/go-zero/rest/httpx" - - "github.com/bnb-chain/zkbas/service/api/app/internal/logic/transaction" - "github.com/bnb-chain/zkbas/service/api/app/internal/svc" - "github.com/bnb-chain/zkbas/service/api/app/internal/types" -) - -func SendAtomicMatchTxHandler(svcCtx *svc.ServiceContext) http.HandlerFunc { - return func(w http.ResponseWriter, r *http.Request) { - var req types.ReqSendAtomicMatchTx - if err := httpx.Parse(r, &req); err != nil { - httpx.Error(w, err) - return - } - - l := transaction.NewSendAtomicMatchTxLogic(r.Context(), svcCtx) - resp, err := l.SendAtomicMatchTx(&req) - if err != nil { - httpx.Error(w, err) - } else { - httpx.OkJson(w, resp) - } - } -} diff --git a/service/api/app/internal/handler/transaction/sendcanceloffertxhandler.go b/service/api/app/internal/handler/transaction/sendcanceloffertxhandler.go deleted file mode 100644 index 4e627a200..000000000 --- a/service/api/app/internal/handler/transaction/sendcanceloffertxhandler.go +++ /dev/null @@ -1,29 +0,0 @@ -package transaction - -import ( - "net/http" - - "github.com/zeromicro/go-zero/rest/httpx" - - "github.com/bnb-chain/zkbas/service/api/app/internal/logic/transaction" - "github.com/bnb-chain/zkbas/service/api/app/internal/svc" - "github.com/bnb-chain/zkbas/service/api/app/internal/types" -) - -func SendCancelOfferTxHandler(svcCtx *svc.ServiceContext) http.HandlerFunc { - return func(w http.ResponseWriter, r *http.Request) { - var req types.ReqSendCancelOfferTx - if err := httpx.Parse(r, &req); err != nil { - httpx.Error(w, err) - return - } - - l := transaction.NewSendCancelOfferTxLogic(r.Context(), svcCtx) - resp, err := l.SendCancelOfferTx(&req) - if err != nil { - httpx.Error(w, err) - } else { - httpx.OkJson(w, resp) - } - } -} diff --git a/service/api/app/internal/handler/transaction/sendcreatecollectiontxhandler.go b/service/api/app/internal/handler/transaction/sendcreatecollectiontxhandler.go deleted file mode 100644 index b178f40e2..000000000 --- a/service/api/app/internal/handler/transaction/sendcreatecollectiontxhandler.go +++ /dev/null @@ -1,29 +0,0 @@ -package transaction - -import ( - "net/http" - - "github.com/zeromicro/go-zero/rest/httpx" - - "github.com/bnb-chain/zkbas/service/api/app/internal/logic/transaction" - "github.com/bnb-chain/zkbas/service/api/app/internal/svc" - "github.com/bnb-chain/zkbas/service/api/app/internal/types" -) - -func SendCreateCollectionTxHandler(svcCtx *svc.ServiceContext) http.HandlerFunc { - return func(w http.ResponseWriter, r *http.Request) { - var req types.ReqSendCreateCollectionTx - if err := httpx.Parse(r, &req); err != nil { - httpx.Error(w, err) - return - } - - l := transaction.NewSendCreateCollectionTxLogic(r.Context(), svcCtx) - resp, err := l.SendCreateCollectionTx(&req) - if err != nil { - httpx.Error(w, err) - } else { - httpx.OkJson(w, resp) - } - } -} diff --git a/service/api/app/internal/handler/transaction/sendmintnfttxhandler.go b/service/api/app/internal/handler/transaction/sendmintnfttxhandler.go deleted file mode 100644 index e1917a8fd..000000000 --- a/service/api/app/internal/handler/transaction/sendmintnfttxhandler.go +++ /dev/null @@ -1,29 +0,0 @@ -package transaction - -import ( - "net/http" - - "github.com/zeromicro/go-zero/rest/httpx" - - "github.com/bnb-chain/zkbas/service/api/app/internal/logic/transaction" - "github.com/bnb-chain/zkbas/service/api/app/internal/svc" - "github.com/bnb-chain/zkbas/service/api/app/internal/types" -) - -func SendMintNftTxHandler(svcCtx *svc.ServiceContext) http.HandlerFunc { - return func(w http.ResponseWriter, r *http.Request) { - var req types.ReqSendMintNftTx - if err := httpx.Parse(r, &req); err != nil { - httpx.Error(w, err) - return - } - - l := transaction.NewSendMintNftTxLogic(r.Context(), svcCtx) - resp, err := l.SendMintNftTx(&req) - if err != nil { - httpx.Error(w, err) - } else { - httpx.OkJson(w, resp) - } - } -} diff --git a/service/api/app/internal/handler/transaction/sendremoveliquiditytxhandler.go b/service/api/app/internal/handler/transaction/sendremoveliquiditytxhandler.go deleted file mode 100644 index 4d415718c..000000000 --- a/service/api/app/internal/handler/transaction/sendremoveliquiditytxhandler.go +++ /dev/null @@ -1,29 +0,0 @@ -package transaction - -import ( - "net/http" - - "github.com/zeromicro/go-zero/rest/httpx" - - "github.com/bnb-chain/zkbas/service/api/app/internal/logic/transaction" - "github.com/bnb-chain/zkbas/service/api/app/internal/svc" - "github.com/bnb-chain/zkbas/service/api/app/internal/types" -) - -func SendRemoveLiquidityTxHandler(svcCtx *svc.ServiceContext) http.HandlerFunc { - return func(w http.ResponseWriter, r *http.Request) { - var req types.ReqSendRemoveLiquidityTx - if err := httpx.Parse(r, &req); err != nil { - httpx.Error(w, err) - return - } - - l := transaction.NewSendRemoveLiquidityTxLogic(r.Context(), svcCtx) - resp, err := l.SendRemoveLiquidityTx(&req) - if err != nil { - httpx.Error(w, err) - } else { - httpx.OkJson(w, resp) - } - } -} diff --git a/service/api/app/internal/handler/transaction/sendswaptxhandler.go b/service/api/app/internal/handler/transaction/sendswaptxhandler.go deleted file mode 100644 index d7acf5b45..000000000 --- a/service/api/app/internal/handler/transaction/sendswaptxhandler.go +++ /dev/null @@ -1,29 +0,0 @@ -package transaction - -import ( - "net/http" - - "github.com/zeromicro/go-zero/rest/httpx" - - "github.com/bnb-chain/zkbas/service/api/app/internal/logic/transaction" - "github.com/bnb-chain/zkbas/service/api/app/internal/svc" - "github.com/bnb-chain/zkbas/service/api/app/internal/types" -) - -func SendSwapTxHandler(svcCtx *svc.ServiceContext) http.HandlerFunc { - return func(w http.ResponseWriter, r *http.Request) { - var req types.ReqSendSwapTx - if err := httpx.Parse(r, &req); err != nil { - httpx.Error(w, err) - return - } - - l := transaction.NewSendSwapTxLogic(r.Context(), svcCtx) - resp, err := l.SendSwapTx(&req) - if err != nil { - httpx.Error(w, err) - } else { - httpx.OkJson(w, resp) - } - } -} diff --git a/service/api/app/internal/handler/transaction/sendtransfernfttxhandler.go b/service/api/app/internal/handler/transaction/sendtransfernfttxhandler.go deleted file mode 100644 index af5144c66..000000000 --- a/service/api/app/internal/handler/transaction/sendtransfernfttxhandler.go +++ /dev/null @@ -1,29 +0,0 @@ -package transaction - -import ( - "net/http" - - "github.com/zeromicro/go-zero/rest/httpx" - - "github.com/bnb-chain/zkbas/service/api/app/internal/logic/transaction" - "github.com/bnb-chain/zkbas/service/api/app/internal/svc" - "github.com/bnb-chain/zkbas/service/api/app/internal/types" -) - -func SendTransferNftTxHandler(svcCtx *svc.ServiceContext) http.HandlerFunc { - return func(w http.ResponseWriter, r *http.Request) { - var req types.ReqSendTransferNftTx - if err := httpx.Parse(r, &req); err != nil { - httpx.Error(w, err) - return - } - - l := transaction.NewSendTransferNftTxLogic(r.Context(), svcCtx) - resp, err := l.SendTransferNftTx(&req) - if err != nil { - httpx.Error(w, err) - } else { - httpx.OkJson(w, resp) - } - } -} diff --git a/service/api/app/internal/handler/transaction/sendtransfertxhandler.go b/service/api/app/internal/handler/transaction/sendtransfertxhandler.go deleted file mode 100644 index 25472031a..000000000 --- a/service/api/app/internal/handler/transaction/sendtransfertxhandler.go +++ /dev/null @@ -1,29 +0,0 @@ -package transaction - -import ( - "net/http" - - "github.com/zeromicro/go-zero/rest/httpx" - - "github.com/bnb-chain/zkbas/service/api/app/internal/logic/transaction" - "github.com/bnb-chain/zkbas/service/api/app/internal/svc" - "github.com/bnb-chain/zkbas/service/api/app/internal/types" -) - -func SendTransferTxHandler(svcCtx *svc.ServiceContext) http.HandlerFunc { - return func(w http.ResponseWriter, r *http.Request) { - var req types.ReqSendTransferTx - if err := httpx.Parse(r, &req); err != nil { - httpx.Error(w, err) - return - } - - l := transaction.NewSendTransferTxLogic(r.Context(), svcCtx) - resp, err := l.SendTransferTx(&req) - if err != nil { - httpx.Error(w, err) - } else { - httpx.OkJson(w, resp) - } - } -} diff --git a/service/api/app/internal/handler/transaction/sendwithdrawnfttxhandler.go b/service/api/app/internal/handler/transaction/sendwithdrawnfttxhandler.go deleted file mode 100644 index b565a6386..000000000 --- a/service/api/app/internal/handler/transaction/sendwithdrawnfttxhandler.go +++ /dev/null @@ -1,29 +0,0 @@ -package transaction - -import ( - "net/http" - - "github.com/zeromicro/go-zero/rest/httpx" - - "github.com/bnb-chain/zkbas/service/api/app/internal/logic/transaction" - "github.com/bnb-chain/zkbas/service/api/app/internal/svc" - "github.com/bnb-chain/zkbas/service/api/app/internal/types" -) - -func SendWithdrawNftTxHandler(svcCtx *svc.ServiceContext) http.HandlerFunc { - return func(w http.ResponseWriter, r *http.Request) { - var req types.ReqSendWithdrawNftTx - if err := httpx.Parse(r, &req); err != nil { - httpx.Error(w, err) - return - } - - l := transaction.NewSendWithdrawNftTxLogic(r.Context(), svcCtx) - resp, err := l.SendWithdrawNftTx(&req) - if err != nil { - httpx.Error(w, err) - } else { - httpx.OkJson(w, resp) - } - } -} diff --git a/service/api/app/internal/handler/transaction/sendwithdrawtxhandler.go b/service/api/app/internal/handler/transaction/sendwithdrawtxhandler.go deleted file mode 100644 index 4d02600ac..000000000 --- a/service/api/app/internal/handler/transaction/sendwithdrawtxhandler.go +++ /dev/null @@ -1,29 +0,0 @@ -package transaction - -import ( - "net/http" - - "github.com/zeromicro/go-zero/rest/httpx" - - "github.com/bnb-chain/zkbas/service/api/app/internal/logic/transaction" - "github.com/bnb-chain/zkbas/service/api/app/internal/svc" - "github.com/bnb-chain/zkbas/service/api/app/internal/types" -) - -func SendWithdrawTxHandler(svcCtx *svc.ServiceContext) http.HandlerFunc { - return func(w http.ResponseWriter, r *http.Request) { - var req types.ReqSendWithdrawTx - if err := httpx.Parse(r, &req); err != nil { - httpx.Error(w, err) - return - } - - l := transaction.NewSendWithdrawTxLogic(r.Context(), svcCtx) - resp, err := l.SendWithdrawTx(&req) - if err != nil { - httpx.Error(w, err) - } else { - httpx.OkJson(w, resp) - } - } -} diff --git a/service/api/app/internal/logic/account/getaccountinfobyaccountindexlogic.go b/service/api/app/internal/logic/account/getaccountinfobyaccountindexlogic.go deleted file mode 100644 index 0c6e06eb6..000000000 --- a/service/api/app/internal/logic/account/getaccountinfobyaccountindexlogic.go +++ /dev/null @@ -1,58 +0,0 @@ -package account - -import ( - "context" - - "github.com/zeromicro/go-zero/core/logx" - - "github.com/bnb-chain/zkbas/errorcode" - "github.com/bnb-chain/zkbas/service/api/app/internal/repo/account" - "github.com/bnb-chain/zkbas/service/api/app/internal/repo/globalrpc" - "github.com/bnb-chain/zkbas/service/api/app/internal/svc" - "github.com/bnb-chain/zkbas/service/api/app/internal/types" -) - -type GetAccountInfoByAccountIndexLogic struct { - logx.Logger - ctx context.Context - svcCtx *svc.ServiceContext - globalRPC globalrpc.GlobalRPC - account account.Model -} - -func NewGetAccountInfoByAccountIndexLogic(ctx context.Context, svcCtx *svc.ServiceContext) *GetAccountInfoByAccountIndexLogic { - return &GetAccountInfoByAccountIndexLogic{ - Logger: logx.WithContext(ctx), - ctx: ctx, - svcCtx: svcCtx, - globalRPC: globalrpc.New(svcCtx, ctx), - account: account.New(svcCtx), - } -} - -func (l *GetAccountInfoByAccountIndexLogic) GetAccountInfoByAccountIndex(req *types.ReqGetAccountInfoByAccountIndex) (*types.RespGetAccountInfoByAccountIndex, error) { - account, err := l.globalRPC.GetLatestAccountInfoByAccountIndex(l.ctx, req.AccountIndex) - if err != nil { - logx.Errorf("[GetLatestAccountInfoByAccountIndex] err: %s", err.Error()) - if err == errorcode.RpcErrNotFound { - return nil, errorcode.AppErrNotFound - } - return nil, errorcode.AppErrInternal - } - resp := &types.RespGetAccountInfoByAccountIndex{ - AccountStatus: uint32(account.Status), - AccountName: account.AccountName, - AccountPk: account.PublicKey, - Nonce: account.Nonce, - Assets: make([]*types.AccountAsset, 0), - } - for _, asset := range account.AccountAsset { - resp.Assets = append(resp.Assets, &types.AccountAsset{ - AssetId: asset.AssetId, - Balance: asset.Balance, - LpAmount: asset.LpAmount, - OfferCanceledOrFinalized: asset.OfferCanceledOrFinalized, - }) - } - return resp, nil -} diff --git a/service/api/app/internal/logic/account/getaccountinfobyaccountnamelogic.go b/service/api/app/internal/logic/account/getaccountinfobyaccountnamelogic.go deleted file mode 100644 index 7a3baccb5..000000000 --- a/service/api/app/internal/logic/account/getaccountinfobyaccountnamelogic.go +++ /dev/null @@ -1,75 +0,0 @@ -package account - -import ( - "context" - - "github.com/zeromicro/go-zero/core/logx" - - "github.com/bnb-chain/zkbas/common/checker" - "github.com/bnb-chain/zkbas/errorcode" - "github.com/bnb-chain/zkbas/service/api/app/internal/repo/account" - "github.com/bnb-chain/zkbas/service/api/app/internal/repo/globalrpc" - "github.com/bnb-chain/zkbas/service/api/app/internal/svc" - "github.com/bnb-chain/zkbas/service/api/app/internal/types" -) - -type GetAccountInfoByAccountNameLogic struct { - logx.Logger - ctx context.Context - svcCtx *svc.ServiceContext - globalRPC globalrpc.GlobalRPC - account account.Model -} - -func NewGetAccountInfoByAccountNameLogic(ctx context.Context, svcCtx *svc.ServiceContext) *GetAccountInfoByAccountNameLogic { - return &GetAccountInfoByAccountNameLogic{ - Logger: logx.WithContext(ctx), - ctx: ctx, - svcCtx: svcCtx, - globalRPC: globalrpc.New(svcCtx, ctx), - account: account.New(svcCtx), - } -} - -func (l *GetAccountInfoByAccountNameLogic) GetAccountInfoByAccountName(req *types.ReqGetAccountInfoByAccountName) (*types.RespGetAccountInfoByAccountName, error) { - if checker.CheckAccountName(req.AccountName) { - logx.Errorf("[CheckAccountName] req.AccountName: %s", req.AccountName) - return nil, errorcode.AppErrInvalidParam.RefineError("invalid AccountName") - } - accountName := checker.FormatSting(req.AccountName) - if checker.CheckFormatAccountName(accountName) { - logx.Errorf("[CheckFormatAccountName] accountName: %s", accountName) - return nil, errorcode.AppErrInvalidParam.RefineError("invalid AccountName") - } - info, err := l.account.GetAccountByAccountName(l.ctx, accountName) - if err != nil { - logx.Errorf("[GetAccountByAccountName] accountName: %s, err: %s", accountName, err.Error()) - if err == errorcode.DbErrNotFound { - return nil, errorcode.AppErrNotFound - } - return nil, errorcode.AppErrInternal - } - account, err := l.globalRPC.GetLatestAccountInfoByAccountIndex(l.ctx, info.AccountIndex) - if err != nil { - logx.Errorf("[GetLatestAccountInfoByAccountIndex] err: %s", err.Error()) - if err == errorcode.RpcErrNotFound { - return nil, errorcode.AppErrNotFound - } - return nil, errorcode.AppErrInternal - } - resp := &types.RespGetAccountInfoByAccountName{ - AccountIndex: uint32(account.AccountIndex), - AccountPk: account.PublicKey, - Nonce: account.Nonce, - Assets: make([]*types.AccountAsset, 0), - } - for _, asset := range account.AccountAsset { - resp.Assets = append(resp.Assets, &types.AccountAsset{ - AssetId: asset.AssetId, - Balance: asset.Balance, - LpAmount: asset.LpAmount, - OfferCanceledOrFinalized: asset.OfferCanceledOrFinalized, - }) - } - return resp, nil -} diff --git a/service/api/app/internal/logic/account/getaccountinfobypubkeylogic.go b/service/api/app/internal/logic/account/getaccountinfobypubkeylogic.go deleted file mode 100644 index 14a6b84df..000000000 --- a/service/api/app/internal/logic/account/getaccountinfobypubkeylogic.go +++ /dev/null @@ -1,67 +0,0 @@ -package account - -import ( - "context" - - "github.com/zeromicro/go-zero/core/logx" - - "github.com/bnb-chain/zkbas/errorcode" - "github.com/bnb-chain/zkbas/service/api/app/internal/repo/account" - "github.com/bnb-chain/zkbas/service/api/app/internal/repo/globalrpc" - "github.com/bnb-chain/zkbas/service/api/app/internal/svc" - "github.com/bnb-chain/zkbas/service/api/app/internal/types" -) - -type GetAccountInfoByPubKeyLogic struct { - logx.Logger - ctx context.Context - svcCtx *svc.ServiceContext - account account.Model - globalRPC globalrpc.GlobalRPC -} - -func NewGetAccountInfoByPubKeyLogic(ctx context.Context, svcCtx *svc.ServiceContext) *GetAccountInfoByPubKeyLogic { - return &GetAccountInfoByPubKeyLogic{ - Logger: logx.WithContext(ctx), - ctx: ctx, - svcCtx: svcCtx, - account: account.New(svcCtx), - globalRPC: globalrpc.New(svcCtx, ctx), - } -} - -func (l *GetAccountInfoByPubKeyLogic) GetAccountInfoByPubKey(req *types.ReqGetAccountInfoByPubKey) (*types.RespGetAccountInfoByPubKey, error) { - //TODO: check AccountPk - info, err := l.account.GetAccountByPk(req.AccountPk) - if err != nil { - logx.Errorf("[GetAccountByPk] err: %s", err.Error()) - if err == errorcode.DbErrNotFound { - return nil, errorcode.AppErrNotFound - } - return nil, errorcode.AppErrInternal - } - account, err := l.globalRPC.GetLatestAccountInfoByAccountIndex(l.ctx, info.AccountIndex) - if err != nil { - logx.Errorf("[GetLatestAccountInfoByAccountIndex] err: %s", err.Error()) - if err == errorcode.RpcErrNotFound { - return nil, errorcode.AppErrNotFound - } - return nil, errorcode.AppErrInternal - } - resp := &types.RespGetAccountInfoByPubKey{ - AccountStatus: uint32(account.Status), - AccountName: account.AccountName, - AccountIndex: account.AccountIndex, - Nonce: account.Nonce, - Assets: make([]*types.AccountAsset, 0), - } - for _, asset := range account.AccountAsset { - resp.Assets = append(resp.Assets, &types.AccountAsset{ - AssetId: asset.AssetId, - Balance: asset.Balance, - LpAmount: asset.LpAmount, - OfferCanceledOrFinalized: asset.OfferCanceledOrFinalized, - }) - } - return resp, nil -} diff --git a/service/api/app/internal/logic/account/getaccountstatusbyaccountnamelogic.go b/service/api/app/internal/logic/account/getaccountstatusbyaccountnamelogic.go deleted file mode 100644 index f1af4d941..000000000 --- a/service/api/app/internal/logic/account/getaccountstatusbyaccountnamelogic.go +++ /dev/null @@ -1,50 +0,0 @@ -package account - -import ( - "context" - - "github.com/zeromicro/go-zero/core/logx" - - "github.com/bnb-chain/zkbas/common/checker" - "github.com/bnb-chain/zkbas/errorcode" - "github.com/bnb-chain/zkbas/service/api/app/internal/repo/account" - "github.com/bnb-chain/zkbas/service/api/app/internal/svc" - "github.com/bnb-chain/zkbas/service/api/app/internal/types" -) - -type GetAccountStatusByAccountNameLogic struct { - logx.Logger - ctx context.Context - svcCtx *svc.ServiceContext - account account.Model -} - -func NewGetAccountStatusByAccountNameLogic(ctx context.Context, svcCtx *svc.ServiceContext) *GetAccountStatusByAccountNameLogic { - return &GetAccountStatusByAccountNameLogic{ - Logger: logx.WithContext(ctx), - ctx: ctx, - svcCtx: svcCtx, - account: account.New(svcCtx), - } -} - -func (l *GetAccountStatusByAccountNameLogic) GetAccountStatusByAccountName(req *types.ReqGetAccountStatusByAccountName) (resp *types.RespGetAccountStatusByAccountName, err error) { - if checker.CheckAccountName(req.AccountName) { - logx.Errorf("[CheckAccountIndex] param: %s", req.AccountName) - return nil, errorcode.AppErrInvalidParam.RefineError("invalid AccountName") - } - account, err := l.account.GetBasicAccountByAccountName(l.ctx, req.AccountName) - if err != nil { - logx.Errorf("[GetBasicAccountByAccountName] err: %s", err.Error()) - if err == errorcode.DbErrNotFound { - return nil, errorcode.AppErrNotFound - } - return nil, errorcode.AppErrInternal - } - resp = &types.RespGetAccountStatusByAccountName{ - AccountStatus: uint32(account.Status), - AccountPk: account.PublicKey, - AccountIndex: uint32(account.AccountIndex), - } - return resp, nil -} diff --git a/service/api/app/internal/logic/account/getaccountstatusbyaccountnamelogic_test.go b/service/api/app/internal/logic/account/getaccountstatusbyaccountnamelogic_test.go deleted file mode 100644 index e4ec471f6..000000000 --- a/service/api/app/internal/logic/account/getaccountstatusbyaccountnamelogic_test.go +++ /dev/null @@ -1,33 +0,0 @@ -package account - -import ( - "testing" - - "github.com/golang/mock/gomock" - "github.com/stretchr/testify/assert" - - table "github.com/bnb-chain/zkbas/common/model/account" - "github.com/bnb-chain/zkbas/errorcode" - "github.com/bnb-chain/zkbas/service/api/app/internal/repo/account" - "github.com/bnb-chain/zkbas/service/api/app/internal/types" -) - -func TestGetAccountStatusByAccountNameLogic_GetAccountStatusByAccountName(t *testing.T) { - ctrl := gomock.NewController(t) - defer ctrl.Finish() - mockAccount := account.NewMockModel(ctrl) - l := &GetAccountStatusByAccountNameLogic{ - account: mockAccount, - } - // error case - mockAccount.EXPECT().GetBasicAccountByAccountName(gomock.Any(), gomock.Any()).Return(nil, errorcode.New(-1, "error")).MaxTimes(1) - req := &types.ReqGetAccountStatusByAccountName{AccountName: ""} - _, err := l.GetAccountStatusByAccountName(req) - assert.NotNil(t, err) - - // normal case - mockAccount.EXPECT().GetBasicAccountByAccountName(gomock.Any(), gomock.Any()).Return(&table.Account{}, nil).AnyTimes() - req = &types.ReqGetAccountStatusByAccountName{AccountName: ""} - _, err = l.GetAccountStatusByAccountName(req) - assert.Nil(t, err) -} diff --git a/service/api/app/internal/logic/account/getaccountstatusbyaccountpklogic.go b/service/api/app/internal/logic/account/getaccountstatusbyaccountpklogic.go deleted file mode 100644 index b463ad802..000000000 --- a/service/api/app/internal/logic/account/getaccountstatusbyaccountpklogic.go +++ /dev/null @@ -1,45 +0,0 @@ -package account - -import ( - "context" - - "github.com/zeromicro/go-zero/core/logx" - - "github.com/bnb-chain/zkbas/errorcode" - "github.com/bnb-chain/zkbas/service/api/app/internal/repo/account" - "github.com/bnb-chain/zkbas/service/api/app/internal/svc" - "github.com/bnb-chain/zkbas/service/api/app/internal/types" -) - -type GetAccountStatusByAccountPkLogic struct { - logx.Logger - ctx context.Context - svcCtx *svc.ServiceContext - account account.Model -} - -func NewGetAccountStatusByAccountPkLogic(ctx context.Context, svcCtx *svc.ServiceContext) *GetAccountStatusByAccountPkLogic { - return &GetAccountStatusByAccountPkLogic{ - Logger: logx.WithContext(ctx), - ctx: ctx, - svcCtx: svcCtx, - account: account.New(svcCtx), - } -} - -func (l *GetAccountStatusByAccountPkLogic) GetAccountStatusByAccountPk(req *types.ReqGetAccountStatusByAccountPk) (*types.RespGetAccountStatusByAccountPk, error) { - //TODO: check pk - account, err := l.account.GetBasicAccountByAccountPk(l.ctx, req.AccountPk) - if err != nil { - logx.Errorf("[GetBasicAccountByAccountPk] err: %s", err.Error()) - if err == errorcode.DbErrNotFound { - return nil, errorcode.AppErrNotFound - } - return nil, errorcode.AppErrInternal - } - return &types.RespGetAccountStatusByAccountPk{ - AccountStatus: int64(account.Status), - AccountIndex: account.AccountIndex, - AccountName: account.AccountName, - }, nil -} diff --git a/service/api/app/internal/logic/account/getbalancebyassetidandaccountnamelogic.go b/service/api/app/internal/logic/account/getbalancebyassetidandaccountnamelogic.go deleted file mode 100644 index f67e53c35..000000000 --- a/service/api/app/internal/logic/account/getbalancebyassetidandaccountnamelogic.go +++ /dev/null @@ -1,62 +0,0 @@ -package account - -import ( - "context" - - "github.com/zeromicro/go-zero/core/logx" - - "github.com/bnb-chain/zkbas/common/checker" - "github.com/bnb-chain/zkbas/errorcode" - "github.com/bnb-chain/zkbas/service/api/app/internal/repo/account" - "github.com/bnb-chain/zkbas/service/api/app/internal/repo/globalrpc" - "github.com/bnb-chain/zkbas/service/api/app/internal/svc" - "github.com/bnb-chain/zkbas/service/api/app/internal/types" -) - -type GetBalanceByAssetIdAndAccountNameLogic struct { - logx.Logger - ctx context.Context - svcCtx *svc.ServiceContext - globalRPC globalrpc.GlobalRPC - account account.Model -} - -func NewGetBalanceByAssetIdAndAccountNameLogic(ctx context.Context, svcCtx *svc.ServiceContext) *GetBalanceByAssetIdAndAccountNameLogic { - return &GetBalanceByAssetIdAndAccountNameLogic{ - Logger: logx.WithContext(ctx), - ctx: ctx, - svcCtx: svcCtx, - globalRPC: globalrpc.New(svcCtx, ctx), - account: account.New(svcCtx), - } -} - -func (l *GetBalanceByAssetIdAndAccountNameLogic) GetBalanceByAssetIdAndAccountName(req *types.ReqGetBlanceByAssetIdAndAccountName) (*types.RespGetBlanceInfoByAssetIdAndAccountName, error) { - resp := &types.RespGetBlanceInfoByAssetIdAndAccountName{} - if checker.CheckAccountName(req.AccountName) { - logx.Errorf("[CheckAccountIndex] param: %s", req.AccountName) - return nil, errorcode.AppErrInvalidParam.RefineError("invalid AccountName") - } - account, err := l.account.GetAccountByAccountName(l.ctx, req.AccountName) - if err != nil { - logx.Errorf("[GetAccountByAccountName] err: %s", err.Error()) - if err == errorcode.DbErrNotFound { - return nil, errorcode.AppErrNotFound - } - return nil, errorcode.AppErrInternal - } - assets, err := l.globalRPC.GetLatestAssetsListByAccountIndex(l.ctx, uint32(account.AccountIndex)) - if err != nil { - logx.Errorf("[GetLatestAssetsListByAccountIndex] err: %s", err.Error()) - if err == errorcode.RpcErrNotFound { - return nil, errorcode.AppErrNotFound - } - return nil, errorcode.AppErrInternal - } - for _, asset := range assets { - if req.AssetId == asset.AssetId { - resp.Balance = asset.Balance - } - } - return resp, nil -} diff --git a/service/api/app/internal/logic/block/getblockbyblockheightlogic.go b/service/api/app/internal/logic/block/getblockbyblockheightlogic.go deleted file mode 100644 index 7df48a7b1..000000000 --- a/service/api/app/internal/logic/block/getblockbyblockheightlogic.go +++ /dev/null @@ -1,60 +0,0 @@ -package block - -import ( - "context" - - "github.com/zeromicro/go-zero/core/logx" - - "github.com/bnb-chain/zkbas/errorcode" - "github.com/bnb-chain/zkbas/service/api/app/internal/logic/utils" - "github.com/bnb-chain/zkbas/service/api/app/internal/repo/block" - "github.com/bnb-chain/zkbas/service/api/app/internal/svc" - "github.com/bnb-chain/zkbas/service/api/app/internal/types" -) - -type GetBlockByBlockHeightLogic struct { - logx.Logger - ctx context.Context - svcCtx *svc.ServiceContext - block block.Block -} - -func NewGetBlockByBlockHeightLogic(ctx context.Context, svcCtx *svc.ServiceContext) *GetBlockByBlockHeightLogic { - return &GetBlockByBlockHeightLogic{ - Logger: logx.WithContext(ctx), - ctx: ctx, - svcCtx: svcCtx, - block: block.New(svcCtx), - } -} - -func (l *GetBlockByBlockHeightLogic) GetBlockByBlockHeight(req *types.ReqGetBlockByBlockHeight) (*types.RespGetBlockByBlockHeight, error) { - block, err := l.block.GetBlockWithTxsByBlockHeight(l.ctx, int64(req.BlockHeight)) - if err != nil { - logx.Errorf("[GetBlockWithTxsByBlockHeight] err: %s", err.Error()) - if err == errorcode.DbErrNotFound { - return nil, errorcode.AppErrNotFound - } - return nil, errorcode.AppErrInternal - } - resp := &types.RespGetBlockByBlockHeight{ - Block: types.Block{ - BlockCommitment: block.BlockCommitment, - BlockHeight: block.BlockHeight, - StateRoot: block.StateRoot, - PriorityOperations: block.PriorityOperations, - PendingOnChainOperationsHash: block.PendingOnChainOperationsHash, - PendingOnChainOperationsPubData: block.PendingOnChainOperationsPubData, - CommittedTxHash: block.CommittedTxHash, - CommittedAt: block.CommittedAt, - VerifiedTxHash: block.VerifiedTxHash, - VerifiedAt: block.VerifiedAt, - BlockStatus: block.BlockStatus, - }, - } - for _, t := range block.Txs { - tx := utils.GormTx2Tx(t) - resp.Block.Txs = append(resp.Block.Txs, tx) - } - return resp, nil -} diff --git a/service/api/app/internal/logic/block/getblockbycommitmentlogic.go b/service/api/app/internal/logic/block/getblockbycommitmentlogic.go deleted file mode 100644 index 3dcd4eaf2..000000000 --- a/service/api/app/internal/logic/block/getblockbycommitmentlogic.go +++ /dev/null @@ -1,61 +0,0 @@ -package block - -import ( - "context" - - "github.com/zeromicro/go-zero/core/logx" - - "github.com/bnb-chain/zkbas/errorcode" - "github.com/bnb-chain/zkbas/service/api/app/internal/logic/utils" - "github.com/bnb-chain/zkbas/service/api/app/internal/repo/block" - "github.com/bnb-chain/zkbas/service/api/app/internal/svc" - "github.com/bnb-chain/zkbas/service/api/app/internal/types" -) - -type GetBlockByCommitmentLogic struct { - logx.Logger - ctx context.Context - svcCtx *svc.ServiceContext - block block.Block -} - -func NewGetBlockByCommitmentLogic(ctx context.Context, svcCtx *svc.ServiceContext) *GetBlockByCommitmentLogic { - return &GetBlockByCommitmentLogic{ - Logger: logx.WithContext(ctx), - ctx: ctx, - svcCtx: svcCtx, - block: block.New(svcCtx), - } -} - -func (l *GetBlockByCommitmentLogic) GetBlockByCommitment(req *types.ReqGetBlockByCommitment) (*types.RespGetBlockByCommitment, error) { - // query basic block info - block, err := l.block.GetBlockWithTxsByCommitment(l.ctx, req.BlockCommitment) - if err != nil { - logx.Errorf("[GetBlockWithTxsByCommitment] err: %s", err.Error()) - if err == errorcode.DbErrNotFound { - return nil, errorcode.AppErrNotFound - } - return nil, errorcode.AppErrInternal - } - resp := &types.RespGetBlockByCommitment{ - Block: types.Block{ - BlockCommitment: block.BlockCommitment, - BlockHeight: block.BlockHeight, - StateRoot: block.StateRoot, - PriorityOperations: block.PriorityOperations, - PendingOnChainOperationsHash: block.PendingOnChainOperationsHash, - PendingOnChainOperationsPubData: block.PendingOnChainOperationsPubData, - CommittedTxHash: block.CommittedTxHash, - CommittedAt: block.CommittedAt, - VerifiedTxHash: block.VerifiedTxHash, - VerifiedAt: block.VerifiedAt, - BlockStatus: block.BlockStatus, - }, - } - for _, t := range block.Txs { - tx := utils.GormTx2Tx(t) - resp.Block.Txs = append(resp.Block.Txs, tx) - } - return resp, nil -} diff --git a/service/api/app/internal/logic/block/getcurrentblockheightlogic.go b/service/api/app/internal/logic/block/getcurrentblockheightlogic.go deleted file mode 100644 index ce839cdab..000000000 --- a/service/api/app/internal/logic/block/getcurrentblockheightlogic.go +++ /dev/null @@ -1,42 +0,0 @@ -package block - -import ( - "context" - - "github.com/zeromicro/go-zero/core/logx" - - "github.com/bnb-chain/zkbas/errorcode" - "github.com/bnb-chain/zkbas/service/api/app/internal/repo/block" - "github.com/bnb-chain/zkbas/service/api/app/internal/svc" - "github.com/bnb-chain/zkbas/service/api/app/internal/types" -) - -type GetCurrentBlockHeightLogic struct { - logx.Logger - ctx context.Context - svcCtx *svc.ServiceContext - block block.Block -} - -func NewGetCurrentBlockHeightLogic(ctx context.Context, svcCtx *svc.ServiceContext) *GetCurrentBlockHeightLogic { - return &GetCurrentBlockHeightLogic{ - Logger: logx.WithContext(ctx), - ctx: ctx, - svcCtx: svcCtx, - block: block.New(svcCtx), - } -} - -func (l *GetCurrentBlockHeightLogic) GetCurrentBlockHeight() (resp *types.RespCurrentBlockHeight, err error) { - height, err := l.block.GetCurrentBlockHeight(l.ctx) - if err != nil { - logx.Errorf("[GetBlockWithTxsByBlockHeight] err: %s", err.Error()) - if err == errorcode.DbErrNotFound { - return nil, errorcode.AppErrNotFound - } - return nil, errorcode.AppErrInternal - } - return &types.RespCurrentBlockHeight{ - Height: height, - }, nil -} diff --git a/service/api/app/internal/logic/info/getaccountslogic.go b/service/api/app/internal/logic/info/getaccountslogic.go deleted file mode 100644 index e9baf820e..000000000 --- a/service/api/app/internal/logic/info/getaccountslogic.go +++ /dev/null @@ -1,56 +0,0 @@ -package info - -import ( - "context" - - "github.com/zeromicro/go-zero/core/logx" - - "github.com/bnb-chain/zkbas/errorcode" - "github.com/bnb-chain/zkbas/service/api/app/internal/repo/account" - "github.com/bnb-chain/zkbas/service/api/app/internal/svc" - "github.com/bnb-chain/zkbas/service/api/app/internal/types" -) - -type GetAccountsLogic struct { - logx.Logger - ctx context.Context - svcCtx *svc.ServiceContext - account account.Model -} - -func NewGetAccountsLogic(ctx context.Context, svcCtx *svc.ServiceContext) *GetAccountsLogic { - return &GetAccountsLogic{ - Logger: logx.WithContext(ctx), - ctx: ctx, - svcCtx: svcCtx, - account: account.New(svcCtx), - } -} - -func (l *GetAccountsLogic) GetAccounts(req *types.ReqGetAccounts) (*types.RespGetAccounts, error) { - accounts, err := l.account.GetAccountsList(int(req.Limit), int64(req.Offset)) - if err != nil { - logx.Errorf("[GetAccountsList] err: %s", err.Error()) - if err == errorcode.DbErrNotFound { - return nil, errorcode.AppErrNotFound - } - return nil, errorcode.AppErrInternal - } - total, err := l.account.GetAccountsTotalCount() - if err != nil { - logx.Errorf("[GetAccountsTotalCount] err: %s", err.Error()) - return nil, errorcode.AppErrInternal - } - resp := &types.RespGetAccounts{ - Total: uint32(total), - Accounts: make([]*types.Accounts, 0), - } - for _, a := range accounts { - resp.Accounts = append(resp.Accounts, &types.Accounts{ - AccountIndex: uint32(a.AccountIndex), - AccountName: a.AccountName, - PublicKey: a.PublicKey, - }) - } - return resp, nil -} diff --git a/service/api/app/internal/logic/info/getassetslistlogic.go b/service/api/app/internal/logic/info/getassetslistlogic.go deleted file mode 100644 index f16b836b6..000000000 --- a/service/api/app/internal/logic/info/getassetslistlogic.go +++ /dev/null @@ -1,52 +0,0 @@ -package info - -import ( - "context" - - "github.com/zeromicro/go-zero/core/logx" - - "github.com/bnb-chain/zkbas/errorcode" - "github.com/bnb-chain/zkbas/service/api/app/internal/repo/l2asset" - "github.com/bnb-chain/zkbas/service/api/app/internal/svc" - "github.com/bnb-chain/zkbas/service/api/app/internal/types" -) - -type GetAssetsListLogic struct { - logx.Logger - ctx context.Context - svcCtx *svc.ServiceContext - l2asset l2asset.L2asset -} - -func NewGetAssetsListLogic(ctx context.Context, svcCtx *svc.ServiceContext) *GetAssetsListLogic { - return &GetAssetsListLogic{ - Logger: logx.WithContext(ctx), - ctx: ctx, - svcCtx: svcCtx, - l2asset: l2asset.New(svcCtx), - } -} - -func (l *GetAssetsListLogic) GetAssetsList(req *types.ReqGetAssetsList) (*types.RespGetAssetsList, error) { - assets, err := l.l2asset.GetL2AssetsList(l.ctx) - if err != nil { - logx.Errorf("[GetL2AssetsList] err: %s", err.Error()) - if err == errorcode.DbErrNotFound { - return nil, errorcode.AppErrNotFound - } - return nil, errorcode.AppErrInternal - } - resp := &types.RespGetAssetsList{} - resp.Assets = []*types.AssetInfo{} - for _, asset := range assets { - resp.Assets = append(resp.Assets, &types.AssetInfo{ - AssetId: asset.AssetId, - AssetName: asset.AssetName, - AssetDecimals: asset.Decimals, - AssetSymbol: asset.AssetSymbol, - AssetAddress: asset.L1Address, - IsGasAsset: asset.IsGasAsset, - }) - } - return resp, nil -} diff --git a/service/api/app/internal/logic/info/getcurrencypricebysymbollogic.go b/service/api/app/internal/logic/info/getcurrencypricebysymbollogic.go deleted file mode 100644 index 6254f609a..000000000 --- a/service/api/app/internal/logic/info/getcurrencypricebysymbollogic.go +++ /dev/null @@ -1,57 +0,0 @@ -package info - -import ( - "context" - "strconv" - - "github.com/zeromicro/go-zero/core/logx" - - "github.com/bnb-chain/zkbas/errorcode" - "github.com/bnb-chain/zkbas/service/api/app/internal/repo/l2asset" - "github.com/bnb-chain/zkbas/service/api/app/internal/repo/price" - "github.com/bnb-chain/zkbas/service/api/app/internal/svc" - "github.com/bnb-chain/zkbas/service/api/app/internal/types" -) - -type GetCurrencyPriceBySymbolLogic struct { - logx.Logger - ctx context.Context - svcCtx *svc.ServiceContext - price price.Price - l2asset l2asset.L2asset -} - -func NewGetCurrencyPriceBySymbolLogic(ctx context.Context, svcCtx *svc.ServiceContext) *GetCurrencyPriceBySymbolLogic { - return &GetCurrencyPriceBySymbolLogic{ - Logger: logx.WithContext(ctx), - ctx: ctx, - svcCtx: svcCtx, - price: price.New(svcCtx), - l2asset: l2asset.New(svcCtx), - } -} - -func (l *GetCurrencyPriceBySymbolLogic) GetCurrencyPriceBySymbol(req *types.ReqGetCurrencyPriceBySymbol) (*types.RespGetCurrencyPriceBySymbol, error) { - //TODO: check symbol - price, err := l.price.GetCurrencyPrice(l.ctx, req.Symbol) - if err != nil { - logx.Errorf("[GetCurrencyPrice] err: %s", err.Error()) - if err == errorcode.AppErrQuoteNotExist { - return nil, err - } - return nil, errorcode.AppErrInternal - } - l2Asset, err := l.l2asset.GetL2AssetInfoBySymbol(l.ctx, req.Symbol) - if err != nil { - logx.Errorf("[GetL2AssetInfoBySymbol] err: %s", err.Error()) - if err == errorcode.DbErrNotFound { - return nil, errorcode.AppErrNotFound - } - return nil, errorcode.AppErrInternal - } - resp := &types.RespGetCurrencyPriceBySymbol{ - Price: strconv.FormatFloat(price, 'E', -1, 64), - AssetId: uint32(l2Asset.ID), - } - return resp, nil -} diff --git a/service/api/app/internal/logic/info/getcurrencypriceslogic.go b/service/api/app/internal/logic/info/getcurrencypriceslogic.go deleted file mode 100644 index 90d9ce3e5..000000000 --- a/service/api/app/internal/logic/info/getcurrencypriceslogic.go +++ /dev/null @@ -1,69 +0,0 @@ -package info - -import ( - "context" - "strconv" - - "github.com/zeromicro/go-zero/core/logx" - - "github.com/bnb-chain/zkbas/errorcode" - "github.com/bnb-chain/zkbas/service/api/app/internal/repo/l2asset" - "github.com/bnb-chain/zkbas/service/api/app/internal/repo/price" - "github.com/bnb-chain/zkbas/service/api/app/internal/svc" - "github.com/bnb-chain/zkbas/service/api/app/internal/types" -) - -type GetCurrencyPricesLogic struct { - logx.Logger - ctx context.Context - svcCtx *svc.ServiceContext - price price.Price - l2asset l2asset.L2asset -} - -func NewGetCurrencyPricesLogic(ctx context.Context, svcCtx *svc.ServiceContext) *GetCurrencyPricesLogic { - return &GetCurrencyPricesLogic{ - Logger: logx.WithContext(ctx), - ctx: ctx, - svcCtx: svcCtx, - price: price.New(svcCtx), - l2asset: l2asset.New(svcCtx), - } -} - -func (l *GetCurrencyPricesLogic) GetCurrencyPrices(req *types.ReqGetCurrencyPrices) (*types.RespGetCurrencyPrices, error) { - l2Assets, err := l.l2asset.GetL2AssetsList(l.ctx) - if err != nil { - logx.Errorf("[GetL2AssetsList] err: %s", err.Error()) - if err == errorcode.DbErrNotFound { - return nil, errorcode.AppErrNotFound - } - return nil, errorcode.AppErrInternal - } - - //TODO: performance issue here - resp := &types.RespGetCurrencyPrices{} - for _, asset := range l2Assets { - price, err := l.price.GetCurrencyPrice(l.ctx, asset.AssetSymbol) - if err != nil { - logx.Errorf("[GetCurrencyPrice] err: %s", err.Error()) - if err == errorcode.AppErrQuoteNotExist { - return nil, err - } - return nil, errorcode.AppErrInternal - } - //TODO: fix the symbol - if asset.AssetSymbol == "LEG" { - price = 1.0 - } - if asset.AssetSymbol == "REY" { - price = 0.5 - } - resp.Data = append(resp.Data, &types.DataCurrencyPrices{ - Pair: asset.AssetSymbol + "/" + "USDT", - AssetId: asset.AssetId, - Price: strconv.FormatFloat(price, 'E', -1, 64), - }) - } - return resp, nil -} diff --git a/service/api/app/internal/logic/info/getgasaccountlogic.go b/service/api/app/internal/logic/info/getgasaccountlogic.go deleted file mode 100644 index fadbc0e2d..000000000 --- a/service/api/app/internal/logic/info/getgasaccountlogic.go +++ /dev/null @@ -1,66 +0,0 @@ -package info - -import ( - "context" - "strconv" - - "github.com/zeromicro/go-zero/core/logx" - - "github.com/bnb-chain/zkbas/common/sysconfigName" - "github.com/bnb-chain/zkbas/errorcode" - "github.com/bnb-chain/zkbas/service/api/app/internal/repo/account" - "github.com/bnb-chain/zkbas/service/api/app/internal/repo/sysconf" - "github.com/bnb-chain/zkbas/service/api/app/internal/svc" - "github.com/bnb-chain/zkbas/service/api/app/internal/types" -) - -type GetGasAccountLogic struct { - logx.Logger - ctx context.Context - svcCtx *svc.ServiceContext - sysConfig sysconf.Sysconf - account account.Model -} - -func NewGetGasAccountLogic(ctx context.Context, svcCtx *svc.ServiceContext) *GetGasAccountLogic { - return &GetGasAccountLogic{ - Logger: logx.WithContext(ctx), - ctx: ctx, - svcCtx: svcCtx, - sysConfig: sysconf.New(svcCtx), - account: account.New(svcCtx), - } -} - -func (l *GetGasAccountLogic) GetGasAccount(req *types.ReqGetGasAccount) (resp *types.RespGetGasAccount, err error) { - accountIndexConfig, err := l.sysConfig.GetSysconfigByName(l.ctx, sysconfigName.GasAccountIndex) - if err != nil { - logx.Errorf("[GetGasAccountLogic] get sys config error: %s", err.Error()) - if err == errorcode.DbErrNotFound { - return nil, errorcode.AppErrNotFound - } - return nil, errorcode.AppErrInternal - } - - accountIndex, err := strconv.ParseInt(accountIndexConfig.Value, 10, 64) - if err != nil { - logx.Errorf("[GetGasAccountLogic] invalid account index: %s", accountIndexConfig.Value) - return nil, errorcode.AppErrInternal - } - - accountModel, err := l.account.GetAccountByAccountIndex(accountIndex) - if err != nil { - logx.Errorf("[GetGasAccountLogic] get account error, index: %d, err: %s", accountIndex, err.Error()) - if err == errorcode.DbErrNotFound { - return nil, errorcode.AppErrNotFound - } - return nil, errorcode.AppErrInternal - } - - resp = &types.RespGetGasAccount{ - AccountStatus: int64(accountModel.Status), - AccountIndex: accountModel.AccountIndex, - AccountName: accountModel.AccountName, - } - return resp, nil -} diff --git a/service/api/app/internal/logic/info/getgasfeeassetlistlogic.go b/service/api/app/internal/logic/info/getgasfeeassetlistlogic.go deleted file mode 100644 index 5987aedb7..000000000 --- a/service/api/app/internal/logic/info/getgasfeeassetlistlogic.go +++ /dev/null @@ -1,57 +0,0 @@ -package info - -import ( - "context" - - "github.com/zeromicro/go-zero/core/logx" - - table "github.com/bnb-chain/zkbas/common/model/assetInfo" - "github.com/bnb-chain/zkbas/errorcode" - "github.com/bnb-chain/zkbas/service/api/app/internal/repo/l2asset" - "github.com/bnb-chain/zkbas/service/api/app/internal/svc" - "github.com/bnb-chain/zkbas/service/api/app/internal/types" -) - -type GetGasFeeAssetListLogic struct { - logx.Logger - ctx context.Context - svcCtx *svc.ServiceContext - l2asset l2asset.L2asset -} - -func NewGetGasFeeAssetListLogic(ctx context.Context, svcCtx *svc.ServiceContext) *GetGasFeeAssetListLogic { - return &GetGasFeeAssetListLogic{ - Logger: logx.WithContext(ctx), - ctx: ctx, - svcCtx: svcCtx, - l2asset: l2asset.New(svcCtx), - } -} - -func (l *GetGasFeeAssetListLogic) GetGasFeeAssetList(req *types.ReqGetGasFeeAssetList) (*types.RespGetGasFeeAssetList, error) { - assets, err := l.l2asset.GetL2AssetsList(l.ctx) - if err != nil { - logx.Errorf("[GetL2AssetsList] err: %s", err.Error()) - if err == errorcode.DbErrNotFound { - return nil, errorcode.AppErrNotFound - } - return nil, errorcode.AppErrInternal - } - resp := &types.RespGetGasFeeAssetList{ - Assets: make([]types.AssetInfo, 0), - } - for _, asset := range assets { - if asset.IsGasAsset != table.IsGasAsset { - continue - } - resp.Assets = append(resp.Assets, types.AssetInfo{ - AssetId: asset.AssetId, - AssetName: asset.AssetName, - AssetDecimals: asset.Decimals, - AssetSymbol: asset.AssetSymbol, - AssetAddress: asset.L1Address, - IsGasAsset: asset.IsGasAsset, - }) - } - return resp, nil -} diff --git a/service/api/app/internal/logic/info/getgasfeelogic.go b/service/api/app/internal/logic/info/getgasfeelogic.go deleted file mode 100644 index 45783bf23..000000000 --- a/service/api/app/internal/logic/info/getgasfeelogic.go +++ /dev/null @@ -1,106 +0,0 @@ -package info - -import ( - "context" - "math/big" - - "github.com/bnb-chain/zkbas-crypto/ffmath" - "github.com/zeromicro/go-zero/core/logx" - - "github.com/bnb-chain/zkbas/common/commonConstant" - "github.com/bnb-chain/zkbas/common/model/assetInfo" - "github.com/bnb-chain/zkbas/common/sysconfigName" - "github.com/bnb-chain/zkbas/common/util" - "github.com/bnb-chain/zkbas/errorcode" - "github.com/bnb-chain/zkbas/service/api/app/internal/repo/l2asset" - "github.com/bnb-chain/zkbas/service/api/app/internal/repo/price" - "github.com/bnb-chain/zkbas/service/api/app/internal/repo/sysconf" - "github.com/bnb-chain/zkbas/service/api/app/internal/svc" - "github.com/bnb-chain/zkbas/service/api/app/internal/types" -) - -type GetGasFeeLogic struct { - logx.Logger - ctx context.Context - svcCtx *svc.ServiceContext - price price.Price - l2asset l2asset.L2asset - sysconf sysconf.Sysconf -} - -func NewGetGasFeeLogic(ctx context.Context, svcCtx *svc.ServiceContext) *GetGasFeeLogic { - return &GetGasFeeLogic{ - Logger: logx.WithContext(ctx), - ctx: ctx, - svcCtx: svcCtx, - price: price.New(svcCtx), - l2asset: l2asset.New(svcCtx), - sysconf: sysconf.New(svcCtx), - } -} - -// GetGasFee 需求文档 -func (l *GetGasFeeLogic) GetGasFee(req *types.ReqGetGasFee) (*types.RespGetGasFee, error) { - resp := &types.RespGetGasFee{} - l2Asset, err := l.l2asset.GetSimpleL2AssetInfoByAssetId(l.ctx, req.AssetId) - if err != nil { - logx.Errorf("[GetGasFee] err: %s", err.Error()) - if err == errorcode.DbErrNotFound { - return nil, errorcode.AppErrNotFound - } - return nil, errorcode.AppErrInternal - } - oAssetInfo, err := l.l2asset.GetSimpleL2AssetInfoByAssetId(context.Background(), req.AssetId) - if err != nil { - logx.Errorf("[GetGasFee] unable to get l2 asset info: %s", err.Error()) - if err == errorcode.DbErrNotFound { - return nil, errorcode.AppErrNotFound - } - return nil, errorcode.AppErrInternal - } - if oAssetInfo.IsGasAsset != assetInfo.IsGasAsset { - logx.Errorf("[GetGasFee] not gas asset id") - return nil, errorcode.AppErrInvalidGasAsset - } - sysGasFee, err := l.sysconf.GetSysconfigByName(l.ctx, sysconfigName.SysGasFee) - if err != nil { - logx.Errorf("[GetGasFee] err: %s", err.Error()) - if err == errorcode.DbErrNotFound { - return nil, errorcode.AppErrNotFound - } - return nil, errorcode.AppErrInternal - } - sysGasFeeBigInt, isValid := new(big.Int).SetString(sysGasFee.Value, 10) - if !isValid { - logx.Errorf("[GetGasFee] parse sys gas fee err: %s", err.Error()) - return nil, errorcode.AppErrInternal - } - // if asset id == BNB, just return - if l2Asset.AssetId == commonConstant.BNBAssetId { - resp.GasFee = sysGasFeeBigInt.String() - return resp, nil - } - // if not, try to compute the gas amount based on USD - assetPrice, err := l.price.GetCurrencyPrice(l.ctx, l2Asset.AssetSymbol) - if err != nil { - logx.Errorf("[GetGasFee] err: %s", err.Error()) - return nil, errorcode.AppErrInternal - } - bnbPrice, err := l.price.GetCurrencyPrice(l.ctx, "BNB") - if err != nil { - logx.Errorf("[GetGasFee] err: %s", err.Error()) - return nil, errorcode.AppErrInternal - } - bnbDecimals, _ := new(big.Int).SetString(commonConstant.BNBDecimalsStr, 10) - assetDecimals := new(big.Int).Exp(big.NewInt(10), big.NewInt(int64(oAssetInfo.Decimals)), nil) - // bnbPrice * bnbAmount * assetDecimals / (10^18 * assetPrice) - left := ffmath.FloatMul(ffmath.FloatMul(big.NewFloat(bnbPrice), ffmath.IntToFloat(sysGasFeeBigInt)), ffmath.IntToFloat(assetDecimals)) - right := ffmath.FloatMul(ffmath.IntToFloat(bnbDecimals), big.NewFloat(assetPrice)) - gasFee, err := util.CleanPackedFee(ffmath.FloatToInt(ffmath.FloatDiv(left, right))) - if err != nil { - logx.Errorf("[GetGasFee] unable to clean packed fee: %s", err.Error()) - return nil, errorcode.AppErrInternal - } - resp.GasFee = gasFee.String() - return resp, nil -} diff --git a/service/api/app/internal/logic/info/getlayer2basicinfologic.go b/service/api/app/internal/logic/info/getlayer2basicinfologic.go deleted file mode 100644 index d435aeb63..000000000 --- a/service/api/app/internal/logic/info/getlayer2basicinfologic.go +++ /dev/null @@ -1,94 +0,0 @@ -package info - -import ( - "context" - - "github.com/zeromicro/go-zero/core/logx" - - "github.com/bnb-chain/zkbas/service/api/app/internal/repo/block" - "github.com/bnb-chain/zkbas/service/api/app/internal/repo/sysconf" - "github.com/bnb-chain/zkbas/service/api/app/internal/repo/tx" - "github.com/bnb-chain/zkbas/service/api/app/internal/repo/txdetail" - "github.com/bnb-chain/zkbas/service/api/app/internal/svc" - "github.com/bnb-chain/zkbas/service/api/app/internal/types" -) - -type GetLayer2BasicInfoLogic struct { - logx.Logger - ctx context.Context - svcCtx *svc.ServiceContext - sysconfigModel sysconf.Sysconf - block block.Block - tx tx.Model - txDetail txdetail.Model -} - -func NewGetLayer2BasicInfoLogic(ctx context.Context, svcCtx *svc.ServiceContext) *GetLayer2BasicInfoLogic { - return &GetLayer2BasicInfoLogic{ - Logger: logx.WithContext(ctx), - ctx: ctx, - svcCtx: svcCtx, - sysconfigModel: sysconf.New(svcCtx), - block: block.New(svcCtx), - tx: tx.New(svcCtx), - txDetail: txdetail.New(svcCtx), - } -} - -var ( - contractAddressesNames = []string{ - "ZkbasContract", - "ZnsPriceOracle", - } -) - -func (l *GetLayer2BasicInfoLogic) GetLayer2BasicInfo(_ *types.ReqGetLayer2BasicInfo) (*types.RespGetLayer2BasicInfo, error) { - resp := &types.RespGetLayer2BasicInfo{ - ContractAddresses: make([]string, 0), - } - var err error - resp.BlockCommitted, err = l.block.GetCommittedBlocksCount(l.ctx) - if err != nil { - logx.Errorf("[GetCommittedBlocksCount] err: %s", err.Error()) - return nil, err - } - resp.BlockVerified, err = l.block.GetVerifiedBlocksCount(l.ctx) - if err != nil { - logx.Errorf("[GetVerifiedBlocksCount] err: %s", err.Error()) - return nil, err - } - resp.TotalTransactions, err = l.tx.GetTxsTotalCount(l.ctx) - if err != nil { - logx.Errorf("[GetTxsTotalCount] err: %s", err.Error()) - return nil, err - } - resp.TransactionsCountYesterday, err = l.tx.GetTxCountByTimeRange(l.ctx, "yesterday") - if err != nil { - logx.Errorf("[GetTxCountByTimeRange] err: %s", err.Error()) - return nil, err - } - resp.TransactionsCountToday, err = l.tx.GetTxCountByTimeRange(l.ctx, "today") - if err != nil { - logx.Errorf("[GetTxCountByTimeRange] err: %s", err.Error()) - return nil, err - } - resp.DauYesterday, err = l.txDetail.GetDauInTxDetail(l.ctx, "yesterday") - if err != nil { - logx.Errorf("[GetDauInTxDetail] err: %s", err.Error()) - return nil, err - } - resp.DauToday, err = l.txDetail.GetDauInTxDetail(l.ctx, "today") - if err != nil { - logx.Errorf("[GetDauInTxDetail] err: %s", err.Error()) - return nil, err - } - for _, contractAddressesName := range contractAddressesNames { - contract, err := l.sysconfigModel.GetSysconfigByName(l.ctx, contractAddressesName) - if err != nil { - logx.Errorf("[GetSysconfigByName] err: %s", err.Error()) - return nil, err - } - resp.ContractAddresses = append(resp.ContractAddresses, contract.Value) - } - return resp, nil -} diff --git a/service/api/app/internal/logic/info/getwithdrawgasfeelogic.go b/service/api/app/internal/logic/info/getwithdrawgasfeelogic.go deleted file mode 100644 index 45a304d84..000000000 --- a/service/api/app/internal/logic/info/getwithdrawgasfeelogic.go +++ /dev/null @@ -1,97 +0,0 @@ -package info - -import ( - "context" - "errors" - "math/big" - - "github.com/bnb-chain/zkbas-crypto/ffmath" - "github.com/zeromicro/go-zero/core/logx" - - "github.com/bnb-chain/zkbas/common/commonConstant" - "github.com/bnb-chain/zkbas/common/model/assetInfo" - "github.com/bnb-chain/zkbas/common/sysconfigName" - "github.com/bnb-chain/zkbas/common/util" - "github.com/bnb-chain/zkbas/service/api/app/internal/repo/l2asset" - "github.com/bnb-chain/zkbas/service/api/app/internal/repo/price" - "github.com/bnb-chain/zkbas/service/api/app/internal/repo/sysconf" - "github.com/bnb-chain/zkbas/service/api/app/internal/svc" - "github.com/bnb-chain/zkbas/service/api/app/internal/types" -) - -type GetWithdrawGasFeeLogic struct { - logx.Logger - ctx context.Context - svcCtx *svc.ServiceContext - price price.Price - l2asset l2asset.L2asset - sysconf sysconf.Sysconf -} - -func NewGetWithdrawGasFeeLogic(ctx context.Context, svcCtx *svc.ServiceContext) *GetWithdrawGasFeeLogic { - return &GetWithdrawGasFeeLogic{ - Logger: logx.WithContext(ctx), - ctx: ctx, - svcCtx: svcCtx, - price: price.New(svcCtx), - l2asset: l2asset.New(svcCtx), - sysconf: sysconf.New(svcCtx), - } -} - -//todo modify 【now function copy from service/api/app/internal/logic/info/getgasfeelogic.go:38】 -func (l *GetWithdrawGasFeeLogic) GetWithdrawGasFee(req *types.ReqGetWithdrawGasFee) (*types.RespGetWithdrawGasFee, error) { - resp := &types.RespGetWithdrawGasFee{} - l2Asset, err := l.l2asset.GetSimpleL2AssetInfoByAssetId(l.ctx, req.AssetId) - if err != nil { - logx.Errorf("[GetGasFee] err: %s", err.Error()) - return nil, err - } - oAssetInfo, err := l.l2asset.GetSimpleL2AssetInfoByAssetId(context.Background(), req.AssetId) - if err != nil { - logx.Errorf("[GetGasFee] unable to get l2 asset info: %s", err.Error()) - return nil, err - } - if oAssetInfo.IsGasAsset != assetInfo.IsGasAsset { - logx.Errorf("[GetGasFee] not gas asset id") - return nil, errors.New("[GetGasFee] not gas asset id") - } - sysGasFee, err := l.sysconf.GetSysconfigByName(l.ctx, sysconfigName.SysGasFee) - if err != nil { - logx.Errorf("[GetGasFee] err: %s", err.Error()) - return nil, err - } - sysGasFeeBigInt, isValid := new(big.Int).SetString(sysGasFee.Value, 10) - if !isValid { - logx.Errorf("[GetGasFee] parse sys gas fee err: %s", err.Error()) - return nil, err - } - // if asset id == BNB, just return - if l2Asset.AssetId == commonConstant.BNBAssetId { - resp.GasFee = sysGasFeeBigInt.String() - return resp, nil - } - // if not, try to compute the gas amount based on USD - assetPrice, err := l.price.GetCurrencyPrice(l.ctx, l2Asset.AssetSymbol) - if err != nil { - logx.Errorf("[GetGasFee] err: %s", err.Error()) - return nil, err - } - bnbPrice, err := l.price.GetCurrencyPrice(l.ctx, "BNB") - if err != nil { - logx.Errorf("[GetGasFee] err: %s", err.Error()) - return nil, err - } - bnbDecimals, _ := new(big.Int).SetString(commonConstant.BNBDecimalsStr, 10) - assetDecimals := new(big.Int).Exp(big.NewInt(10), big.NewInt(int64(oAssetInfo.Decimals)), nil) - // bnbPrice * bnbAmount * assetDecimals / (10^18 * assetPrice) - left := ffmath.FloatMul(ffmath.FloatMul(big.NewFloat(bnbPrice), ffmath.IntToFloat(sysGasFeeBigInt)), ffmath.IntToFloat(assetDecimals)) - right := ffmath.FloatMul(ffmath.IntToFloat(bnbDecimals), big.NewFloat(assetPrice)) - gasFee, err := util.CleanPackedFee(ffmath.FloatToInt(ffmath.FloatDiv(left, right))) - if err != nil { - logx.Errorf("[GetGasFee] unable to clean packed fee: %s", err.Error()) - return nil, err - } - resp.GasFee = gasFee.String() - return resp, nil -} diff --git a/service/api/app/internal/logic/info/searchlogic.go b/service/api/app/internal/logic/info/searchlogic.go deleted file mode 100644 index 1beff50b0..000000000 --- a/service/api/app/internal/logic/info/searchlogic.go +++ /dev/null @@ -1,63 +0,0 @@ -package info - -import ( - "context" - "strconv" - - "github.com/zeromicro/go-zero/core/logx" - - "github.com/bnb-chain/zkbas/common/util" - "github.com/bnb-chain/zkbas/service/api/app/internal/repo/account" - "github.com/bnb-chain/zkbas/service/api/app/internal/repo/block" - "github.com/bnb-chain/zkbas/service/api/app/internal/repo/sysconf" - "github.com/bnb-chain/zkbas/service/api/app/internal/repo/tx" - "github.com/bnb-chain/zkbas/service/api/app/internal/svc" - "github.com/bnb-chain/zkbas/service/api/app/internal/types" -) - -type SearchLogic struct { - logx.Logger - ctx context.Context - svcCtx *svc.ServiceContext - - sysconfigModel sysconf.Sysconf - block block.Block - tx tx.Model - account account.Model -} - -func NewSearchLogic(ctx context.Context, svcCtx *svc.ServiceContext) *SearchLogic { - return &SearchLogic{ - Logger: logx.WithContext(ctx), - ctx: ctx, - svcCtx: svcCtx, - sysconfigModel: sysconf.New(svcCtx), - block: block.New(svcCtx), - tx: tx.New(svcCtx), - account: account.New(svcCtx), - } -} - -func (l *SearchLogic) Search(req *types.ReqSearch) (*types.RespSearch, error) { - resp := &types.RespSearch{} - blockHeight, err := strconv.ParseInt(req.Info, 10, 64) - if err == nil { - if _, err = l.block.GetBlockByBlockHeight(l.ctx, blockHeight); err != nil { - logx.Errorf("[GetBlockByBlockHeight] err: %s", err.Error()) - return nil, err - } - resp.DataType = util.TypeBlockHeight - return resp, nil - } - // TODO: prevent sql slow query, bloom Filter - if _, err = l.tx.GetTxByTxHash(l.ctx, req.Info); err == nil { - resp.DataType = util.TypeTxType - return resp, nil - } - if _, err = l.account.GetAccountByAccountName(l.ctx, req.Info); err != nil { - logx.Errorf("[GetAccountByAccountName] err: %s", err.Error()) - return nil, err - } - resp.DataType = util.TypeAccountName - return resp, nil -} diff --git a/service/api/app/internal/logic/nft/getaccountnftlistlogic.go b/service/api/app/internal/logic/nft/getaccountnftlistlogic.go deleted file mode 100644 index 33d251e7c..000000000 --- a/service/api/app/internal/logic/nft/getaccountnftlistlogic.go +++ /dev/null @@ -1,70 +0,0 @@ -package nft - -import ( - "context" - - "github.com/zeromicro/go-zero/core/logx" - - "github.com/bnb-chain/zkbas/errorcode" - "github.com/bnb-chain/zkbas/service/api/app/internal/repo/nft" - "github.com/bnb-chain/zkbas/service/api/app/internal/svc" - "github.com/bnb-chain/zkbas/service/api/app/internal/types" -) - -type GetAccountNftListLogic struct { - logx.Logger - ctx context.Context - svcCtx *svc.ServiceContext - nftModel nft.Nft -} - -func NewGetAccountNftListLogic(ctx context.Context, svcCtx *svc.ServiceContext) *GetAccountNftListLogic { - return &GetAccountNftListLogic{ - Logger: logx.WithContext(ctx), - ctx: ctx, - svcCtx: svcCtx, - nftModel: nft.New(svcCtx), - } -} - -func (l *GetAccountNftListLogic) GetAccountNftList(req *types.ReqGetAccountNftList) (*types.RespGetAccountNftList, error) { - total, err := l.nftModel.GetAccountNftTotalCount(l.ctx, req.AccountIndex) - if err != nil { - logx.Errorf("[GetAccountNftList] get account nft total count error: %s", err.Error()) - if err == errorcode.DbErrNotFound { - return nil, errorcode.AppErrNotFound - } - return nil, errorcode.AppErrInternal - } - - resp := &types.RespGetAccountNftList{ - Total: total, - Nfts: make([]*types.Nft, 0), - } - if total == 0 || total < int64(req.Offset) { - return resp, nil - } - - nftList, err := l.nftModel.GetNftListByAccountIndex(l.ctx, req.AccountIndex, int64(req.Limit), int64(req.Offset)) - if err != nil { - logx.Errorf("[GetAccountNftList] get nft list by account error: %s", err.Error()) - if err == errorcode.DbErrNotFound { - return nil, errorcode.AppErrNotFound - } - return nil, errorcode.AppErrInternal - } - - for _, nftItem := range nftList { - resp.Nfts = append(resp.Nfts, &types.Nft{ - NftIndex: nftItem.NftIndex, - CreatorAccountIndex: nftItem.CreatorAccountIndex, - OwnerAccountIndex: nftItem.OwnerAccountIndex, - NftContentHash: nftItem.NftContentHash, - NftL1Address: nftItem.NftL1Address, - NftL1TokenId: nftItem.NftL1TokenId, - CreatorTreasuryRate: nftItem.CreatorTreasuryRate, - CollectionId: nftItem.CollectionId, - }) - } - return resp, nil -} diff --git a/service/api/app/internal/logic/nft/getmaxofferidlogic.go b/service/api/app/internal/logic/nft/getmaxofferidlogic.go deleted file mode 100644 index df9ff2836..000000000 --- a/service/api/app/internal/logic/nft/getmaxofferidlogic.go +++ /dev/null @@ -1,40 +0,0 @@ -package nft - -import ( - "context" - - "github.com/zeromicro/go-zero/core/logx" - - "github.com/bnb-chain/zkbas/errorcode" - "github.com/bnb-chain/zkbas/service/api/app/internal/repo/globalrpc" - "github.com/bnb-chain/zkbas/service/api/app/internal/svc" - "github.com/bnb-chain/zkbas/service/api/app/internal/types" -) - -type GetMaxOfferIdLogic struct { - logx.Logger - ctx context.Context - svcCtx *svc.ServiceContext - globalRPC globalrpc.GlobalRPC -} - -func NewGetMaxOfferIdLogic(ctx context.Context, svcCtx *svc.ServiceContext) *GetMaxOfferIdLogic { - return &GetMaxOfferIdLogic{ - Logger: logx.WithContext(ctx), - ctx: ctx, - svcCtx: svcCtx, - globalRPC: globalrpc.New(svcCtx, ctx), - } -} - -func (l *GetMaxOfferIdLogic) GetMaxOfferId(req *types.ReqGetMaxOfferId) (resp *types.RespGetMaxOfferId, err error) { - offerId, err := l.globalRPC.GetMaxOfferId(l.ctx, req.AccountIndex) - if err != nil { - logx.Errorf("[GetMaxOfferId] err: %s", err.Error()) - if err == errorcode.RpcErrNotFound { - return nil, errorcode.AppErrNotFound - } - return nil, errorcode.AppErrInternal - } - return &types.RespGetMaxOfferId{OfferId: offerId}, nil -} diff --git a/service/api/app/internal/logic/pair/getavailablepairslogic.go b/service/api/app/internal/logic/pair/getavailablepairslogic.go deleted file mode 100644 index 40df1450f..000000000 --- a/service/api/app/internal/logic/pair/getavailablepairslogic.go +++ /dev/null @@ -1,73 +0,0 @@ -package pair - -import ( - "context" - - "github.com/zeromicro/go-zero/core/logx" - - "github.com/bnb-chain/zkbas/errorcode" - "github.com/bnb-chain/zkbas/service/api/app/internal/repo/l2asset" - "github.com/bnb-chain/zkbas/service/api/app/internal/repo/liquidity" - "github.com/bnb-chain/zkbas/service/api/app/internal/svc" - "github.com/bnb-chain/zkbas/service/api/app/internal/types" -) - -type GetAvailablePairsLogic struct { - logx.Logger - ctx context.Context - svcCtx *svc.ServiceContext - liquidity liquidity.LiquidityModel - l2asset l2asset.L2asset -} - -func NewGetAvailablePairsLogic(ctx context.Context, svcCtx *svc.ServiceContext) *GetAvailablePairsLogic { - return &GetAvailablePairsLogic{ - Logger: logx.WithContext(ctx), - ctx: ctx, - svcCtx: svcCtx, - liquidity: liquidity.New(svcCtx), - l2asset: l2asset.New(svcCtx), - } -} - -func (l *GetAvailablePairsLogic) GetAvailablePairs(_ *types.ReqGetAvailablePairs) (*types.RespGetAvailablePairs, error) { - liquidityAssets, err := l.liquidity.GetAllLiquidityAssets() - if err != nil { - logx.Errorf("[GetAllLiquidityAssets] error: %s", err.Error()) - if err == errorcode.DbErrNotFound { - return nil, errorcode.AppErrNotFound - } - return nil, errorcode.AppErrInternal - } - resp := &types.RespGetAvailablePairs{} - for _, asset := range liquidityAssets { - assetA, err := l.l2asset.GetSimpleL2AssetInfoByAssetId(l.ctx, uint32(asset.AssetAId)) - if err != nil { - logx.Errorf("[GetSimpleL2AssetInfoByAssetId] err: %s", err.Error()) - if err == errorcode.DbErrNotFound { - return nil, errorcode.AppErrNotFound - } - return nil, errorcode.AppErrInternal - } - assetB, err := l.l2asset.GetSimpleL2AssetInfoByAssetId(l.ctx, uint32(asset.AssetBId)) - if err != nil { - logx.Errorf("[GetSimpleL2AssetInfoByAssetId] err: %s", err.Error()) - if err == errorcode.DbErrNotFound { - return nil, errorcode.AppErrNotFound - } - return nil, errorcode.AppErrInternal - } - resp.Pairs = append(resp.Pairs, &types.Pair{ - PairIndex: uint32(asset.PairIndex), - AssetAId: uint32(asset.AssetAId), - AssetAName: assetA.AssetName, - AssetAAmount: asset.AssetA, - AssetBId: uint32(asset.AssetBId), - AssetBName: assetB.AssetName, - AssetBAmount: asset.AssetB, - FeeRate: asset.FeeRate, - TreasuryRate: asset.TreasuryRate, - }) - } - return resp, nil -} diff --git a/service/api/app/internal/logic/pair/getlpvaluelogic.go b/service/api/app/internal/logic/pair/getlpvaluelogic.go deleted file mode 100644 index f29c8bddd..000000000 --- a/service/api/app/internal/logic/pair/getlpvaluelogic.go +++ /dev/null @@ -1,51 +0,0 @@ -package pair - -import ( - "context" - - "github.com/zeromicro/go-zero/core/logx" - - "github.com/bnb-chain/zkbas/common/checker" - "github.com/bnb-chain/zkbas/errorcode" - "github.com/bnb-chain/zkbas/service/api/app/internal/repo/globalrpc" - "github.com/bnb-chain/zkbas/service/api/app/internal/svc" - "github.com/bnb-chain/zkbas/service/api/app/internal/types" -) - -type GetLPValueLogic struct { - logx.Logger - ctx context.Context - svcCtx *svc.ServiceContext - globalRPC globalrpc.GlobalRPC -} - -func NewGetLPValueLogic(ctx context.Context, svcCtx *svc.ServiceContext) *GetLPValueLogic { - return &GetLPValueLogic{ - Logger: logx.WithContext(ctx), - ctx: ctx, - svcCtx: svcCtx, - globalRPC: globalrpc.New(svcCtx, ctx), - } -} - -func (l *GetLPValueLogic) GetLPValue(req *types.ReqGetLPValue) (resp *types.RespGetLPValue, err error) { - if checker.CheckPairIndex(req.PairIndex) { - logx.Errorf("[CheckPairIndex] param: %d", req.PairIndex) - return nil, errorcode.AppErrInvalidParam.RefineError("invalid PairIndex") - } - lpValue, err := l.globalRPC.GetLpValue(l.ctx, req.PairIndex, req.LpAmount) - if err != nil { - logx.Errorf("[GetLpValue] err: %s", err.Error()) - if err == errorcode.RpcErrNotFound { - return nil, errorcode.AppErrNotFound - } - return nil, errorcode.AppErrInternal - } - resp = &types.RespGetLPValue{ - AssetAId: lpValue.AssetAId, - AssetAAmount: lpValue.AssetAAmount, - AssetBid: lpValue.AssetBId, - AssetBAmount: lpValue.AssetBAmount, - } - return resp, nil -} diff --git a/service/api/app/internal/logic/pair/getpairinfologic.go b/service/api/app/internal/logic/pair/getpairinfologic.go deleted file mode 100644 index e6cd4cf08..000000000 --- a/service/api/app/internal/logic/pair/getpairinfologic.go +++ /dev/null @@ -1,52 +0,0 @@ -package pair - -import ( - "context" - - "github.com/zeromicro/go-zero/core/logx" - - "github.com/bnb-chain/zkbas/common/checker" - "github.com/bnb-chain/zkbas/errorcode" - "github.com/bnb-chain/zkbas/service/api/app/internal/repo/globalrpc" - "github.com/bnb-chain/zkbas/service/api/app/internal/svc" - "github.com/bnb-chain/zkbas/service/api/app/internal/types" -) - -type GetPairInfoLogic struct { - logx.Logger - ctx context.Context - svcCtx *svc.ServiceContext - globalRPC globalrpc.GlobalRPC -} - -func NewGetPairInfoLogic(ctx context.Context, svcCtx *svc.ServiceContext) *GetPairInfoLogic { - return &GetPairInfoLogic{ - Logger: logx.WithContext(ctx), - ctx: ctx, - svcCtx: svcCtx, - globalRPC: globalrpc.New(svcCtx, ctx), - } -} - -func (l *GetPairInfoLogic) GetPairInfo(req *types.ReqGetPairInfo) (*types.RespGetPairInfo, error) { - if checker.CheckPairIndex(req.PairIndex) { - logx.Errorf("[CheckPairIndex] param: %d", req.PairIndex) - return nil, errorcode.AppErrInvalidParam.RefineError("invalid PairIndex") - } - pair, err := l.globalRPC.GetPairInfo(l.ctx, req.PairIndex) - if err != nil { - logx.Errorf("[GetPairRatio] err: %s", err.Error()) - if err == errorcode.RpcErrNotFound { - return nil, errorcode.AppErrNotFound - } - return nil, errorcode.AppErrInternal - } - resp := &types.RespGetPairInfo{ - AssetAId: pair.AssetAId, - AssetAAmount: pair.AssetAAmount, - AssetBId: pair.AssetBId, - AssetBAmount: pair.AssetBAmount, - TotalLpAmount: pair.LpAmount, - } - return resp, nil -} diff --git a/service/api/app/internal/logic/pair/getswapamountlogic.go b/service/api/app/internal/logic/pair/getswapamountlogic.go deleted file mode 100644 index c7e95fe9d..000000000 --- a/service/api/app/internal/logic/pair/getswapamountlogic.go +++ /dev/null @@ -1,52 +0,0 @@ -package pair - -import ( - "context" - - "github.com/zeromicro/go-zero/core/logx" - - "github.com/bnb-chain/zkbas/common/checker" - "github.com/bnb-chain/zkbas/errorcode" - "github.com/bnb-chain/zkbas/service/api/app/internal/repo/globalrpc" - "github.com/bnb-chain/zkbas/service/api/app/internal/svc" - "github.com/bnb-chain/zkbas/service/api/app/internal/types" -) - -type GetSwapAmountLogic struct { - logx.Logger - ctx context.Context - svcCtx *svc.ServiceContext - globalRPC globalrpc.GlobalRPC -} - -func NewGetSwapAmountLogic(ctx context.Context, svcCtx *svc.ServiceContext) *GetSwapAmountLogic { - return &GetSwapAmountLogic{ - Logger: logx.WithContext(ctx), - ctx: ctx, - svcCtx: svcCtx, - globalRPC: globalrpc.New(svcCtx, ctx), - } -} - -func (l *GetSwapAmountLogic) GetSwapAmount(req *types.ReqGetSwapAmount) (*types.RespGetSwapAmount, error) { - if checker.CheckPairIndex(req.PairIndex) { - logx.Errorf("[CheckPairIndex] param: %d", req.PairIndex) - return nil, errorcode.AppErrInvalidParam.RefineError("invalid PairIndex") - } - if checker.CheckAssetId(req.AssetId) { - logx.Errorf("[CheckAssetId] param: %d", req.AssetId) - return nil, errorcode.AppErrInvalidParam.RefineError("invalid AssetId") - } - resAssetAmount, resAssetId, err := l.globalRPC.GetSwapAmount(l.ctx, uint64(req.PairIndex), uint64(req.AssetId), req.AssetAmount, req.IsFrom) - if err != nil { - logx.Errorf("[GetSwapAmount] err: %s", err.Error()) - if err == errorcode.RpcErrNotFound { - return nil, errorcode.AppErrNotFound - } - return nil, errorcode.AppErrInternal - } - return &types.RespGetSwapAmount{ - ResAssetAmount: resAssetAmount, - ResAssetId: resAssetId, - }, nil -} diff --git a/service/api/app/internal/logic/root/getstatuslogic.go b/service/api/app/internal/logic/root/getstatuslogic.go deleted file mode 100644 index 743dd93a9..000000000 --- a/service/api/app/internal/logic/root/getstatuslogic.go +++ /dev/null @@ -1,38 +0,0 @@ -package root - -import ( - "context" - "fmt" - - "github.com/zeromicro/go-zero/core/logx" - - "github.com/bnb-chain/zkbas/service/api/app/internal/svc" - "github.com/bnb-chain/zkbas/service/api/app/internal/types" -) - -type GetStatusLogic struct { - logx.Logger - ctx context.Context - svcCtx *svc.ServiceContext -} - -func NewGetStatusLogic(ctx context.Context, svcCtx *svc.ServiceContext) *GetStatusLogic { - return &GetStatusLogic{ - Logger: logx.WithContext(ctx), - ctx: ctx, - svcCtx: svcCtx, - } -} - -func packServerVersion(CodeVersion string, GitCommitHash string) string { - return fmt.Sprintf("%s:%s ", CodeVersion, GitCommitHash) - -} - -func (l *GetStatusLogic) GetStatus(req *types.ReqGetStatus) (resp *types.RespGetStatus, err error) { - return &types.RespGetStatus{ - Status: 200, - NetworkId: 1, - ServerVersion: packServerVersion(l.svcCtx.CodeVersion, l.svcCtx.GitCommitHash), - }, nil -} diff --git a/service/api/app/internal/logic/transaction/getmempooltxsbyaccountnamelogic.go b/service/api/app/internal/logic/transaction/getmempooltxsbyaccountnamelogic.go deleted file mode 100644 index af44714b9..000000000 --- a/service/api/app/internal/logic/transaction/getmempooltxsbyaccountnamelogic.go +++ /dev/null @@ -1,68 +0,0 @@ -package transaction - -import ( - "context" - - "github.com/zeromicro/go-zero/core/logx" - - "github.com/bnb-chain/zkbas/errorcode" - "github.com/bnb-chain/zkbas/service/api/app/internal/logic/utils" - "github.com/bnb-chain/zkbas/service/api/app/internal/repo/account" - "github.com/bnb-chain/zkbas/service/api/app/internal/repo/mempool" - "github.com/bnb-chain/zkbas/service/api/app/internal/repo/mempooltxdetail" - "github.com/bnb-chain/zkbas/service/api/app/internal/svc" - "github.com/bnb-chain/zkbas/service/api/app/internal/types" -) - -type GetmempoolTxsByAccountNameLogic struct { - logx.Logger - ctx context.Context - svcCtx *svc.ServiceContext - account account.Model - memPoolTxDetail mempooltxdetail.Model - mempool mempool.Mempool -} - -func NewGetmempoolTxsByAccountNameLogic(ctx context.Context, svcCtx *svc.ServiceContext) *GetmempoolTxsByAccountNameLogic { - return &GetmempoolTxsByAccountNameLogic{ - Logger: logx.WithContext(ctx), - ctx: ctx, - svcCtx: svcCtx, - account: account.New(svcCtx), - memPoolTxDetail: mempooltxdetail.New(svcCtx), - mempool: mempool.New(svcCtx), - } -} - -func (l *GetmempoolTxsByAccountNameLogic) GetmempoolTxsByAccountName(req *types.ReqGetmempoolTxsByAccountName) (*types.RespGetmempoolTxsByAccountName, error) { - //TODO: check AccountName - account, err := l.account.GetAccountByAccountName(l.ctx, req.AccountName) - if err != nil { - logx.Errorf("[GetAccountByAccountName] err: %s", err.Error()) - if err == errorcode.DbErrNotFound { - return nil, errorcode.AppErrNotFound - } - return nil, errorcode.AppErrInternal - } - mempoolTxDetails, err := l.memPoolTxDetail.GetMemPoolTxDetailByAccountIndex(l.ctx, account.AccountIndex) - if err != nil { - logx.Errorf("[GetMemPoolTxDetailByAccountIndex] AccountIndex: %d err: %s", account.AccountIndex, err.Error()) - if err == errorcode.DbErrNotFound { - return nil, errorcode.AppErrNotFound - } - return nil, errorcode.AppErrInternal - } - - resp := &types.RespGetmempoolTxsByAccountName{ - Txs: make([]*types.Tx, 0), - } - for _, d := range mempoolTxDetails { - tx, err := l.mempool.GetMempoolTxByTxId(l.ctx, d.TxId) - if err != nil { - logx.Errorf("[GetMempoolTxByTxID] TxId: %d, err: %s", d.TxId, err.Error()) - continue - } - resp.Txs = append(resp.Txs, utils.MempoolTx2Tx(tx)) - } - return resp, nil -} diff --git a/service/api/app/internal/logic/transaction/getmempooltxslogic.go b/service/api/app/internal/logic/transaction/getmempooltxslogic.go deleted file mode 100644 index caa5fc60e..000000000 --- a/service/api/app/internal/logic/transaction/getmempooltxslogic.go +++ /dev/null @@ -1,58 +0,0 @@ -package transaction - -import ( - "context" - - "github.com/zeromicro/go-zero/core/logx" - - "github.com/bnb-chain/zkbas/errorcode" - "github.com/bnb-chain/zkbas/service/api/app/internal/logic/utils" - "github.com/bnb-chain/zkbas/service/api/app/internal/repo/block" - "github.com/bnb-chain/zkbas/service/api/app/internal/repo/mempool" - "github.com/bnb-chain/zkbas/service/api/app/internal/svc" - "github.com/bnb-chain/zkbas/service/api/app/internal/types" -) - -type GetMempoolTxsLogic struct { - logx.Logger - ctx context.Context - svcCtx *svc.ServiceContext - mempool mempool.Mempool - block block.Block -} - -func NewGetMempoolTxsLogic(ctx context.Context, svcCtx *svc.ServiceContext) *GetMempoolTxsLogic { - return &GetMempoolTxsLogic{ - Logger: logx.WithContext(ctx), - ctx: ctx, - svcCtx: svcCtx, - mempool: mempool.New(svcCtx), - block: block.New(svcCtx), - } -} -func (l *GetMempoolTxsLogic) GetMempoolTxs(req *types.ReqGetMempoolTxs) (*types.RespGetMempoolTxs, error) { - count, err := l.mempool.GetMempoolTxsTotalCount() - if err != nil { - logx.Errorf("[GetMempoolTxsTotalCount] err: %s", err.Error()) - if err == errorcode.DbErrNotFound { - return nil, errorcode.AppErrNotFound - } - return nil, errorcode.AppErrInternal - } - resp := &types.RespGetMempoolTxs{ - MempoolTxs: make([]*types.Tx, 0), - Total: uint32(count), - } - if count == 0 { - return resp, nil - } - mempoolTxs, err := l.mempool.GetMempoolTxs(int(req.Offset), int(req.Limit)) - if err != nil { - logx.Errorf("[GetMempoolTxs] err: %s", err.Error()) - return nil, errorcode.AppErrInternal - } - for _, mempoolTx := range mempoolTxs { - resp.MempoolTxs = append(resp.MempoolTxs, utils.MempoolTx2Tx(mempoolTx)) - } - return resp, nil -} diff --git a/service/api/app/internal/logic/transaction/getnextnoncelogic.go b/service/api/app/internal/logic/transaction/getnextnoncelogic.go deleted file mode 100644 index 1c7a479a7..000000000 --- a/service/api/app/internal/logic/transaction/getnextnoncelogic.go +++ /dev/null @@ -1,40 +0,0 @@ -package transaction - -import ( - "context" - - "github.com/zeromicro/go-zero/core/logx" - - "github.com/bnb-chain/zkbas/errorcode" - "github.com/bnb-chain/zkbas/service/api/app/internal/repo/globalrpc" - "github.com/bnb-chain/zkbas/service/api/app/internal/svc" - "github.com/bnb-chain/zkbas/service/api/app/internal/types" -) - -type GetNextNonceLogic struct { - logx.Logger - ctx context.Context - svcCtx *svc.ServiceContext - globalRpc globalrpc.GlobalRPC -} - -func NewGetNextNonceLogic(ctx context.Context, svcCtx *svc.ServiceContext) *GetNextNonceLogic { - return &GetNextNonceLogic{ - Logger: logx.WithContext(ctx), - ctx: ctx, - svcCtx: svcCtx, - globalRpc: globalrpc.New(svcCtx, ctx), - } -} - -func (l *GetNextNonceLogic) GetNextNonce(req *types.ReqGetNextNonce) (*types.RespGetNextNonce, error) { - nonce, err := l.globalRpc.GetNextNonce(l.ctx, req.AccountIndex) - if err != nil { - logx.Errorf("[GetNextNonce] err: %s", err.Error()) - if err == errorcode.RpcErrNotFound { - return nil, errorcode.AppErrNotFound - } - return nil, errorcode.AppErrInternal - } - return &types.RespGetNextNonce{Nonce: nonce}, nil -} diff --git a/service/api/app/internal/logic/transaction/gettxbyhashlogic.go b/service/api/app/internal/logic/transaction/gettxbyhashlogic.go deleted file mode 100644 index 1d2b54725..000000000 --- a/service/api/app/internal/logic/transaction/gettxbyhashlogic.go +++ /dev/null @@ -1,74 +0,0 @@ -package transaction - -import ( - "context" - - "github.com/zeromicro/go-zero/core/logx" - - "github.com/bnb-chain/zkbas/common/commonTx" - "github.com/bnb-chain/zkbas/errorcode" - "github.com/bnb-chain/zkbas/service/api/app/internal/logic/utils" - "github.com/bnb-chain/zkbas/service/api/app/internal/repo/block" - "github.com/bnb-chain/zkbas/service/api/app/internal/repo/mempool" - "github.com/bnb-chain/zkbas/service/api/app/internal/repo/tx" - "github.com/bnb-chain/zkbas/service/api/app/internal/svc" - "github.com/bnb-chain/zkbas/service/api/app/internal/types" -) - -type GetTxByHashLogic struct { - logx.Logger - ctx context.Context - svcCtx *svc.ServiceContext - mempool mempool.Mempool - block block.Block - tx tx.Model -} - -func NewGetTxByHashLogic(ctx context.Context, svcCtx *svc.ServiceContext) *GetTxByHashLogic { - return &GetTxByHashLogic{ - Logger: logx.WithContext(ctx), - ctx: ctx, - svcCtx: svcCtx, - mempool: mempool.New(svcCtx), - block: block.New(svcCtx), - tx: tx.New(svcCtx), - } -} - -func (l *GetTxByHashLogic) GetTxByHash(req *types.ReqGetTxByHash) (*types.RespGetTxByHash, error) { - resp := &types.RespGetTxByHash{} - tx, err := l.tx.GetTxByTxHash(l.ctx, req.TxHash) - if err == nil { - resp.Tx = *utils.GormTx2Tx(tx) - } - if err != nil { - if err != errorcode.DbErrNotFound { - return nil, errorcode.AppErrInternal - } - memppolTx, err := l.mempool.GetMempoolTxByTxHash(req.TxHash) - if err != nil { - logx.Errorf("[GetMempoolTxByTxHash]: %s", err.Error()) - if err == errorcode.DbErrNotFound { - return nil, errorcode.AppErrNotFound - } - return nil, errorcode.AppErrInternal - } - resp.Tx = *utils.MempoolTx2Tx(memppolTx) - } - if resp.Tx.TxType == commonTx.TxTypeSwap { - txInfo, err := commonTx.ParseSwapTxInfo(tx.TxInfo) - if err != nil { - logx.Errorf("[ParseSwapTxInfo]: %s", err.Error()) - return nil, errorcode.AppErrInternal - } - resp.AssetAId = txInfo.AssetAId - resp.AssetBId = txInfo.AssetBId - } - block, err := l.block.GetBlockByBlockHeight(l.ctx, resp.Tx.BlockHeight) - if err == nil { - resp.CommittedAt = block.CommittedAt - resp.ExecutedAt = block.CreatedAt.Unix() - resp.VerifiedAt = block.VerifiedAt - } - return resp, nil -} diff --git a/service/api/app/internal/logic/transaction/gettxsbyaccountindexandtxtypelogic.go b/service/api/app/internal/logic/transaction/gettxsbyaccountindexandtxtypelogic.go deleted file mode 100644 index 0d8724d44..000000000 --- a/service/api/app/internal/logic/transaction/gettxsbyaccountindexandtxtypelogic.go +++ /dev/null @@ -1,86 +0,0 @@ -package transaction - -import ( - "context" - - "github.com/zeromicro/go-zero/core/logx" - - "github.com/bnb-chain/zkbas/errorcode" - "github.com/bnb-chain/zkbas/service/api/app/internal/logic/utils" - "github.com/bnb-chain/zkbas/service/api/app/internal/repo/block" - "github.com/bnb-chain/zkbas/service/api/app/internal/repo/globalrpc" - "github.com/bnb-chain/zkbas/service/api/app/internal/repo/mempool" - "github.com/bnb-chain/zkbas/service/api/app/internal/repo/mempooltxdetail" - "github.com/bnb-chain/zkbas/service/api/app/internal/repo/tx" - "github.com/bnb-chain/zkbas/service/api/app/internal/repo/txdetail" - "github.com/bnb-chain/zkbas/service/api/app/internal/svc" - "github.com/bnb-chain/zkbas/service/api/app/internal/types" -) - -type GetTxsByAccountIndexAndTxTypeLogic struct { - logx.Logger - ctx context.Context - svcCtx *svc.ServiceContext - tx tx.Model - globalRPC globalrpc.GlobalRPC - block block.Block - mempool mempool.Mempool - txDetail txdetail.Model - memPoolTxDetail mempooltxdetail.Model -} - -func NewGetTxsByAccountIndexAndTxTypeLogic(ctx context.Context, svcCtx *svc.ServiceContext) *GetTxsByAccountIndexAndTxTypeLogic { - return &GetTxsByAccountIndexAndTxTypeLogic{ - Logger: logx.WithContext(ctx), - ctx: ctx, - svcCtx: svcCtx, - tx: tx.New(svcCtx), - globalRPC: globalrpc.New(svcCtx, ctx), - block: block.New(svcCtx), - mempool: mempool.New(svcCtx), - txDetail: txdetail.New(svcCtx), - memPoolTxDetail: mempooltxdetail.New(svcCtx), - } -} - -func (l *GetTxsByAccountIndexAndTxTypeLogic) GetTxsByAccountIndexAndTxType(req *types.ReqGetTxsByAccountIndexAndTxType) (*types.RespGetTxsByAccountIndexAndTxType, error) { - txDetails, err := l.txDetail.GetTxDetailByAccountIndex(l.ctx, int64(req.AccountIndex)) - if err != nil { - logx.Errorf("[GetTxDetailByAccountIndex] err: %s", err.Error()) - if err == errorcode.DbErrNotFound { - return nil, errorcode.AppErrNotFound - } - return nil, errorcode.AppErrInternal - } - resp := &types.RespGetTxsByAccountIndexAndTxType{ - Txs: make([]*types.Tx, 0), - } - for _, txDetail := range txDetails { - tx, err := l.tx.GetTxByTxID(l.ctx, txDetail.TxId) - if err != nil { - logx.Errorf("[GetTxByTxID] err: %s", err.Error()) - return nil, err - } - if tx.TxType == int64(req.TxType) { - resp.Total = resp.Total + 1 - resp.Txs = append(resp.Txs, utils.GormTx2Tx(tx)) - } - } - memPoolTxDetails, err := l.memPoolTxDetail.GetMemPoolTxDetailByAccountIndex(l.ctx, int64(req.AccountIndex)) - if err != nil { - logx.Errorf("[GetMemPoolTxDetailByAccountIndex] err: %s", err.Error()) - return nil, err - } - for _, txDetail := range memPoolTxDetails { - tx, err := l.mempool.GetMempoolTxByTxId(l.ctx, txDetail.TxId) - if err != nil { - logx.Errorf("[GetMempoolTxByTxId] err: %s", err.Error()) - return nil, err - } - if tx.TxType == int64(req.TxType) { - resp.Total = resp.Total + 1 - resp.Txs = append(resp.Txs, utils.MempoolTx2Tx(tx)) - } - } - return resp, nil -} diff --git a/service/api/app/internal/logic/transaction/gettxsbyaccountnamelogic.go b/service/api/app/internal/logic/transaction/gettxsbyaccountnamelogic.go deleted file mode 100644 index 2a2402081..000000000 --- a/service/api/app/internal/logic/transaction/gettxsbyaccountnamelogic.go +++ /dev/null @@ -1,84 +0,0 @@ -package transaction - -import ( - "context" - - "github.com/zeromicro/go-zero/core/logx" - - "github.com/bnb-chain/zkbas/common/checker" - "github.com/bnb-chain/zkbas/errorcode" - "github.com/bnb-chain/zkbas/service/api/app/internal/logic/utils" - "github.com/bnb-chain/zkbas/service/api/app/internal/repo/account" - "github.com/bnb-chain/zkbas/service/api/app/internal/repo/block" - "github.com/bnb-chain/zkbas/service/api/app/internal/repo/globalrpc" - "github.com/bnb-chain/zkbas/service/api/app/internal/repo/mempool" - "github.com/bnb-chain/zkbas/service/api/app/internal/repo/tx" - "github.com/bnb-chain/zkbas/service/api/app/internal/repo/txdetail" - "github.com/bnb-chain/zkbas/service/api/app/internal/svc" - "github.com/bnb-chain/zkbas/service/api/app/internal/types" -) - -type GetTxsByAccountNameLogic struct { - logx.Logger - ctx context.Context - svcCtx *svc.ServiceContext - account account.Model - tx tx.Model - globalRpc globalrpc.GlobalRPC - mempool mempool.Mempool - block block.Block - txDetail txdetail.Model -} - -func NewGetTxsByAccountNameLogic(ctx context.Context, svcCtx *svc.ServiceContext) *GetTxsByAccountNameLogic { - return &GetTxsByAccountNameLogic{ - Logger: logx.WithContext(ctx), - ctx: ctx, - svcCtx: svcCtx, - account: account.New(svcCtx), - globalRpc: globalrpc.New(svcCtx, ctx), - tx: tx.New(svcCtx), - mempool: mempool.New(svcCtx), - block: block.New(svcCtx), - txDetail: txdetail.New(svcCtx), - } -} - -func (l *GetTxsByAccountNameLogic) GetTxsByAccountName(req *types.ReqGetTxsByAccountName) (*types.RespGetTxsByAccountName, error) { - account, err := l.account.GetAccountByAccountName(l.ctx, req.AccountName) - if err != nil { - logx.Errorf("[transaction.GetTxsByAccountName] err: %s", err.Error()) - if err == errorcode.DbErrNotFound { - return nil, errorcode.AppErrNotFound - } - return nil, errorcode.AppErrInternal - } - txIds, err := l.txDetail.GetTxIdsByAccountIndex(l.ctx, account.AccountIndex) - if err != nil { - logx.Errorf("[GetTxDetailByAccountIndex] err: %s", err.Error()) - if err == errorcode.DbErrNotFound { - return nil, errorcode.AppErrNotFound - } - return nil, errorcode.AppErrInternal - } - resp := &types.RespGetTxsByAccountName{ - Total: uint32(len(txIds)), - Txs: make([]*types.Tx, 0), - } - if !checker.CheckOffset(req.Offset, resp.Total) { - return nil, errorcode.AppErrInvalidParam - } - end := req.Offset + req.Limit - if resp.Total < (req.Offset + req.Limit) { - end = resp.Total - } - for _, id := range txIds[req.Offset:end] { - tx, err := l.tx.GetTxByTxID(l.ctx, id) - if err != nil { - logx.Errorf("[GetTxByTxID] err: %s", err.Error()) - return nil, err - } - resp.Txs = append(resp.Txs, utils.GormTx2Tx(tx)) - } - return resp, nil -} diff --git a/service/api/app/internal/logic/transaction/gettxsbypubkeylogic.go b/service/api/app/internal/logic/transaction/gettxsbypubkeylogic.go deleted file mode 100644 index ee6ceafdf..000000000 --- a/service/api/app/internal/logic/transaction/gettxsbypubkeylogic.go +++ /dev/null @@ -1,86 +0,0 @@ -package transaction - -import ( - "context" - - "github.com/zeromicro/go-zero/core/logx" - - "github.com/bnb-chain/zkbas/common/checker" - "github.com/bnb-chain/zkbas/errorcode" - "github.com/bnb-chain/zkbas/service/api/app/internal/logic/utils" - "github.com/bnb-chain/zkbas/service/api/app/internal/repo/account" - "github.com/bnb-chain/zkbas/service/api/app/internal/repo/block" - "github.com/bnb-chain/zkbas/service/api/app/internal/repo/globalrpc" - "github.com/bnb-chain/zkbas/service/api/app/internal/repo/mempool" - "github.com/bnb-chain/zkbas/service/api/app/internal/repo/tx" - "github.com/bnb-chain/zkbas/service/api/app/internal/repo/txdetail" - "github.com/bnb-chain/zkbas/service/api/app/internal/svc" - "github.com/bnb-chain/zkbas/service/api/app/internal/types" -) - -type GetTxsByPubKeyLogic struct { - logx.Logger - ctx context.Context - svcCtx *svc.ServiceContext - account account.Model - globalRpc globalrpc.GlobalRPC - tx tx.Model - mempool mempool.Mempool - block block.Block - txDetail txdetail.Model -} - -func NewGetTxsByPubKeyLogic(ctx context.Context, svcCtx *svc.ServiceContext) *GetTxsByPubKeyLogic { - return &GetTxsByPubKeyLogic{ - Logger: logx.WithContext(ctx), - ctx: ctx, - svcCtx: svcCtx, - account: account.New(svcCtx), - globalRpc: globalrpc.New(svcCtx, ctx), - tx: tx.New(svcCtx), - mempool: mempool.New(svcCtx), - block: block.New(svcCtx), - txDetail: txdetail.New(svcCtx), - } -} - -func (l *GetTxsByPubKeyLogic) GetTxsByPubKey(req *types.ReqGetTxsByPubKey) (*types.RespGetTxsByPubKey, error) { - //TODO: check pubkey - account, err := l.account.GetAccountByPk(req.AccountPk) - if err != nil { - logx.Errorf("[GetAccountByPk] err: %s", err.Error()) - if err == errorcode.DbErrNotFound { - return nil, errorcode.AppErrNotFound - } - return nil, errorcode.AppErrInternal - } - txIds, err := l.txDetail.GetTxIdsByAccountIndex(l.ctx, account.AccountIndex) - if err != nil { - logx.Errorf("[GetTxDetailByAccountIndex] err: %s", err.Error()) - if err == errorcode.DbErrNotFound { - return nil, errorcode.AppErrNotFound - } - return nil, errorcode.AppErrInternal - } - resp := &types.RespGetTxsByPubKey{ - Total: uint32(len(txIds)), - Txs: make([]*types.Tx, 0), - } - if checker.CheckOffset(req.Offset, resp.Total) { - return nil, errorcode.AppErrInvalidParam - } - end := req.Offset + req.Limit - if resp.Total < (req.Offset + req.Limit) { - end = resp.Total - } - for _, id := range txIds[req.Offset:end] { - tx, err := l.tx.GetTxByTxID(l.ctx, id) - if err != nil { - logx.Errorf("[GetTxByTxID] err: %s", err.Error()) - return nil, err - } - resp.Txs = append(resp.Txs, utils.GormTx2Tx(tx)) - } - return resp, nil - -} diff --git a/service/api/app/internal/logic/transaction/gettxslistbyaccountindexlogic.go b/service/api/app/internal/logic/transaction/gettxslistbyaccountindexlogic.go deleted file mode 100644 index daad39b18..000000000 --- a/service/api/app/internal/logic/transaction/gettxslistbyaccountindexlogic.go +++ /dev/null @@ -1,57 +0,0 @@ -package transaction - -import ( - "context" - - "github.com/zeromicro/go-zero/core/logx" - - "github.com/bnb-chain/zkbas/errorcode" - "github.com/bnb-chain/zkbas/service/api/app/internal/logic/utils" - "github.com/bnb-chain/zkbas/service/api/app/internal/repo/tx" - "github.com/bnb-chain/zkbas/service/api/app/internal/repo/txdetail" - "github.com/bnb-chain/zkbas/service/api/app/internal/svc" - "github.com/bnb-chain/zkbas/service/api/app/internal/types" -) - -type GetTxsListByAccountIndexLogic struct { - logx.Logger - ctx context.Context - svcCtx *svc.ServiceContext - txDetail txdetail.Model - tx tx.Model -} - -func NewGetTxsListByAccountIndexLogic(ctx context.Context, svcCtx *svc.ServiceContext) *GetTxsListByAccountIndexLogic { - return &GetTxsListByAccountIndexLogic{ - Logger: logx.WithContext(ctx), - ctx: ctx, - svcCtx: svcCtx, - txDetail: txdetail.New(svcCtx), - tx: tx.New(svcCtx), - } -} - -func (l *GetTxsListByAccountIndexLogic) GetTxsListByAccountIndex(req *types.ReqGetTxsListByAccountIndex) (*types.RespGetTxsListByAccountIndex, error) { - - txDetails, err := l.txDetail.GetTxDetailByAccountIndex(l.ctx, int64(req.AccountIndex)) - if err != nil { - logx.Errorf("[GetTxDetailByAccountIndex] err: %s", err.Error()) - if err == errorcode.DbErrNotFound { - return nil, errorcode.AppErrNotFound - } - return nil, errorcode.AppErrInternal - } - - resp := &types.RespGetTxsListByAccountIndex{ - Txs: make([]*types.Tx, 0), - } - for _, d := range txDetails { - tx, err := l.tx.GetTxByTxID(l.ctx, d.TxId) - if err != nil { - logx.Errorf("[GetTxByTxID] err: %s", err.Error()) - return nil, err - } - resp.Txs = append(resp.Txs, utils.GormTx2Tx(tx)) - } - return resp, nil -} diff --git a/service/api/app/internal/logic/transaction/gettxslistbyblockheightlogic.go b/service/api/app/internal/logic/transaction/gettxslistbyblockheightlogic.go deleted file mode 100644 index 002749d3a..000000000 --- a/service/api/app/internal/logic/transaction/gettxslistbyblockheightlogic.go +++ /dev/null @@ -1,49 +0,0 @@ -package transaction - -import ( - "context" - - "github.com/zeromicro/go-zero/core/logx" - - "github.com/bnb-chain/zkbas/errorcode" - "github.com/bnb-chain/zkbas/service/api/app/internal/logic/utils" - "github.com/bnb-chain/zkbas/service/api/app/internal/repo/block" - "github.com/bnb-chain/zkbas/service/api/app/internal/svc" - "github.com/bnb-chain/zkbas/service/api/app/internal/types" -) - -type GetTxsListByBlockHeightLogic struct { - logx.Logger - ctx context.Context - svcCtx *svc.ServiceContext - block block.Block -} - -func NewGetTxsListByBlockHeightLogic(ctx context.Context, svcCtx *svc.ServiceContext) *GetTxsListByBlockHeightLogic { - return &GetTxsListByBlockHeightLogic{ - Logger: logx.WithContext(ctx), - ctx: ctx, - svcCtx: svcCtx, - block: block.New(svcCtx), - } -} - -func (l *GetTxsListByBlockHeightLogic) GetTxsListByBlockHeight(req *types.ReqGetTxsListByBlockHeight) (*types.RespGetTxsListByBlockHeight, error) { - block, err := l.block.GetBlockWithTxsByBlockHeight(l.ctx, int64(req.BlockHeight)) - if err != nil { - logx.Errorf("[GetBlockByBlockHeight] err: %s", err.Error()) - if err == errorcode.DbErrNotFound { - return nil, errorcode.AppErrNotFound - } - return nil, errorcode.AppErrInternal - } - resp := &types.RespGetTxsListByBlockHeight{ - Total: uint32(len(block.Txs)), - Txs: make([]*types.Tx, 0), - } - for _, t := range block.Txs { - tx := utils.GormTx2Tx(t) - resp.Txs = append(resp.Txs, tx) - } - return resp, nil -} diff --git a/service/api/app/internal/logic/transaction/gettxslistlogic.go b/service/api/app/internal/logic/transaction/gettxslistlogic.go deleted file mode 100644 index 601875911..000000000 --- a/service/api/app/internal/logic/transaction/gettxslistlogic.go +++ /dev/null @@ -1,54 +0,0 @@ -package transaction - -import ( - "context" - - "github.com/zeromicro/go-zero/core/logx" - - "github.com/bnb-chain/zkbas/errorcode" - "github.com/bnb-chain/zkbas/service/api/app/internal/logic/utils" - "github.com/bnb-chain/zkbas/service/api/app/internal/repo/tx" - "github.com/bnb-chain/zkbas/service/api/app/internal/svc" - "github.com/bnb-chain/zkbas/service/api/app/internal/types" -) - -type GetTxsListLogic struct { - logx.Logger - ctx context.Context - svcCtx *svc.ServiceContext - tx tx.Model -} - -func NewGetTxsListLogic(ctx context.Context, svcCtx *svc.ServiceContext) *GetTxsListLogic { - return &GetTxsListLogic{ - Logger: logx.WithContext(ctx), - ctx: ctx, - svcCtx: svcCtx, - tx: tx.New(svcCtx), - } -} - -func (l *GetTxsListLogic) GetTxsList(req *types.ReqGetTxsList) (resp *types.RespGetTxsList, err error) { - count, err := l.tx.GetTxsTotalCount(l.ctx) - if err != nil { - if err != errorcode.DbErrNotFound { - return nil, errorcode.AppErrInternal - } - } - txs := make([]*types.Tx, 0) - if count > 0 { - list, err := l.tx.GetTxsList(l.ctx, int64(req.Limit), int64(req.Offset)) - if err != nil { - return nil, errorcode.AppErrInternal - } - for _, t := range list { - tx := utils.GormTx2Tx(t) - txs = append(txs, tx) - } - } - resp = &types.RespGetTxsList{ - Total: uint32(count), - Txs: txs, - } - return resp, nil -} diff --git a/service/api/app/internal/logic/transaction/sendaddliquiditytxlogic.go b/service/api/app/internal/logic/transaction/sendaddliquiditytxlogic.go deleted file mode 100644 index 586249cff..000000000 --- a/service/api/app/internal/logic/transaction/sendaddliquiditytxlogic.go +++ /dev/null @@ -1,36 +0,0 @@ -package transaction - -import ( - "context" - - "github.com/zeromicro/go-zero/core/logx" - - "github.com/bnb-chain/zkbas/service/api/app/internal/repo/globalrpc" - "github.com/bnb-chain/zkbas/service/api/app/internal/svc" - "github.com/bnb-chain/zkbas/service/api/app/internal/types" -) - -type SendAddLiquidityTxLogic struct { - logx.Logger - ctx context.Context - svcCtx *svc.ServiceContext - globalRpc globalrpc.GlobalRPC -} - -func NewSendAddLiquidityTxLogic(ctx context.Context, svcCtx *svc.ServiceContext) *SendAddLiquidityTxLogic { - return &SendAddLiquidityTxLogic{ - Logger: logx.WithContext(ctx), - ctx: ctx, - svcCtx: svcCtx, - globalRpc: globalrpc.New(svcCtx, ctx), - } -} - -func (l *SendAddLiquidityTxLogic) SendAddLiquidityTx(req *types.ReqSendAddLiquidityTx) (*types.RespSendAddLiquidityTx, error) { - txIndex, err := l.globalRpc.SendAddLiquidityTx(l.ctx, req.TxInfo) - if err != nil { - logx.Errorf("[transaction.SendAddLiquidityTx] err: %s", err.Error()) - return nil, err - } - return &types.RespSendAddLiquidityTx{TxId: txIndex}, nil -} diff --git a/service/api/app/internal/logic/transaction/sendatomicmatchtxlogic.go b/service/api/app/internal/logic/transaction/sendatomicmatchtxlogic.go deleted file mode 100644 index cbaf5c91b..000000000 --- a/service/api/app/internal/logic/transaction/sendatomicmatchtxlogic.go +++ /dev/null @@ -1,36 +0,0 @@ -package transaction - -import ( - "context" - - "github.com/zeromicro/go-zero/core/logx" - - "github.com/bnb-chain/zkbas/service/api/app/internal/repo/globalrpc" - "github.com/bnb-chain/zkbas/service/api/app/internal/svc" - "github.com/bnb-chain/zkbas/service/api/app/internal/types" -) - -type SendAtomicMatchTxLogic struct { - logx.Logger - ctx context.Context - svcCtx *svc.ServiceContext - globalRpc globalrpc.GlobalRPC -} - -func NewSendAtomicMatchTxLogic(ctx context.Context, svcCtx *svc.ServiceContext) *SendAtomicMatchTxLogic { - return &SendAtomicMatchTxLogic{ - Logger: logx.WithContext(ctx), - ctx: ctx, - svcCtx: svcCtx, - globalRpc: globalrpc.New(svcCtx, ctx), - } -} - -func (l *SendAtomicMatchTxLogic) SendAtomicMatchTx(req *types.ReqSendAtomicMatchTx) (*types.RespSendAtomicMatchTx, error) { - txIndex, err := l.globalRpc.SendAtomicMatchTx(l.ctx, req.TxInfo) - if err != nil { - logx.Errorf("[transaction.SendAtomicMatchTx] err: %s", err.Error()) - return nil, err - } - return &types.RespSendAtomicMatchTx{TxId: txIndex}, nil -} diff --git a/service/api/app/internal/logic/transaction/sendcanceloffertxlogic.go b/service/api/app/internal/logic/transaction/sendcanceloffertxlogic.go deleted file mode 100644 index 5a78de5cb..000000000 --- a/service/api/app/internal/logic/transaction/sendcanceloffertxlogic.go +++ /dev/null @@ -1,36 +0,0 @@ -package transaction - -import ( - "context" - - "github.com/zeromicro/go-zero/core/logx" - - "github.com/bnb-chain/zkbas/service/api/app/internal/repo/globalrpc" - "github.com/bnb-chain/zkbas/service/api/app/internal/svc" - "github.com/bnb-chain/zkbas/service/api/app/internal/types" -) - -type SendCancelOfferTxLogic struct { - logx.Logger - ctx context.Context - svcCtx *svc.ServiceContext - globalRpc globalrpc.GlobalRPC -} - -func NewSendCancelOfferTxLogic(ctx context.Context, svcCtx *svc.ServiceContext) *SendCancelOfferTxLogic { - return &SendCancelOfferTxLogic{ - Logger: logx.WithContext(ctx), - ctx: ctx, - svcCtx: svcCtx, - globalRpc: globalrpc.New(svcCtx, ctx), - } -} - -func (l *SendCancelOfferTxLogic) SendCancelOfferTx(req *types.ReqSendCancelOfferTx) (*types.RespSendCancelOfferTx, error) { - txIndex, err := l.globalRpc.SendCancelOfferTx(l.ctx, req.TxInfo) - if err != nil { - logx.Errorf("[transaction.SendCancelOfferTx] err: %s", err.Error()) - return nil, err - } - return &types.RespSendCancelOfferTx{TxId: txIndex}, nil -} diff --git a/service/api/app/internal/logic/transaction/sendcreatecollectiontxlogic.go b/service/api/app/internal/logic/transaction/sendcreatecollectiontxlogic.go deleted file mode 100644 index acbe2f23c..000000000 --- a/service/api/app/internal/logic/transaction/sendcreatecollectiontxlogic.go +++ /dev/null @@ -1,36 +0,0 @@ -package transaction - -import ( - "context" - - "github.com/zeromicro/go-zero/core/logx" - - "github.com/bnb-chain/zkbas/service/api/app/internal/repo/globalrpc" - "github.com/bnb-chain/zkbas/service/api/app/internal/svc" - "github.com/bnb-chain/zkbas/service/api/app/internal/types" -) - -type SendCreateCollectionTxLogic struct { - logx.Logger - ctx context.Context - svcCtx *svc.ServiceContext - globalRpc globalrpc.GlobalRPC -} - -func NewSendCreateCollectionTxLogic(ctx context.Context, svcCtx *svc.ServiceContext) *SendCreateCollectionTxLogic { - return &SendCreateCollectionTxLogic{ - Logger: logx.WithContext(ctx), - ctx: ctx, - svcCtx: svcCtx, - globalRpc: globalrpc.New(svcCtx, ctx), - } -} - -func (l *SendCreateCollectionTxLogic) SendCreateCollectionTx(req *types.ReqSendCreateCollectionTx) (*types.RespSendCreateCollectionTx, error) { - collectionId, err := l.globalRpc.SendCreateCollectionTx(l.ctx, req.TxInfo) - if err != nil { - logx.Errorf("[SendCreateCollectionTx] err: %s", err.Error()) - return nil, err - } - return &types.RespSendCreateCollectionTx{CollectionId: collectionId}, nil -} diff --git a/service/api/app/internal/logic/transaction/sendmintnfttxlogic.go b/service/api/app/internal/logic/transaction/sendmintnfttxlogic.go deleted file mode 100644 index 09a362d67..000000000 --- a/service/api/app/internal/logic/transaction/sendmintnfttxlogic.go +++ /dev/null @@ -1,36 +0,0 @@ -package transaction - -import ( - "context" - - "github.com/zeromicro/go-zero/core/logx" - - "github.com/bnb-chain/zkbas/service/api/app/internal/repo/globalrpc" - "github.com/bnb-chain/zkbas/service/api/app/internal/svc" - "github.com/bnb-chain/zkbas/service/api/app/internal/types" -) - -type SendMintNftTxLogic struct { - logx.Logger - ctx context.Context - svcCtx *svc.ServiceContext - globalRpc globalrpc.GlobalRPC -} - -func NewSendMintNftTxLogic(ctx context.Context, svcCtx *svc.ServiceContext) *SendMintNftTxLogic { - return &SendMintNftTxLogic{ - Logger: logx.WithContext(ctx), - ctx: ctx, - svcCtx: svcCtx, - globalRpc: globalrpc.New(svcCtx, ctx), - } -} - -func (l *SendMintNftTxLogic) SendMintNftTx(req *types.ReqSendMintNftTx) (*types.RespSendMintNftTx, error) { - nftIndex, err := l.globalRpc.SendMintNftTx(l.ctx, req.TxInfo) - if err != nil { - logx.Errorf("[transaction.SendMintNftTx] err: %s", err.Error()) - return nil, err - } - return &types.RespSendMintNftTx{NftIndex: nftIndex}, nil -} diff --git a/service/api/app/internal/logic/transaction/sendremoveliquiditytxlogic.go b/service/api/app/internal/logic/transaction/sendremoveliquiditytxlogic.go deleted file mode 100644 index eaedfe4bd..000000000 --- a/service/api/app/internal/logic/transaction/sendremoveliquiditytxlogic.go +++ /dev/null @@ -1,36 +0,0 @@ -package transaction - -import ( - "context" - - "github.com/zeromicro/go-zero/core/logx" - - "github.com/bnb-chain/zkbas/service/api/app/internal/repo/globalrpc" - "github.com/bnb-chain/zkbas/service/api/app/internal/svc" - "github.com/bnb-chain/zkbas/service/api/app/internal/types" -) - -type SendRemoveLiquidityTxLogic struct { - logx.Logger - ctx context.Context - svcCtx *svc.ServiceContext - globalRpc globalrpc.GlobalRPC -} - -func NewSendRemoveLiquidityTxLogic(ctx context.Context, svcCtx *svc.ServiceContext) *SendRemoveLiquidityTxLogic { - return &SendRemoveLiquidityTxLogic{ - Logger: logx.WithContext(ctx), - ctx: ctx, - svcCtx: svcCtx, - globalRpc: globalrpc.New(svcCtx, ctx), - } -} - -func (l *SendRemoveLiquidityTxLogic) SendRemoveLiquidityTx(req *types.ReqSendRemoveLiquidityTx) (*types.RespSendRemoveLiquidityTx, error) { - txIndex, err := l.globalRpc.SendRemoveLiquidityTx(l.ctx, req.TxInfo) - if err != nil { - logx.Errorf("[transaction.SendRemoveLiquidityTx] err: %s", err.Error()) - return nil, err - } - return &types.RespSendRemoveLiquidityTx{TxId: txIndex}, nil -} diff --git a/service/api/app/internal/logic/transaction/sendswaptxlogic.go b/service/api/app/internal/logic/transaction/sendswaptxlogic.go deleted file mode 100644 index 5ea888404..000000000 --- a/service/api/app/internal/logic/transaction/sendswaptxlogic.go +++ /dev/null @@ -1,36 +0,0 @@ -package transaction - -import ( - "context" - - "github.com/zeromicro/go-zero/core/logx" - - "github.com/bnb-chain/zkbas/service/api/app/internal/repo/globalrpc" - "github.com/bnb-chain/zkbas/service/api/app/internal/svc" - "github.com/bnb-chain/zkbas/service/api/app/internal/types" -) - -type SendSwapTxLogic struct { - logx.Logger - ctx context.Context - svcCtx *svc.ServiceContext - globalRpc globalrpc.GlobalRPC -} - -func NewSendSwapTxLogic(ctx context.Context, svcCtx *svc.ServiceContext) *SendSwapTxLogic { - return &SendSwapTxLogic{ - Logger: logx.WithContext(ctx), - ctx: ctx, - svcCtx: svcCtx, - globalRpc: globalrpc.New(svcCtx, ctx), - } -} - -func (l *SendSwapTxLogic) SendSwapTx(req *types.ReqSendSwapTx) (*types.RespSendSwapTx, error) { - txIndex, err := l.globalRpc.SendSwapTx(l.ctx, req.TxInfo) - if err != nil { - logx.Errorf("[transaction.SendSwapTx] err: %s", err.Error()) - return nil, err - } - return &types.RespSendSwapTx{TxId: txIndex}, nil -} diff --git a/service/api/app/internal/logic/transaction/sendtransfernfttxlogic.go b/service/api/app/internal/logic/transaction/sendtransfernfttxlogic.go deleted file mode 100644 index 797aecf3a..000000000 --- a/service/api/app/internal/logic/transaction/sendtransfernfttxlogic.go +++ /dev/null @@ -1,36 +0,0 @@ -package transaction - -import ( - "context" - - "github.com/zeromicro/go-zero/core/logx" - - "github.com/bnb-chain/zkbas/service/api/app/internal/repo/globalrpc" - "github.com/bnb-chain/zkbas/service/api/app/internal/svc" - "github.com/bnb-chain/zkbas/service/api/app/internal/types" -) - -type SendTransferNftTxLogic struct { - logx.Logger - ctx context.Context - svcCtx *svc.ServiceContext - globalRpc globalrpc.GlobalRPC -} - -func NewSendTransferNftTxLogic(ctx context.Context, svcCtx *svc.ServiceContext) *SendTransferNftTxLogic { - return &SendTransferNftTxLogic{ - Logger: logx.WithContext(ctx), - ctx: ctx, - svcCtx: svcCtx, - globalRpc: globalrpc.New(svcCtx, ctx), - } -} - -func (l *SendTransferNftTxLogic) SendTransferNftTx(req *types.ReqSendTransferNftTx) (*types.RespSendTransferNftTx, error) { - txIndex, err := l.globalRpc.SendTransferNftTx(l.ctx, req.TxInfo) - if err != nil { - logx.Errorf("[transaction.SendTransferNftTx] err: %s", err.Error()) - return nil, err - } - return &types.RespSendTransferNftTx{TxId: txIndex}, nil -} diff --git a/service/api/app/internal/logic/transaction/sendtransfertxlogic.go b/service/api/app/internal/logic/transaction/sendtransfertxlogic.go deleted file mode 100644 index 0272c0dd7..000000000 --- a/service/api/app/internal/logic/transaction/sendtransfertxlogic.go +++ /dev/null @@ -1,36 +0,0 @@ -package transaction - -import ( - "context" - - "github.com/zeromicro/go-zero/core/logx" - - "github.com/bnb-chain/zkbas/service/api/app/internal/repo/globalrpc" - "github.com/bnb-chain/zkbas/service/api/app/internal/svc" - "github.com/bnb-chain/zkbas/service/api/app/internal/types" -) - -type SendTransferTxLogic struct { - logx.Logger - ctx context.Context - svcCtx *svc.ServiceContext - globalRpc globalrpc.GlobalRPC -} - -func NewSendTransferTxLogic(ctx context.Context, svcCtx *svc.ServiceContext) *SendTransferTxLogic { - return &SendTransferTxLogic{ - Logger: logx.WithContext(ctx), - ctx: ctx, - svcCtx: svcCtx, - globalRpc: globalrpc.New(svcCtx, ctx), - } -} - -func (l *SendTransferTxLogic) SendTransferTx(req *types.ReqSendTransferTx) (*types.RespSendTransferTx, error) { - txIndex, err := l.globalRpc.SendTransferTx(l.ctx, req.TxInfo) - if err != nil { - logx.Errorf("[transaction.SendTransferTx] err: %s", err.Error()) - return nil, err - } - return &types.RespSendTransferTx{TxId: txIndex}, nil -} diff --git a/service/api/app/internal/logic/transaction/sendtxlogic.go b/service/api/app/internal/logic/transaction/sendtxlogic.go deleted file mode 100644 index 249757f14..000000000 --- a/service/api/app/internal/logic/transaction/sendtxlogic.go +++ /dev/null @@ -1,37 +0,0 @@ -package transaction - -import ( - "context" - - "github.com/zeromicro/go-zero/core/logx" - - "github.com/bnb-chain/zkbas/service/api/app/internal/repo/globalrpc" - "github.com/bnb-chain/zkbas/service/api/app/internal/svc" - "github.com/bnb-chain/zkbas/service/api/app/internal/types" -) - -type SendTxLogic struct { - logx.Logger - ctx context.Context - svcCtx *svc.ServiceContext - globalRpc globalrpc.GlobalRPC -} - -func NewSendTxLogic(ctx context.Context, svcCtx *svc.ServiceContext) *SendTxLogic { - return &SendTxLogic{ - Logger: logx.WithContext(ctx), - ctx: ctx, - svcCtx: svcCtx, - globalRpc: globalrpc.New(svcCtx, ctx), - } -} - -func (l *SendTxLogic) SendTx(req *types.ReqSendTx) (resp *types.RespSendTx, err error) { - //err := utils.CheckRequestParam(utils.TypeTxType, reflect.ValueOf(req.TxType)) - txId, err := l.globalRpc.SendTx(l.ctx, req.TxType, req.TxInfo) - if err != nil { - logx.Errorf("[transaction.SendTx] err: %s", err.Error()) - return nil, err - } - return &types.RespSendTx{TxId: txId}, nil -} diff --git a/service/api/app/internal/logic/transaction/sendwithdrawnfttxlogic.go b/service/api/app/internal/logic/transaction/sendwithdrawnfttxlogic.go deleted file mode 100644 index 319f4093a..000000000 --- a/service/api/app/internal/logic/transaction/sendwithdrawnfttxlogic.go +++ /dev/null @@ -1,36 +0,0 @@ -package transaction - -import ( - "context" - - "github.com/zeromicro/go-zero/core/logx" - - "github.com/bnb-chain/zkbas/service/api/app/internal/repo/globalrpc" - "github.com/bnb-chain/zkbas/service/api/app/internal/svc" - "github.com/bnb-chain/zkbas/service/api/app/internal/types" -) - -type SendWithdrawNftTxLogic struct { - logx.Logger - ctx context.Context - svcCtx *svc.ServiceContext - globalRpc globalrpc.GlobalRPC -} - -func NewSendWithdrawNftTxLogic(ctx context.Context, svcCtx *svc.ServiceContext) *SendWithdrawNftTxLogic { - return &SendWithdrawNftTxLogic{ - Logger: logx.WithContext(ctx), - ctx: ctx, - svcCtx: svcCtx, - globalRpc: globalrpc.New(svcCtx, ctx), - } -} - -func (l *SendWithdrawNftTxLogic) SendWithdrawNftTx(req *types.ReqSendWithdrawNftTx) (*types.RespSendWithdrawNftTx, error) { - txIndex, err := l.globalRpc.SendWithdrawNftTx(l.ctx, req.TxInfo) - if err != nil { - logx.Errorf("[transaction.SendWithdrawNftTx] err: %s", err.Error()) - return nil, err - } - return &types.RespSendWithdrawNftTx{TxId: txIndex}, nil -} diff --git a/service/api/app/internal/logic/transaction/sendwithdrawtxlogic.go b/service/api/app/internal/logic/transaction/sendwithdrawtxlogic.go deleted file mode 100644 index d43d51662..000000000 --- a/service/api/app/internal/logic/transaction/sendwithdrawtxlogic.go +++ /dev/null @@ -1,36 +0,0 @@ -package transaction - -import ( - "context" - - "github.com/zeromicro/go-zero/core/logx" - - "github.com/bnb-chain/zkbas/service/api/app/internal/repo/globalrpc" - "github.com/bnb-chain/zkbas/service/api/app/internal/svc" - "github.com/bnb-chain/zkbas/service/api/app/internal/types" -) - -type SendWithdrawTxLogic struct { - logx.Logger - ctx context.Context - svcCtx *svc.ServiceContext - globalRpc globalrpc.GlobalRPC -} - -func NewSendWithdrawTxLogic(ctx context.Context, svcCtx *svc.ServiceContext) *SendWithdrawTxLogic { - return &SendWithdrawTxLogic{ - Logger: logx.WithContext(ctx), - ctx: ctx, - svcCtx: svcCtx, - globalRpc: globalrpc.New(svcCtx, ctx), - } -} - -func (l *SendWithdrawTxLogic) SendWithdrawTx(req *types.ReqSendWithdrawTx) (*types.RespSendWithdrawTx, error) { - txIndex, err := l.globalRpc.SendWithdrawTx(l.ctx, req.TxInfo) - if err != nil { - logx.Errorf("[transaction.SendWithdrawTx] err: %s", err.Error()) - return nil, err - } - return &types.RespSendWithdrawTx{TxId: txIndex}, nil -} diff --git a/service/api/app/internal/logic/utils/typetransform.go b/service/api/app/internal/logic/utils/typetransform.go deleted file mode 100644 index 2e2ef8128..000000000 --- a/service/api/app/internal/logic/utils/typetransform.go +++ /dev/null @@ -1,89 +0,0 @@ -package utils - -import ( - "github.com/bnb-chain/zkbas/common/model/mempool" - "github.com/bnb-chain/zkbas/common/model/tx" - "github.com/bnb-chain/zkbas/service/api/app/internal/types" -) - -func GormTx2Tx(tx *tx.Tx) *types.Tx { - details := make([]*types.TxDetail, 0) - if tx.TxDetails != nil { - for _, detail := range tx.TxDetails { - d := &types.TxDetail{ - TxId: detail.TxId, - AssetId: detail.AssetId, - AssetType: detail.AssetType, - AccountIndex: detail.AccountIndex, - AccountName: detail.AccountName, - AccountBalance: detail.Balance, - AccountDelta: detail.BalanceDelta, - Order: detail.Order, - AccountOrder: detail.AccountOrder, - Nonce: detail.Nonce, - CollectionNonce: detail.CollectionNonce, - } - details = append(details, d) - } - } - return &types.Tx{ - TxHash: tx.TxHash, - TxType: tx.TxType, - GasFee: tx.GasFee, - GasFeeAssetId: tx.GasFeeAssetId, - TxStatus: tx.TxStatus, - BlockHeight: tx.BlockHeight, - BlockId: tx.BlockId, - StateRoot: tx.StateRoot, - NftIndex: tx.NftIndex, - PairIndex: tx.PairIndex, - AssetId: tx.AssetId, - TxAmount: tx.TxAmount, - NativeAddress: tx.NativeAddress, - TxInfo: tx.TxInfo, - TxDetails: details, - ExtraInfo: tx.ExtraInfo, - Memo: tx.Memo, - AccountIndex: tx.AccountIndex, - Nonce: tx.Nonce, - ExpiredAt: tx.ExpiredAt, - CreatedAt: tx.CreatedAt.Unix(), - } -} - -func MempoolTx2Tx(tx *mempool.MempoolTx) *types.Tx { - details := make([]*types.TxDetail, 0) - for _, detail := range tx.MempoolDetails { - d := &types.TxDetail{ - TxId: detail.TxId, - AssetId: detail.AssetId, - AssetType: detail.AssetType, - AccountIndex: detail.AccountIndex, - AccountName: detail.AccountName, - AccountBalance: detail.BalanceDelta, - Order: detail.Order, - AccountOrder: detail.AccountOrder, - } - details = append(details, d) - } - return &types.Tx{ - TxHash: tx.TxHash, - TxType: tx.TxType, - GasFee: tx.GasFee, - GasFeeAssetId: tx.GasFeeAssetId, - TxStatus: int64(tx.Status), - BlockHeight: tx.L2BlockHeight, - NftIndex: tx.NftIndex, - PairIndex: tx.PairIndex, - AssetId: tx.AssetId, - TxAmount: tx.TxAmount, - NativeAddress: tx.NativeAddress, - TxInfo: tx.TxInfo, - TxDetails: details, - ExtraInfo: tx.ExtraInfo, - Memo: tx.Memo, - AccountIndex: tx.AccountIndex, - Nonce: tx.Nonce, - ExpiredAt: tx.ExpiredAt, - } -} diff --git a/service/api/app/internal/repo/account/account.go b/service/api/app/internal/repo/account/account.go deleted file mode 100644 index 031e0b4e1..000000000 --- a/service/api/app/internal/repo/account/account.go +++ /dev/null @@ -1,165 +0,0 @@ -/* - * Copyright © 2021 Zkbas Protocol - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package account - -import ( - "context" - - "github.com/zeromicro/go-zero/core/logx" - "gorm.io/gorm" - - table "github.com/bnb-chain/zkbas/common/model/account" - "github.com/bnb-chain/zkbas/errorcode" - "github.com/bnb-chain/zkbas/pkg/multcache" -) - -type model struct { - table string - db *gorm.DB - cache multcache.MultCache -} - -func (m *model) GetBasicAccountByAccountName(ctx context.Context, accountName string) (*table.Account, error) { - f := func() (interface{}, error) { - account := &table.Account{} - dbTx := m.db.Table(m.table).Where("account_name = ?", accountName).Find(&account) - if dbTx.Error != nil { - logx.Errorf("fail to get account by name: %s, error: %s", accountName, dbTx.Error.Error()) - return nil, errorcode.DbErrSqlOperation - } else if dbTx.RowsAffected == 0 { - return nil, errorcode.DbErrNotFound - } - return account, nil - } - account := &table.Account{} - value, err := m.cache.GetWithSet(ctx, multcache.SpliceCacheKeyAccountByAccountName(accountName), account, multcache.AccountTtl, f) - if err != nil { - return nil, err - } - account, _ = value.(*table.Account) - return account, nil -} - -func (m *model) GetBasicAccountByAccountPk(ctx context.Context, accountPk string) (*table.Account, error) { - f := func() (interface{}, error) { - account := &table.Account{} - dbTx := m.db.Table(m.table).Where("public_key = ?", accountPk).Find(&account) - if dbTx.Error != nil { - logx.Errorf("fail to get account by pk: %s, error: %s", accountPk, dbTx.Error.Error()) - return nil, errorcode.DbErrSqlOperation - } else if dbTx.RowsAffected == 0 { - return nil, errorcode.DbErrNotFound - } - return account, nil - } - account := &table.Account{} - value, err := m.cache.GetWithSet(ctx, multcache.SpliceCacheKeyAccountByAccountPk(accountPk), account, multcache.AccountTtl, f) - if err != nil { - return nil, err - } - account, _ = value.(*table.Account) - return account, nil -} - -/* - Func: GetAccountByAccountIndex - Params: accountIndex int64 - Return: account Account, err error - Description: get account info by index -*/ - -func (m *model) GetAccountByAccountIndex(accountIndex int64) (account *table.Account, err error) { - dbTx := m.db.Table(m.table).Where("account_index = ?", accountIndex).Find(&account) - if dbTx.Error != nil { - logx.Errorf("fail to get tx by account: %d, error: %s", accountIndex, dbTx.Error.Error()) - return nil, errorcode.DbErrSqlOperation - } else if dbTx.RowsAffected == 0 { - return nil, errorcode.DbErrNotFound - } - return account, nil -} - -/* - Func: GetAccountByPk - Params: pk string - Return: account Account, err error - Description: get account info by public key -*/ - -func (m *model) GetAccountByPk(pk string) (account *table.Account, err error) { - dbTx := m.db.Table(m.table).Where("public_key = ?", pk).Find(&account) - if dbTx.Error != nil { - logx.Errorf("fail to get tx by pk: %s, error: %s", pk, dbTx.Error.Error()) - return nil, errorcode.DbErrSqlOperation - } else if dbTx.RowsAffected == 0 { - return nil, errorcode.DbErrNotFound - } - return account, nil -} - -/* - Func: GetAccountByAccountName - Params: accountName string - Return: account Account, err error - Description: get account info by account name -*/ -func (m *model) GetAccountByAccountName(ctx context.Context, accountName string) (*table.Account, error) { - account := &table.Account{} - dbTx := m.db.Table(m.table).Where("account_name = ?", accountName).Find(&account) - if dbTx.Error != nil { - logx.Errorf("fail to get tx by account: %s, error: %s", accountName, dbTx.Error.Error()) - return nil, errorcode.DbErrSqlOperation - } else if dbTx.RowsAffected == 0 { - return nil, errorcode.DbErrNotFound - } - return account, nil -} - -/* - Func: GetAccountsList - Params: limit int, offset int64 - Return: err error - Description: For API /api/v1/info/getAccountsList - -*/ -func (m *model) GetAccountsList(limit int, offset int64) (accounts []*table.Account, err error) { - dbTx := m.db.Table(m.table).Limit(limit).Offset(int(offset)).Order("account_index desc").Find(&accounts) - if dbTx.Error != nil { - logx.Errorf("fail to get accounts, offset: %d, limit: %d, error: %s", offset, limit, dbTx.Error.Error()) - return nil, errorcode.DbErrSqlOperation - } else if dbTx.RowsAffected == 0 { - return nil, errorcode.DbErrNotFound - } - return accounts, nil -} - -/* - Func: GetAccountsTotalCount - Params: - Return: count int64, err error - Description: used for counting total accounts for explorer dashboard -*/ -func (m *model) GetAccountsTotalCount() (count int64, err error) { - dbTx := m.db.Table(m.table).Where("deleted_at is NULL").Count(&count) - if dbTx.Error != nil { - return 0, dbTx.Error - } else if dbTx.RowsAffected == 0 { - return 0, nil - } - return count, nil -} diff --git a/service/api/app/internal/repo/account/api.go b/service/api/app/internal/repo/account/api.go deleted file mode 100644 index 879c2b8d8..000000000 --- a/service/api/app/internal/repo/account/api.go +++ /dev/null @@ -1,46 +0,0 @@ -/* - * Copyright © 2021 Zkbas Protocol - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package account - -//go:generate mockgen -source api.go -destination api_mock.go -package account - -import ( - "context" - - table "github.com/bnb-chain/zkbas/common/model/account" - "github.com/bnb-chain/zkbas/service/api/app/internal/svc" -) - -type Model interface { - GetBasicAccountByAccountName(ctx context.Context, accountName string) (account *table.Account, err error) - GetBasicAccountByAccountPk(ctx context.Context, accountPK string) (account *table.Account, err error) - - GetAccountByAccountIndex(accountIndex int64) (account *table.Account, err error) - GetAccountByPk(pk string) (account *table.Account, err error) - GetAccountByAccountName(ctx context.Context, accountName string) (account *table.Account, err error) - GetAccountsList(limit int, offset int64) (accounts []*table.Account, err error) - GetAccountsTotalCount() (count int64, err error) -} - -func New(svcCtx *svc.ServiceContext) Model { - return &model{ - table: `account`, - db: svcCtx.GormPointer, - cache: svcCtx.Cache, - } -} diff --git a/service/api/app/internal/repo/account/api_mock.go b/service/api/app/internal/repo/account/api_mock.go deleted file mode 100644 index 575c72202..000000000 --- a/service/api/app/internal/repo/account/api_mock.go +++ /dev/null @@ -1,140 +0,0 @@ -// Code generated by MockGen. DO NOT EDIT. -// Source: api.go - -// Package account is a generated GoMock package. -package account - -import ( - context "context" - account "github.com/bnb-chain/zkbas/common/model/account" - gomock "github.com/golang/mock/gomock" - reflect "reflect" -) - -// MockModel is a mock of Model interface -type MockModel struct { - ctrl *gomock.Controller - recorder *MockModelMockRecorder -} - -// MockModelMockRecorder is the mock recorder for MockModel -type MockModelMockRecorder struct { - mock *MockModel -} - -// NewMockModel creates a new mock instance -func NewMockModel(ctrl *gomock.Controller) *MockModel { - mock := &MockModel{ctrl: ctrl} - mock.recorder = &MockModelMockRecorder{mock} - return mock -} - -// EXPECT returns an object that allows the caller to indicate expected use -func (m *MockModel) EXPECT() *MockModelMockRecorder { - return m.recorder -} - -// GetBasicAccountByAccountName mocks base method -func (m *MockModel) GetBasicAccountByAccountName(ctx context.Context, accountName string) (*account.Account, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "GetBasicAccountByAccountName", ctx, accountName) - ret0, _ := ret[0].(*account.Account) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// GetBasicAccountByAccountName indicates an expected call of GetBasicAccountByAccountName -func (mr *MockModelMockRecorder) GetBasicAccountByAccountName(ctx, accountName interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetBasicAccountByAccountName", reflect.TypeOf((*MockModel)(nil).GetBasicAccountByAccountName), ctx, accountName) -} - -// GetBasicAccountByAccountPk mocks base method -func (m *MockModel) GetBasicAccountByAccountPk(ctx context.Context, accountPK string) (*account.Account, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "GetBasicAccountByAccountPk", ctx, accountPK) - ret0, _ := ret[0].(*account.Account) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// GetBasicAccountByAccountPk indicates an expected call of GetBasicAccountByAccountPk -func (mr *MockModelMockRecorder) GetBasicAccountByAccountPk(ctx, accountPK interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetBasicAccountByAccountPk", reflect.TypeOf((*MockModel)(nil).GetBasicAccountByAccountPk), ctx, accountPK) -} - -// GetAccountByAccountIndex mocks base method -func (m *MockModel) GetAccountByAccountIndex(accountIndex int64) (*account.Account, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "GetAccountByAccountIndex", accountIndex) - ret0, _ := ret[0].(*account.Account) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// GetAccountByAccountIndex indicates an expected call of GetAccountByAccountIndex -func (mr *MockModelMockRecorder) GetAccountByAccountIndex(accountIndex interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetAccountByAccountIndex", reflect.TypeOf((*MockModel)(nil).GetAccountByAccountIndex), accountIndex) -} - -// GetAccountByPk mocks base method -func (m *MockModel) GetAccountByPk(pk string) (*account.Account, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "GetAccountByPk", pk) - ret0, _ := ret[0].(*account.Account) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// GetAccountByPk indicates an expected call of GetAccountByPk -func (mr *MockModelMockRecorder) GetAccountByPk(pk interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetAccountByPk", reflect.TypeOf((*MockModel)(nil).GetAccountByPk), pk) -} - -// GetAccountByAccountName mocks base method -func (m *MockModel) GetAccountByAccountName(ctx context.Context, accountName string) (*account.Account, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "GetAccountByAccountName", ctx, accountName) - ret0, _ := ret[0].(*account.Account) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// GetAccountByAccountName indicates an expected call of GetAccountByAccountName -func (mr *MockModelMockRecorder) GetAccountByAccountName(ctx, accountName interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetAccountByAccountName", reflect.TypeOf((*MockModel)(nil).GetAccountByAccountName), ctx, accountName) -} - -// GetAccountsList mocks base method -func (m *MockModel) GetAccountsList(limit int, offset int64) ([]*account.Account, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "GetAccountsList", limit, offset) - ret0, _ := ret[0].([]*account.Account) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// GetAccountsList indicates an expected call of GetAccountsList -func (mr *MockModelMockRecorder) GetAccountsList(limit, offset interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetAccountsList", reflect.TypeOf((*MockModel)(nil).GetAccountsList), limit, offset) -} - -// GetAccountsTotalCount mocks base method -func (m *MockModel) GetAccountsTotalCount() (int64, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "GetAccountsTotalCount") - ret0, _ := ret[0].(int64) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// GetAccountsTotalCount indicates an expected call of GetAccountsTotalCount -func (mr *MockModelMockRecorder) GetAccountsTotalCount() *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetAccountsTotalCount", reflect.TypeOf((*MockModel)(nil).GetAccountsTotalCount)) -} diff --git a/service/api/app/internal/repo/block/api.go b/service/api/app/internal/repo/block/api.go deleted file mode 100644 index 7229b8642..000000000 --- a/service/api/app/internal/repo/block/api.go +++ /dev/null @@ -1,27 +0,0 @@ -package block - -import ( - "context" - - table "github.com/bnb-chain/zkbas/common/model/block" - "github.com/bnb-chain/zkbas/service/api/app/internal/svc" -) - -type Block interface { - GetCommittedBlocksCount(ctx context.Context) (count int64, err error) - GetVerifiedBlocksCount(ctx context.Context) (count int64, err error) - GetBlockWithTxsByCommitment(ctx context.Context, BlockCommitment string) (block *table.Block, err error) - GetBlockByBlockHeight(ctx context.Context, blockHeight int64) (block *table.Block, err error) - GetBlockWithTxsByBlockHeight(ctx context.Context, blockHeight int64) (block *table.Block, err error) - GetBlocksList(ctx context.Context, limit int64, offset int64) (blocks []*table.Block, err error) - GetBlocksTotalCount(ctx context.Context) (count int64, err error) - GetCurrentBlockHeight(ctx context.Context) (height int64, err error) -} - -func New(svcCtx *svc.ServiceContext) Block { - return &block{ - table: `block`, - db: svcCtx.GormPointer, - cache: svcCtx.Cache, - } -} diff --git a/service/api/app/internal/repo/block/block.go b/service/api/app/internal/repo/block/block.go deleted file mode 100755 index 049fdd3b4..000000000 --- a/service/api/app/internal/repo/block/block.go +++ /dev/null @@ -1,223 +0,0 @@ -package block - -import ( - "context" - "fmt" - "strconv" - - "github.com/zeromicro/go-zero/core/logx" - "github.com/zeromicro/go-zero/core/stores/redis" - "gorm.io/gorm" - - table "github.com/bnb-chain/zkbas/common/model/block" - "github.com/bnb-chain/zkbas/errorcode" - "github.com/bnb-chain/zkbas/pkg/multcache" -) - -type block struct { - table string - db *gorm.DB - cache multcache.MultCache - redisConn *redis.Redis -} - -/* - Func: GetBlockByBlockHeight - Params: blockHeight int64 - Return: err error - Description: For API /api/v1/block/getBlockByBlockHeight -*/ -func (m *block) GetBlockByBlockHeight(ctx context.Context, blockHeight int64) (*table.Block, error) { - f := func() (interface{}, error) { - _block := &table.Block{} - dbTx := m.db.Table(m.table).Where("block_height = ?", blockHeight).Find(_block) - if dbTx.Error != nil { - logx.Errorf("fail to get block by height: %d, error: %s", blockHeight, dbTx.Error.Error()) - return nil, errorcode.DbErrSqlOperation - } else if dbTx.RowsAffected == 0 { - return nil, errorcode.DbErrNotFound - } - return _block, nil - } - _block := &table.Block{} - value, err := m.cache.GetWithSet(ctx, multcache.KeyGetBlockByBlockHeight+strconv.FormatInt(blockHeight, 10), _block, multcache.BlockTtl, f) - if err != nil { - return nil, err - } - _block, _ = value.(*table.Block) - return _block, nil - -} - -func (m *block) GetCommittedBlocksCount(ctx context.Context) (int64, error) { - f := func() (interface{}, error) { - var count int64 - dbTx := m.db.Table(m.table).Where("block_status = ? and deleted_at is NULL", table.StatusCommitted).Count(&count) - if dbTx.Error != nil { - logx.Errorf("fail to get committed block count, error: %s", dbTx.Error.Error()) - return nil, errorcode.DbErrSqlOperation - } else if dbTx.RowsAffected == 0 { - return nil, errorcode.DbErrNotFound - } - return &count, nil - } - var count int64 - value, err := m.cache.GetWithSet(ctx, multcache.KeyGetCommittedBlocksCount, &count, multcache.BlockCountTtl, f) - if err != nil { - return count, err - } - count1, _ := value.(*int64) - return *count1, nil -} - -func (m *block) GetVerifiedBlocksCount(ctx context.Context) (int64, error) { - f := func() (interface{}, error) { - var count int64 - dbTx := m.db.Table(m.table).Where("block_status = ? and deleted_at is NULL", table.StatusVerifiedAndExecuted).Count(&count) - if dbTx.Error != nil { - logx.Errorf("fail to get verified block count, error: %s", dbTx.Error.Error()) - return nil, errorcode.DbErrSqlOperation - } else if dbTx.RowsAffected == 0 { - return nil, errorcode.DbErrNotFound - } - return &count, nil - } - var count int64 - value, err := m.cache.GetWithSet(ctx, multcache.KeyGetVerifiedBlocksCount, &count, multcache.BlockCountTtl, f) - if err != nil { - return count, err - } - count1, _ := value.(*int64) - return *count1, nil - -} - -func (m *block) GetBlockWithTxsByCommitment(ctx context.Context, blockCommitment string) (*table.Block, error) { - f := func() (interface{}, error) { - txForeignKeyColumn := `Txs` - _block := &table.Block{} - dbTx := m.db.Table(m.table).Where("block_commitment = ?", blockCommitment).Find(_block) - if dbTx.Error != nil { - logx.Errorf("fail to get block by commitment: %d, error: %s", blockCommitment, dbTx.Error.Error()) - return nil, errorcode.DbErrSqlOperation - } else if dbTx.RowsAffected == 0 { - return nil, errorcode.DbErrNotFound - } - if err := m.db.Model(&_block).Association(txForeignKeyColumn).Find(&_block.Txs); err != nil { - return nil, errorcode.DbErrNotFound - } - return _block, nil - } - _block := &table.Block{} - value, err := m.cache.GetWithSet(ctx, multcache.KeyGetBlockBlockCommitment+blockCommitment, _block, multcache.BlockTtl, f) - if err != nil { - return nil, err - } - _block, _ = value.(*table.Block) - return _block, nil - -} - -func (m *block) GetBlockWithTxsByBlockHeight(ctx context.Context, blockHeight int64) (*table.Block, error) { - f := func() (interface{}, error) { - txForeignKeyColumn := `Txs` - _block := &table.Block{} - dbTx := m.db.Table(m.table).Where("block_height = ?", blockHeight).Find(_block) - if dbTx.Error != nil { - logx.Errorf("fail to get block by height: %d, error: %s", blockHeight, dbTx.Error.Error()) - return nil, errorcode.DbErrSqlOperation - } else if dbTx.RowsAffected == 0 { - return nil, errorcode.DbErrNotFound - } - err := m.db.Model(&_block).Association(txForeignKeyColumn).Find(&_block.Txs) - if err != nil { - return nil, errorcode.DbErrNotFound - } - return _block, nil - } - block := &table.Block{} - value, err := m.cache.GetWithSet(ctx, multcache.KeyGetBlockWithTxHeight+strconv.FormatInt(blockHeight, 10), block, multcache.BlockTtl, f) - if err != nil { - return nil, err - } - block, _ = value.(*table.Block) - return block, nil - -} - -func (m *block) GetBlocksList(ctx context.Context, limit int64, offset int64) ([]*table.Block, error) { - f := func() (interface{}, error) { - var blockList []*table.Block - dbTx := m.db.Table(m.table).Limit(int(limit)).Offset(int(offset)).Order("block_height desc").Find(&blockList) - if dbTx.Error != nil { - logx.Errorf("fail to get blocks offset: %d, limit: %d, error: %s", offset, limit, dbTx.Error.Error()) - return nil, errorcode.DbErrSqlOperation - } else if dbTx.RowsAffected == 0 { - return nil, errorcode.DbErrNotFound - } - for _, _block := range blockList { - if err := m.db.Model(&_block).Association(`Txs`).Find(&_block.Txs); err != nil { - return nil, err - } - } - return &blockList, nil - } - var blockList []*table.Block - value, err := m.cache.GetWithSet(ctx, multcache.KeyGetBlockList+strconv.FormatInt(limit, 10)+strconv.FormatInt(offset, 10), &blockList, multcache.BlockListTtl, f) - if err != nil { - return nil, err - } - blockList1, ok := value.(*[]*table.Block) - if !ok { - return nil, fmt.Errorf("[GetBlocksList] ErrConvertFail") - } - return *blockList1, nil -} - -func (m *block) GetBlocksTotalCount(ctx context.Context) (int64, error) { - f := func() (interface{}, error) { - var count int64 - dbTx := m.db.Table(m.table).Where("deleted_at is NULL").Count(&count) - if dbTx.Error != nil { - logx.Errorf("fail to get block count, error: %s", dbTx.Error.Error()) - return 0, errorcode.DbErrSqlOperation - } else if dbTx.RowsAffected == 0 { - return 0, errorcode.DbErrNotFound - } - return &count, nil - } - var count int64 - value, err := m.cache.GetWithSet(ctx, multcache.KeyGetBlocksTotalCount, &count, multcache.BlockCountTtl, f) - if err != nil { - return count, err - } - count1, ok := value.(*int64) - if !ok { - return 0, fmt.Errorf("[GetBlocksTotalCount] ErrConvertFail") - } - return *count1, nil -} -func (m *block) GetCurrentBlockHeight(ctx context.Context) (int64, error) { - f := func() (interface{}, error) { - var blockHeight int64 - dbTx := m.db.Table(m.table).Select("block_height").Order("block_height desc").Limit(1).Find(&blockHeight) - if dbTx.Error != nil { - logx.Errorf("fail to get block height, error: %s", dbTx.Error.Error()) - return 0, errorcode.DbErrSqlOperation - } else if dbTx.RowsAffected == 0 { - return 0, errorcode.DbErrNotFound - } - return &blockHeight, nil - } - - var height int64 - value, err := m.cache.GetWithSet(ctx, multcache.KeyGetCurrentBlockHeight, &height, multcache.BlockHeightTtl, f) - if err != nil { - return height, err - } - height1, ok := value.(*int64) - if !ok { - return 0, fmt.Errorf("[GetCurrentBlockHeight] ErrConvertFail") - } - return *height1, nil -} diff --git a/service/api/app/internal/repo/block/cmd/block.sql b/service/api/app/internal/repo/block/cmd/block.sql deleted file mode 100644 index 9c84b6b10..000000000 --- a/service/api/app/internal/repo/block/cmd/block.sql +++ /dev/null @@ -1,33 +0,0 @@ -/* - * Copyright © 2021 Zecrey Protocol - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -CREATE TABLE `block` ( - `id` bigint unsigned NOT NULL AUTO_INCREMENT, - `is_deleted` tinyint(1) DEFAULT 0 COMMENT 'is deleted?: 1 for yes, 0 for no', - `create_time` timestamp NULL DEFAULT CURRENT_TIMESTAMP, - `update_time` timestamp NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, - - `block_commitment` varchar(255) NOT NULL, - `block_height` bigint unsigned NOT NULL, - `block_status` tinyint unsigned NOT NULL, - `account_root` varchar(100) DEFAULT NULL, - `verified_tx_hash` varchar(200) DEFAULT NULL, - `verified_at` int NULL DEFAULT NULL, - `committed_tx_hash` varchar(200) DEFAULT NULL, - `committed_at` int NULL DEFAULT NULL, - PRIMARY KEY (`id`), - UNIQUE KEY `idx_block_block_commitment` (`block_commitment`), - KEY `idx_block_l2_block_height` (`block_height`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_general_ci; \ No newline at end of file diff --git a/service/api/app/internal/repo/globalrpc/api.go b/service/api/app/internal/repo/globalrpc/api.go deleted file mode 100644 index 29e711e60..000000000 --- a/service/api/app/internal/repo/globalrpc/api.go +++ /dev/null @@ -1,48 +0,0 @@ -package globalrpc - -import ( - "context" - - "github.com/zeromicro/go-zero/zrpc" - - "github.com/bnb-chain/zkbas/common/model/account" - "github.com/bnb-chain/zkbas/common/model/mempool" - "github.com/bnb-chain/zkbas/service/api/app/internal/svc" - "github.com/bnb-chain/zkbas/service/rpc/globalRPC/globalRPCProto" - "github.com/bnb-chain/zkbas/service/rpc/globalRPC/globalrpc" -) - -type GlobalRPC interface { - SendTx(ctx context.Context, txType uint32, txInfo string) (string, error) - GetLpValue(ctx context.Context, pairIndex uint32, lpAmount string) (*globalRPCProto.RespGetLpValue, error) - GetPairInfo(ctx context.Context, pairIndex uint32) (*globalRPCProto.RespGetLatestPairInfo, error) - GetSwapAmount(ctx context.Context, pairIndex, assetId uint64, assetAmount string, isFrom bool) (string, uint32, error) - GetNextNonce(ctx context.Context, accountIndex uint32) (uint64, error) - GetLatestAssetsListByAccountIndex(ctx context.Context, accountIndex uint32) ([]*globalrpc.AssetResult, error) - GetLatestAccountInfoByAccountIndex(ctx context.Context, accountIndex int64) (*globalrpc.RespGetLatestAccountInfoByAccountIndex, error) - GetMaxOfferId(ctx context.Context, accountIndex uint32) (uint64, error) - SendMintNftTx(ctx context.Context, txInfo string) (int64, error) - SendCreateCollectionTx(ctx context.Context, txInfo string) (int64, error) - - SendAddLiquidityTx(ctx context.Context, txInfo string) (string, error) - SendAtomicMatchTx(ctx context.Context, txInfo string) (string, error) - SendCancelOfferTx(ctx context.Context, txInfo string) (string, error) - SendRemoveLiquidityTx(ctx context.Context, txInfo string) (string, error) - SendSwapTx(ctx context.Context, txInfo string) (string, error) - SendTransferNftTx(ctx context.Context, txInfo string) (string, error) - SendTransferTx(ctx context.Context, txInfo string) (string, error) - SendWithdrawNftTx(ctx context.Context, txInfo string) (string, error) - SendWithdrawTx(ctx context.Context, txInfo string) (string, error) -} - -func New(svcCtx *svc.ServiceContext, ctx context.Context) GlobalRPC { - return &globalRPC{ - AccountModel: account.NewAccountModel(svcCtx.Conn, svcCtx.Config.CacheRedis, svcCtx.GormPointer), - AccountHistoryModel: account.NewAccountHistoryModel(svcCtx.Conn, svcCtx.Config.CacheRedis, svcCtx.GormPointer), - MempoolModel: mempool.NewMempoolModel(svcCtx.Conn, svcCtx.Config.CacheRedis, svcCtx.GormPointer), - MempoolDetailModel: mempool.NewMempoolDetailModel(svcCtx.Conn, svcCtx.Config.CacheRedis, svcCtx.GormPointer), - RedisConnection: svcCtx.RedisConn, - globalRPC: globalrpc.NewGlobalRPC(zrpc.MustNewClient(svcCtx.Config.GlobalRpc)), - cache: svcCtx.Cache, - } -} diff --git a/service/api/app/internal/repo/globalrpc/globalrpc.go b/service/api/app/internal/repo/globalrpc/globalrpc.go deleted file mode 100644 index 481dd0239..000000000 --- a/service/api/app/internal/repo/globalrpc/globalrpc.go +++ /dev/null @@ -1,193 +0,0 @@ -/* - * Copyright © 2021 Zkbas Protocol - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package globalrpc - -import ( - "context" - "sort" - - "github.com/zeromicro/go-zero/core/stores/redis" - - "github.com/bnb-chain/zkbas/common/model/account" - "github.com/bnb-chain/zkbas/common/model/mempool" - "github.com/bnb-chain/zkbas/pkg/multcache" - "github.com/bnb-chain/zkbas/service/rpc/globalRPC/globalRPCProto" - "github.com/bnb-chain/zkbas/service/rpc/globalRPC/globalrpc" -) - -type globalRPC struct { - AccountModel account.AccountModel - AccountHistoryModel account.AccountHistoryModel - MempoolModel mempool.MempoolModel - MempoolDetailModel mempool.MempoolTxDetailModel - RedisConnection *redis.Redis - globalRPC globalrpc.GlobalRPC - cache multcache.MultCache -} - -func (m *globalRPC) GetSwapAmount(ctx context.Context, pairIndex, assetId uint64, assetAmount string, isFrom bool) (string, uint32, error) { - resRpc, err := m.globalRPC.GetSwapAmount(ctx, &globalrpc.ReqGetSwapAmount{ - PairIndex: uint32(pairIndex), - AssetId: uint32(assetId), - AssetAmount: assetAmount, - IsFrom: isFrom, - }) - if err != nil { - return "", 0, err - } - return resRpc.SwapAssetAmount, resRpc.SwapAssetId, err -} - -func (m *globalRPC) GetLpValue(ctx context.Context, pairIndex uint32, lpAmount string) (*globalRPCProto.RespGetLpValue, error) { - return m.globalRPC.GetLpValue(ctx, &globalrpc.ReqGetLpValue{ - PairIndex: pairIndex, - LPAmount: lpAmount, - }) -} - -func (m *globalRPC) GetPairInfo(ctx context.Context, pairIndex uint32) (*globalRPCProto.RespGetLatestPairInfo, error) { - return m.globalRPC.GetLatestPairInfo(ctx, &globalrpc.ReqGetLatestPairInfo{ - PairIndex: pairIndex, - }) -} - -func (m *globalRPC) GetNextNonce(ctx context.Context, accountIndex uint32) (uint64, error) { - rpcRsp, err := m.globalRPC.GetNextNonce(ctx, &globalrpc.ReqGetNextNonce{ - AccountIndex: accountIndex, - }) - return rpcRsp.GetNonce(), err -} - -func (m *globalRPC) GetLatestAssetsListByAccountIndex(ctx context.Context, accountIndex uint32) ([]*globalrpc.AssetResult, error) { - res, err := m.globalRPC.GetLatestAssetsListByAccountIndex(ctx, &globalrpc.ReqGetLatestAssetsListByAccountIndex{ - AccountIndex: accountIndex}) - return res.ResultAssetsList, err -} - -func (m *globalRPC) GetLatestAccountInfoByAccountIndex(ctx context.Context, accountIndex int64) (*globalrpc.RespGetLatestAccountInfoByAccountIndex, error) { - f := func() (interface{}, error) { - res, err := m.globalRPC.GetLatestAccountInfoByAccountIndex(ctx, &globalrpc.ReqGetLatestAccountInfoByAccountIndex{ - AccountIndex: uint32(accountIndex), - }) - if err != nil { - return nil, err - } - sort.SliceStable(res.AccountAsset, func(i, j int) bool { - return res.AccountAsset[i].AssetId < res.AccountAsset[j].AssetId - }) - return res, nil - } - account := &globalRPCProto.RespGetLatestAccountInfoByAccountIndex{} - value, err := m.cache.GetWithSet(ctx, multcache.SpliceCacheKeyAccountByAccountIndex(accountIndex), account, multcache.AccountTtl, f) - if err != nil { - return nil, err - } - account, _ = value.(*globalRPCProto.RespGetLatestAccountInfoByAccountIndex) - return account, err -} - -func (m *globalRPC) GetMaxOfferId(ctx context.Context, accountIndex uint32) (uint64, error) { - rpcRsp, err := m.globalRPC.GetMaxOfferId(ctx, &globalrpc.ReqGetMaxOfferId{ - AccountIndex: accountIndex, - }) - return rpcRsp.GetOfferId(), err -} - -func (m *globalRPC) SendTx(ctx context.Context, txType uint32, txInfo string) (string, error) { - rpcRsp, err := m.globalRPC.SendTx(ctx, &globalrpc.ReqSendTx{ - TxType: txType, - TxInfo: txInfo, - }) - return rpcRsp.GetTxId(), err -} - -func (m *globalRPC) SendMintNftTx(ctx context.Context, txInfo string) (int64, error) { - rpcRsp, err := m.globalRPC.SendMintNftTx(ctx, &globalrpc.ReqSendMintNftTx{ - TxInfo: txInfo, - }) - return rpcRsp.GetNftIndex(), err -} - -func (m *globalRPC) SendCreateCollectionTx(ctx context.Context, txInfo string) (int64, error) { - rpcRsp, err := m.globalRPC.SendCreateCollectionTx(ctx, &globalrpc.ReqSendCreateCollectionTx{ - TxInfo: txInfo, - }) - return rpcRsp.GetCollectionId(), err -} - -func (m *globalRPC) SendAddLiquidityTx(ctx context.Context, txInfo string) (string, error) { - rpcRsp, err := m.globalRPC.SendAddLiquidityTx(ctx, &globalrpc.ReqSendTxByRawInfo{ - TxInfo: txInfo, - }) - return rpcRsp.GetTxId(), err -} - -func (m *globalRPC) SendAtomicMatchTx(ctx context.Context, txInfo string) (string, error) { - rpcRsp, err := m.globalRPC.SendAtomicMatchTx(ctx, &globalrpc.ReqSendTxByRawInfo{ - TxInfo: txInfo, - }) - return rpcRsp.GetTxId(), err -} - -func (m *globalRPC) SendCancelOfferTx(ctx context.Context, txInfo string) (string, error) { - rpcRsp, err := m.globalRPC.SendCancelOfferTx(ctx, &globalrpc.ReqSendTxByRawInfo{ - TxInfo: txInfo, - }) - return rpcRsp.GetTxId(), err -} - -func (m *globalRPC) SendRemoveLiquidityTx(ctx context.Context, txInfo string) (string, error) { - rpcRsp, err := m.globalRPC.SendRemoveLiquidityTx(ctx, &globalrpc.ReqSendTxByRawInfo{ - TxInfo: txInfo, - }) - return rpcRsp.GetTxId(), err -} - -func (m *globalRPC) SendSwapTx(ctx context.Context, txInfo string) (string, error) { - rpcRsp, err := m.globalRPC.SendSwapTx(ctx, &globalrpc.ReqSendTxByRawInfo{ - TxInfo: txInfo, - }) - return rpcRsp.GetTxId(), err -} - -func (m *globalRPC) SendTransferNftTx(ctx context.Context, txInfo string) (string, error) { - rpcRsp, err := m.globalRPC.SendTransferNftTx(ctx, &globalrpc.ReqSendTxByRawInfo{ - TxInfo: txInfo, - }) - return rpcRsp.GetTxId(), err -} - -func (m *globalRPC) SendTransferTx(ctx context.Context, txInfo string) (string, error) { - rpcRsp, err := m.globalRPC.SendTransferTx(ctx, &globalrpc.ReqSendTxByRawInfo{ - TxInfo: txInfo, - }) - return rpcRsp.GetTxId(), err -} - -func (m *globalRPC) SendWithdrawNftTx(ctx context.Context, txInfo string) (string, error) { - rpcRsp, err := m.globalRPC.SendWithdrawNftTx(ctx, &globalrpc.ReqSendTxByRawInfo{ - TxInfo: txInfo, - }) - return rpcRsp.GetTxId(), err -} - -func (m *globalRPC) SendWithdrawTx(ctx context.Context, txInfo string) (string, error) { - rpcRsp, err := m.globalRPC.SendWithdrawTx(ctx, &globalrpc.ReqSendTxByRawInfo{ - TxInfo: txInfo, - }) - return rpcRsp.GetTxId(), err -} diff --git a/service/api/app/internal/repo/globalrpc/vars.go b/service/api/app/internal/repo/globalrpc/vars.go deleted file mode 100644 index 538f1c817..000000000 --- a/service/api/app/internal/repo/globalrpc/vars.go +++ /dev/null @@ -1 +0,0 @@ -package globalrpc diff --git a/service/api/app/internal/repo/l2asset/api.go b/service/api/app/internal/repo/l2asset/api.go deleted file mode 100644 index 4c1c1d501..000000000 --- a/service/api/app/internal/repo/l2asset/api.go +++ /dev/null @@ -1,22 +0,0 @@ -package l2asset - -import ( - "context" - - table "github.com/bnb-chain/zkbas/common/model/assetInfo" - "github.com/bnb-chain/zkbas/service/api/app/internal/svc" -) - -type L2asset interface { - GetL2AssetsList(ctx context.Context) (res []*table.AssetInfo, err error) - GetL2AssetInfoBySymbol(ctx context.Context, symbol string) (res *table.AssetInfo, err error) - GetSimpleL2AssetInfoByAssetId(ctx context.Context, assetId uint32) (res *table.AssetInfo, err error) -} - -func New(svcCtx *svc.ServiceContext) L2asset { - return &l2asset{ - table: `asset_info`, - db: svcCtx.GormPointer, - cache: svcCtx.Cache, - } -} diff --git a/service/api/app/internal/repo/l2asset/cmd/l2_asset_info.sql b/service/api/app/internal/repo/l2asset/cmd/l2_asset_info.sql deleted file mode 100644 index 6de4a824f..000000000 --- a/service/api/app/internal/repo/l2asset/cmd/l2_asset_info.sql +++ /dev/null @@ -1,29 +0,0 @@ -/* - * Copyright © 2021 Zecrey Protocol - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -CREATE TABLE `l2_asset_info` ( - `id` bigint unsigned NOT NULL AUTO_INCREMENT, - `is_deleted` tinyint(1) DEFAULT 0 COMMENT 'is deleted?: 1 for yes, 0 for no', - `create_time` timestamp NULL DEFAULT CURRENT_TIMESTAMP, - `update_time` timestamp NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, - - `l2_asset_id` bigint unsigned NOT NULL, - `l2_asset_name` varchar(50) DEFAULT NULL, - `l2_decimals` tinyint unsigned NOT NULL, - PRIMARY KEY (`id`), - UNIQUE KEY `idx_l2_asset_info_l2_asset_id` (`l2_asset_id`) - UNIQUE KEY `idx_l2_asset_info_l2_asset_name` (`l2_asset_name`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_general_ci; \ No newline at end of file diff --git a/service/api/app/internal/repo/l2asset/l2asset.go b/service/api/app/internal/repo/l2asset/l2asset.go deleted file mode 100644 index 96d4edd39..000000000 --- a/service/api/app/internal/repo/l2asset/l2asset.go +++ /dev/null @@ -1,113 +0,0 @@ -package l2asset - -import ( - "context" - "fmt" - "strconv" - - "github.com/zeromicro/go-zero/core/logx" - "gorm.io/gorm" - - table "github.com/bnb-chain/zkbas/common/model/assetInfo" - "github.com/bnb-chain/zkbas/errorcode" - "github.com/bnb-chain/zkbas/pkg/multcache" -) - -type l2asset struct { - table string - db *gorm.DB - cache multcache.MultCache -} - -/* - Func: GetL2AssetsList - Params: - Return: err error - Description: create account table -*/ -func (m *l2asset) GetL2AssetsList(ctx context.Context) ([]*table.AssetInfo, error) { - f := func() (interface{}, error) { - var res []*table.AssetInfo - dbTx := m.db.Table(m.table).Find(&res) - if dbTx.Error != nil { - logx.Errorf("fail to get assets, error: %s", dbTx.Error.Error()) - return nil, errorcode.DbErrSqlOperation - } - if dbTx.RowsAffected == 0 { - return nil, errorcode.DbErrNotFound - } - return &res, nil - } - var res []*table.AssetInfo - value, err := m.cache.GetWithSet(ctx, multcache.KeyGetL2AssetsList, &res, multcache.AssetListTtl, f) - if err != nil { - return nil, err - } - res1, ok := value.(*[]*table.AssetInfo) - if !ok { - return nil, fmt.Errorf("[GetL2AssetsList] ErrConvertFail") - } - return *res1, nil -} - -/* - Func: GetL2AssetInfoBySymbol - Params: symbol string - Return: res *L2AssetInfo, err error - Description: get l2 asset info by l2 symbol -*/ -func (m *l2asset) GetL2AssetInfoBySymbol(ctx context.Context, symbol string) (*table.AssetInfo, error) { - f := func() (interface{}, error) { - res := table.AssetInfo{} - dbTx := m.db.Table(m.table).Where("asset_symbol = ?", symbol).Find(&res) - if dbTx.Error != nil { - logx.Errorf("fail to get asset by symbol: %s, error: %s", symbol, dbTx.Error.Error()) - return nil, errorcode.DbErrSqlOperation - } - if dbTx.RowsAffected == 0 { - return nil, errorcode.DbErrNotFound - } - return &res, nil - } - res := table.AssetInfo{} - value, err := m.cache.GetWithSet(ctx, multcache.KeyGetL2AssetInfoBySymbol+symbol, &res, multcache.AssetTtl, f) - if err != nil { - return nil, err - } - res1, ok := value.(*table.AssetInfo) - if !ok { - return nil, fmt.Errorf("[GetL2AssetInfoBySymbol] ErrConvertFail") - } - return res1, nil -} - -/* - Func: GetSimpleL2AssetInfoByAssetId - Params: assetId uint32 - Return: L2AssetInfo, error - Description: get layer-2 asset info by assetId -*/ -func (m *l2asset) GetSimpleL2AssetInfoByAssetId(ctx context.Context, assetId uint32) (*table.AssetInfo, error) { - f := func() (interface{}, error) { - res := table.AssetInfo{} - dbTx := m.db.Table(m.table).Where("asset_id = ?", assetId).Find(&res) - if dbTx.Error != nil { - logx.Errorf("fail to get asset by id: %d, error: %s", assetId, dbTx.Error.Error()) - return nil, errorcode.DbErrSqlOperation - } - if dbTx.RowsAffected == 0 { - return nil, errorcode.DbErrNotFound - } - return &res, nil - } - res := table.AssetInfo{} - value, err := m.cache.GetWithSet(ctx, multcache.KeyGetSimpleL2AssetInfoByAssetId+strconv.Itoa(int(assetId)), &res, multcache.AssetTtl, f) - if err != nil { - return nil, err - } - res1, ok := value.(*table.AssetInfo) - if !ok { - return nil, fmt.Errorf("[GetSimpleL2AssetInfoByAssetId] ErrConvertFail") - } - return res1, nil -} diff --git a/service/api/app/internal/repo/liquidity/api.go b/service/api/app/internal/repo/liquidity/api.go deleted file mode 100644 index 84815d1c6..000000000 --- a/service/api/app/internal/repo/liquidity/api.go +++ /dev/null @@ -1,21 +0,0 @@ -package liquidity - -import ( - table "github.com/bnb-chain/zkbas/common/model/liquidity" - "github.com/bnb-chain/zkbas/service/api/app/internal/svc" -) - -//go:generate mockgen -source api.go -destination api_mock.go -package liquidity - -type LiquidityModel interface { - GetLiquidityByPairIndex(pairIndex int64) (entity *table.Liquidity, err error) - GetAllLiquidityAssets() (entity []*table.Liquidity, err error) -} - -func New(svcCtx *svc.ServiceContext) LiquidityModel { - return &liquidityModel{ - table: `liquidity`, - db: svcCtx.GormPointer, - cache: svcCtx.Cache, - } -} diff --git a/service/api/app/internal/repo/liquidity/api_mock.go b/service/api/app/internal/repo/liquidity/api_mock.go deleted file mode 100644 index 1c17c48a6..000000000 --- a/service/api/app/internal/repo/liquidity/api_mock.go +++ /dev/null @@ -1,65 +0,0 @@ -// Code generated by MockGen. DO NOT EDIT. -// Source: api.go - -// Package liquidity is a generated GoMock package. -package liquidity - -import ( - reflect "reflect" - - gomock "github.com/golang/mock/gomock" - liquidity "github.com/bnb-chain/zkbas/common/model/liquidity" -) - -// MockLiquidityModel is a mock of LiquidityModel interface. -type MockLiquidityModel struct { - ctrl *gomock.Controller - recorder *MockLiquidityModelMockRecorder -} - -// MockLiquidityModelMockRecorder is the mock recorder for MockLiquidityModel. -type MockLiquidityModelMockRecorder struct { - mock *MockLiquidityModel -} - -// NewMockLiquidityModel creates a new mock instance. -func NewMockLiquidityModel(ctrl *gomock.Controller) *MockLiquidityModel { - mock := &MockLiquidityModel{ctrl: ctrl} - mock.recorder = &MockLiquidityModelMockRecorder{mock} - return mock -} - -// EXPECT returns an object that allows the caller to indicate expected use. -func (m *MockLiquidityModel) EXPECT() *MockLiquidityModelMockRecorder { - return m.recorder -} - -// GetAllLiquidityAssets mocks base method. -func (m *MockLiquidityModel) GetAllLiquidityAssets() ([]*liquidity.Liquidity, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "GetAllLiquidityAssets") - ret0, _ := ret[0].([]*liquidity.Liquidity) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// GetAllLiquidityAssets indicates an expected call of GetAllLiquidityAssets. -func (mr *MockLiquidityModelMockRecorder) GetAllLiquidityAssets() *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetAllLiquidityAssets", reflect.TypeOf((*MockLiquidityModel)(nil).GetAllLiquidityAssets)) -} - -// GetLiquidityByPairIndex mocks base method. -func (m *MockLiquidityModel) GetLiquidityByPairIndex(pairIndex int64) (*liquidity.Liquidity, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "GetLiquidityByPairIndex", pairIndex) - ret0, _ := ret[0].(*liquidity.Liquidity) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// GetLiquidityByPairIndex indicates an expected call of GetLiquidityByPairIndex. -func (mr *MockLiquidityModelMockRecorder) GetLiquidityByPairIndex(pairIndex interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetLiquidityByPairIndex", reflect.TypeOf((*MockLiquidityModel)(nil).GetLiquidityByPairIndex), pairIndex) -} diff --git a/service/api/app/internal/repo/liquidity/liquidity.go b/service/api/app/internal/repo/liquidity/liquidity.go deleted file mode 100644 index 56d496838..000000000 --- a/service/api/app/internal/repo/liquidity/liquidity.go +++ /dev/null @@ -1,41 +0,0 @@ -package liquidity - -import ( - "gorm.io/gorm" - - table "github.com/bnb-chain/zkbas/common/model/liquidity" - "github.com/bnb-chain/zkbas/errorcode" - "github.com/bnb-chain/zkbas/pkg/multcache" -) - -type liquidityModel struct { - table string - db *gorm.DB - cache multcache.MultCache -} - -/* - Func: GetAccountLiquidityByPairIndex - Params: pairIndex int64 - Return: entities []*Liquidity, err error - Description: get account liquidity entities by account index -*/ -func (m *liquidityModel) GetLiquidityByPairIndex(pairIndex int64) (entity *table.Liquidity, err error) { - dbTx := m.db.Table(m.table).Where("pair_index = ?", pairIndex).Find(&entity) - if dbTx.Error != nil { - return entity, dbTx.Error - } else if dbTx.RowsAffected == 0 { - return nil, errorcode.DbErrNotFound - } - return entity, nil -} - -func (m *liquidityModel) GetAllLiquidityAssets() (entity []*table.Liquidity, err error) { - dbTx := m.db.Table(m.table).Raw("SELECT * FROM liquidity").Find(&entity) - if dbTx.Error != nil { - return entity, dbTx.Error - } else if dbTx.RowsAffected == 0 { - return nil, errorcode.DbErrNotFound - } - return entity, nil -} diff --git a/service/api/app/internal/repo/mempool/api.go b/service/api/app/internal/repo/mempool/api.go deleted file mode 100644 index 6c00e3f80..000000000 --- a/service/api/app/internal/repo/mempool/api.go +++ /dev/null @@ -1,24 +0,0 @@ -package mempool - -import ( - "context" - - mempoolModel "github.com/bnb-chain/zkbas/common/model/mempool" - "github.com/bnb-chain/zkbas/service/api/app/internal/svc" -) - -type Mempool interface { - GetMempoolTxs(offset int, limit int) (mempoolTx []*mempoolModel.MempoolTx, err error) - GetMempoolTxsTotalCount() (count int64, err error) - GetMempoolTxsTotalCountByAccountIndex(accountIndex int64) (count int64, err error) - GetMempoolTxByTxHash(hash string) (mempoolTxs *mempoolModel.MempoolTx, err error) - GetMempoolTxByTxId(ctx context.Context, txId int64) (mempoolTxs *mempoolModel.MempoolTx, err error) -} - -func New(svcCtx *svc.ServiceContext) Mempool { - return &model{ - table: `mempool_tx`, - db: svcCtx.GormPointer, - cache: svcCtx.Cache, - } -} diff --git a/service/api/app/internal/repo/mempool/mempool.go b/service/api/app/internal/repo/mempool/mempool.go deleted file mode 100644 index b2fd66dbb..000000000 --- a/service/api/app/internal/repo/mempool/mempool.go +++ /dev/null @@ -1,118 +0,0 @@ -package mempool - -import ( - "context" - "sort" - - "github.com/zeromicro/go-zero/core/logx" - "gorm.io/gorm" - - table "github.com/bnb-chain/zkbas/common/model/mempool" - "github.com/bnb-chain/zkbas/errorcode" - "github.com/bnb-chain/zkbas/pkg/multcache" -) - -type model struct { - table string - db *gorm.DB - cache multcache.MultCache -} - -/* - Func: GetMempoolTxs - Params: offset uint64, limit uint64 - Return: mempoolTx []*mempoolModel.MempoolTx, err error - Description: query txs from db that sit in the range -*/ -func (m *model) GetMempoolTxs(offset int, limit int) (mempoolTxs []*table.MempoolTx, err error) { - var mempoolForeignKeyColumn = `MempoolDetails` - dbTx := m.db.Table(m.table).Where("status = ? and deleted_at is NULL", PendingTxStatus).Order("created_at, id").Offset(offset).Limit(limit).Find(&mempoolTxs) - if dbTx.Error != nil { - logx.Errorf("[mempool.GetMempoolTxsList] %s", dbTx.Error.Error()) - return nil, errorcode.DbErrSqlOperation - } - for _, mempoolTx := range mempoolTxs { - err := m.db.Model(&mempoolTx).Association(mempoolForeignKeyColumn).Find(&mempoolTx.MempoolDetails) - if err != nil { - logx.Errorf("[mempool.GetMempoolTxsList] Get Associate MempoolDetails Error") - return nil, err - } - } - return mempoolTxs, nil -} - -func (m *model) GetMempoolTxsTotalCount() (count int64, err error) { - dbTx := m.db.Table(m.table).Where("status = ? and deleted_at is NULL", PendingTxStatus).Count(&count) - if dbTx.Error != nil { - logx.Errorf("[txVerification.GetTxsTotalCount] %s", dbTx.Error) - return 0, dbTx.Error - } else if dbTx.RowsAffected == 0 { - return 0, nil - } - return count, nil -} - -func (m *model) GetMempoolTxByTxHash(hash string) (mempoolTx *table.MempoolTx, err error) { - var mempoolForeignKeyColumn = `MempoolDetails` - dbTx := m.db.Table(m.table).Where("status = ? and tx_hash = ?", PendingTxStatus, hash).Find(&mempoolTx) - if dbTx.Error != nil { - logx.Errorf("[GetMempoolTxByTxHash] %s", dbTx.Error.Error()) - return nil, errorcode.DbErrSqlOperation - } else if dbTx.RowsAffected == 0 { - return nil, errorcode.DbErrNotFound - } - if err = m.db.Model(&mempoolTx).Association(mempoolForeignKeyColumn).Find(&mempoolTx.MempoolDetails); err != nil { - logx.Errorf("[mempool.GetMempoolTxByTxHash] Get Associate MempoolDetails Error") - return nil, err - } - return mempoolTx, nil -} - -func (m *model) GetMempoolTxsTotalCountByAccountIndex(accountIndex int64) (count int64, err error) { - var ( - mempoolDetailTable = `mempool_tx_detail` - mempoolIds []int64 - ) - var mempoolTxDetails []*table.MempoolTxDetail - dbTx := m.db.Table(mempoolDetailTable).Select("tx_id").Where("account_index = ?", accountIndex).Find(&mempoolTxDetails).Group("tx_id").Find(&mempoolIds) - if dbTx.Error != nil { - return 0, dbTx.Error - } else if dbTx.RowsAffected == 0 { - return 0, nil - } - dbTx = m.db.Table(m.table).Where("status = ? and id in (?) and deleted_at is NULL", PendingTxStatus, mempoolIds).Count(&count) - if dbTx.Error != nil { - return 0, dbTx.Error - } else if dbTx.RowsAffected == 0 { - return 0, nil - } - return count, nil -} - -func (m *model) GetMempoolTxByTxId(ctx context.Context, txID int64) (*table.MempoolTx, error) { - f := func() (interface{}, error) { - tx := &table.MempoolTx{} - dbTx := m.db.Table(m.table).Where("id = ? and deleted_at is NULL", txID).Find(&tx) - if dbTx.Error != nil { - logx.Errorf("fail to get mempool tx by id: %d, error: %s", txID, dbTx.Error.Error()) - return nil, errorcode.DbErrSqlOperation - } else if dbTx.RowsAffected == 0 { - return nil, errorcode.DbErrNotFound - } - err := m.db.Model(&tx).Association(`MempoolDetails`).Find(&tx.MempoolDetails) - if err != nil { - return nil, err - } - sort.SliceStable(tx.MempoolDetails, func(i, j int) bool { - return tx.MempoolDetails[i].Order < tx.MempoolDetails[j].Order - }) - return tx, nil - } - tx := &table.MempoolTx{} - value, err := m.cache.GetWithSet(ctx, multcache.SpliceCacheKeyTxByTxId(txID), tx, multcache.MempoolTxTtl, f) - if err != nil { - return nil, err - } - tx, _ = value.(*table.MempoolTx) - return tx, nil -} diff --git a/service/api/app/internal/repo/mempool/vars.go b/service/api/app/internal/repo/mempool/vars.go deleted file mode 100644 index a4b9f1ff2..000000000 --- a/service/api/app/internal/repo/mempool/vars.go +++ /dev/null @@ -1,6 +0,0 @@ -package mempool - -const ( - PendingTxStatus = iota - HandledTxStatus -) diff --git a/service/api/app/internal/repo/mempooltxdetail/api.go b/service/api/app/internal/repo/mempooltxdetail/api.go deleted file mode 100644 index 61f20a712..000000000 --- a/service/api/app/internal/repo/mempooltxdetail/api.go +++ /dev/null @@ -1,20 +0,0 @@ -package mempooltxdetail - -import ( - "context" - - table "github.com/bnb-chain/zkbas/common/model/mempool" - "github.com/bnb-chain/zkbas/service/api/app/internal/svc" -) - -type Model interface { - GetMemPoolTxDetailByAccountIndex(ctx context.Context, accountIndex int64) (mempoolTx []*table.MempoolTxDetail, err error) -} - -func New(svcCtx *svc.ServiceContext) Model { - return &model{ - table: `mempool_tx_detail`, - db: svcCtx.GormPointer, - cache: svcCtx.Cache, - } -} diff --git a/service/api/app/internal/repo/mempooltxdetail/mempooltxdetail.go b/service/api/app/internal/repo/mempooltxdetail/mempooltxdetail.go deleted file mode 100644 index 1ad00d137..000000000 --- a/service/api/app/internal/repo/mempooltxdetail/mempooltxdetail.go +++ /dev/null @@ -1,36 +0,0 @@ -package mempooltxdetail - -import ( - "context" - - "github.com/zeromicro/go-zero/core/logx" - "gorm.io/gorm" - - table "github.com/bnb-chain/zkbas/common/model/mempool" - "github.com/bnb-chain/zkbas/errorcode" - "github.com/bnb-chain/zkbas/pkg/multcache" -) - -type model struct { - table string - db *gorm.DB - cache multcache.MultCache -} - -/* - Func: GetMempoolTxs - Params: offset uint64, limit uint64 - Return: mempoolTx []*mempoolModel.MempoolTx, err error - Description: query txs from db that sit in the range -*/ -func (m *model) GetMemPoolTxDetailByAccountIndex(ctx context.Context, accountIndex int64) ([]*table.MempoolTxDetail, error) { - result := make([]*table.MempoolTxDetail, 0) - dbTx := m.db.Table(m.table).Where("account_index = ?", accountIndex).Order("created_at").Find(&result) - if dbTx.Error != nil { - logx.Errorf("fail to get mempool tx by account: %d, error: %s", accountIndex, dbTx.Error.Error()) - return nil, errorcode.DbErrSqlOperation - } else if dbTx.RowsAffected == 0 { - return nil, errorcode.DbErrNotFound - } - return result, nil -} diff --git a/service/api/app/internal/repo/nft/api.go b/service/api/app/internal/repo/nft/api.go deleted file mode 100644 index ed68fc635..000000000 --- a/service/api/app/internal/repo/nft/api.go +++ /dev/null @@ -1,21 +0,0 @@ -package nft - -import ( - "context" - - nftModel "github.com/bnb-chain/zkbas/common/model/nft" - "github.com/bnb-chain/zkbas/service/api/app/internal/svc" -) - -type Nft interface { - GetNftListByAccountIndex(ctx context.Context, accountIndex, limit, offset int64) (nfts []*nftModel.L2Nft, err error) - GetAccountNftTotalCount(ctx context.Context, accountIndex int64) (int64, error) -} - -func New(svcCtx *svc.ServiceContext) Nft { - return &nft{ - table: nftModel.L2NftTableName, - db: svcCtx.GormPointer, - cache: svcCtx.Cache, - } -} diff --git a/service/api/app/internal/repo/nft/nft.go b/service/api/app/internal/repo/nft/nft.go deleted file mode 100644 index b932f072a..000000000 --- a/service/api/app/internal/repo/nft/nft.go +++ /dev/null @@ -1,71 +0,0 @@ -package nft - -import ( - "context" - "fmt" - - "github.com/zeromicro/go-zero/core/logx" - "github.com/zeromicro/go-zero/core/stores/redis" - "gorm.io/gorm" - - nftModel "github.com/bnb-chain/zkbas/common/model/nft" - "github.com/bnb-chain/zkbas/errorcode" - "github.com/bnb-chain/zkbas/pkg/multcache" -) - -type nft struct { - table string - db *gorm.DB - cache multcache.MultCache - redisConn *redis.Redis -} - -func (n *nft) GetNftListByAccountIndex(ctx context.Context, accountIndex, limit, offset int64) (nfts []*nftModel.L2Nft, err error) { - f := func() (interface{}, error) { - nftList := make([]*nftModel.L2Nft, 0) - dbTx := n.db.Table(n.table).Where("owner_account_index = ? and deleted_at is NULL", accountIndex). - Limit(int(limit)).Offset(int(offset)).Order("nft_index desc").Find(&nftList) - if dbTx.Error != nil { - logx.Errorf("fail to get nfts by account: %d, offset: %d, limit: %d, error: %s", accountIndex, offset, limit, dbTx.Error.Error()) - return nil, errorcode.DbErrSqlOperation - } else if dbTx.RowsAffected == 0 { - return nil, errorcode.DbErrNotFound - } - return &nftList, nil - } - nftList := make([]*nftModel.L2Nft, 0) - value, err := n.cache.GetWithSet(ctx, multcache.SpliceCacheKeyAccountNftList(accountIndex, offset, limit), &nftList, multcache.NftListTtl, f) - if err != nil { - return nil, err - } - nftListStored, ok := value.(*[]*nftModel.L2Nft) - if !ok { - return nil, fmt.Errorf("[GetNftListByAccountIndex] ErrConvertFail") - } - return *nftListStored, nil -} - -func (n *nft) GetAccountNftTotalCount(ctx context.Context, accountIndex int64) (int64, error) { - f := func() (interface{}, error) { - var count int64 - dbTx := n.db.Table(n.table).Where("owner_account_index = ? and deleted_at is NULL", accountIndex).Count(&count) - if dbTx.Error != nil { - logx.Errorf("fail to get nft count by account: %d, error: %s", accountIndex, dbTx.Error.Error()) - return 0, errorcode.DbErrSqlOperation - } else if dbTx.RowsAffected == 0 { - return 0, errorcode.DbErrNotFound - } - return &count, nil - } - - var count int64 - value, err := n.cache.GetWithSet(ctx, multcache.SpliceCacheKeyAccountTotalNftCount(accountIndex), &count, multcache.NftCountTtl, f) - if err != nil { - return count, err - } - countStored, ok := value.(*int64) - if !ok { - return 0, fmt.Errorf("[GetAccountNftTotalCount] ErrConvertFail") - } - return *countStored, nil -} diff --git a/service/api/app/internal/repo/price/api.go b/service/api/app/internal/repo/price/api.go deleted file mode 100644 index abb566177..000000000 --- a/service/api/app/internal/repo/price/api.go +++ /dev/null @@ -1,17 +0,0 @@ -package price - -import ( - "context" - - "github.com/bnb-chain/zkbas/service/api/app/internal/svc" -) - -type Price interface { - GetCurrencyPrice(ctx context.Context, l2Symbol string) (price float64, err error) -} - -func New(svcCtx *svc.ServiceContext) Price { - return &price{ - cache: svcCtx.Cache, - } -} diff --git a/service/api/app/internal/repo/price/price.go b/service/api/app/internal/repo/price/price.go deleted file mode 100644 index 86598e38a..000000000 --- a/service/api/app/internal/repo/price/price.go +++ /dev/null @@ -1,87 +0,0 @@ -package price - -import ( - "context" - "encoding/json" - "errors" - "fmt" - "io/ioutil" - "net/http" - - "github.com/bnb-chain/zkbas/errorcode" - "github.com/bnb-chain/zkbas/pkg/multcache" -) - -type price struct { - cache multcache.MultCache -} - -/* - Func: GetCurrencyPrice - Params: currency string - Return: price float64, err error - Description: get currency price cache by currency symbol -*/ -func (m *price) GetCurrencyPrice(ctx context.Context, l2Symbol string) (float64, error) { - f := func() (interface{}, error) { - quoteMap, err := getQuotesLatest(l2Symbol) - if err != nil { - return 0, err - } - return "eMap, nil - } - var quoteType map[string]QuoteLatest - value, err := m.cache.GetWithSet(ctx, multcache.SpliceCacheKeyCurrencyPrice(), "eType, multcache.PriceTtl, f) - if err != nil { - return 0, err - } - res, _ := value.(*map[string]QuoteLatest) - quoteMap := *res - q, ok := quoteMap[l2Symbol] - if !ok { - return 0, errorcode.AppErrQuoteNotExist - } - return q.Quote["USD"].Price, nil -} - -func getQuotesLatest(l2Symbol string) (map[string]QuoteLatest, error) { - client := &http.Client{} - url := fmt.Sprintf("%s%s", coinMarketCap, l2Symbol) - reqest, err := http.NewRequest("GET", url, nil) - if err != nil { - return nil, errorcode.HttpErrFailToRequest - } - reqest.Header.Add("X-CMC_PRO_API_KEY", "cfce503f-dd3d-4847-9570-bbab5257dac8") - reqest.Header.Add("Accept", "application/json") - resp, err := client.Do(reqest) - if err != nil { - return nil, errorcode.HttpErrClientDo - } - defer resp.Body.Close() - body, err := ioutil.ReadAll(resp.Body) - if err != nil { - return nil, errorcode.IoErrFailToRead - } - currencyPrice := ¤cyPrice{} - if err = json.Unmarshal(body, ¤cyPrice); err != nil { - return nil, errorcode.JsonErrUnmarshal - } - ifcs, ok := currencyPrice.Data.(interface{}) - if !ok { - return nil, errors.New("type conversion error") - } - quotesLatest := make(map[string]QuoteLatest, 0) - for _, coinObj := range ifcs.(map[string]interface{}) { - b, err := json.Marshal(coinObj) - if err != nil { - return nil, errorcode.JsonErrMarshal - } - quoteLatest := &QuoteLatest{} - err = json.Unmarshal(b, quoteLatest) - if err != nil { - return nil, errorcode.JsonErrUnmarshal - } - quotesLatest[quoteLatest.Symbol] = *quoteLatest - } - return quotesLatest, nil -} diff --git a/service/api/app/internal/repo/sysconf/api.go b/service/api/app/internal/repo/sysconf/api.go deleted file mode 100644 index a925ff2a4..000000000 --- a/service/api/app/internal/repo/sysconf/api.go +++ /dev/null @@ -1,23 +0,0 @@ -package sysconf - -import ( - "context" - - table "github.com/bnb-chain/zkbas/common/model/sysconfig" - "github.com/bnb-chain/zkbas/service/api/app/internal/svc" -) - -type Sysconf interface { - GetSysconfigByName(ctx context.Context, name string) (info *table.Sysconfig, err error) - CreateSysconfig(ctx context.Context, config *table.Sysconfig) error - CreateSysconfigInBatches(ctx context.Context, configs []*table.Sysconfig) (rowsAffected int64, err error) - UpdateSysconfig(ctx context.Context, config *table.Sysconfig) error -} - -func New(svcCtx *svc.ServiceContext) Sysconf { - return &sysconf{ - table: `sys_config`, - db: svcCtx.GormPointer, - cache: svcCtx.Cache, - } -} diff --git a/service/api/app/internal/repo/sysconf/sysconf.go b/service/api/app/internal/repo/sysconf/sysconf.go deleted file mode 100644 index c455ef348..000000000 --- a/service/api/app/internal/repo/sysconf/sysconf.go +++ /dev/null @@ -1,102 +0,0 @@ -package sysconf - -import ( - "context" - "errors" - - "github.com/zeromicro/go-zero/core/logx" - "gorm.io/gorm" - - table "github.com/bnb-chain/zkbas/common/model/sysconfig" - "github.com/bnb-chain/zkbas/errorcode" - "github.com/bnb-chain/zkbas/pkg/multcache" -) - -type sysconf struct { - table string - db *gorm.DB - cache multcache.MultCache -} - -/* - Func: GetSysconfigByName - Params: name string - Return: info *Sysconfig, err error - Description: get sysconfig by config name -*/ -func (m *sysconf) GetSysconfigByName(ctx context.Context, name string) (*table.Sysconfig, error) { - logx.Infof("[GetSysconfigByName] name: %s", name) - f := func() (interface{}, error) { - var config table.Sysconfig - dbTx := m.db.Table(m.table).Where("name = ?", name).Find(&config) - if dbTx.Error != nil { - logx.Errorf("fail to get sysconfig: %s, error: %s", name, dbTx.Error.Error()) - return nil, errorcode.DbErrSqlOperation - } else if dbTx.RowsAffected == 0 { - return nil, errorcode.DbErrNotFound - } - return &config, nil - } - var config table.Sysconfig - value, err := m.cache.GetWithSet(ctx, multcache.KeyGetSysconfigByName+name, &config, multcache.SysconfigTtl, f) - if err != nil { - return &config, err - } - config1, ok := value.(*table.Sysconfig) - if !ok { - logx.Errorf("fail to convert value to sysconfig, value: %v, name: %s", value, name) - return nil, errors.New("conversion error") - } - return config1, nil -} - -/* - Func: CreateSysconfig - Params: config *Sysconfig - Return: error - Description: Insert New Sysconfig -*/ - -func (m *sysconf) CreateSysconfig(_ context.Context, config *table.Sysconfig) error { - dbTx := m.db.Table(m.table).Create(config) - if dbTx.Error != nil { - logx.Errorf("[sysconfig.sysconfig] %s", dbTx.Error.Error()) - return dbTx.Error - } - if dbTx.RowsAffected == 0 { - logx.Error("[sysconfig.sysconfig] Create Invalid Sysconfig") - return errorcode.DbErrFailToCreateSysconfig - } - return nil -} - -func (m *sysconf) CreateSysconfigInBatches(_ context.Context, configs []*table.Sysconfig) (rowsAffected int64, err error) { - dbTx := m.db.Table(m.table).CreateInBatches(configs, len(configs)) - if dbTx.Error != nil { - logx.Errorf("[sysconfig.CreateSysconfigInBatches] %s", dbTx.Error.Error()) - return 0, dbTx.Error - } - if dbTx.RowsAffected == 0 { - logx.Error("[sysconfig.CreateSysconfigInBatches] Create Invalid Sysconfig Batches") - return 0, errorcode.DbErrFailToCreateSysconfig - } - return dbTx.RowsAffected, nil -} - -/* - Func: UpdateSysconfigByName - Params: config *Sysconfig - Return: err error - Description: update sysconfig by config name -*/ -func (m *sysconf) UpdateSysconfig(_ context.Context, config *table.Sysconfig) error { - dbTx := m.db.Table(m.table).Where("name = ?", config.Name).Select(NameColumn, ValueColumn, ValueTypeColumn, CommentColumn). - Updates(config) - if dbTx.Error != nil { - logx.Errorf("[sysconfig.UpdateSysconfig] %s", dbTx.Error.Error()) - return errorcode.DbErrSqlOperation - } else if dbTx.RowsAffected == 0 { - return errorcode.DbErrNotFound - } - return nil -} diff --git a/service/api/app/internal/repo/sysconf/vars.go b/service/api/app/internal/repo/sysconf/vars.go deleted file mode 100644 index f5d2c31ab..000000000 --- a/service/api/app/internal/repo/sysconf/vars.go +++ /dev/null @@ -1,8 +0,0 @@ -package sysconf - -const ( - NameColumn = "name" - ValueColumn = "value" - ValueTypeColumn = "value_type" - CommentColumn = "comment" -) diff --git a/service/api/app/internal/repo/tx/api.go b/service/api/app/internal/repo/tx/api.go deleted file mode 100644 index de91e1e40..000000000 --- a/service/api/app/internal/repo/tx/api.go +++ /dev/null @@ -1,24 +0,0 @@ -package tx - -import ( - "context" - - table "github.com/bnb-chain/zkbas/common/model/tx" - "github.com/bnb-chain/zkbas/service/api/app/internal/svc" -) - -type Model interface { - GetTxsTotalCount(ctx context.Context) (count int64, err error) - GetTxsList(ctx context.Context, limit int64, offset int64) (blocks []*table.Tx, err error) - GetTxByTxHash(ctx context.Context, txHash string) (tx *table.Tx, err error) - GetTxByTxID(ctx context.Context, txID int64) (tx *table.Tx, err error) - GetTxCountByTimeRange(ctx context.Context, data string) (count int64, err error) -} - -func New(svcCtx *svc.ServiceContext) Model { - return &model{ - table: `tx`, - db: svcCtx.GormPointer, - cache: svcCtx.Cache, - } -} diff --git a/service/api/app/internal/repo/tx/tx.go b/service/api/app/internal/repo/tx/tx.go deleted file mode 100755 index bf2e5e6f5..000000000 --- a/service/api/app/internal/repo/tx/tx.go +++ /dev/null @@ -1,158 +0,0 @@ -package tx - -import ( - "context" - "sort" - "time" - - "github.com/zeromicro/go-zero/core/logx" - "gorm.io/gorm" - - table "github.com/bnb-chain/zkbas/common/model/tx" - "github.com/bnb-chain/zkbas/errorcode" - "github.com/bnb-chain/zkbas/pkg/multcache" -) - -type model struct { - table string - db *gorm.DB - cache multcache.MultCache -} - -/* - Func: GetTxsTotalCount - Params: - Return: count int64, err error - Description: used for counting total transactions for explorer dashboard -*/ - -func (m *model) GetTxsTotalCount(ctx context.Context) (int64, error) { - f := func() (interface{}, error) { - var count int64 - dbTx := m.db.Table(m.table).Where("deleted_at is NULL").Count(&count) - if dbTx.Error != nil { - logx.Errorf("fail to get tx count, error: %s", dbTx.Error.Error()) - return nil, errorcode.DbErrSqlOperation - } else if dbTx.RowsAffected == 0 { - return nil, nil - } - return &count, nil - } - var countType int64 - value, err := m.cache.GetWithSet(ctx, multcache.SpliceCacheKeyTxsCount(), &countType, multcache.TxCountTtl, f) - if err != nil { - return 0, err - } - count, _ := value.(*int64) - return *count, nil -} - -/* - Func: GetTxsList - Params: - Return: list of txs, err error - Description: used for showing transactions for explorer -*/ - -func (m *model) GetTxsList(ctx context.Context, limit int64, offset int64) (blocks []*table.Tx, err error) { - txList := []*table.Tx{} - dbTx := m.db.Table(m.table).Limit(int(limit)).Offset(int(offset)).Order("created_at desc").Find(&txList) - if dbTx.Error != nil { - logx.Errorf("fail to get txs offset: %d, limit: %d, error: %s", offset, limit, dbTx.Error.Error()) - return nil, errorcode.DbErrSqlOperation - } else if dbTx.RowsAffected == 0 { - return nil, errorcode.DbErrNotFound - } - return txList, nil -} - -func (m *model) GetTxByTxHash(ctx context.Context, txHash string) (*table.Tx, error) { - f := func() (interface{}, error) { - tx := &table.Tx{} - dbTx := m.db.Table(m.table).Where("tx_hash = ?", txHash).Find(&tx) - if dbTx.Error != nil { - logx.Errorf("fail to get tx by hash: %s, error: %s", txHash, dbTx.Error.Error()) - return nil, errorcode.DbErrSqlOperation - } else if dbTx.RowsAffected == 0 { - return nil, errorcode.DbErrNotFound - } - err := m.db.Model(&tx).Association(`TxDetails`).Find(&tx.TxDetails) - if err != nil { - return nil, err - } - sort.SliceStable(tx.TxDetails, func(i, j int) bool { - return tx.TxDetails[i].Order < tx.TxDetails[j].Order - }) - return tx, nil - } - tx := &table.Tx{} - value, err := m.cache.GetWithSet(ctx, multcache.SpliceCacheKeyTxByTxHash(txHash), tx, multcache.TxTtl, f) - if err != nil { - return nil, err - } - tx, _ = value.(*table.Tx) - return tx, nil -} - -func (m *model) GetTxByTxID(ctx context.Context, txID int64) (*table.Tx, error) { - f := func() (interface{}, error) { - tx := &table.Tx{} - dbTx := m.db.Table(m.table).Where("id = ? and deleted_at is NULL", txID).Find(&tx) - if dbTx.Error != nil { - logx.Errorf("fail to get tx by id: %d, error: %s", txID, dbTx.Error.Error()) - return nil, errorcode.DbErrSqlOperation - } else if dbTx.RowsAffected == 0 { - return nil, errorcode.DbErrNotFound - } - err := m.db.Model(&tx).Association(`TxDetails`).Find(&tx.TxDetails) - if err != nil { - return nil, err - } - sort.SliceStable(tx.TxDetails, func(i, j int) bool { - return tx.TxDetails[i].Order < tx.TxDetails[j].Order - }) - return tx, nil - } - tx := &table.Tx{} - value, err := m.cache.GetWithSet(ctx, multcache.SpliceCacheKeyTxByTxId(txID), tx, multcache.TxTtl, f) - if err != nil { - return nil, err - } - tx, _ = value.(*table.Tx) - return tx, nil -} - -func (m *model) GetTxCountByTimeRange(ctx context.Context, data string) (int64, error) { - var ( - from time.Time - to time.Time - ) - now := time.Now() - today := now.Round(24 * time.Hour).Add(-8 * time.Hour) - switch data { - case "yesterday": - from = today.Add(-24 * time.Hour) - to = today - case "today": - from = today - to = now - } - f := func() (interface{}, error) { - var count int64 - dbTx := m.db.Table(m.table).Where("created_at BETWEEN ? AND ?", from, to).Count(&count) - if dbTx.Error != nil { - logx.Errorf("fail to get tx by time range: %d-%d, error: %s", from.Unix(), to.Unix(), dbTx.Error.Error()) - return nil, errorcode.DbErrSqlOperation - } else if dbTx.RowsAffected == 0 { - return nil, nil - } - return &count, nil - } - var countType int64 - value, err := m.cache.GetWithSet(ctx, multcache.SpliceCacheKeyTxCountByTimeRange(data), &countType, multcache.TxCountTtl, f) - if err != nil { - return 0, err - } - count, _ := value.(*int64) - return *count, nil -} diff --git a/service/api/app/internal/repo/txdetail/api.go b/service/api/app/internal/repo/txdetail/api.go deleted file mode 100644 index 8c76b52e4..000000000 --- a/service/api/app/internal/repo/txdetail/api.go +++ /dev/null @@ -1,23 +0,0 @@ -package txdetail - -import ( - "context" - - table "github.com/bnb-chain/zkbas/common/model/tx" - "github.com/bnb-chain/zkbas/service/api/app/internal/svc" -) - -type Model interface { - GetTxsTotalCountByAccountIndex(ctx context.Context, accountIndex int64) (count int64, err error) - GetTxDetailByAccountIndex(ctx context.Context, accountIndex int64) ([]*table.TxDetail, error) - GetTxIdsByAccountIndex(ctx context.Context, accountIndex int64) ([]int64, error) - GetDauInTxDetail(ctx context.Context, data string) (count int64, err error) -} - -func New(svcCtx *svc.ServiceContext) Model { - return &model{ - table: `tx_detail`, - db: svcCtx.GormPointer, - cache: svcCtx.Cache, - } -} diff --git a/service/api/app/internal/repo/txdetail/txdetail.go b/service/api/app/internal/repo/txdetail/txdetail.go deleted file mode 100755 index fcd67bf41..000000000 --- a/service/api/app/internal/repo/txdetail/txdetail.go +++ /dev/null @@ -1,104 +0,0 @@ -package txdetail - -import ( - "context" - "sort" - "time" - - "github.com/zeromicro/go-zero/core/logx" - "gorm.io/gorm" - - table "github.com/bnb-chain/zkbas/common/model/tx" - "github.com/bnb-chain/zkbas/errorcode" - "github.com/bnb-chain/zkbas/pkg/multcache" -) - -type model struct { - table string - db *gorm.DB - cache multcache.MultCache -} - -func (m *model) GetTxsTotalCountByAccountIndex(ctx context.Context, accountIndex int64) (int64, error) { - f := func() (interface{}, error) { - var count int64 - dbTx := m.db.Table(m.table).Select("tx_id"). - Where("account_index = ? and deleted_at is NULL", accountIndex).Group("tx_id").Count(&count) - if dbTx.Error != nil { - logx.Errorf("fail to get tx count by account: %d, error: %s", accountIndex, dbTx.Error.Error()) - return nil, errorcode.DbErrSqlOperation - } else if dbTx.RowsAffected == 0 { - return nil, nil - } - return &count, nil - } - var countType int64 - value, err := m.cache.GetWithSet(ctx, multcache.SpliceCacheKeyTxsCount(), &countType, multcache.TxCountTtl, f) - if err != nil { - return 0, err - } - count, _ := value.(*int64) - return *count, nil -} - -func (m *model) GetTxDetailByAccountIndex(ctx context.Context, accountIndex int64) ([]*table.TxDetail, error) { - result := make([]*table.TxDetail, 0) - dbTx := m.db.Table(m.table).Where("account_index = ?", accountIndex).Find(&result) - if dbTx.Error != nil { - logx.Errorf("fail to get tx details by account: %d, error: %s", accountIndex, dbTx.Error.Error()) - return nil, errorcode.DbErrSqlOperation - } else if dbTx.RowsAffected == 0 { - return nil, errorcode.DbErrNotFound - } - return result, nil -} - -func (m *model) GetTxIdsByAccountIndex(ctx context.Context, accountIndex int64) ([]int64, error) { - txIds := make([]int64, 0) - dbTx := m.db.Table(m.table).Select("tx_id").Where("account_index = ?", accountIndex).Group("tx_id").Find(&txIds) - if dbTx.Error != nil { - logx.Errorf("fail to get tx ids by account: %d, error: %s", accountIndex, dbTx.Error.Error()) - return nil, errorcode.DbErrSqlOperation - } else if dbTx.RowsAffected == 0 { - return nil, errorcode.DbErrNotFound - } - sort.Slice(txIds, func(i, j int) bool { - return txIds[i] > txIds[j] - }) - return txIds, nil -} - -func (m *model) GetDauInTxDetail(ctx context.Context, data string) (int64, error) { - var ( - from time.Time - to time.Time - ) - now := time.Now() - today := now.Round(24 * time.Hour).Add(-8 * time.Hour) - switch data { - case "yesterday": - from = today.Add(-24 * time.Hour) - to = today - case "today": - from = today - to = now - } - f := func() (interface{}, error) { - var count int64 - dbTx := m.db.Raw("SELECT account_index FROM tx_detail WHERE created_at BETWEEN ? AND ? AND account_index != -1 GROUP BY account_index", from, to).Count(&count) - if dbTx.Error != nil { - logx.Errorf("fail to get dau by time range: %d-%d, error: %s", from.Unix(), to.Unix(), dbTx.Error.Error()) - return nil, errorcode.DbErrSqlOperation - } else if dbTx.RowsAffected == 0 { - return nil, nil - } - return &count, nil - } - var countType int64 - value, err := m.cache.GetWithSet(ctx, multcache.SpliceCacheKeyTxCountByTimeRange(data), &countType, multcache.DauTtl, f) - if err != nil { - return 0, err - } - count, _ := value.(*int64) - return *count, nil -} diff --git a/service/api/app/internal/svc/servicecontext.go b/service/api/app/internal/svc/servicecontext.go deleted file mode 100644 index f8c92cf66..000000000 --- a/service/api/app/internal/svc/servicecontext.go +++ /dev/null @@ -1,39 +0,0 @@ -package svc - -import ( - "github.com/zeromicro/go-zero/core/logx" - "github.com/zeromicro/go-zero/core/stores/redis" - "github.com/zeromicro/go-zero/core/stores/sqlx" - "gorm.io/driver/postgres" - "gorm.io/gorm" - - "github.com/bnb-chain/zkbas/pkg/multcache" - "github.com/bnb-chain/zkbas/service/api/app/internal/config" -) - -type ServiceContext struct { - Config config.Config - Conn sqlx.SqlConn - GormPointer *gorm.DB - RedisConn *redis.Redis - Cache multcache.MultCache - CodeVersion string - GitCommitHash string -} - -func NewServiceContext(c config.Config) *ServiceContext { - g, err := gorm.Open(postgres.Open(c.Postgres.DataSource)) - if err != nil { - logx.Must(err) - } - return &ServiceContext{ - Config: c, - Conn: sqlx.NewSqlConn("postgres", c.Postgres.DataSource), - GormPointer: g, - RedisConn: redis.New(c.CacheRedis[0].Host, func(p *redis.Redis) { - p.Type = c.CacheRedis[0].Type - p.Pass = c.CacheRedis[0].Pass - }), - Cache: multcache.NewGoCache(100, 10), - } -} diff --git a/service/apiserver/etc/server-api.yaml.example b/service/apiserver/etc/server-api.yaml.example new file mode 100644 index 000000000..5758ccb7b --- /dev/null +++ b/service/apiserver/etc/server-api.yaml.example @@ -0,0 +1,33 @@ +Name: api-server +Host: 0.0.0.0 +Port: 8888 + +Prometheus: + Host: 0.0.0.0 + Port: 9091 + Path: /metrics + +Postgres: + DataSource: host=127.0.0.1 user=postgres password=pw dbname=zkbas port=5432 sslmode=disable + +CacheRedis: + - Host: redis:6379 + Type: node + +LogConf: + ServiceName: api-server + Mode: console + Path: ./log/api-server + StackCooldownMillis: 500 + Level: error + +CoinMarketCap: + Url: https://pro-api.coinmarketcap.com/v1/cryptocurrency/quotes/latest?symbol= + Token: cfce503f-fake-fake-fake-bbab5257dac8 + +MemCache: + AccountExpiration: 200 + AssetExpiration: 600 + BlockExpiration: 400 + TxExpiration: 400 + PriceExpiration: 200 diff --git a/service/apiserver/internal/cache/mem_cache.go b/service/apiserver/internal/cache/mem_cache.go new file mode 100644 index 000000000..b27c73e90 --- /dev/null +++ b/service/apiserver/internal/cache/mem_cache.go @@ -0,0 +1,282 @@ +package cache + +import ( + "fmt" + "time" + + gocache "github.com/patrickmn/go-cache" + + accdao "github.com/bnb-chain/zkbas/dao/account" + assetdao "github.com/bnb-chain/zkbas/dao/asset" + blockdao "github.com/bnb-chain/zkbas/dao/block" + "github.com/bnb-chain/zkbas/dao/sysconfig" + "github.com/bnb-chain/zkbas/dao/tx" +) + +const ( + cacheDefaultExpiration = time.Hour * 1 //gocache default expiration + cacheDefaultPurgeInterval = time.Minute * 5 // gocache purge interval + + AccountIndexNameKeyPrefix = "in:" //key for cache: accountIndex -> accountName + AccountIndexPkKeyPrefix = "ip:" //key for cache: accountIndex -> accountPk + AccountNameKeyPrefix = "n:" //key for cache: accountName -> accountIndex + AccountPkKeyPrefix = "k:" //key for cache: accountPk -> accountIndex + AccountByIndexKeyPrefix = "a:" //key for cache: accountIndex -> account + AccountCountKeyPrefix = "ac" //key for cache: total account count + BlockByHeightKeyPrefix = "h:" //key for cache: blockHeight -> block + BlockByCommitmentKeyPrefix = "c:" //key for cache: blockCommitment -> block + BlockCountKeyPrefix = "bc" //key for cache: total block count + TxByHashKeyPrefix = "h:" //key for cache: txHash -> tx + TxCountKeyPrefix = "tc" //key for cache: total tx count + AssetCountKeyKeyPrefix = "AC" //key for cache: total asset count + AssetIdNameKeyPrefix = "IN:" //key for cache: assetId -> assetName + AssetByIdKeyPrefix = "I:" //key for cache: assetId -> asset + AssetBySymbolKeyPrefix = "S:" //key for cache: assetSymbol -> asset + PriceKeyPrefix = "p:" //key for cache: symbol -> price + SysConfigKeyPrefix = "s:" //key for cache: configName -> sysconfig +) + +type fallback func() (interface{}, error) + +type MemCache struct { + goCache *gocache.Cache + accountModel accdao.AccountModel + assetModel assetdao.AssetModel + accountExpiration time.Duration + blockExpiration time.Duration + txExpiration time.Duration + assetExpiration time.Duration + priceExpiration time.Duration +} + +func NewMemCache(accountModel accdao.AccountModel, assetModel assetdao.AssetModel, + accountExpiration, blockExpiration, txExpiration, + assetExpiration, priceExpiration int) *MemCache { + memCache := &MemCache{ + goCache: gocache.New(cacheDefaultExpiration, cacheDefaultPurgeInterval), + accountModel: accountModel, + assetModel: assetModel, + accountExpiration: time.Duration(accountExpiration) * time.Millisecond, + blockExpiration: time.Duration(blockExpiration) * time.Millisecond, + txExpiration: time.Duration(txExpiration) * time.Millisecond, + assetExpiration: time.Duration(assetExpiration) * time.Millisecond, + priceExpiration: time.Duration(priceExpiration) * time.Millisecond, + } + return memCache +} + +func (m *MemCache) getWithSet(key string, duration time.Duration, f fallback) (interface{}, error) { + result, found := m.goCache.Get(key) + if found { + return result, nil + } + result, err := f() + if err != nil { + return nil, err + } + m.goCache.Set(key, result, duration) + return result, nil +} + +func (m *MemCache) setAccount(accountIndex int64, accountName, accountPk string) { + m.goCache.Set(fmt.Sprintf("%s%d", AccountIndexNameKeyPrefix, accountIndex), accountName, gocache.DefaultExpiration) + m.goCache.Set(fmt.Sprintf("%s%d", AccountIndexPkKeyPrefix, accountIndex), accountPk, gocache.DefaultExpiration) + m.goCache.Set(fmt.Sprintf("%s%s", AccountNameKeyPrefix, accountName), accountIndex, gocache.DefaultExpiration) + m.goCache.Set(fmt.Sprintf("%s%s", AccountPkKeyPrefix, accountPk), accountIndex, gocache.DefaultExpiration) +} + +func (m *MemCache) GetAccountIndexByName(accountName string) (int64, error) { + index, found := m.goCache.Get(fmt.Sprintf("%s%s", AccountNameKeyPrefix, accountName)) + if found { + return index.(int64), nil + } + account, err := m.accountModel.GetAccountByName(accountName) + if err != nil { + return 0, err + } + m.setAccount(account.AccountIndex, account.AccountName, account.PublicKey) + return account.AccountIndex, nil +} + +func (m *MemCache) GetAccountIndexByPk(accountPk string) (int64, error) { + index, found := m.goCache.Get(fmt.Sprintf("%s%s", AccountPkKeyPrefix, accountPk)) + if found { + return index.(int64), nil + } + account, err := m.accountModel.GetAccountByPk(accountPk) + if err != nil { + return 0, err + } + m.setAccount(account.AccountIndex, account.AccountName, account.PublicKey) + return account.AccountIndex, nil +} + +func (m *MemCache) GetAccountNameByIndex(accountIndex int64) (string, error) { + name, found := m.goCache.Get(fmt.Sprintf("%s%d", AccountIndexNameKeyPrefix, accountIndex)) + if found { + return name.(string), nil + } + account, err := m.accountModel.GetAccountByIndex(accountIndex) + if err != nil { + return "", err + } + m.setAccount(account.AccountIndex, account.AccountName, account.PublicKey) + return account.AccountName, nil +} + +func (m *MemCache) GetAccountPkByIndex(accountIndex int64) (string, error) { + pk, found := m.goCache.Get(fmt.Sprintf("%s%d", AccountIndexPkKeyPrefix, accountIndex)) + if found { + return pk.(string), nil + } + account, err := m.accountModel.GetAccountByIndex(accountIndex) + if err != nil { + return "", err + } + m.setAccount(account.AccountIndex, account.AccountName, account.PublicKey) + return account.PublicKey, nil +} + +func (m *MemCache) GetAccountWithFallback(accountIndex int64, f fallback) (*accdao.Account, error) { + key := fmt.Sprintf("%s%d", AccountByIndexKeyPrefix, accountIndex) + a, err := m.getWithSet(key, m.accountExpiration, f) + if err != nil { + return nil, err + } + + account := a.(*accdao.Account) + m.setAccount(account.AccountIndex, account.AccountName, account.PublicKey) + return account, nil +} + +func (m *MemCache) GetAccountTotalCountWiltFallback(f fallback) (int64, error) { + count, err := m.getWithSet(AccountCountKeyPrefix, m.accountExpiration, f) + if err != nil { + return 0, err + } + return count.(int64), nil +} + +func (m *MemCache) GetBlockByHeightWithFallback(blockHeight int64, f fallback) (*blockdao.Block, error) { + key := fmt.Sprintf("%s%d", BlockByHeightKeyPrefix, blockHeight) + b, err := m.getWithSet(key, m.blockExpiration, f) + if err != nil { + return nil, err + } + + block := b.(*blockdao.Block) + key = fmt.Sprintf("%s%s", BlockByCommitmentKeyPrefix, block.BlockCommitment) + m.goCache.Set(key, block, m.blockExpiration) + return block, nil +} + +func (m *MemCache) GetBlockByCommitmentWithFallback(blockCommitment string, f fallback) (*blockdao.Block, error) { + key := fmt.Sprintf("%s%s", BlockByCommitmentKeyPrefix, blockCommitment) + b, err := m.getWithSet(key, m.blockExpiration, f) + if err != nil { + return nil, err + } + + block := b.(*blockdao.Block) + key = fmt.Sprintf("%s%d", BlockByHeightKeyPrefix, block.BlockHeight) + m.goCache.Set(key, block, m.blockExpiration) + return block, nil +} + +func (m *MemCache) GetBlockTotalCountWithFallback(f fallback) (int64, error) { + count, err := m.getWithSet(BlockCountKeyPrefix, m.blockExpiration, f) + if err != nil { + return 0, err + } + return count.(int64), nil +} + +func (m *MemCache) GetTxByHashWithFallback(txHash string, f fallback) (*tx.Tx, error) { + key := fmt.Sprintf("%s%s", TxByHashKeyPrefix, txHash) + t, err := m.getWithSet(key, m.txExpiration, f) + if err != nil { + return nil, err + } + return t.(*tx.Tx), nil +} + +func (m *MemCache) GetTxTotalCountWithFallback(f fallback) (int64, error) { + count, err := m.getWithSet(TxCountKeyPrefix, m.txExpiration, f) + if err != nil { + return 0, err + } + return count.(int64), nil +} + +func (m *MemCache) GetAssetTotalCountWithFallback(f fallback) (int64, error) { + count, err := m.getWithSet(AssetCountKeyKeyPrefix, m.txExpiration, f) + if err != nil { + return 0, err + } + return count.(int64), nil +} + +func (m *MemCache) GetAssetByIdWithFallback(assetId int64, f fallback) (*assetdao.Asset, error) { + key := fmt.Sprintf("%s%d", AssetByIdKeyPrefix, assetId) + a, err := m.getWithSet(key, m.assetExpiration, f) + if err != nil { + return nil, err + } + + asset := a.(*assetdao.Asset) + key = fmt.Sprintf("%s%s", AssetBySymbolKeyPrefix, asset.AssetSymbol) + m.goCache.Set(key, asset, m.assetExpiration) + + key = fmt.Sprintf("%s%d", AssetIdNameKeyPrefix, assetId) + m.goCache.Set(key, asset.AssetName, gocache.DefaultExpiration) + return asset, nil +} + +func (m *MemCache) GetAssetBySymbolWithFallback(assetSymbol string, f fallback) (*assetdao.Asset, error) { + key := fmt.Sprintf("%s%s", AssetBySymbolKeyPrefix, assetSymbol) + a, err := m.getWithSet(key, m.assetExpiration, f) + if err != nil { + return nil, err + } + + asset := a.(*assetdao.Asset) + key = fmt.Sprintf("%s%d", AssetByIdKeyPrefix, asset.AssetId) + m.goCache.Set(key, asset, m.assetExpiration) + + key = fmt.Sprintf("%s%d", AssetIdNameKeyPrefix, asset.AssetId) + m.goCache.Set(key, asset.AssetName, gocache.DefaultExpiration) + return asset, nil +} + +func (m *MemCache) GetAssetNameById(assetId int64) (string, error) { + key := fmt.Sprintf("%s%d", AssetIdNameKeyPrefix, assetId) + name, found := m.goCache.Get(key) + if found { + return name.(string), nil + } + asset, err := m.assetModel.GetAssetById(assetId) + if err != nil { + return "", err + } + + m.goCache.Set(key, asset.AssetName, gocache.DefaultExpiration) + return asset.AssetName, nil +} + +func (m *MemCache) GetPriceWithFallback(symbol string, f fallback) (float64, error) { + key := fmt.Sprintf("%s%s", PriceKeyPrefix, symbol) + price, err := m.getWithSet(key, m.priceExpiration, f) + if err != nil { + return 0, err + } + return price.(float64), nil +} + +func (m *MemCache) GetSysConfigWithFallback(configName string, f fallback) (*sysconfig.SysConfig, error) { + key := fmt.Sprintf("%s%s", SysConfigKeyPrefix, configName) + c, err := m.getWithSet(key, gocache.DefaultExpiration, f) + if err != nil { + return nil, err + } + return c.(*sysconfig.SysConfig), nil +} diff --git a/service/apiserver/internal/config/config.go b/service/apiserver/internal/config/config.go new file mode 100644 index 000000000..34fdf3173 --- /dev/null +++ b/service/apiserver/internal/config/config.go @@ -0,0 +1,27 @@ +package config + +import ( + "github.com/zeromicro/go-zero/core/logx" + "github.com/zeromicro/go-zero/core/stores/cache" + "github.com/zeromicro/go-zero/rest" +) + +type Config struct { + rest.RestConf + Postgres struct { + DataSource string + } + CacheRedis cache.CacheConf + LogConf logx.LogConf + CoinMarketCap struct { + Url string + Token string + } + MemCache struct { + AccountExpiration int + AssetExpiration int + BlockExpiration int + TxExpiration int + PriceExpiration int + } +} diff --git a/service/apiserver/internal/fetcher/price/fetcher.go b/service/apiserver/internal/fetcher/price/fetcher.go new file mode 100644 index 000000000..0ab96f99f --- /dev/null +++ b/service/apiserver/internal/fetcher/price/fetcher.go @@ -0,0 +1,83 @@ +package price + +import ( + "context" + "encoding/json" + "fmt" + "io" + "net/http" + + "github.com/bnb-chain/zkbas/service/apiserver/internal/cache" + "github.com/bnb-chain/zkbas/types" +) + +type Fetcher interface { + GetCurrencyPrice(ctx context.Context, l2Symbol string) (price float64, err error) +} + +func NewFetcher(memCache *cache.MemCache, cmcUrl, cmcToken string) Fetcher { + return &fetcher{ + memCache: memCache, + cmcUrl: cmcUrl, + cmcToken: cmcToken, + } +} + +type fetcher struct { + memCache *cache.MemCache + cmcUrl string + cmcToken string +} + +func (f *fetcher) GetCurrencyPrice(_ context.Context, symbol string) (float64, error) { + return f.memCache.GetPriceWithFallback(symbol, func() (interface{}, error) { + quoteMap, err := f.getLatestQuotes(symbol) + if err != nil { + return 0, err + } + q, ok := quoteMap[symbol] + if !ok { + return 0, nil + } + price := q.Quote["USD"].Price + return price, err + }) +} + +func (f *fetcher) getLatestQuotes(symbol string) (map[string]QuoteLatest, error) { + client := &http.Client{} + url := fmt.Sprintf("%s%s", f.cmcUrl, symbol) + reqest, err := http.NewRequest("GET", url, nil) + if err != nil { + return nil, types.HttpErrFailToRequest + } + reqest.Header.Add("X-CMC_PRO_API_KEY", f.cmcToken) + reqest.Header.Add("Accept", "application/json") + resp, err := client.Do(reqest) + if err != nil { + return nil, types.HttpErrClientDo + } + defer resp.Body.Close() + body, err := io.ReadAll(resp.Body) + if err != nil { + return nil, types.IoErrFailToRead + } + currencyPrice := ¤cyPrice{} + if err = json.Unmarshal(body, ¤cyPrice); err != nil { + return nil, types.JsonErrUnmarshal + } + quotesLatest := make(map[string]QuoteLatest, 0) + for _, coinObj := range currencyPrice.Data.(map[string]interface{}) { + b, err := json.Marshal(coinObj) + if err != nil { + return nil, types.JsonErrMarshal + } + quoteLatest := &QuoteLatest{} + err = json.Unmarshal(b, quoteLatest) + if err != nil { + return nil, types.JsonErrUnmarshal + } + quotesLatest[quoteLatest.Symbol] = *quoteLatest + } + return quotesLatest, nil +} diff --git a/service/api/app/internal/repo/price/vars.go b/service/apiserver/internal/fetcher/price/types.go similarity index 93% rename from service/api/app/internal/repo/price/vars.go rename to service/apiserver/internal/fetcher/price/types.go index 01006b6ba..37db441bc 100644 --- a/service/api/app/internal/repo/price/vars.go +++ b/service/apiserver/internal/fetcher/price/types.go @@ -1,9 +1,5 @@ package price -var ( - coinMarketCap = "https://pro-api.coinmarketcap.com/v1/cryptocurrency/quotes/latest?symbol=" -) - type status struct { Timestamp string `json:"timestamp"` ErrorCode int `json:"error_code"` diff --git a/service/apiserver/internal/fetcher/state/fetcher.go b/service/apiserver/internal/fetcher/state/fetcher.go new file mode 100644 index 000000000..eb4464b8e --- /dev/null +++ b/service/apiserver/internal/fetcher/state/fetcher.go @@ -0,0 +1,112 @@ +package state + +import ( + "context" + + "github.com/bnb-chain/zkbas/common/chain" + accdao "github.com/bnb-chain/zkbas/dao/account" + "github.com/bnb-chain/zkbas/dao/dbcache" + liqdao "github.com/bnb-chain/zkbas/dao/liquidity" + nftdao "github.com/bnb-chain/zkbas/dao/nft" + "github.com/bnb-chain/zkbas/types" +) + +//go:generate mockgen -source api.go -destination api_mock.go -package state + +// Fetcher will fetch the latest states (account,nft,liquidity) from redis, which is written by committer; +// and if the required data cannot be found then database will be used. +type Fetcher interface { + GetLatestAccount(accountIndex int64) (accountInfo *types.AccountInfo, err error) + GetLatestLiquidity(pairIndex int64) (liquidityInfo *types.LiquidityInfo, err error) + GetLatestNft(nftIndex int64) (*types.NftInfo, error) +} + +func NewFetcher(redisCache dbcache.Cache, + accountModel accdao.AccountModel, + liquidityModel liqdao.LiquidityModel, + nftModel nftdao.L2NftModel) Fetcher { + return &fetcher{ + redisCache: redisCache, + accountModel: accountModel, + liquidityModel: liquidityModel, + nftModel: nftModel, + } +} + +type fetcher struct { + redisCache dbcache.Cache + accountModel accdao.AccountModel + liquidityModel liqdao.LiquidityModel + nftModel nftdao.L2NftModel +} + +func (f *fetcher) GetLatestAccount(accountIndex int64) (*types.AccountInfo, error) { + var fa *types.AccountInfo + account := &accdao.Account{} + + redisAccount, err := f.redisCache.Get(context.Background(), dbcache.AccountKeyByIndex(accountIndex), account) + if err == nil && redisAccount != nil { + fa, err = chain.ToFormatAccountInfo(account) + if err == nil { + return fa, nil + } + } else { + dbAccount, err := f.accountModel.GetAccountByIndex(accountIndex) + if err != nil { + return nil, err + } + fa, err = chain.ToFormatAccountInfo(dbAccount) + if err != nil { + return nil, err + } + } + return fa, nil +} + +func (f *fetcher) GetLatestLiquidity(pairIndex int64) (liquidityInfo *types.LiquidityInfo, err error) { + l := &liqdao.Liquidity{} + + redisLiquidity, err := f.redisCache.Get(context.Background(), dbcache.LiquidityKeyByIndex(pairIndex), l) + if err == nil && redisLiquidity != "" { + } else { + l, err = f.liquidityModel.GetLiquidityByPairIndex(pairIndex) + if err != nil { + return nil, err + } + } + + return types.ConstructLiquidityInfo( + pairIndex, + l.AssetAId, + l.AssetA, + l.AssetBId, + l.AssetB, + l.LpAmount, + l.KLast, + l.FeeRate, + l.TreasuryAccountIndex, + l.TreasuryRate, + ) +} + +func (f *fetcher) GetLatestNft(nftIndex int64) (*types.NftInfo, error) { + n := &nftdao.L2Nft{} + + redisNft, err := f.redisCache.Get(context.Background(), dbcache.NftKeyByIndex(nftIndex), n) + if err == nil && redisNft != "" { + } else { + n, err = f.nftModel.GetNftAsset(nftIndex) + if err != nil { + return nil, err + } + } + + return types.ConstructNftInfo(nftIndex, + n.CreatorAccountIndex, + n.OwnerAccountIndex, + n.NftContentHash, + n.NftL1TokenId, + n.NftL1Address, + n.CreatorTreasuryRate, + n.CollectionId), nil +} diff --git a/service/apiserver/internal/handler/account/getaccounthandler.go b/service/apiserver/internal/handler/account/getaccounthandler.go new file mode 100644 index 000000000..961f3bdf1 --- /dev/null +++ b/service/apiserver/internal/handler/account/getaccounthandler.go @@ -0,0 +1,29 @@ +package account + +import ( + "net/http" + + "github.com/zeromicro/go-zero/rest/httpx" + + "github.com/bnb-chain/zkbas/service/apiserver/internal/logic/account" + "github.com/bnb-chain/zkbas/service/apiserver/internal/svc" + "github.com/bnb-chain/zkbas/service/apiserver/internal/types" +) + +func GetAccountHandler(svcCtx *svc.ServiceContext) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + var req types.ReqGetAccount + if err := httpx.Parse(r, &req); err != nil { + httpx.Error(w, err) + return + } + + l := account.NewGetAccountLogic(r.Context(), svcCtx) + resp, err := l.GetAccount(&req) + if err != nil { + httpx.Error(w, err) + } else { + httpx.OkJson(w, resp) + } + } +} diff --git a/service/api/app/internal/handler/info/getaccountshandler.go b/service/apiserver/internal/handler/account/getaccountshandler.go similarity index 57% rename from service/api/app/internal/handler/info/getaccountshandler.go rename to service/apiserver/internal/handler/account/getaccountshandler.go index 50ea924dd..bc0cdec4a 100644 --- a/service/api/app/internal/handler/info/getaccountshandler.go +++ b/service/apiserver/internal/handler/account/getaccountshandler.go @@ -1,24 +1,24 @@ -package info +package account import ( "net/http" "github.com/zeromicro/go-zero/rest/httpx" - "github.com/bnb-chain/zkbas/service/api/app/internal/logic/info" - "github.com/bnb-chain/zkbas/service/api/app/internal/svc" - "github.com/bnb-chain/zkbas/service/api/app/internal/types" + "github.com/bnb-chain/zkbas/service/apiserver/internal/logic/account" + "github.com/bnb-chain/zkbas/service/apiserver/internal/svc" + "github.com/bnb-chain/zkbas/service/apiserver/internal/types" ) func GetAccountsHandler(svcCtx *svc.ServiceContext) http.HandlerFunc { return func(w http.ResponseWriter, r *http.Request) { - var req types.ReqGetAccounts + var req types.ReqGetRange if err := httpx.Parse(r, &req); err != nil { httpx.Error(w, err) return } - l := info.NewGetAccountsLogic(r.Context(), svcCtx) + l := account.NewGetAccountsLogic(r.Context(), svcCtx) resp, err := l.GetAccounts(&req) if err != nil { httpx.Error(w, err) diff --git a/service/apiserver/internal/handler/asset/getassetshandler.go b/service/apiserver/internal/handler/asset/getassetshandler.go new file mode 100644 index 000000000..4c727d0c9 --- /dev/null +++ b/service/apiserver/internal/handler/asset/getassetshandler.go @@ -0,0 +1,29 @@ +package asset + +import ( + "net/http" + + "github.com/zeromicro/go-zero/rest/httpx" + + "github.com/bnb-chain/zkbas/service/apiserver/internal/logic/asset" + "github.com/bnb-chain/zkbas/service/apiserver/internal/svc" + "github.com/bnb-chain/zkbas/service/apiserver/internal/types" +) + +func GetAssetsHandler(svcCtx *svc.ServiceContext) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + var req types.ReqGetRange + if err := httpx.Parse(r, &req); err != nil { + httpx.Error(w, err) + return + } + + l := asset.NewGetAssetsLogic(r.Context(), svcCtx) + resp, err := l.GetAssets(&req) + if err != nil { + httpx.Error(w, err) + } else { + httpx.OkJson(w, resp) + } + } +} diff --git a/service/apiserver/internal/handler/block/getblockhandler.go b/service/apiserver/internal/handler/block/getblockhandler.go new file mode 100644 index 000000000..4e89619f1 --- /dev/null +++ b/service/apiserver/internal/handler/block/getblockhandler.go @@ -0,0 +1,29 @@ +package block + +import ( + "net/http" + + "github.com/zeromicro/go-zero/rest/httpx" + + "github.com/bnb-chain/zkbas/service/apiserver/internal/logic/block" + "github.com/bnb-chain/zkbas/service/apiserver/internal/svc" + "github.com/bnb-chain/zkbas/service/apiserver/internal/types" +) + +func GetBlockHandler(svcCtx *svc.ServiceContext) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + var req types.ReqGetBlock + if err := httpx.Parse(r, &req); err != nil { + httpx.Error(w, err) + return + } + + l := block.NewGetBlockLogic(r.Context(), svcCtx) + resp, err := l.GetBlock(&req) + if err != nil { + httpx.Error(w, err) + } else { + httpx.OkJson(w, resp) + } + } +} diff --git a/service/api/app/internal/handler/block/getblockshandler.go b/service/apiserver/internal/handler/block/getblockshandler.go similarity index 67% rename from service/api/app/internal/handler/block/getblockshandler.go rename to service/apiserver/internal/handler/block/getblockshandler.go index 782612419..97903d5c2 100644 --- a/service/api/app/internal/handler/block/getblockshandler.go +++ b/service/apiserver/internal/handler/block/getblockshandler.go @@ -5,14 +5,14 @@ import ( "github.com/zeromicro/go-zero/rest/httpx" - "github.com/bnb-chain/zkbas/service/api/app/internal/logic/block" - "github.com/bnb-chain/zkbas/service/api/app/internal/svc" - "github.com/bnb-chain/zkbas/service/api/app/internal/types" + "github.com/bnb-chain/zkbas/service/apiserver/internal/logic/block" + "github.com/bnb-chain/zkbas/service/apiserver/internal/svc" + "github.com/bnb-chain/zkbas/service/apiserver/internal/types" ) func GetBlocksHandler(svcCtx *svc.ServiceContext) http.HandlerFunc { return func(w http.ResponseWriter, r *http.Request) { - var req types.ReqGetBlocks + var req types.ReqGetRange if err := httpx.Parse(r, &req); err != nil { httpx.Error(w, err) return diff --git a/service/apiserver/internal/handler/block/getcurrentheighthandler.go b/service/apiserver/internal/handler/block/getcurrentheighthandler.go new file mode 100644 index 000000000..aea7d17cf --- /dev/null +++ b/service/apiserver/internal/handler/block/getcurrentheighthandler.go @@ -0,0 +1,22 @@ +package block + +import ( + "net/http" + + "github.com/zeromicro/go-zero/rest/httpx" + + "github.com/bnb-chain/zkbas/service/apiserver/internal/logic/block" + "github.com/bnb-chain/zkbas/service/apiserver/internal/svc" +) + +func GetCurrentHeightHandler(svcCtx *svc.ServiceContext) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + l := block.NewGetCurrentHeightLogic(r.Context(), svcCtx) + resp, err := l.GetCurrentHeight() + if err != nil { + httpx.Error(w, err) + } else { + httpx.OkJson(w, resp) + } + } +} diff --git a/service/apiserver/internal/handler/info/getcurrencypricehandler.go b/service/apiserver/internal/handler/info/getcurrencypricehandler.go new file mode 100644 index 000000000..a243b1441 --- /dev/null +++ b/service/apiserver/internal/handler/info/getcurrencypricehandler.go @@ -0,0 +1,29 @@ +package info + +import ( + "net/http" + + "github.com/zeromicro/go-zero/rest/httpx" + + "github.com/bnb-chain/zkbas/service/apiserver/internal/logic/info" + "github.com/bnb-chain/zkbas/service/apiserver/internal/svc" + "github.com/bnb-chain/zkbas/service/apiserver/internal/types" +) + +func GetCurrencyPriceHandler(svcCtx *svc.ServiceContext) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + var req types.ReqGetCurrencyPrice + if err := httpx.Parse(r, &req); err != nil { + httpx.Error(w, err) + return + } + + l := info.NewGetCurrencyPriceLogic(r.Context(), svcCtx) + resp, err := l.GetCurrencyPrice(&req) + if err != nil { + httpx.Error(w, err) + } else { + httpx.OkJson(w, resp) + } + } +} diff --git a/service/api/app/internal/handler/info/getcurrencypriceshandler.go b/service/apiserver/internal/handler/info/getcurrencypriceshandler.go similarity index 68% rename from service/api/app/internal/handler/info/getcurrencypriceshandler.go rename to service/apiserver/internal/handler/info/getcurrencypriceshandler.go index d32809d65..f728c962c 100644 --- a/service/api/app/internal/handler/info/getcurrencypriceshandler.go +++ b/service/apiserver/internal/handler/info/getcurrencypriceshandler.go @@ -5,14 +5,14 @@ import ( "github.com/zeromicro/go-zero/rest/httpx" - "github.com/bnb-chain/zkbas/service/api/app/internal/logic/info" - "github.com/bnb-chain/zkbas/service/api/app/internal/svc" - "github.com/bnb-chain/zkbas/service/api/app/internal/types" + "github.com/bnb-chain/zkbas/service/apiserver/internal/logic/info" + "github.com/bnb-chain/zkbas/service/apiserver/internal/svc" + "github.com/bnb-chain/zkbas/service/apiserver/internal/types" ) func GetCurrencyPricesHandler(svcCtx *svc.ServiceContext) http.HandlerFunc { return func(w http.ResponseWriter, r *http.Request) { - var req types.ReqGetCurrencyPrices + var req types.ReqGetRange if err := httpx.Parse(r, &req); err != nil { httpx.Error(w, err) return diff --git a/service/api/app/internal/handler/info/getgasaccounthandler.go b/service/apiserver/internal/handler/info/getgasaccounthandler.go similarity index 50% rename from service/api/app/internal/handler/info/getgasaccounthandler.go rename to service/apiserver/internal/handler/info/getgasaccounthandler.go index e28bac937..6831960c6 100644 --- a/service/api/app/internal/handler/info/getgasaccounthandler.go +++ b/service/apiserver/internal/handler/info/getgasaccounthandler.go @@ -5,21 +5,14 @@ import ( "github.com/zeromicro/go-zero/rest/httpx" - "github.com/bnb-chain/zkbas/service/api/app/internal/logic/info" - "github.com/bnb-chain/zkbas/service/api/app/internal/svc" - "github.com/bnb-chain/zkbas/service/api/app/internal/types" + "github.com/bnb-chain/zkbas/service/apiserver/internal/logic/info" + "github.com/bnb-chain/zkbas/service/apiserver/internal/svc" ) func GetGasAccountHandler(svcCtx *svc.ServiceContext) http.HandlerFunc { return func(w http.ResponseWriter, r *http.Request) { - var req types.ReqGetGasAccount - if err := httpx.Parse(r, &req); err != nil { - httpx.Error(w, err) - return - } - l := info.NewGetGasAccountLogic(r.Context(), svcCtx) - resp, err := l.GetGasAccount(&req) + resp, err := l.GetGasAccount() if err != nil { httpx.Error(w, err) } else { diff --git a/service/apiserver/internal/handler/info/getgasfeeassetshandler.go b/service/apiserver/internal/handler/info/getgasfeeassetshandler.go new file mode 100644 index 000000000..ddb82d3a2 --- /dev/null +++ b/service/apiserver/internal/handler/info/getgasfeeassetshandler.go @@ -0,0 +1,22 @@ +package info + +import ( + "net/http" + + "github.com/zeromicro/go-zero/rest/httpx" + + "github.com/bnb-chain/zkbas/service/apiserver/internal/logic/info" + "github.com/bnb-chain/zkbas/service/apiserver/internal/svc" +) + +func GetGasFeeAssetsHandler(svcCtx *svc.ServiceContext) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + l := info.NewGetGasFeeAssetsLogic(r.Context(), svcCtx) + resp, err := l.GetGasFeeAssets() + if err != nil { + httpx.Error(w, err) + } else { + httpx.OkJson(w, resp) + } + } +} diff --git a/service/api/app/internal/handler/info/getgasfeehandler.go b/service/apiserver/internal/handler/info/getgasfeehandler.go similarity index 71% rename from service/api/app/internal/handler/info/getgasfeehandler.go rename to service/apiserver/internal/handler/info/getgasfeehandler.go index c634e022f..4987e6c2f 100644 --- a/service/api/app/internal/handler/info/getgasfeehandler.go +++ b/service/apiserver/internal/handler/info/getgasfeehandler.go @@ -5,9 +5,9 @@ import ( "github.com/zeromicro/go-zero/rest/httpx" - "github.com/bnb-chain/zkbas/service/api/app/internal/logic/info" - "github.com/bnb-chain/zkbas/service/api/app/internal/svc" - "github.com/bnb-chain/zkbas/service/api/app/internal/types" + "github.com/bnb-chain/zkbas/service/apiserver/internal/logic/info" + "github.com/bnb-chain/zkbas/service/apiserver/internal/svc" + "github.com/bnb-chain/zkbas/service/apiserver/internal/types" ) func GetGasFeeHandler(svcCtx *svc.ServiceContext) http.HandlerFunc { diff --git a/service/api/app/internal/handler/info/getlayer2basicinfohandler.go b/service/apiserver/internal/handler/info/getlayer2basicinfohandler.go similarity index 50% rename from service/api/app/internal/handler/info/getlayer2basicinfohandler.go rename to service/apiserver/internal/handler/info/getlayer2basicinfohandler.go index 12269b3c8..4ed86d6d3 100644 --- a/service/api/app/internal/handler/info/getlayer2basicinfohandler.go +++ b/service/apiserver/internal/handler/info/getlayer2basicinfohandler.go @@ -5,21 +5,14 @@ import ( "github.com/zeromicro/go-zero/rest/httpx" - "github.com/bnb-chain/zkbas/service/api/app/internal/logic/info" - "github.com/bnb-chain/zkbas/service/api/app/internal/svc" - "github.com/bnb-chain/zkbas/service/api/app/internal/types" + "github.com/bnb-chain/zkbas/service/apiserver/internal/logic/info" + "github.com/bnb-chain/zkbas/service/apiserver/internal/svc" ) func GetLayer2BasicInfoHandler(svcCtx *svc.ServiceContext) http.HandlerFunc { return func(w http.ResponseWriter, r *http.Request) { - var req types.ReqGetLayer2BasicInfo - if err := httpx.Parse(r, &req); err != nil { - httpx.Error(w, err) - return - } - l := info.NewGetLayer2BasicInfoLogic(r.Context(), svcCtx) - resp, err := l.GetLayer2BasicInfo(&req) + resp, err := l.GetLayer2BasicInfo() if err != nil { httpx.Error(w, err) } else { diff --git a/service/api/app/internal/handler/info/getwithdrawgasfeehandler.go b/service/apiserver/internal/handler/info/getwithdrawgasfeehandler.go similarity index 73% rename from service/api/app/internal/handler/info/getwithdrawgasfeehandler.go rename to service/apiserver/internal/handler/info/getwithdrawgasfeehandler.go index e72673f85..09b793e9d 100644 --- a/service/api/app/internal/handler/info/getwithdrawgasfeehandler.go +++ b/service/apiserver/internal/handler/info/getwithdrawgasfeehandler.go @@ -5,9 +5,9 @@ import ( "github.com/zeromicro/go-zero/rest/httpx" - "github.com/bnb-chain/zkbas/service/api/app/internal/logic/info" - "github.com/bnb-chain/zkbas/service/api/app/internal/svc" - "github.com/bnb-chain/zkbas/service/api/app/internal/types" + "github.com/bnb-chain/zkbas/service/apiserver/internal/logic/info" + "github.com/bnb-chain/zkbas/service/apiserver/internal/svc" + "github.com/bnb-chain/zkbas/service/apiserver/internal/types" ) func GetWithdrawGasFeeHandler(svcCtx *svc.ServiceContext) http.HandlerFunc { diff --git a/service/api/app/internal/handler/info/searchhandler.go b/service/apiserver/internal/handler/info/searchhandler.go similarity index 71% rename from service/api/app/internal/handler/info/searchhandler.go rename to service/apiserver/internal/handler/info/searchhandler.go index c4e019a92..9198e55b1 100644 --- a/service/api/app/internal/handler/info/searchhandler.go +++ b/service/apiserver/internal/handler/info/searchhandler.go @@ -5,9 +5,9 @@ import ( "github.com/zeromicro/go-zero/rest/httpx" - "github.com/bnb-chain/zkbas/service/api/app/internal/logic/info" - "github.com/bnb-chain/zkbas/service/api/app/internal/svc" - "github.com/bnb-chain/zkbas/service/api/app/internal/types" + "github.com/bnb-chain/zkbas/service/apiserver/internal/logic/info" + "github.com/bnb-chain/zkbas/service/apiserver/internal/svc" + "github.com/bnb-chain/zkbas/service/apiserver/internal/types" ) func SearchHandler(svcCtx *svc.ServiceContext) http.HandlerFunc { diff --git a/service/apiserver/internal/handler/nft/getaccountnftshandler.go b/service/apiserver/internal/handler/nft/getaccountnftshandler.go new file mode 100644 index 000000000..e06b9f5c1 --- /dev/null +++ b/service/apiserver/internal/handler/nft/getaccountnftshandler.go @@ -0,0 +1,29 @@ +package nft + +import ( + "net/http" + + "github.com/zeromicro/go-zero/rest/httpx" + + "github.com/bnb-chain/zkbas/service/apiserver/internal/logic/nft" + "github.com/bnb-chain/zkbas/service/apiserver/internal/svc" + "github.com/bnb-chain/zkbas/service/apiserver/internal/types" +) + +func GetAccountNftsHandler(svcCtx *svc.ServiceContext) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + var req types.ReqGetAccountNfts + if err := httpx.Parse(r, &req); err != nil { + httpx.Error(w, err) + return + } + + l := nft.NewGetAccountNftsLogic(r.Context(), svcCtx) + resp, err := l.GetAccountNfts(&req) + if err != nil { + httpx.Error(w, err) + } else { + httpx.OkJson(w, resp) + } + } +} diff --git a/service/api/app/internal/handler/nft/getmaxofferidhandler.go b/service/apiserver/internal/handler/nft/getmaxofferidhandler.go similarity index 72% rename from service/api/app/internal/handler/nft/getmaxofferidhandler.go rename to service/apiserver/internal/handler/nft/getmaxofferidhandler.go index 6cd6376d6..67ddac5f0 100644 --- a/service/api/app/internal/handler/nft/getmaxofferidhandler.go +++ b/service/apiserver/internal/handler/nft/getmaxofferidhandler.go @@ -5,9 +5,9 @@ import ( "github.com/zeromicro/go-zero/rest/httpx" - "github.com/bnb-chain/zkbas/service/api/app/internal/logic/nft" - "github.com/bnb-chain/zkbas/service/api/app/internal/svc" - "github.com/bnb-chain/zkbas/service/api/app/internal/types" + "github.com/bnb-chain/zkbas/service/apiserver/internal/logic/nft" + "github.com/bnb-chain/zkbas/service/apiserver/internal/svc" + "github.com/bnb-chain/zkbas/service/apiserver/internal/types" ) func GetMaxOfferIdHandler(svcCtx *svc.ServiceContext) http.HandlerFunc { diff --git a/service/api/app/internal/handler/pair/getlpvaluehandler.go b/service/apiserver/internal/handler/pair/getlpvaluehandler.go similarity index 50% rename from service/api/app/internal/handler/pair/getlpvaluehandler.go rename to service/apiserver/internal/handler/pair/getlpvaluehandler.go index ee44e5045..4579bc639 100644 --- a/service/api/app/internal/handler/pair/getlpvaluehandler.go +++ b/service/apiserver/internal/handler/pair/getlpvaluehandler.go @@ -5,20 +5,20 @@ import ( "github.com/zeromicro/go-zero/rest/httpx" - "github.com/bnb-chain/zkbas/service/api/app/internal/logic/pair" - "github.com/bnb-chain/zkbas/service/api/app/internal/svc" - "github.com/bnb-chain/zkbas/service/api/app/internal/types" + "github.com/bnb-chain/zkbas/service/apiserver/internal/logic/pair" + "github.com/bnb-chain/zkbas/service/apiserver/internal/svc" + "github.com/bnb-chain/zkbas/service/apiserver/internal/types" ) -func GetLPValueHandler(svcCtx *svc.ServiceContext) http.HandlerFunc { +func GetLpValueHandler(svcCtx *svc.ServiceContext) http.HandlerFunc { return func(w http.ResponseWriter, r *http.Request) { - var req types.ReqGetLPValue + var req types.ReqGetLpValue if err := httpx.Parse(r, &req); err != nil { httpx.Error(w, err) return } - l := pair.NewGetLPValueLogic(r.Context(), svcCtx) + l := pair.NewGetLpValueLogic(r.Context(), svcCtx) resp, err := l.GetLPValue(&req) if err != nil { httpx.Error(w, err) diff --git a/service/apiserver/internal/handler/pair/getpairhandler.go b/service/apiserver/internal/handler/pair/getpairhandler.go new file mode 100644 index 000000000..c511cbff5 --- /dev/null +++ b/service/apiserver/internal/handler/pair/getpairhandler.go @@ -0,0 +1,29 @@ +package pair + +import ( + "net/http" + + "github.com/zeromicro/go-zero/rest/httpx" + + "github.com/bnb-chain/zkbas/service/apiserver/internal/logic/pair" + "github.com/bnb-chain/zkbas/service/apiserver/internal/svc" + "github.com/bnb-chain/zkbas/service/apiserver/internal/types" +) + +func GetPairHandler(svcCtx *svc.ServiceContext) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + var req types.ReqGetPair + if err := httpx.Parse(r, &req); err != nil { + httpx.Error(w, err) + return + } + + l := pair.NewGetPairLogic(r.Context(), svcCtx) + resp, err := l.GetPair(&req) + if err != nil { + httpx.Error(w, err) + } else { + httpx.OkJson(w, resp) + } + } +} diff --git a/service/apiserver/internal/handler/pair/getpairshandler.go b/service/apiserver/internal/handler/pair/getpairshandler.go new file mode 100644 index 000000000..973bc0448 --- /dev/null +++ b/service/apiserver/internal/handler/pair/getpairshandler.go @@ -0,0 +1,22 @@ +package pair + +import ( + "net/http" + + "github.com/zeromicro/go-zero/rest/httpx" + + "github.com/bnb-chain/zkbas/service/apiserver/internal/logic/pair" + "github.com/bnb-chain/zkbas/service/apiserver/internal/svc" +) + +func GetPairsHandler(svcCtx *svc.ServiceContext) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + l := pair.NewGetPairsLogic(r.Context(), svcCtx) + resp, err := l.GetPairs() + if err != nil { + httpx.Error(w, err) + } else { + httpx.OkJson(w, resp) + } + } +} diff --git a/service/api/app/internal/handler/pair/getswapamounthandler.go b/service/apiserver/internal/handler/pair/getswapamounthandler.go similarity index 72% rename from service/api/app/internal/handler/pair/getswapamounthandler.go rename to service/apiserver/internal/handler/pair/getswapamounthandler.go index d246bcb56..65f4b3c09 100644 --- a/service/api/app/internal/handler/pair/getswapamounthandler.go +++ b/service/apiserver/internal/handler/pair/getswapamounthandler.go @@ -5,9 +5,9 @@ import ( "github.com/zeromicro/go-zero/rest/httpx" - "github.com/bnb-chain/zkbas/service/api/app/internal/logic/pair" - "github.com/bnb-chain/zkbas/service/api/app/internal/svc" - "github.com/bnb-chain/zkbas/service/api/app/internal/types" + "github.com/bnb-chain/zkbas/service/apiserver/internal/logic/pair" + "github.com/bnb-chain/zkbas/service/apiserver/internal/svc" + "github.com/bnb-chain/zkbas/service/apiserver/internal/types" ) func GetSwapAmountHandler(svcCtx *svc.ServiceContext) http.HandlerFunc { diff --git a/service/api/app/internal/handler/root/getstatushandler.go b/service/apiserver/internal/handler/root/getstatushandler.go similarity index 50% rename from service/api/app/internal/handler/root/getstatushandler.go rename to service/apiserver/internal/handler/root/getstatushandler.go index b1124b4bb..49894783a 100644 --- a/service/api/app/internal/handler/root/getstatushandler.go +++ b/service/apiserver/internal/handler/root/getstatushandler.go @@ -5,21 +5,14 @@ import ( "github.com/zeromicro/go-zero/rest/httpx" - "github.com/bnb-chain/zkbas/service/api/app/internal/logic/root" - "github.com/bnb-chain/zkbas/service/api/app/internal/svc" - "github.com/bnb-chain/zkbas/service/api/app/internal/types" + "github.com/bnb-chain/zkbas/service/apiserver/internal/logic/root" + "github.com/bnb-chain/zkbas/service/apiserver/internal/svc" ) func GetStatusHandler(svcCtx *svc.ServiceContext) http.HandlerFunc { return func(w http.ResponseWriter, r *http.Request) { - var req types.ReqGetStatus - if err := httpx.Parse(r, &req); err != nil { - httpx.Error(w, err) - return - } - l := root.NewGetStatusLogic(r.Context(), svcCtx) - resp, err := l.GetStatus(&req) + resp, err := l.GetStatus() if err != nil { httpx.Error(w, err) } else { diff --git a/service/apiserver/internal/handler/routes.go b/service/apiserver/internal/handler/routes.go new file mode 100644 index 000000000..2d60fa325 --- /dev/null +++ b/service/apiserver/internal/handler/routes.go @@ -0,0 +1,205 @@ +// Code generated by goctl. DO NOT EDIT. +package handler + +import ( + "net/http" + + account "github.com/bnb-chain/zkbas/service/apiserver/internal/handler/account" + asset "github.com/bnb-chain/zkbas/service/apiserver/internal/handler/asset" + block "github.com/bnb-chain/zkbas/service/apiserver/internal/handler/block" + info "github.com/bnb-chain/zkbas/service/apiserver/internal/handler/info" + nft "github.com/bnb-chain/zkbas/service/apiserver/internal/handler/nft" + pair "github.com/bnb-chain/zkbas/service/apiserver/internal/handler/pair" + root "github.com/bnb-chain/zkbas/service/apiserver/internal/handler/root" + transaction "github.com/bnb-chain/zkbas/service/apiserver/internal/handler/transaction" + "github.com/bnb-chain/zkbas/service/apiserver/internal/svc" + + "github.com/zeromicro/go-zero/rest" +) + +func RegisterHandlers(server *rest.Server, serverCtx *svc.ServiceContext) { + server.AddRoutes( + []rest.Route{ + { + Method: http.MethodGet, + Path: "/", + Handler: root.GetStatusHandler(serverCtx), + }, + }, + ) + + server.AddRoutes( + []rest.Route{ + { + Method: http.MethodGet, + Path: "/api/v1/accounts", + Handler: account.GetAccountsHandler(serverCtx), + }, + { + Method: http.MethodGet, + Path: "/api/v1/account", + Handler: account.GetAccountHandler(serverCtx), + }, + }, + ) + + server.AddRoutes( + []rest.Route{ + { + Method: http.MethodGet, + Path: "/api/v1/assets", + Handler: asset.GetAssetsHandler(serverCtx), + }, + }, + ) + + server.AddRoutes( + []rest.Route{ + { + Method: http.MethodGet, + Path: "/api/v1/blocks", + Handler: block.GetBlocksHandler(serverCtx), + }, + { + Method: http.MethodGet, + Path: "/api/v1/block", + Handler: block.GetBlockHandler(serverCtx), + }, + { + Method: http.MethodGet, + Path: "/api/v1/currentHeight", + Handler: block.GetCurrentHeightHandler(serverCtx), + }, + }, + ) + + server.AddRoutes( + []rest.Route{ + { + Method: http.MethodGet, + Path: "/api/v1/layer2BasicInfo", + Handler: info.GetLayer2BasicInfoHandler(serverCtx), + }, + { + Method: http.MethodGet, + Path: "/api/v1/currencyPrice", + Handler: info.GetCurrencyPriceHandler(serverCtx), + }, + { + Method: http.MethodGet, + Path: "/api/v1/currencyPrices", + Handler: info.GetCurrencyPricesHandler(serverCtx), + }, + { + Method: http.MethodGet, + Path: "/api/v1/gasFee", + Handler: info.GetGasFeeHandler(serverCtx), + }, + { + Method: http.MethodGet, + Path: "/api/v1/withdrawGasFee", + Handler: info.GetWithdrawGasFeeHandler(serverCtx), + }, + { + Method: http.MethodGet, + Path: "/api/v1/gasFeeAssets", + Handler: info.GetGasFeeAssetsHandler(serverCtx), + }, + { + Method: http.MethodGet, + Path: "/api/v1/gasAccount", + Handler: info.GetGasAccountHandler(serverCtx), + }, + { + Method: http.MethodGet, + Path: "/api/v1/search", + Handler: info.SearchHandler(serverCtx), + }, + }, + ) + + server.AddRoutes( + []rest.Route{ + { + Method: http.MethodGet, + Path: "/api/v1/swapAmount", + Handler: pair.GetSwapAmountHandler(serverCtx), + }, + { + Method: http.MethodGet, + Path: "/api/v1/pairs", + Handler: pair.GetPairsHandler(serverCtx), + }, + { + Method: http.MethodGet, + Path: "/api/v1/lpValue", + Handler: pair.GetLpValueHandler(serverCtx), + }, + { + Method: http.MethodGet, + Path: "/api/v1/pair", + Handler: pair.GetPairHandler(serverCtx), + }, + }, + ) + + server.AddRoutes( + []rest.Route{ + { + Method: http.MethodGet, + Path: "/api/v1/txs", + Handler: transaction.GetTxsHandler(serverCtx), + }, + { + Method: http.MethodGet, + Path: "/api/v1/blockTxs", + Handler: transaction.GetBlockTxsHandler(serverCtx), + }, + { + Method: http.MethodGet, + Path: "/api/v1/accountTxs", + Handler: transaction.GetAccountTxsHandler(serverCtx), + }, + { + Method: http.MethodGet, + Path: "/api/v1/tx", + Handler: transaction.GetTxHandler(serverCtx), + }, + { + Method: http.MethodGet, + Path: "/api/v1/mempoolTxs", + Handler: transaction.GetMempoolTxsHandler(serverCtx), + }, + { + Method: http.MethodGet, + Path: "/api/v1/accountMempoolTxs", + Handler: transaction.GetAccountMempoolTxsHandler(serverCtx), + }, + { + Method: http.MethodGet, + Path: "/api/v1/nextNonce", + Handler: transaction.GetNextNonceHandler(serverCtx), + }, + { + Method: http.MethodPost, + Path: "/api/v1/sendTx", + Handler: transaction.SendTxHandler(serverCtx), + }, + }, + ) + + server.AddRoutes( + []rest.Route{ + { + Method: http.MethodGet, + Path: "/api/v1/maxOfferId", + Handler: nft.GetMaxOfferIdHandler(serverCtx), + }, + { + Method: http.MethodGet, + Path: "/api/v1/accountNfts", + Handler: nft.GetAccountNftsHandler(serverCtx), + }, + }, + ) +} diff --git a/service/apiserver/internal/handler/transaction/getaccountmempooltxshandler.go b/service/apiserver/internal/handler/transaction/getaccountmempooltxshandler.go new file mode 100644 index 000000000..df5a90c8d --- /dev/null +++ b/service/apiserver/internal/handler/transaction/getaccountmempooltxshandler.go @@ -0,0 +1,29 @@ +package transaction + +import ( + "net/http" + + "github.com/zeromicro/go-zero/rest/httpx" + + "github.com/bnb-chain/zkbas/service/apiserver/internal/logic/transaction" + "github.com/bnb-chain/zkbas/service/apiserver/internal/svc" + "github.com/bnb-chain/zkbas/service/apiserver/internal/types" +) + +func GetAccountMempoolTxsHandler(svcCtx *svc.ServiceContext) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + var req types.ReqGetAccountMempoolTxs + if err := httpx.Parse(r, &req); err != nil { + httpx.Error(w, err) + return + } + + l := transaction.NewGetAccountMempoolTxsLogic(r.Context(), svcCtx) + resp, err := l.GetAccountMempoolTxs(&req) + if err != nil { + httpx.Error(w, err) + } else { + httpx.OkJson(w, resp) + } + } +} diff --git a/service/apiserver/internal/handler/transaction/getaccounttxshandler.go b/service/apiserver/internal/handler/transaction/getaccounttxshandler.go new file mode 100644 index 000000000..30f30deb7 --- /dev/null +++ b/service/apiserver/internal/handler/transaction/getaccounttxshandler.go @@ -0,0 +1,29 @@ +package transaction + +import ( + "net/http" + + "github.com/zeromicro/go-zero/rest/httpx" + + "github.com/bnb-chain/zkbas/service/apiserver/internal/logic/transaction" + "github.com/bnb-chain/zkbas/service/apiserver/internal/svc" + "github.com/bnb-chain/zkbas/service/apiserver/internal/types" +) + +func GetAccountTxsHandler(svcCtx *svc.ServiceContext) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + var req types.ReqGetAccountTxs + if err := httpx.Parse(r, &req); err != nil { + httpx.Error(w, err) + return + } + + l := transaction.NewGetAccountTxsLogic(r.Context(), svcCtx) + resp, err := l.GetAccountTxs(&req) + if err != nil { + httpx.Error(w, err) + } else { + httpx.OkJson(w, resp) + } + } +} diff --git a/service/apiserver/internal/handler/transaction/getblocktxshandler.go b/service/apiserver/internal/handler/transaction/getblocktxshandler.go new file mode 100644 index 000000000..7070c9115 --- /dev/null +++ b/service/apiserver/internal/handler/transaction/getblocktxshandler.go @@ -0,0 +1,29 @@ +package transaction + +import ( + "net/http" + + "github.com/zeromicro/go-zero/rest/httpx" + + "github.com/bnb-chain/zkbas/service/apiserver/internal/logic/transaction" + "github.com/bnb-chain/zkbas/service/apiserver/internal/svc" + "github.com/bnb-chain/zkbas/service/apiserver/internal/types" +) + +func GetBlockTxsHandler(svcCtx *svc.ServiceContext) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + var req types.ReqGetBlockTxs + if err := httpx.Parse(r, &req); err != nil { + httpx.Error(w, err) + return + } + + l := transaction.NewGetBlockTxsLogic(r.Context(), svcCtx) + resp, err := l.GetBlockTxs(&req) + if err != nil { + httpx.Error(w, err) + } else { + httpx.OkJson(w, resp) + } + } +} diff --git a/service/api/app/internal/handler/transaction/getmempooltxshandler.go b/service/apiserver/internal/handler/transaction/getmempooltxshandler.go similarity index 68% rename from service/api/app/internal/handler/transaction/getmempooltxshandler.go rename to service/apiserver/internal/handler/transaction/getmempooltxshandler.go index c6a27dd05..e15366d0d 100644 --- a/service/api/app/internal/handler/transaction/getmempooltxshandler.go +++ b/service/apiserver/internal/handler/transaction/getmempooltxshandler.go @@ -5,14 +5,14 @@ import ( "github.com/zeromicro/go-zero/rest/httpx" - "github.com/bnb-chain/zkbas/service/api/app/internal/logic/transaction" - "github.com/bnb-chain/zkbas/service/api/app/internal/svc" - "github.com/bnb-chain/zkbas/service/api/app/internal/types" + "github.com/bnb-chain/zkbas/service/apiserver/internal/logic/transaction" + "github.com/bnb-chain/zkbas/service/apiserver/internal/svc" + "github.com/bnb-chain/zkbas/service/apiserver/internal/types" ) func GetMempoolTxsHandler(svcCtx *svc.ServiceContext) http.HandlerFunc { return func(w http.ResponseWriter, r *http.Request) { - var req types.ReqGetMempoolTxs + var req types.ReqGetRange if err := httpx.Parse(r, &req); err != nil { httpx.Error(w, err) return diff --git a/service/api/app/internal/handler/transaction/getnextnoncehandler.go b/service/apiserver/internal/handler/transaction/getnextnoncehandler.go similarity index 72% rename from service/api/app/internal/handler/transaction/getnextnoncehandler.go rename to service/apiserver/internal/handler/transaction/getnextnoncehandler.go index 543655a8b..c8bd62572 100644 --- a/service/api/app/internal/handler/transaction/getnextnoncehandler.go +++ b/service/apiserver/internal/handler/transaction/getnextnoncehandler.go @@ -5,9 +5,9 @@ import ( "github.com/zeromicro/go-zero/rest/httpx" - "github.com/bnb-chain/zkbas/service/api/app/internal/logic/transaction" - "github.com/bnb-chain/zkbas/service/api/app/internal/svc" - "github.com/bnb-chain/zkbas/service/api/app/internal/types" + "github.com/bnb-chain/zkbas/service/apiserver/internal/logic/transaction" + "github.com/bnb-chain/zkbas/service/apiserver/internal/svc" + "github.com/bnb-chain/zkbas/service/apiserver/internal/types" ) func GetNextNonceHandler(svcCtx *svc.ServiceContext) http.HandlerFunc { diff --git a/service/apiserver/internal/handler/transaction/gettxhandler.go b/service/apiserver/internal/handler/transaction/gettxhandler.go new file mode 100644 index 000000000..86b2db63a --- /dev/null +++ b/service/apiserver/internal/handler/transaction/gettxhandler.go @@ -0,0 +1,29 @@ +package transaction + +import ( + "net/http" + + "github.com/zeromicro/go-zero/rest/httpx" + + "github.com/bnb-chain/zkbas/service/apiserver/internal/logic/transaction" + "github.com/bnb-chain/zkbas/service/apiserver/internal/svc" + "github.com/bnb-chain/zkbas/service/apiserver/internal/types" +) + +func GetTxHandler(svcCtx *svc.ServiceContext) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + var req types.ReqGetTx + if err := httpx.Parse(r, &req); err != nil { + httpx.Error(w, err) + return + } + + l := transaction.NewGetTxLogic(r.Context(), svcCtx) + resp, err := l.GetTx(&req) + if err != nil { + httpx.Error(w, err) + } else { + httpx.OkJson(w, resp) + } + } +} diff --git a/service/apiserver/internal/handler/transaction/gettxshandler.go b/service/apiserver/internal/handler/transaction/gettxshandler.go new file mode 100644 index 000000000..c116d88ac --- /dev/null +++ b/service/apiserver/internal/handler/transaction/gettxshandler.go @@ -0,0 +1,29 @@ +package transaction + +import ( + "net/http" + + "github.com/zeromicro/go-zero/rest/httpx" + + "github.com/bnb-chain/zkbas/service/apiserver/internal/logic/transaction" + "github.com/bnb-chain/zkbas/service/apiserver/internal/svc" + "github.com/bnb-chain/zkbas/service/apiserver/internal/types" +) + +func GetTxsHandler(svcCtx *svc.ServiceContext) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + var req types.ReqGetRange + if err := httpx.Parse(r, &req); err != nil { + httpx.Error(w, err) + return + } + + l := transaction.NewGetTxsLogic(r.Context(), svcCtx) + resp, err := l.GetTxs(&req) + if err != nil { + httpx.Error(w, err) + } else { + httpx.OkJson(w, resp) + } + } +} diff --git a/service/api/app/internal/handler/transaction/sendtxhandler.go b/service/apiserver/internal/handler/transaction/sendtxhandler.go similarity index 71% rename from service/api/app/internal/handler/transaction/sendtxhandler.go rename to service/apiserver/internal/handler/transaction/sendtxhandler.go index 5634c0614..6f6108dff 100644 --- a/service/api/app/internal/handler/transaction/sendtxhandler.go +++ b/service/apiserver/internal/handler/transaction/sendtxhandler.go @@ -5,9 +5,9 @@ import ( "github.com/zeromicro/go-zero/rest/httpx" - "github.com/bnb-chain/zkbas/service/api/app/internal/logic/transaction" - "github.com/bnb-chain/zkbas/service/api/app/internal/svc" - "github.com/bnb-chain/zkbas/service/api/app/internal/types" + "github.com/bnb-chain/zkbas/service/apiserver/internal/logic/transaction" + "github.com/bnb-chain/zkbas/service/apiserver/internal/svc" + "github.com/bnb-chain/zkbas/service/apiserver/internal/types" ) func SendTxHandler(svcCtx *svc.ServiceContext) http.HandlerFunc { diff --git a/service/apiserver/internal/logic/account/getaccountlogic.go b/service/apiserver/internal/logic/account/getaccountlogic.go new file mode 100644 index 000000000..9fb8a0bc1 --- /dev/null +++ b/service/apiserver/internal/logic/account/getaccountlogic.go @@ -0,0 +1,98 @@ +package account + +import ( + "context" + "math/big" + "sort" + "strconv" + + "github.com/zeromicro/go-zero/core/logx" + + "github.com/bnb-chain/zkbas/service/apiserver/internal/svc" + "github.com/bnb-chain/zkbas/service/apiserver/internal/types" + types2 "github.com/bnb-chain/zkbas/types" +) + +const ( + queryByIndex = "index" + queryByName = "name" + queryByPk = "pk" +) + +type GetAccountLogic struct { + logx.Logger + ctx context.Context + svcCtx *svc.ServiceContext +} + +func NewGetAccountLogic(ctx context.Context, svcCtx *svc.ServiceContext) *GetAccountLogic { + return &GetAccountLogic{ + Logger: logx.WithContext(ctx), + ctx: ctx, + svcCtx: svcCtx, + } +} + +func (l *GetAccountLogic) GetAccount(req *types.ReqGetAccount) (resp *types.Account, err error) { + accountIndex := int64(0) + switch req.By { + case queryByIndex: + accountIndex, err = strconv.ParseInt(req.Value, 10, 64) + if err != nil { + return nil, types2.AppErrInvalidParam.RefineError("invalid value for account index") + } + case queryByName: + accountIndex, err = l.svcCtx.MemCache.GetAccountIndexByName(req.Value) + case queryByPk: + accountIndex, err = l.svcCtx.MemCache.GetAccountIndexByPk(req.Value) + default: + return nil, types2.AppErrInvalidParam.RefineError("param by should be index|name|pk") + } + + if err != nil { + if err == types2.DbErrNotFound { + return nil, types2.AppErrNotFound + } + return nil, types2.AppErrInternal + } + + account, err := l.svcCtx.StateFetcher.GetLatestAccount(accountIndex) + if err != nil { + if err == types2.DbErrNotFound { + return nil, types2.AppErrNotFound + } + return nil, types2.AppErrInternal + } + + maxAssetId, err := l.svcCtx.AssetModel.GetMaxId() + if err != nil { + return nil, types2.AppErrInternal + } + + resp = &types.Account{ + Index: account.AccountIndex, + Status: uint32(account.Status), + Name: account.AccountName, + Pk: account.PublicKey, + Nonce: account.Nonce, + Assets: make([]*types.AccountAsset, 0), + } + for _, asset := range account.AssetInfo { + if asset.AssetId > maxAssetId || asset.Balance == nil || asset.Balance.Cmp(big.NewInt(0)) == 0 { + continue //it is used for offer related, or empty balance + } + assetName, _ := l.svcCtx.MemCache.GetAssetNameById(asset.AssetId) + resp.Assets = append(resp.Assets, &types.AccountAsset{ + Id: uint32(asset.AssetId), + Name: assetName, + Balance: asset.Balance.String(), + LpAmount: asset.LpAmount.String(), + }) + } + + sort.Slice(resp.Assets, func(i, j int) bool { + return resp.Assets[i].Id < resp.Assets[j].Id + }) + + return resp, nil +} diff --git a/service/apiserver/internal/logic/account/getaccountslogic.go b/service/apiserver/internal/logic/account/getaccountslogic.go new file mode 100644 index 000000000..e6d7f9a87 --- /dev/null +++ b/service/apiserver/internal/logic/account/getaccountslogic.go @@ -0,0 +1,56 @@ +package account + +import ( + "context" + + "github.com/zeromicro/go-zero/core/logx" + + "github.com/bnb-chain/zkbas/service/apiserver/internal/svc" + "github.com/bnb-chain/zkbas/service/apiserver/internal/types" + types2 "github.com/bnb-chain/zkbas/types" +) + +type GetAccountsLogic struct { + logx.Logger + ctx context.Context + svcCtx *svc.ServiceContext +} + +func NewGetAccountsLogic(ctx context.Context, svcCtx *svc.ServiceContext) *GetAccountsLogic { + return &GetAccountsLogic{ + Logger: logx.WithContext(ctx), + ctx: ctx, + svcCtx: svcCtx, + } +} + +func (l *GetAccountsLogic) GetAccounts(req *types.ReqGetRange) (resp *types.Accounts, err error) { + total, err := l.svcCtx.MemCache.GetAccountTotalCountWiltFallback(func() (interface{}, error) { + return l.svcCtx.AccountModel.GetAccountsTotalCount() + }) + if err != nil { + return nil, types2.AppErrInternal + } + + resp = &types.Accounts{ + Accounts: make([]*types.SimpleAccount, 0), + Total: uint32(total), + } + + if total == 0 || total <= int64(req.Offset) { + return resp, nil + } + + accounts, err := l.svcCtx.AccountModel.GetAccountsList(int(req.Limit), int64(req.Offset)) + if err != nil { + return nil, types2.AppErrInternal + } + for _, a := range accounts { + resp.Accounts = append(resp.Accounts, &types.SimpleAccount{ + Index: a.AccountIndex, + Name: a.AccountName, + Pk: a.PublicKey, + }) + } + return resp, nil +} diff --git a/service/apiserver/internal/logic/asset/getassetslogic.go b/service/apiserver/internal/logic/asset/getassetslogic.go new file mode 100644 index 000000000..56fc818ef --- /dev/null +++ b/service/apiserver/internal/logic/asset/getassetslogic.go @@ -0,0 +1,60 @@ +package asset + +import ( + "context" + + "github.com/zeromicro/go-zero/core/logx" + + "github.com/bnb-chain/zkbas/service/apiserver/internal/svc" + "github.com/bnb-chain/zkbas/service/apiserver/internal/types" + types2 "github.com/bnb-chain/zkbas/types" +) + +type GetAssetsLogic struct { + logx.Logger + ctx context.Context + svcCtx *svc.ServiceContext +} + +func NewGetAssetsLogic(ctx context.Context, svcCtx *svc.ServiceContext) *GetAssetsLogic { + return &GetAssetsLogic{ + Logger: logx.WithContext(ctx), + ctx: ctx, + svcCtx: svcCtx, + } +} + +func (l *GetAssetsLogic) GetAssets(req *types.ReqGetRange) (resp *types.Assets, err error) { + total, err := l.svcCtx.MemCache.GetAssetTotalCountWithFallback(func() (interface{}, error) { + return l.svcCtx.AssetModel.GetAssetsTotalCount() + }) + if err != nil { + return nil, types2.AppErrInternal + } + + resp = &types.Assets{ + Assets: make([]*types.Asset, 0), + Total: uint32(total), + } + if total == 0 || total <= int64(req.Offset) { + return resp, nil + } + + assets, err := l.svcCtx.AssetModel.GetAssetsList(int64(req.Limit), int64(req.Offset)) + if err != nil { + return nil, types2.AppErrInternal + } + + resp.Assets = make([]*types.Asset, 0) + for _, asset := range assets { + resp.Assets = append(resp.Assets, &types.Asset{ + Id: asset.AssetId, + Name: asset.AssetName, + Decimals: asset.Decimals, + Symbol: asset.AssetSymbol, + Address: asset.L1Address, + IsGasAsset: asset.IsGasAsset, + }) + } + return resp, nil +} diff --git a/service/apiserver/internal/logic/block/getblocklogic.go b/service/apiserver/internal/logic/block/getblocklogic.go new file mode 100644 index 000000000..0df90d252 --- /dev/null +++ b/service/apiserver/internal/logic/block/getblocklogic.go @@ -0,0 +1,83 @@ +package block + +import ( + "context" + "strconv" + + "github.com/zeromicro/go-zero/core/logx" + + blockdao "github.com/bnb-chain/zkbas/dao/block" + "github.com/bnb-chain/zkbas/service/apiserver/internal/logic/utils" + "github.com/bnb-chain/zkbas/service/apiserver/internal/svc" + "github.com/bnb-chain/zkbas/service/apiserver/internal/types" + types2 "github.com/bnb-chain/zkbas/types" +) + +const ( + queryByHeight = "height" + queryByCommitment = "commitment" +) + +type GetBlockLogic struct { + logx.Logger + ctx context.Context + svcCtx *svc.ServiceContext +} + +func NewGetBlockLogic(ctx context.Context, svcCtx *svc.ServiceContext) *GetBlockLogic { + return &GetBlockLogic{ + Logger: logx.WithContext(ctx), + ctx: ctx, + svcCtx: svcCtx, + } +} + +func (l *GetBlockLogic) GetBlock(req *types.ReqGetBlock) (resp *types.Block, err error) { + var block *blockdao.Block + switch req.By { + case queryByHeight: + blockHeight, err := strconv.ParseInt(req.Value, 10, 64) + if err != nil { + return nil, types2.AppErrInvalidParam.RefineError("invalid value for block height") + } + if blockHeight < 0 { + return nil, types2.AppErrInvalidParam.RefineError("invalid value for block height") + } + block, _ = l.svcCtx.MemCache.GetBlockByHeightWithFallback(blockHeight, func() (interface{}, error) { + return l.svcCtx.BlockModel.GetBlockByHeight(blockHeight) + }) + case queryByCommitment: + block, err = l.svcCtx.MemCache.GetBlockByCommitmentWithFallback(req.Value, func() (interface{}, error) { + return l.svcCtx.BlockModel.GetBlockByCommitment(req.Value) + }) + default: + return nil, types2.AppErrInvalidParam.RefineError("param by should be height|commitment") + } + + if err != nil { + if err == types2.DbErrNotFound { + return nil, types2.DbErrNotFound + } + return nil, types2.AppErrInternal + } + + resp = &types.Block{ + Commitment: block.BlockCommitment, + Height: block.BlockHeight, + StateRoot: block.StateRoot, + PriorityOperations: block.PriorityOperations, + PendingOnChainOperationsHash: block.PendingOnChainOperationsHash, + PendingOnChainOperationsPubData: block.PendingOnChainOperationsPubData, + CommittedTxHash: block.CommittedTxHash, + CommittedAt: block.CommittedAt, + VerifiedTxHash: block.VerifiedTxHash, + VerifiedAt: block.VerifiedAt, + Status: block.BlockStatus, + } + for _, t := range block.Txs { + tx := utils.DbtxTx(t) + tx.AccountName, _ = l.svcCtx.MemCache.GetAccountNameByIndex(tx.AccountIndex) + resp.Txs = append(resp.Txs, tx) + } + return resp, nil +} diff --git a/service/api/app/internal/logic/block/getblockslogic.go b/service/apiserver/internal/logic/block/getblockslogic.go similarity index 52% rename from service/api/app/internal/logic/block/getblockslogic.go rename to service/apiserver/internal/logic/block/getblockslogic.go index 621b77ba8..61bb3d1da 100644 --- a/service/api/app/internal/logic/block/getblockslogic.go +++ b/service/apiserver/internal/logic/block/getblockslogic.go @@ -5,18 +5,16 @@ import ( "github.com/zeromicro/go-zero/core/logx" - "github.com/bnb-chain/zkbas/errorcode" - "github.com/bnb-chain/zkbas/service/api/app/internal/logic/utils" - "github.com/bnb-chain/zkbas/service/api/app/internal/repo/block" - "github.com/bnb-chain/zkbas/service/api/app/internal/svc" - "github.com/bnb-chain/zkbas/service/api/app/internal/types" + "github.com/bnb-chain/zkbas/service/apiserver/internal/logic/utils" + "github.com/bnb-chain/zkbas/service/apiserver/internal/svc" + "github.com/bnb-chain/zkbas/service/apiserver/internal/types" + types2 "github.com/bnb-chain/zkbas/types" ) type GetBlocksLogic struct { logx.Logger ctx context.Context svcCtx *svc.ServiceContext - block block.Block } func NewGetBlocksLogic(ctx context.Context, svcCtx *svc.ServiceContext) *GetBlocksLogic { @@ -24,31 +22,33 @@ func NewGetBlocksLogic(ctx context.Context, svcCtx *svc.ServiceContext) *GetBloc Logger: logx.WithContext(ctx), ctx: ctx, svcCtx: svcCtx, - block: block.New(svcCtx), } } -func (l *GetBlocksLogic) GetBlocks(req *types.ReqGetBlocks) (*types.RespGetBlocks, error) { - blocks, err := l.block.GetBlocksList(l.ctx, int64(req.Limit), int64(req.Offset)) +func (l *GetBlocksLogic) GetBlocks(req *types.ReqGetRange) (*types.Blocks, error) { + total, err := l.svcCtx.MemCache.GetBlockTotalCountWithFallback(func() (interface{}, error) { + return l.svcCtx.BlockModel.GetCurrentHeight() + }) if err != nil { - logx.Errorf("[GetBlocksList] err: %s", err.Error()) - if err == errorcode.DbErrNotFound { - return nil, errorcode.AppErrNotFound - } - return nil, errorcode.AppErrInternal - } - total, err := l.block.GetBlocksTotalCount(l.ctx) - if err != nil { - return nil, errorcode.AppErrInternal + return nil, types2.AppErrInternal } - resp := &types.RespGetBlocks{ - Total: uint32(total), + + resp := &types.Blocks{ Blocks: make([]*types.Block, 0), + Total: uint32(total), + } + if total == 0 || total <= int64(req.Offset) { + return resp, nil + } + + blocks, err := l.svcCtx.BlockModel.GetBlocksList(int64(req.Limit), int64(req.Offset)) + if err != nil { + return nil, types2.AppErrInternal } for _, b := range blocks { block := &types.Block{ - BlockCommitment: b.BlockCommitment, - BlockHeight: b.BlockHeight, + Commitment: b.BlockCommitment, + Height: b.BlockHeight, StateRoot: b.StateRoot, PriorityOperations: b.PriorityOperations, PendingOnChainOperationsHash: b.PendingOnChainOperationsHash, @@ -57,10 +57,11 @@ func (l *GetBlocksLogic) GetBlocks(req *types.ReqGetBlocks) (*types.RespGetBlock CommittedAt: b.CommittedAt, VerifiedTxHash: b.VerifiedTxHash, VerifiedAt: b.VerifiedAt, - BlockStatus: b.BlockStatus, + Status: b.BlockStatus, } for _, t := range b.Txs { - tx := utils.GormTx2Tx(t) + tx := utils.DbtxTx(t) + tx.AccountName, _ = l.svcCtx.MemCache.GetAccountNameByIndex(tx.AccountIndex) block.Txs = append(block.Txs, tx) } resp.Blocks = append(resp.Blocks, block) diff --git a/service/apiserver/internal/logic/block/getcurrentheightlogic.go b/service/apiserver/internal/logic/block/getcurrentheightlogic.go new file mode 100644 index 000000000..d45eb0fc9 --- /dev/null +++ b/service/apiserver/internal/logic/block/getcurrentheightlogic.go @@ -0,0 +1,38 @@ +package block + +import ( + "context" + + "github.com/zeromicro/go-zero/core/logx" + + "github.com/bnb-chain/zkbas/service/apiserver/internal/svc" + "github.com/bnb-chain/zkbas/service/apiserver/internal/types" + types2 "github.com/bnb-chain/zkbas/types" +) + +type GetCurrentHeightLogic struct { + logx.Logger + ctx context.Context + svcCtx *svc.ServiceContext +} + +func NewGetCurrentHeightLogic(ctx context.Context, svcCtx *svc.ServiceContext) *GetCurrentHeightLogic { + return &GetCurrentHeightLogic{ + Logger: logx.WithContext(ctx), + ctx: ctx, + svcCtx: svcCtx, + } +} + +func (l *GetCurrentHeightLogic) GetCurrentHeight() (resp *types.CurrentHeight, err error) { + resp = &types.CurrentHeight{} + height, err := l.svcCtx.BlockModel.GetCurrentHeight() + if err != nil { + if err == types2.DbErrNotFound { + return resp, nil + } + return nil, types2.AppErrInternal + } + resp.Height = height + return resp, nil +} diff --git a/service/apiserver/internal/logic/info/getcurrencypricelogic.go b/service/apiserver/internal/logic/info/getcurrencypricelogic.go new file mode 100644 index 000000000..b4922f7cd --- /dev/null +++ b/service/apiserver/internal/logic/info/getcurrencypricelogic.go @@ -0,0 +1,67 @@ +package info + +import ( + "context" + "strconv" + "strings" + + "github.com/zeromicro/go-zero/core/logx" + + "github.com/bnb-chain/zkbas/service/apiserver/internal/logic/utils" + "github.com/bnb-chain/zkbas/service/apiserver/internal/svc" + "github.com/bnb-chain/zkbas/service/apiserver/internal/types" + types2 "github.com/bnb-chain/zkbas/types" +) + +const ( + queryBySymbol = "symbol" +) + +type GetCurrencyPriceLogic struct { + logx.Logger + ctx context.Context + svcCtx *svc.ServiceContext +} + +func NewGetCurrencyPriceLogic(ctx context.Context, svcCtx *svc.ServiceContext) *GetCurrencyPriceLogic { + return &GetCurrencyPriceLogic{ + Logger: logx.WithContext(ctx), + ctx: ctx, + svcCtx: svcCtx, + } +} + +func (l *GetCurrencyPriceLogic) GetCurrencyPrice(req *types.ReqGetCurrencyPrice) (resp *types.CurrencyPrice, err error) { + symbol := "" + switch req.By { + case queryBySymbol: + if !utils.ValidateSymbol(req.Value) { + logx.Errorf("invalid Symbol: %s", req.Value) + return nil, types2.AppErrInvalidParam.RefineError("invalid symbol") + } + symbol = strings.ToUpper(req.Value) + default: + return nil, types2.AppErrInvalidParam.RefineError("param by should be symbol") + } + + asset, err := l.svcCtx.MemCache.GetAssetBySymbolWithFallback(symbol, func() (interface{}, error) { + return l.svcCtx.AssetModel.GetAssetBySymbol(symbol) + }) + if err != nil { + if err == types2.DbErrNotFound { + return nil, types2.AppErrNotFound + } + return nil, types2.AppErrInternal + } + + price, err := l.svcCtx.PriceFetcher.GetCurrencyPrice(l.ctx, symbol) + if err != nil { + return nil, types2.AppErrInternal + } + resp = &types.CurrencyPrice{ + Pair: asset.AssetSymbol + "/" + "USDT", + Price: strconv.FormatFloat(price, 'E', -1, 64), + AssetId: uint32(asset.ID), + } + return resp, nil +} diff --git a/service/apiserver/internal/logic/info/getcurrencypriceslogic.go b/service/apiserver/internal/logic/info/getcurrencypriceslogic.go new file mode 100644 index 000000000..949d6726b --- /dev/null +++ b/service/apiserver/internal/logic/info/getcurrencypriceslogic.go @@ -0,0 +1,70 @@ +package info + +import ( + "context" + "strconv" + + "github.com/zeromicro/go-zero/core/logx" + + "github.com/bnb-chain/zkbas/service/apiserver/internal/svc" + "github.com/bnb-chain/zkbas/service/apiserver/internal/types" + types2 "github.com/bnb-chain/zkbas/types" +) + +type GetCurrencyPricesLogic struct { + logx.Logger + ctx context.Context + svcCtx *svc.ServiceContext +} + +func NewGetCurrencyPricesLogic(ctx context.Context, svcCtx *svc.ServiceContext) *GetCurrencyPricesLogic { + return &GetCurrencyPricesLogic{ + Logger: logx.WithContext(ctx), + ctx: ctx, + svcCtx: svcCtx, + } +} + +func (l *GetCurrencyPricesLogic) GetCurrencyPrices(req *types.ReqGetRange) (resp *types.CurrencyPrices, err error) { + total, err := l.svcCtx.MemCache.GetAssetTotalCountWithFallback(func() (interface{}, error) { + return l.svcCtx.AssetModel.GetAssetsTotalCount() + }) + if err != nil { + return nil, types2.AppErrInternal + } + + resp = &types.CurrencyPrices{ + CurrencyPrices: make([]*types.CurrencyPrice, 0), + Total: uint32(total), + } + if total == 0 || total <= int64(req.Offset) { + return resp, nil + } + + assets, err := l.svcCtx.AssetModel.GetAssetsList(int64(req.Limit), int64(req.Offset)) + if err != nil { + return nil, types2.AppErrInternal + } + + for _, asset := range assets { + price := 0.0 + if asset.AssetSymbol == "LEG" { + price = 1.0 + } else if asset.AssetSymbol == "REY" { + price = 0.5 + } else { + price, err = l.svcCtx.PriceFetcher.GetCurrencyPrice(l.ctx, asset.AssetSymbol) + if err != nil { + logx.Errorf("fail to get price for symbol: %s, err: %s", asset.AssetSymbol, err.Error()) + return nil, types2.AppErrInternal + } + } + + resp.CurrencyPrices = append(resp.CurrencyPrices, &types.CurrencyPrice{ + Pair: asset.AssetSymbol + "/" + "USDT", + AssetId: asset.AssetId, + Price: strconv.FormatFloat(price, 'E', -1, 64), + }) + } + return resp, nil +} diff --git a/service/apiserver/internal/logic/info/getgasaccountlogic.go b/service/apiserver/internal/logic/info/getgasaccountlogic.go new file mode 100644 index 000000000..9e4be8d17 --- /dev/null +++ b/service/apiserver/internal/logic/info/getgasaccountlogic.go @@ -0,0 +1,55 @@ +package info + +import ( + "context" + "strconv" + + "github.com/zeromicro/go-zero/core/logx" + + "github.com/bnb-chain/zkbas/service/apiserver/internal/svc" + "github.com/bnb-chain/zkbas/service/apiserver/internal/types" + types2 "github.com/bnb-chain/zkbas/types" +) + +type GetGasAccountLogic struct { + logx.Logger + ctx context.Context + svcCtx *svc.ServiceContext +} + +func NewGetGasAccountLogic(ctx context.Context, svcCtx *svc.ServiceContext) *GetGasAccountLogic { + return &GetGasAccountLogic{ + Logger: logx.WithContext(ctx), + ctx: ctx, + svcCtx: svcCtx, + } +} + +func (l *GetGasAccountLogic) GetGasAccount() (resp *types.GasAccount, err error) { + accountIndexConfig, err := l.svcCtx.MemCache.GetSysConfigWithFallback(types2.GasAccountIndex, func() (interface{}, error) { + return l.svcCtx.SysConfigModel.GetSysConfigByName(types2.GasAccountIndex) + }) + if err != nil { + return nil, types2.AppErrInternal + } + + accountIndex, err := strconv.ParseInt(accountIndexConfig.Value, 10, 64) + if err != nil { + logx.Errorf("invalid account index: %s", accountIndexConfig.Value) + return nil, types2.AppErrInternal + } + + account, err := l.svcCtx.MemCache.GetAccountWithFallback(accountIndex, func() (interface{}, error) { + return l.svcCtx.AccountModel.GetAccountByIndex(accountIndex) + }) + if err != nil { + return nil, types2.AppErrInternal + } + + resp = &types.GasAccount{ + Status: int64(account.Status), + Index: account.AccountIndex, + Name: account.AccountName, + } + return resp, nil +} diff --git a/service/apiserver/internal/logic/info/getgasfeeassetslogic.go b/service/apiserver/internal/logic/info/getgasfeeassetslogic.go new file mode 100644 index 000000000..35859e971 --- /dev/null +++ b/service/apiserver/internal/logic/info/getgasfeeassetslogic.go @@ -0,0 +1,46 @@ +package info + +import ( + "context" + + "github.com/zeromicro/go-zero/core/logx" + + "github.com/bnb-chain/zkbas/service/apiserver/internal/svc" + "github.com/bnb-chain/zkbas/service/apiserver/internal/types" + types2 "github.com/bnb-chain/zkbas/types" +) + +type GetGasFeeAssetsLogic struct { + logx.Logger + ctx context.Context + svcCtx *svc.ServiceContext +} + +func NewGetGasFeeAssetsLogic(ctx context.Context, svcCtx *svc.ServiceContext) *GetGasFeeAssetsLogic { + return &GetGasFeeAssetsLogic{ + Logger: logx.WithContext(ctx), + ctx: ctx, + svcCtx: svcCtx, + } +} + +func (l *GetGasFeeAssetsLogic) GetGasFeeAssets() (resp *types.GasFeeAssets, err error) { + resp = &types.GasFeeAssets{Assets: make([]types.Asset, 0)} + + assets, err := l.svcCtx.AssetModel.GetGasAssets() + if err != nil { + return nil, types2.AppErrInternal + } + + for _, asset := range assets { + resp.Assets = append(resp.Assets, types.Asset{ + Id: asset.AssetId, + Name: asset.AssetName, + Decimals: asset.Decimals, + Symbol: asset.AssetSymbol, + Address: asset.L1Address, + IsGasAsset: asset.IsGasAsset, + }) + } + return resp, nil +} diff --git a/service/apiserver/internal/logic/info/getgasfeelogic.go b/service/apiserver/internal/logic/info/getgasfeelogic.go new file mode 100644 index 000000000..96e41d97b --- /dev/null +++ b/service/apiserver/internal/logic/info/getgasfeelogic.go @@ -0,0 +1,86 @@ +package info + +import ( + "context" + asset2 "github.com/bnb-chain/zkbas/dao/asset" + "math/big" + + "github.com/zeromicro/go-zero/core/logx" + + "github.com/bnb-chain/zkbas-crypto/ffmath" + "github.com/bnb-chain/zkbas/common" + "github.com/bnb-chain/zkbas/service/apiserver/internal/svc" + "github.com/bnb-chain/zkbas/service/apiserver/internal/types" + types2 "github.com/bnb-chain/zkbas/types" +) + +type GetGasFeeLogic struct { + logx.Logger + ctx context.Context + svcCtx *svc.ServiceContext +} + +func NewGetGasFeeLogic(ctx context.Context, svcCtx *svc.ServiceContext) *GetGasFeeLogic { + return &GetGasFeeLogic{ + Logger: logx.WithContext(ctx), + ctx: ctx, + svcCtx: svcCtx, + } +} + +func (l *GetGasFeeLogic) GetGasFee(req *types.ReqGetGasFee) (*types.GasFee, error) { + resp := &types.GasFee{} + + asset, err := l.svcCtx.MemCache.GetAssetByIdWithFallback(int64(req.AssetId), func() (interface{}, error) { + return l.svcCtx.AssetModel.GetAssetById(int64(req.AssetId)) + }) + if err != nil { + if err == types2.DbErrNotFound { + return nil, types2.AppErrNotFound + } + return nil, types2.AppErrInternal + } + + if asset.IsGasAsset != asset2.IsGasAsset { + logx.Errorf("not gas asset id: %d", asset.AssetId) + return nil, types2.AppErrInvalidGasAsset + } + sysGasFee, err := l.svcCtx.MemCache.GetSysConfigWithFallback(types2.SysGasFee, func() (interface{}, error) { + return l.svcCtx.SysConfigModel.GetSysConfigByName(types2.SysGasFee) + }) + if err != nil { + return nil, types2.AppErrInternal + } + + sysGasFeeBigInt, isValid := new(big.Int).SetString(sysGasFee.Value, 10) + if !isValid { + logx.Errorf("parse sys gas fee err: %s", err.Error()) + return nil, types2.AppErrInternal + } + // if asset id == BNB, just return + if asset.AssetId == types2.BNBAssetId { + resp.GasFee = sysGasFeeBigInt.String() + return resp, nil + } + // if not, try to compute the gas amount based on USD + assetPrice, err := l.svcCtx.PriceFetcher.GetCurrencyPrice(l.ctx, asset.AssetSymbol) + if err != nil { + return nil, types2.AppErrInternal + } + bnbPrice, err := l.svcCtx.PriceFetcher.GetCurrencyPrice(l.ctx, "BNB") + if err != nil { + return nil, types2.AppErrInternal + } + bnbDecimals, _ := new(big.Int).SetString(types2.BNBDecimalsStr, 10) + assetDecimals := new(big.Int).Exp(big.NewInt(10), big.NewInt(int64(asset.Decimals)), nil) + // bnbPrice * bnbAmount * assetDecimals / (10^18 * assetPrice) + left := ffmath.FloatMul(ffmath.FloatMul(big.NewFloat(bnbPrice), ffmath.IntToFloat(sysGasFeeBigInt)), ffmath.IntToFloat(assetDecimals)) + right := ffmath.FloatMul(ffmath.IntToFloat(bnbDecimals), big.NewFloat(assetPrice)) + gasFee, err := common.CleanPackedFee(ffmath.FloatToInt(ffmath.FloatDiv(left, right))) + if err != nil { + logx.Errorf("unable to clean packed fee: %s", err.Error()) + return nil, types2.AppErrInternal + } + resp.GasFee = gasFee.String() + return resp, nil +} diff --git a/service/apiserver/internal/logic/info/getlayer2basicinfologic.go b/service/apiserver/internal/logic/info/getlayer2basicinfologic.go new file mode 100644 index 000000000..90621c8df --- /dev/null +++ b/service/apiserver/internal/logic/info/getlayer2basicinfologic.go @@ -0,0 +1,102 @@ +package info + +import ( + "context" + "time" + + "github.com/zeromicro/go-zero/core/logx" + + "github.com/bnb-chain/zkbas/service/apiserver/internal/svc" + "github.com/bnb-chain/zkbas/service/apiserver/internal/types" + types2 "github.com/bnb-chain/zkbas/types" +) + +type GetLayer2BasicInfoLogic struct { + logx.Logger + ctx context.Context + svcCtx *svc.ServiceContext +} + +func NewGetLayer2BasicInfoLogic(ctx context.Context, svcCtx *svc.ServiceContext) *GetLayer2BasicInfoLogic { + return &GetLayer2BasicInfoLogic{ + Logger: logx.WithContext(ctx), + ctx: ctx, + svcCtx: svcCtx, + } +} + +var ( + contractNames = []string{ + "ZkbasContract", + "ZnsPriceOracle", + "AssetGovernanceContract", + } +) + +func (l *GetLayer2BasicInfoLogic) GetLayer2BasicInfo() (*types.Layer2BasicInfo, error) { + resp := &types.Layer2BasicInfo{ + ContractAddresses: make([]types.ContractAddress, 0), + } + var err error + resp.BlockCommitted, err = l.svcCtx.BlockModel.GetCommittedBlocksCount() + if err != nil { + if err != types2.DbErrNotFound { + return nil, types2.AppErrInternal + } + } + resp.BlockVerified, err = l.svcCtx.BlockModel.GetVerifiedBlocksCount() + if err != nil { + if err != types2.DbErrNotFound { + return nil, types2.AppErrInternal + } + } + resp.TotalTransactionCount, err = l.svcCtx.MemCache.GetTxTotalCountWithFallback(func() (interface{}, error) { + return l.svcCtx.TxModel.GetTxsTotalCount() + }) + if err != nil { + if err != types2.DbErrNotFound { + return nil, types2.AppErrInternal + } + } + + now := time.Now() + today := now.Round(24 * time.Hour).Add(-8 * time.Hour) + + resp.YesterdayTransactionCount, err = l.svcCtx.TxModel.GetTxsTotalCountBetween(today.Add(-24*time.Hour), today) + if err != nil { + if err != types2.DbErrNotFound { + return nil, types2.AppErrInternal + } + } + resp.TodayTransactionCount, err = l.svcCtx.TxModel.GetTxsTotalCountBetween(today, now) + if err != nil { + if err != types2.DbErrNotFound { + return nil, types2.AppErrInternal + } + } + resp.YesterdayActiveUserCount, err = l.svcCtx.TxModel.GetDistinctAccountsCountBetween(today.Add(-24*time.Hour), today) + if err != nil { + if err != types2.DbErrNotFound { + return nil, types2.AppErrInternal + } + } + resp.TodayActiveUserCount, err = l.svcCtx.TxModel.GetDistinctAccountsCountBetween(today, now) + if err != nil { + if err != types2.DbErrNotFound { + return nil, types2.AppErrInternal + } + } + for _, contractName := range contractNames { + contract, err := l.svcCtx.MemCache.GetSysConfigWithFallback(contractName, func() (interface{}, error) { + return l.svcCtx.SysConfigModel.GetSysConfigByName(contractName) + }) + if err != nil { + if err != types2.DbErrNotFound { + return nil, types2.AppErrInternal + } + } + resp.ContractAddresses = append(resp.ContractAddresses, + types.ContractAddress{Name: contractName, Address: contract.Value}) + } + return resp, nil +} diff --git a/service/apiserver/internal/logic/info/getwithdrawgasfeelogic.go b/service/apiserver/internal/logic/info/getwithdrawgasfeelogic.go new file mode 100644 index 000000000..29ea41fb7 --- /dev/null +++ b/service/apiserver/internal/logic/info/getwithdrawgasfeelogic.go @@ -0,0 +1,86 @@ +package info + +import ( + "context" + asset2 "github.com/bnb-chain/zkbas/dao/asset" + "math/big" + + "github.com/zeromicro/go-zero/core/logx" + + "github.com/bnb-chain/zkbas-crypto/ffmath" + "github.com/bnb-chain/zkbas/common" + "github.com/bnb-chain/zkbas/service/apiserver/internal/svc" + "github.com/bnb-chain/zkbas/service/apiserver/internal/types" + types2 "github.com/bnb-chain/zkbas/types" +) + +type GetWithdrawGasFeeLogic struct { + logx.Logger + ctx context.Context + svcCtx *svc.ServiceContext +} + +func NewGetWithdrawGasFeeLogic(ctx context.Context, svcCtx *svc.ServiceContext) *GetWithdrawGasFeeLogic { + return &GetWithdrawGasFeeLogic{ + Logger: logx.WithContext(ctx), + ctx: ctx, + svcCtx: svcCtx, + } +} + +func (l *GetWithdrawGasFeeLogic) GetWithdrawGasFee(req *types.ReqGetWithdrawGasFee) (*types.GasFee, error) { + resp := &types.GasFee{} + + asset, err := l.svcCtx.MemCache.GetAssetByIdWithFallback(int64(req.AssetId), func() (interface{}, error) { + return l.svcCtx.AssetModel.GetAssetById(int64(req.AssetId)) + }) + if err != nil { + if err == types2.DbErrNotFound { + return nil, types2.AppErrNotFound + } + return nil, types2.AppErrInternal + } + + if asset.IsGasAsset != asset2.IsGasAsset { + logx.Errorf("not gas asset id: %d", asset.AssetId) + return nil, types2.AppErrInvalidGasAsset + } + sysGasFee, err := l.svcCtx.MemCache.GetSysConfigWithFallback(types2.SysGasFee, func() (interface{}, error) { + return l.svcCtx.SysConfigModel.GetSysConfigByName(types2.SysGasFee) + }) + if err != nil { + return nil, types2.AppErrInternal + } + + sysGasFeeBigInt, isValid := new(big.Int).SetString(sysGasFee.Value, 10) + if !isValid { + logx.Errorf("parse sys gas fee err: %s", err.Error()) + return nil, types2.AppErrInternal + } + // if asset id == BNB, just return + if asset.AssetId == types2.BNBAssetId { + resp.GasFee = sysGasFeeBigInt.String() + return resp, nil + } + // if not, try to compute the gas amount based on USD + assetPrice, err := l.svcCtx.PriceFetcher.GetCurrencyPrice(l.ctx, asset.AssetSymbol) + if err != nil { + return nil, types2.AppErrInternal + } + bnbPrice, err := l.svcCtx.PriceFetcher.GetCurrencyPrice(l.ctx, "BNB") + if err != nil { + return nil, types2.AppErrInternal + } + bnbDecimals, _ := new(big.Int).SetString(types2.BNBDecimalsStr, 10) + assetDecimals := new(big.Int).Exp(big.NewInt(10), big.NewInt(int64(asset.Decimals)), nil) + // bnbPrice * bnbAmount * assetDecimals / (10^18 * assetPrice) + left := ffmath.FloatMul(ffmath.FloatMul(big.NewFloat(bnbPrice), ffmath.IntToFloat(sysGasFeeBigInt)), ffmath.IntToFloat(assetDecimals)) + right := ffmath.FloatMul(ffmath.IntToFloat(bnbDecimals), big.NewFloat(assetPrice)) + gasFee, err := common.CleanPackedFee(ffmath.FloatToInt(ffmath.FloatDiv(left, right))) + if err != nil { + logx.Errorf("unable to clean packed fee: %s", err.Error()) + return nil, types2.AppErrInternal + } + resp.GasFee = gasFee.String() + return resp, nil +} diff --git a/service/apiserver/internal/logic/info/searchlogic.go b/service/apiserver/internal/logic/info/searchlogic.go new file mode 100644 index 000000000..2f0677659 --- /dev/null +++ b/service/apiserver/internal/logic/info/searchlogic.go @@ -0,0 +1,65 @@ +package info + +import ( + "context" + "strconv" + "strings" + + "github.com/zeromicro/go-zero/core/logx" + + "github.com/bnb-chain/zkbas/service/apiserver/internal/svc" + "github.com/bnb-chain/zkbas/service/apiserver/internal/types" + types2 "github.com/bnb-chain/zkbas/types" +) + +type SearchLogic struct { + logx.Logger + ctx context.Context + svcCtx *svc.ServiceContext +} + +func NewSearchLogic(ctx context.Context, svcCtx *svc.ServiceContext) *SearchLogic { + return &SearchLogic{ + Logger: logx.WithContext(ctx), + ctx: ctx, + svcCtx: svcCtx, + } +} + +func (l *SearchLogic) Search(req *types.ReqSearch) (*types.Search, error) { + resp := &types.Search{} + blockHeight, err := strconv.ParseInt(req.Keyword, 10, 64) + if err == nil { + if _, err = l.svcCtx.BlockModel.GetBlockByHeight(blockHeight); err != nil { + if err == types2.DbErrNotFound { + return nil, types2.AppErrNotFound + } + return nil, types2.AppErrInternal + } + resp.DataType = types2.TypeBlockHeight + return resp, nil + } + + if strings.Contains(req.Keyword, ".") { + if _, err = l.svcCtx.MemCache.GetAccountIndexByName(req.Keyword); err != nil { + if err == types2.DbErrNotFound { + return nil, types2.AppErrNotFound + } + return nil, types2.AppErrInternal + } + resp.DataType = types2.TypeAccountName + return resp, nil + } + + if _, err = l.svcCtx.MemCache.GetAccountIndexByPk(req.Keyword); err == nil { + resp.DataType = types2.TypeAccountPk + return resp, nil + } + + if _, err = l.svcCtx.TxModel.GetTxByHash(req.Keyword); err == nil { + resp.DataType = types2.TypeTxType + return resp, nil + } + + return resp, types2.AppErrNotFound +} diff --git a/service/apiserver/internal/logic/nft/getaccountnftslogic.go b/service/apiserver/internal/logic/nft/getaccountnftslogic.go new file mode 100644 index 000000000..32a9366e7 --- /dev/null +++ b/service/apiserver/internal/logic/nft/getaccountnftslogic.go @@ -0,0 +1,91 @@ +package nft + +import ( + "context" + "strconv" + + "github.com/zeromicro/go-zero/core/logx" + + "github.com/bnb-chain/zkbas/service/apiserver/internal/svc" + "github.com/bnb-chain/zkbas/service/apiserver/internal/types" + types2 "github.com/bnb-chain/zkbas/types" +) + +const ( + queryByAccountIndex = "account_index" + queryByAccountName = "account_name" + queryByAccountPk = "account_pk" +) + +type GetAccountNftsLogic struct { + logx.Logger + ctx context.Context + svcCtx *svc.ServiceContext +} + +func NewGetAccountNftsLogic(ctx context.Context, svcCtx *svc.ServiceContext) *GetAccountNftsLogic { + return &GetAccountNftsLogic{ + Logger: logx.WithContext(ctx), + ctx: ctx, + svcCtx: svcCtx, + } +} + +func (l *GetAccountNftsLogic) GetAccountNfts(req *types.ReqGetAccountNfts) (resp *types.Nfts, err error) { + resp = &types.Nfts{ + Nfts: make([]*types.Nft, 0), + } + + accountIndex := int64(0) + switch req.By { + case queryByAccountIndex: + accountIndex, err = strconv.ParseInt(req.Value, 10, 64) + if err != nil { + return nil, types2.AppErrInvalidParam.RefineError("invalid value for account_index") + } + case queryByAccountName: + accountIndex, err = l.svcCtx.MemCache.GetAccountIndexByName(req.Value) + case queryByAccountPk: + accountIndex, err = l.svcCtx.MemCache.GetAccountIndexByPk(req.Value) + default: + return nil, types2.AppErrInvalidParam.RefineError("param by should be account_index|account_name|account_pk") + } + + if err != nil { + if err == types2.DbErrNotFound { + return resp, nil + } + return nil, types2.AppErrInternal + } + + total, err := l.svcCtx.NftModel.GetAccountNftTotalCount(accountIndex) + if err != nil { + if err != types2.DbErrNotFound { + return nil, types2.AppErrInternal + } + } + + resp.Total = total + if total == 0 || total <= int64(req.Offset) { + return resp, nil + } + + nftList, err := l.svcCtx.NftModel.GetNftListByAccountIndex(accountIndex, int64(req.Limit), int64(req.Offset)) + if err != nil { + return nil, types2.AppErrInternal + } + + for _, nftItem := range nftList { + resp.Nfts = append(resp.Nfts, &types.Nft{ + Index: nftItem.NftIndex, + CreatorAccountIndex: nftItem.CreatorAccountIndex, + OwnerAccountIndex: nftItem.OwnerAccountIndex, + ContentHash: nftItem.NftContentHash, + L1Address: nftItem.NftL1Address, + L1TokenId: nftItem.NftL1TokenId, + CreatorTreasuryRate: nftItem.CreatorTreasuryRate, + CollectionId: nftItem.CollectionId, + }) + } + return resp, nil +} diff --git a/service/apiserver/internal/logic/nft/getmaxofferidlogic.go b/service/apiserver/internal/logic/nft/getmaxofferidlogic.go new file mode 100644 index 000000000..5c1f3ae43 --- /dev/null +++ b/service/apiserver/internal/logic/nft/getmaxofferidlogic.go @@ -0,0 +1,64 @@ +package nft + +import ( + "context" + "math/big" + + "github.com/zeromicro/go-zero/core/logx" + + "github.com/bnb-chain/zkbas/core/executor" + "github.com/bnb-chain/zkbas/service/apiserver/internal/svc" + "github.com/bnb-chain/zkbas/service/apiserver/internal/types" + types2 "github.com/bnb-chain/zkbas/types" +) + +type GetMaxOfferIdLogic struct { + logx.Logger + ctx context.Context + svcCtx *svc.ServiceContext +} + +func NewGetMaxOfferIdLogic(ctx context.Context, svcCtx *svc.ServiceContext) *GetMaxOfferIdLogic { + return &GetMaxOfferIdLogic{ + Logger: logx.WithContext(ctx), + ctx: ctx, + svcCtx: svcCtx, + } +} + +func (l *GetMaxOfferIdLogic) GetMaxOfferId(req *types.ReqGetMaxOfferId) (resp *types.MaxOfferId, err error) { + account, err := l.svcCtx.StateFetcher.GetLatestAccount(int64(req.AccountIndex)) + if err != nil { + if err == types2.DbErrNotFound { + return nil, types2.AppErrNotFound + } + return nil, types2.AppErrInternal + } + + maxOfferId := int64(0) + var maxOfferIdAsset *types2.AccountAsset + for _, asset := range account.AssetInfo { + if asset.OfferCanceledOrFinalized != nil && asset.OfferCanceledOrFinalized.Cmp(big.NewInt(0)) > 0 { + if maxOfferIdAsset == nil || asset.AssetId > maxOfferIdAsset.AssetId { + maxOfferIdAsset = asset + } + } + } + + if maxOfferIdAsset != nil { + offerCancelOrFinalized := int64(0) + bitLen := maxOfferIdAsset.OfferCanceledOrFinalized.BitLen() + for i := bitLen; i >= 0; i-- { + if maxOfferIdAsset.OfferCanceledOrFinalized.Bit(i) == 1 { + offerCancelOrFinalized = int64(i) + break + } + } + maxOfferId = maxOfferIdAsset.AssetId * executor.OfferPerAsset + maxOfferId = maxOfferId + offerCancelOrFinalized + } + + return &types.MaxOfferId{ + OfferId: uint64(maxOfferId), + }, nil +} diff --git a/service/apiserver/internal/logic/pair/getlpvaluelogic.go b/service/apiserver/internal/logic/pair/getlpvaluelogic.go new file mode 100644 index 000000000..19f72c022 --- /dev/null +++ b/service/apiserver/internal/logic/pair/getlpvaluelogic.go @@ -0,0 +1,60 @@ +package pair + +import ( + "context" + "math/big" + + "github.com/zeromicro/go-zero/core/logx" + + "github.com/bnb-chain/zkbas/common/chain" + "github.com/bnb-chain/zkbas/service/apiserver/internal/svc" + "github.com/bnb-chain/zkbas/service/apiserver/internal/types" + types2 "github.com/bnb-chain/zkbas/types" +) + +type GetLpValueLogic struct { + logx.Logger + ctx context.Context + svcCtx *svc.ServiceContext +} + +func NewGetLpValueLogic(ctx context.Context, svcCtx *svc.ServiceContext) *GetLpValueLogic { + return &GetLpValueLogic{ + Logger: logx.WithContext(ctx), + ctx: ctx, + svcCtx: svcCtx, + } +} + +func (l *GetLpValueLogic) GetLPValue(req *types.ReqGetLpValue) (resp *types.LpValue, err error) { + amount, isTure := new(big.Int).SetString(req.LpAmount, 10) + if !isTure { + logx.Errorf("fail to convert string: %s to int", req.LpAmount) + return nil, types2.AppErrInvalidParam.RefineError("invalid LpAmount") + } + + liquidity, err := l.svcCtx.StateFetcher.GetLatestLiquidity(int64(req.PairIndex)) + if err != nil { + if err == types2.DbErrNotFound { + return nil, types2.AppErrNotFound + } + return nil, types2.AppErrInternal + } + assetAAmount, assetBAmount := big.NewInt(0), big.NewInt(0) + if liquidity.LpAmount.Cmp(big.NewInt(0)) > 0 { + assetAAmount, assetBAmount, err = chain.ComputeRemoveLiquidityAmount(liquidity, amount) + if err != nil { + logx.Errorf("fail to compute liquidity amount, err: %s", err.Error()) + return nil, types2.AppErrInternal + } + } + + resp = &types.LpValue{ + AssetAId: uint32(liquidity.AssetAId), + AssetAAmount: assetAAmount.String(), + AssetBId: uint32(liquidity.AssetBId), + AssetBAmount: assetBAmount.String(), + } + + return resp, nil +} diff --git a/service/apiserver/internal/logic/pair/getpairlogic.go b/service/apiserver/internal/logic/pair/getpairlogic.go new file mode 100644 index 000000000..dd5d36b5c --- /dev/null +++ b/service/apiserver/internal/logic/pair/getpairlogic.go @@ -0,0 +1,44 @@ +package pair + +import ( + "context" + + "github.com/zeromicro/go-zero/core/logx" + + "github.com/bnb-chain/zkbas/service/apiserver/internal/svc" + "github.com/bnb-chain/zkbas/service/apiserver/internal/types" + types2 "github.com/bnb-chain/zkbas/types" +) + +type GetPairLogic struct { + logx.Logger + ctx context.Context + svcCtx *svc.ServiceContext +} + +func NewGetPairLogic(ctx context.Context, svcCtx *svc.ServiceContext) *GetPairLogic { + return &GetPairLogic{ + Logger: logx.WithContext(ctx), + ctx: ctx, + svcCtx: svcCtx, + } +} + +func (l *GetPairLogic) GetPair(req *types.ReqGetPair) (resp *types.Pair, err error) { + pair, err := l.svcCtx.StateFetcher.GetLatestLiquidity(int64(req.Index)) + if err != nil { + logx.Errorf("fail to get pair info: %d, err: %s", req.Index, err.Error()) + if err == types2.DbErrNotFound { + return nil, types2.AppErrNotFound + } + return nil, types2.AppErrInternal + } + resp = &types.Pair{ + AssetAId: uint32(pair.AssetAId), + AssetAAmount: pair.AssetA.String(), + AssetBId: uint32(pair.AssetBId), + AssetBAmount: pair.AssetB.String(), + TotalLpAmount: pair.LpAmount.String(), + } + return resp, nil +} diff --git a/service/apiserver/internal/logic/pair/getpairslogic.go b/service/apiserver/internal/logic/pair/getpairslogic.go new file mode 100644 index 000000000..54a7ff472 --- /dev/null +++ b/service/apiserver/internal/logic/pair/getpairslogic.go @@ -0,0 +1,61 @@ +package pair + +import ( + "context" + + "github.com/zeromicro/go-zero/core/logx" + + "github.com/bnb-chain/zkbas/service/apiserver/internal/svc" + "github.com/bnb-chain/zkbas/service/apiserver/internal/types" + types2 "github.com/bnb-chain/zkbas/types" +) + +type GetPairsLogic struct { + logx.Logger + ctx context.Context + svcCtx *svc.ServiceContext +} + +func NewGetPairsLogic(ctx context.Context, svcCtx *svc.ServiceContext) *GetPairsLogic { + return &GetPairsLogic{ + Logger: logx.WithContext(ctx), + ctx: ctx, + svcCtx: svcCtx, + } +} + +func (l *GetPairsLogic) GetPairs() (resp *types.Pairs, err error) { + resp = &types.Pairs{Pairs: make([]*types.Pair, 0)} + + liquidityAssets, err := l.svcCtx.LiquidityModel.GetAllLiquidityAssets() + if err != nil { + if err == types2.DbErrNotFound { + return resp, nil + } + return nil, types2.AppErrInternal + } + + for _, liquidity := range liquidityAssets { + assetA, err := l.svcCtx.AssetModel.GetAssetById(liquidity.AssetAId) + if err != nil { + return nil, types2.AppErrInternal + } + assetB, err := l.svcCtx.AssetModel.GetAssetById(liquidity.AssetBId) + if err != nil { + return nil, types2.AppErrInternal + } + resp.Pairs = append(resp.Pairs, &types.Pair{ + Index: uint32(liquidity.PairIndex), + AssetAId: uint32(liquidity.AssetAId), + AssetAName: assetA.AssetName, + AssetAAmount: liquidity.AssetA, + AssetBId: uint32(liquidity.AssetBId), + AssetBName: assetB.AssetName, + AssetBAmount: liquidity.AssetB, + FeeRate: liquidity.FeeRate, + TreasuryRate: liquidity.TreasuryRate, + TotalLpAmount: liquidity.LpAmount, + }) + } + return resp, nil +} diff --git a/service/apiserver/internal/logic/pair/getswapamountlogic.go b/service/apiserver/internal/logic/pair/getswapamountlogic.go new file mode 100644 index 000000000..7e3a2ac7b --- /dev/null +++ b/service/apiserver/internal/logic/pair/getswapamountlogic.go @@ -0,0 +1,73 @@ +package pair + +import ( + "context" + "math/big" + + "github.com/zeromicro/go-zero/core/logx" + + "github.com/bnb-chain/zkbas/common/chain" + "github.com/bnb-chain/zkbas/service/apiserver/internal/svc" + "github.com/bnb-chain/zkbas/service/apiserver/internal/types" + types2 "github.com/bnb-chain/zkbas/types" +) + +type GetSwapAmountLogic struct { + logx.Logger + ctx context.Context + svcCtx *svc.ServiceContext +} + +func NewGetSwapAmountLogic(ctx context.Context, svcCtx *svc.ServiceContext) *GetSwapAmountLogic { + return &GetSwapAmountLogic{ + Logger: logx.WithContext(ctx), + ctx: ctx, + svcCtx: svcCtx, + } +} + +func (l *GetSwapAmountLogic) GetSwapAmount(req *types.ReqGetSwapAmount) (*types.SwapAmount, error) { + deltaAmount, isTure := new(big.Int).SetString(req.AssetAmount, 10) + if !isTure { + logx.Errorf("fail to convert string: %s to int", req.AssetAmount) + return nil, types2.AppErrInvalidParam.RefineError("invalid AssetAmount") + } + + liquidity, err := l.svcCtx.StateFetcher.GetLatestLiquidity(int64(req.PairIndex)) + if err != nil { + if err == types2.DbErrNotFound { + return nil, types2.AppErrNotFound + } + return nil, types2.AppErrInternal + } + + if liquidity.AssetA == nil || liquidity.AssetB == nil { + logx.Errorf("invalid liquidity: %v", liquidity) + return nil, types2.AppErrInternal + } + + if int64(req.AssetId) != liquidity.AssetAId && int64(req.AssetId) != liquidity.AssetBId { + logx.Errorf("invalid liquidity asset ids: %v", liquidity) + return nil, types2.AppErrInvalidParam.RefineError("invalid AssetId") + } + + if liquidity.AssetA.Cmp(big.NewInt(0)) == 0 || liquidity.AssetB.Cmp(big.NewInt(0)) == 0 { + logx.Errorf("invalid liquidity asset amount: %v", liquidity) + return nil, types2.AppErrInvalidParam.RefineError("invalid PairIndex, empty liquidity or invalid pair") + } + + var assetAmount *big.Int + var toAssetId int64 + assetAmount, toAssetId, err = chain.ComputeDelta(liquidity.AssetA, liquidity.AssetB, liquidity.AssetAId, liquidity.AssetBId, + int64(req.AssetId), req.IsFrom, deltaAmount, liquidity.FeeRate) + if err != nil { + logx.Errorf("fail to compute delta, err: %s", err.Error()) + return nil, types2.AppErrInternal + } + assetName, _ := l.svcCtx.MemCache.GetAssetNameById(toAssetId) + return &types.SwapAmount{ + AssetId: uint32(toAssetId), + AssetName: assetName, + AssetAmount: assetAmount.String(), + }, nil +} diff --git a/service/apiserver/internal/logic/root/getstatuslogic.go b/service/apiserver/internal/logic/root/getstatuslogic.go new file mode 100644 index 000000000..ca6fbdbc2 --- /dev/null +++ b/service/apiserver/internal/logic/root/getstatuslogic.go @@ -0,0 +1,31 @@ +package root + +import ( + "context" + + "github.com/zeromicro/go-zero/core/logx" + + "github.com/bnb-chain/zkbas/service/apiserver/internal/svc" + "github.com/bnb-chain/zkbas/service/apiserver/internal/types" +) + +type GetStatusLogic struct { + logx.Logger + ctx context.Context + svcCtx *svc.ServiceContext +} + +func NewGetStatusLogic(ctx context.Context, svcCtx *svc.ServiceContext) *GetStatusLogic { + return &GetStatusLogic{ + Logger: logx.WithContext(ctx), + ctx: ctx, + svcCtx: svcCtx, + } +} + +func (l *GetStatusLogic) GetStatus() (resp *types.Status, err error) { + return &types.Status{ + Status: 200, + NetworkId: 1, + }, nil +} diff --git a/service/apiserver/internal/logic/transaction/getaccountmempooltxslogic.go b/service/apiserver/internal/logic/transaction/getaccountmempooltxslogic.go new file mode 100644 index 000000000..5ea729b0a --- /dev/null +++ b/service/apiserver/internal/logic/transaction/getaccountmempooltxslogic.go @@ -0,0 +1,71 @@ +package transaction + +import ( + "context" + "strconv" + + "github.com/zeromicro/go-zero/core/logx" + + "github.com/bnb-chain/zkbas/service/apiserver/internal/logic/utils" + "github.com/bnb-chain/zkbas/service/apiserver/internal/svc" + "github.com/bnb-chain/zkbas/service/apiserver/internal/types" + types2 "github.com/bnb-chain/zkbas/types" +) + +type GetAccountMempoolTxsLogic struct { + logx.Logger + ctx context.Context + svcCtx *svc.ServiceContext +} + +func NewGetAccountMempoolTxsLogic(ctx context.Context, svcCtx *svc.ServiceContext) *GetAccountMempoolTxsLogic { + return &GetAccountMempoolTxsLogic{ + Logger: logx.WithContext(ctx), + ctx: ctx, + svcCtx: svcCtx, + } +} + +func (l *GetAccountMempoolTxsLogic) GetAccountMempoolTxs(req *types.ReqGetAccountMempoolTxs) (resp *types.MempoolTxs, err error) { + resp = &types.MempoolTxs{ + MempoolTxs: make([]*types.Tx, 0), + } + + accountIndex := int64(0) + switch req.By { + case queryByAccountIndex: + accountIndex, err = strconv.ParseInt(req.Value, 10, 64) + if err != nil { + return nil, types2.AppErrInvalidParam.RefineError("invalid value for account_index") + } + case queryByAccountName: + accountIndex, err = l.svcCtx.MemCache.GetAccountIndexByName(req.Value) + case queryByAccountPk: + accountIndex, err = l.svcCtx.MemCache.GetAccountIndexByPk(req.Value) + default: + return nil, types2.AppErrInvalidParam.RefineError("param by should be account_index|account_name|account_pk") + } + + if err != nil { + if err == types2.DbErrNotFound { + return resp, nil + } + return nil, types2.AppErrInternal + } + + mempoolTxs, err := l.svcCtx.MempoolModel.GetPendingMempoolTxsByAccountIndex(accountIndex) + if err != nil { + if err != types2.DbErrNotFound { + return nil, types2.AppErrInternal + } + } + + resp.Total = uint32(len(mempoolTxs)) + for _, t := range mempoolTxs { + tx := utils.DbMempooltxTx(t) + tx.AccountName, _ = l.svcCtx.MemCache.GetAccountNameByIndex(tx.AccountIndex) + tx.AssetName, _ = l.svcCtx.MemCache.GetAssetNameById(tx.AssetId) + resp.MempoolTxs = append(resp.MempoolTxs, tx) + } + return resp, nil +} diff --git a/service/apiserver/internal/logic/transaction/getaccounttxslogic.go b/service/apiserver/internal/logic/transaction/getaccounttxslogic.go new file mode 100644 index 000000000..25d0d9688 --- /dev/null +++ b/service/apiserver/internal/logic/transaction/getaccounttxslogic.go @@ -0,0 +1,86 @@ +package transaction + +import ( + "context" + "strconv" + + "github.com/zeromicro/go-zero/core/logx" + + "github.com/bnb-chain/zkbas/service/apiserver/internal/logic/utils" + "github.com/bnb-chain/zkbas/service/apiserver/internal/svc" + "github.com/bnb-chain/zkbas/service/apiserver/internal/types" + types2 "github.com/bnb-chain/zkbas/types" +) + +const ( + queryByAccountIndex = "account_index" + queryByAccountName = "account_name" + queryByAccountPk = "account_pk" +) + +type GetAccountTxsLogic struct { + logx.Logger + ctx context.Context + svcCtx *svc.ServiceContext +} + +func NewGetAccountTxsLogic(ctx context.Context, svcCtx *svc.ServiceContext) *GetAccountTxsLogic { + return &GetAccountTxsLogic{ + Logger: logx.WithContext(ctx), + ctx: ctx, + svcCtx: svcCtx, + } +} + +func (l *GetAccountTxsLogic) GetAccountTxs(req *types.ReqGetAccountTxs) (resp *types.Txs, err error) { + resp = &types.Txs{ + Txs: make([]*types.Tx, 0), + } + + accountIndex := int64(0) + switch req.By { + case queryByAccountIndex: + accountIndex, err = strconv.ParseInt(req.Value, 10, 64) + if err != nil { + return nil, types2.AppErrInvalidParam.RefineError("invalid value for account_index") + } + case queryByAccountName: + accountIndex, err = l.svcCtx.MemCache.GetAccountIndexByName(req.Value) + case queryByAccountPk: + accountIndex, err = l.svcCtx.MemCache.GetAccountIndexByPk(req.Value) + default: + return nil, types2.AppErrInvalidParam.RefineError("param by should be account_index|account_name|account_pk") + } + + if err != nil { + if err == types2.DbErrNotFound { + return resp, nil + } + return nil, types2.AppErrInternal + } + + total, err := l.svcCtx.TxModel.GetTxsCountByAccountIndex(accountIndex) + if err != nil { + if err != types2.DbErrNotFound { + return nil, types2.AppErrInternal + } + } + + resp.Total = uint32(total) + if total == 0 || total <= int64(req.Offset) { + return resp, nil + } + + txs, err := l.svcCtx.TxModel.GetTxsListByAccountIndex(accountIndex, int64(req.Limit), int64(req.Offset)) + if err != nil { + return nil, types2.AppErrInternal + } + + for _, t := range txs { + tx := utils.DbtxTx(t) + tx.AccountName, _ = l.svcCtx.MemCache.GetAccountNameByIndex(tx.AccountIndex) + tx.AssetName, _ = l.svcCtx.MemCache.GetAssetNameById(tx.AssetId) + resp.Txs = append(resp.Txs, tx) + } + return resp, nil +} diff --git a/service/apiserver/internal/logic/transaction/getblocktxslogic.go b/service/apiserver/internal/logic/transaction/getblocktxslogic.go new file mode 100644 index 000000000..8cdbb8b44 --- /dev/null +++ b/service/apiserver/internal/logic/transaction/getblocktxslogic.go @@ -0,0 +1,77 @@ +package transaction + +import ( + "context" + "strconv" + + "github.com/zeromicro/go-zero/core/logx" + + blockdao "github.com/bnb-chain/zkbas/dao/block" + "github.com/bnb-chain/zkbas/service/apiserver/internal/logic/utils" + "github.com/bnb-chain/zkbas/service/apiserver/internal/svc" + "github.com/bnb-chain/zkbas/service/apiserver/internal/types" + types2 "github.com/bnb-chain/zkbas/types" +) + +const ( + queryByBlockHeight = "block_height" + queryByBlockCommitment = "block_commitment" +) + +type GetBlockTxsLogic struct { + logx.Logger + ctx context.Context + svcCtx *svc.ServiceContext +} + +func NewGetBlockTxsLogic(ctx context.Context, svcCtx *svc.ServiceContext) *GetBlockTxsLogic { + return &GetBlockTxsLogic{ + Logger: logx.WithContext(ctx), + ctx: ctx, + svcCtx: svcCtx, + } +} + +func (l *GetBlockTxsLogic) GetBlockTxs(req *types.ReqGetBlockTxs) (resp *types.Txs, err error) { + resp = &types.Txs{ + Txs: make([]*types.Tx, 0), + } + + blockHeight := int64(0) + var block *blockdao.Block + switch req.By { + case queryByBlockHeight: + blockHeight, err = strconv.ParseInt(req.Value, 10, 64) + if err != nil { + return nil, types2.AppErrInvalidParam.RefineError("invalid value for block height") + } + if blockHeight < 0 { + return nil, types2.AppErrInvalidParam.RefineError("invalid value for block height") + } + block, err = l.svcCtx.MemCache.GetBlockByHeightWithFallback(blockHeight, func() (interface{}, error) { + return l.svcCtx.BlockModel.GetBlockByHeight(blockHeight) + }) + case queryByBlockCommitment: + block, err = l.svcCtx.MemCache.GetBlockByCommitmentWithFallback(req.Value, func() (interface{}, error) { + return l.svcCtx.BlockModel.GetBlockByCommitment(req.Value) + }) + default: + return nil, types2.AppErrInvalidParam.RefineError("param by should be height|commitment") + } + + if err != nil { + if err == types2.DbErrNotFound { + return resp, nil + } + return nil, types2.AppErrInternal + } + + resp.Total = uint32(len(block.Txs)) + for _, t := range block.Txs { + tx := utils.DbtxTx(t) + tx.AccountName, _ = l.svcCtx.MemCache.GetAccountNameByIndex(tx.AccountIndex) + tx.AssetName, _ = l.svcCtx.MemCache.GetAssetNameById(tx.AssetId) + resp.Txs = append(resp.Txs, tx) + } + return resp, nil +} diff --git a/service/apiserver/internal/logic/transaction/getmempooltxslogic.go b/service/apiserver/internal/logic/transaction/getmempooltxslogic.go new file mode 100644 index 000000000..471d3f3db --- /dev/null +++ b/service/apiserver/internal/logic/transaction/getmempooltxslogic.go @@ -0,0 +1,54 @@ +package transaction + +import ( + "context" + + "github.com/zeromicro/go-zero/core/logx" + + "github.com/bnb-chain/zkbas/service/apiserver/internal/logic/utils" + "github.com/bnb-chain/zkbas/service/apiserver/internal/svc" + "github.com/bnb-chain/zkbas/service/apiserver/internal/types" + types2 "github.com/bnb-chain/zkbas/types" +) + +type GetMempoolTxsLogic struct { + logx.Logger + ctx context.Context + svcCtx *svc.ServiceContext +} + +func NewGetMempoolTxsLogic(ctx context.Context, svcCtx *svc.ServiceContext) *GetMempoolTxsLogic { + return &GetMempoolTxsLogic{ + Logger: logx.WithContext(ctx), + ctx: ctx, + svcCtx: svcCtx, + } +} +func (l *GetMempoolTxsLogic) GetMempoolTxs(req *types.ReqGetRange) (*types.MempoolTxs, error) { + total, err := l.svcCtx.MempoolModel.GetMempoolTxsTotalCount() + if err != nil { + if err != types2.DbErrNotFound { + return nil, types2.AppErrInternal + } + } + + resp := &types.MempoolTxs{ + MempoolTxs: make([]*types.Tx, 0), + Total: uint32(total), + } + if total == 0 { + return resp, nil + } + + mempoolTxs, err := l.svcCtx.MempoolModel.GetMempoolTxsList(int64(req.Limit), int64(req.Offset)) + if err != nil { + return nil, types2.AppErrInternal + } + for _, t := range mempoolTxs { + tx := utils.DbMempooltxTx(t) + tx.AccountName, _ = l.svcCtx.MemCache.GetAccountNameByIndex(tx.AccountIndex) + tx.AssetName, _ = l.svcCtx.MemCache.GetAssetNameById(tx.AssetId) + resp.MempoolTxs = append(resp.MempoolTxs, tx) + } + return resp, nil +} diff --git a/service/apiserver/internal/logic/transaction/getnextnoncelogic.go b/service/apiserver/internal/logic/transaction/getnextnoncelogic.go new file mode 100644 index 000000000..b1f05f612 --- /dev/null +++ b/service/apiserver/internal/logic/transaction/getnextnoncelogic.go @@ -0,0 +1,41 @@ +package transaction + +import ( + "context" + + "github.com/zeromicro/go-zero/core/logx" + + "github.com/bnb-chain/zkbas/core" + "github.com/bnb-chain/zkbas/service/apiserver/internal/svc" + "github.com/bnb-chain/zkbas/service/apiserver/internal/types" + types2 "github.com/bnb-chain/zkbas/types" +) + +type GetNextNonceLogic struct { + logx.Logger + ctx context.Context + svcCtx *svc.ServiceContext +} + +func NewGetNextNonceLogic(ctx context.Context, svcCtx *svc.ServiceContext) *GetNextNonceLogic { + return &GetNextNonceLogic{ + Logger: logx.WithContext(ctx), + ctx: ctx, + svcCtx: svcCtx, + } +} + +func (l *GetNextNonceLogic) GetNextNonce(req *types.ReqGetNextNonce) (*types.NextNonce, error) { + bc := core.NewBlockChainForDryRun(l.svcCtx.AccountModel, l.svcCtx.LiquidityModel, l.svcCtx.NftModel, l.svcCtx.MempoolModel, + l.svcCtx.RedisCache) + nonce, err := bc.StateDB().GetPendingNonce(int64(req.AccountIndex)) + if err != nil { + if err == types2.DbErrNotFound { + return nil, types2.AppErrNotFound + } + return nil, types2.AppErrInternal + } + return &types.NextNonce{ + Nonce: uint64(nonce), + }, nil +} diff --git a/service/apiserver/internal/logic/transaction/gettxlogic.go b/service/apiserver/internal/logic/transaction/gettxlogic.go new file mode 100644 index 000000000..77d298300 --- /dev/null +++ b/service/apiserver/internal/logic/transaction/gettxlogic.go @@ -0,0 +1,62 @@ +package transaction + +import ( + "context" + + "github.com/zeromicro/go-zero/core/logx" + + "github.com/bnb-chain/zkbas/service/apiserver/internal/logic/utils" + "github.com/bnb-chain/zkbas/service/apiserver/internal/svc" + "github.com/bnb-chain/zkbas/service/apiserver/internal/types" + types2 "github.com/bnb-chain/zkbas/types" +) + +type GetTxLogic struct { + logx.Logger + ctx context.Context + svcCtx *svc.ServiceContext +} + +func NewGetTxLogic(ctx context.Context, svcCtx *svc.ServiceContext) *GetTxLogic { + return &GetTxLogic{ + Logger: logx.WithContext(ctx), + ctx: ctx, + svcCtx: svcCtx, + } +} + +func (l *GetTxLogic) GetTx(req *types.ReqGetTx) (resp *types.EnrichedTx, err error) { + resp = &types.EnrichedTx{} + tx, err := l.svcCtx.MemCache.GetTxByHashWithFallback(req.Hash, func() (interface{}, error) { + return l.svcCtx.TxModel.GetTxByHash(req.Hash) + }) + if err == nil { + resp.Tx = *utils.DbtxTx(tx) + resp.Tx.AccountName, _ = l.svcCtx.MemCache.GetAccountNameByIndex(tx.AccountIndex) + resp.Tx.AssetName, _ = l.svcCtx.MemCache.GetAssetNameById(tx.AssetId) + block, err := l.svcCtx.MemCache.GetBlockByHeightWithFallback(tx.BlockHeight, func() (interface{}, error) { + return l.svcCtx.BlockModel.GetBlockByHeight(resp.Tx.BlockHeight) + }) + if err == nil { + resp.CommittedAt = block.CommittedAt + resp.ExecutedAt = block.CreatedAt.Unix() + resp.VerifiedAt = block.VerifiedAt + } + } else { + if err != types2.DbErrNotFound { + return nil, types2.AppErrInternal + } + memppolTx, err := l.svcCtx.MempoolModel.GetMempoolTxByTxHash(req.Hash) + if err != nil { + if err == types2.DbErrNotFound { + return nil, types2.AppErrNotFound + } + return nil, types2.AppErrInternal + } + resp.Tx = *utils.DbMempooltxTx(memppolTx) + resp.Tx.AccountName, _ = l.svcCtx.MemCache.GetAccountNameByIndex(tx.AccountIndex) + resp.Tx.AssetName, _ = l.svcCtx.MemCache.GetAssetNameById(tx.AssetId) + } + + return resp, nil +} diff --git a/service/apiserver/internal/logic/transaction/gettxslogic.go b/service/apiserver/internal/logic/transaction/gettxslogic.go new file mode 100644 index 000000000..cdb07fadf --- /dev/null +++ b/service/apiserver/internal/logic/transaction/gettxslogic.go @@ -0,0 +1,56 @@ +package transaction + +import ( + "context" + + "github.com/zeromicro/go-zero/core/logx" + + "github.com/bnb-chain/zkbas/service/apiserver/internal/logic/utils" + "github.com/bnb-chain/zkbas/service/apiserver/internal/svc" + "github.com/bnb-chain/zkbas/service/apiserver/internal/types" + types2 "github.com/bnb-chain/zkbas/types" +) + +type GetTxsLogic struct { + logx.Logger + ctx context.Context + svcCtx *svc.ServiceContext +} + +func NewGetTxsLogic(ctx context.Context, svcCtx *svc.ServiceContext) *GetTxsLogic { + return &GetTxsLogic{ + Logger: logx.WithContext(ctx), + ctx: ctx, + svcCtx: svcCtx, + } +} + +func (l *GetTxsLogic) GetTxs(req *types.ReqGetRange) (resp *types.Txs, err error) { + total, err := l.svcCtx.MemCache.GetTxTotalCountWithFallback(func() (interface{}, error) { + return l.svcCtx.TxModel.GetTxsTotalCount() + }) + if err != nil { + return nil, types2.AppErrInternal + } + + resp = &types.Txs{ + Total: uint32(total), + Txs: make([]*types.Tx, 0), + } + if total == 0 || total <= int64(req.Offset) { + return resp, nil + } + + txs, err := l.svcCtx.TxModel.GetTxsList(int64(req.Limit), int64(req.Offset)) + if err != nil { + return nil, types2.AppErrInternal + } + for _, t := range txs { + tx := utils.DbtxTx(t) + tx.AccountName, _ = l.svcCtx.MemCache.GetAccountNameByIndex(tx.AccountIndex) + tx.AssetName, _ = l.svcCtx.MemCache.GetAssetNameById(tx.AssetId) + resp.Txs = append(resp.Txs, tx) + } + + return resp, nil +} diff --git a/service/apiserver/internal/logic/transaction/sendtxlogic.go b/service/apiserver/internal/logic/transaction/sendtxlogic.go new file mode 100644 index 000000000..c004c7b0e --- /dev/null +++ b/service/apiserver/internal/logic/transaction/sendtxlogic.go @@ -0,0 +1,101 @@ +package transaction + +import ( + "context" + + "github.com/zeromicro/go-zero/core/logx" + + "github.com/bnb-chain/zkbas/core" + "github.com/bnb-chain/zkbas/core/executor" + "github.com/bnb-chain/zkbas/dao/mempool" + "github.com/bnb-chain/zkbas/dao/tx" + "github.com/bnb-chain/zkbas/service/apiserver/internal/svc" + "github.com/bnb-chain/zkbas/service/apiserver/internal/types" + types2 "github.com/bnb-chain/zkbas/types" +) + +type SendTxLogic struct { + logx.Logger + ctx context.Context + svcCtx *svc.ServiceContext +} + +func NewSendTxLogic(ctx context.Context, svcCtx *svc.ServiceContext) *SendTxLogic { + return &SendTxLogic{ + Logger: logx.WithContext(ctx), + ctx: ctx, + svcCtx: svcCtx, + } +} + +func (s *SendTxLogic) SendTx(req *types.ReqSendTx) (resp *types.TxHash, err error) { + resp = &types.TxHash{} + executor, err := s.getExecutor(int(req.TxType), req.TxInfo) + if err != nil { + return resp, types2.AppErrInvalidTx + } + if err := executor.Prepare(); err != nil { + return resp, err + } + if err := executor.VerifyInputs(); err != nil { + return resp, types2.AppErrInvalidTxField.RefineError(err.Error()) + } + + mempoolTx, err := executor.GenerateMempoolTx() + if err != nil { + return resp, types2.AppErrInternal + } + if err := s.svcCtx.MempoolModel.CreateBatchedMempoolTxs([]*mempool.MempoolTx{mempoolTx}); err != nil { + logx.Errorf("fail to create mempool tx: %v, err: %s", mempoolTx, err.Error()) + failTx := &tx.FailTx{ + TxHash: mempoolTx.TxHash, + TxType: mempoolTx.TxType, + TxStatus: tx.StatusFail, + AssetAId: types2.NilAssetId, + AssetBId: types2.NilAssetId, + TxAmount: types2.NilAssetAmountStr, + TxInfo: req.TxInfo, + ExtraInfo: err.Error(), + Memo: "", + } + _ = s.svcCtx.FailTxModel.CreateFailTx(failTx) + return resp, types2.AppErrInternal + } + + resp.TxHash = mempoolTx.TxHash + return resp, nil +} + +func (s *SendTxLogic) getExecutor(txType int, txInfo string) (executor.TxExecutor, error) { + bc := core.NewBlockChainForDryRun(s.svcCtx.AccountModel, s.svcCtx.LiquidityModel, s.svcCtx.NftModel, s.svcCtx.MempoolModel, + s.svcCtx.RedisCache) + t := &tx.Tx{TxType: int64(txType), TxInfo: txInfo} + + switch txType { + case types2.TxTypeTransfer: + return executor.NewTransferExecutor(bc, t) + case types2.TxTypeSwap: + return executor.NewSwapExecutor(bc, t) + case types2.TxTypeAddLiquidity: + return executor.NewAddLiquidityExecutor(bc, t) + case types2.TxTypeRemoveLiquidity: + return executor.NewRemoveLiquidityExecutor(bc, t) + case types2.TxTypeWithdraw: + return executor.NewWithdrawExecutor(bc, t) + case types2.TxTypeTransferNft: + return executor.NewTransferNftExecutor(bc, t) + case types2.TxTypeAtomicMatch: + return executor.NewAtomicMatchExecutor(bc, t) + case types2.TxTypeCancelOffer: + return executor.NewCancelOfferExecutor(bc, t) + case types2.TxTypeWithdrawNft: + return executor.NewWithdrawNftExecutor(bc, t) + case types2.TxTypeCreateCollection: + return executor.NewCreateCollectionExecutor(bc, t) + case types2.TxTypeMintNft: + return executor.NewMintNftExecutor(bc, t) + default: + logx.Errorf("invalid tx type: %v", txType) + return nil, types2.AppErrInvalidTxType + } +} diff --git a/service/apiserver/internal/logic/utils/converter.go b/service/apiserver/internal/logic/utils/converter.go new file mode 100644 index 000000000..21084372f --- /dev/null +++ b/service/apiserver/internal/logic/utils/converter.go @@ -0,0 +1,55 @@ +package utils + +import ( + "github.com/bnb-chain/zkbas/dao/mempool" + "github.com/bnb-chain/zkbas/dao/tx" + "github.com/bnb-chain/zkbas/service/apiserver/internal/types" +) + +func DbtxTx(tx *tx.Tx) *types.Tx { + return &types.Tx{ + Hash: tx.TxHash, + Type: tx.TxType, + GasFee: tx.GasFee, + GasFeeAssetId: tx.GasFeeAssetId, + Status: tx.TxStatus, + Index: tx.TxIndex, + BlockHeight: tx.BlockHeight, + StateRoot: tx.StateRoot, + NftIndex: tx.NftIndex, + PairIndex: tx.PairIndex, + CollectionId: tx.CollectionId, + AssetId: tx.AssetId, + Amount: tx.TxAmount, + NativeAddress: tx.NativeAddress, + Info: tx.TxInfo, + ExtraInfo: tx.ExtraInfo, + Memo: tx.Memo, + AccountIndex: tx.AccountIndex, + Nonce: tx.Nonce, + ExpiredAt: tx.ExpiredAt, + CreatedAt: tx.CreatedAt.Unix(), + } +} + +func DbMempooltxTx(tx *mempool.MempoolTx) *types.Tx { + return &types.Tx{ + Hash: tx.TxHash, + Type: tx.TxType, + GasFee: tx.GasFee, + GasFeeAssetId: tx.GasFeeAssetId, + Status: int64(tx.Status), + BlockHeight: tx.L2BlockHeight, + NftIndex: tx.NftIndex, + PairIndex: tx.PairIndex, + AssetId: tx.AssetId, + Amount: tx.TxAmount, + NativeAddress: tx.NativeAddress, + Info: tx.TxInfo, + ExtraInfo: tx.ExtraInfo, + Memo: tx.Memo, + AccountIndex: tx.AccountIndex, + Nonce: tx.Nonce, + ExpiredAt: tx.ExpiredAt, + } +} diff --git a/service/apiserver/internal/logic/utils/validator.go b/service/apiserver/internal/logic/utils/validator.go new file mode 100644 index 000000000..60d425bd9 --- /dev/null +++ b/service/apiserver/internal/logic/utils/validator.go @@ -0,0 +1,9 @@ +package utils + +const ( + minSymbolLength = 3 +) + +func ValidateSymbol(symbol string) bool { + return len(symbol) >= minSymbolLength +} diff --git a/service/apiserver/internal/svc/servicecontext.go b/service/apiserver/internal/svc/servicecontext.go new file mode 100644 index 000000000..a8fbed891 --- /dev/null +++ b/service/apiserver/internal/svc/servicecontext.go @@ -0,0 +1,81 @@ +package svc + +import ( + "time" + + "github.com/zeromicro/go-zero/core/logx" + "gorm.io/driver/postgres" + "gorm.io/gorm" + + "github.com/bnb-chain/zkbas/dao/account" + "github.com/bnb-chain/zkbas/dao/asset" + "github.com/bnb-chain/zkbas/dao/block" + "github.com/bnb-chain/zkbas/dao/dbcache" + "github.com/bnb-chain/zkbas/dao/liquidity" + "github.com/bnb-chain/zkbas/dao/mempool" + "github.com/bnb-chain/zkbas/dao/nft" + "github.com/bnb-chain/zkbas/dao/sysconfig" + "github.com/bnb-chain/zkbas/dao/tx" + "github.com/bnb-chain/zkbas/service/apiserver/internal/cache" + "github.com/bnb-chain/zkbas/service/apiserver/internal/config" + "github.com/bnb-chain/zkbas/service/apiserver/internal/fetcher/price" + "github.com/bnb-chain/zkbas/service/apiserver/internal/fetcher/state" +) + +type ServiceContext struct { + Config config.Config + RedisCache dbcache.Cache + MemCache *cache.MemCache + + MempoolModel mempool.MempoolModel + AccountModel account.AccountModel + AccountHistoryModel account.AccountHistoryModel + TxModel tx.TxModel + TxDetailModel tx.TxDetailModel + FailTxModel tx.FailTxModel + LiquidityModel liquidity.LiquidityModel + LiquidityHistoryModel liquidity.LiquidityHistoryModel + BlockModel block.BlockModel + NftModel nft.L2NftModel + AssetModel asset.AssetModel + SysConfigModel sysconfig.SysConfigModel + + PriceFetcher price.Fetcher + StateFetcher state.Fetcher +} + +func NewServiceContext(c config.Config) *ServiceContext { + gormPointer, err := gorm.Open(postgres.Open(c.Postgres.DataSource)) + if err != nil { + logx.Must(err) + } + redisCache := dbcache.NewRedisCache(c.CacheRedis[0].Host, c.CacheRedis[0].Pass, 15*time.Minute) + + mempoolModel := mempool.NewMempoolModel(gormPointer) + accountModel := account.NewAccountModel(gormPointer) + liquidityModel := liquidity.NewLiquidityModel(gormPointer) + nftModel := nft.NewL2NftModel(gormPointer) + assetModel := asset.NewAssetModel(gormPointer) + memCache := cache.NewMemCache(accountModel, assetModel, c.MemCache.AccountExpiration, c.MemCache.BlockExpiration, + c.MemCache.TxExpiration, c.MemCache.AssetExpiration, c.MemCache.PriceExpiration) + return &ServiceContext{ + Config: c, + RedisCache: redisCache, + MemCache: memCache, + MempoolModel: mempoolModel, + AccountModel: accountModel, + AccountHistoryModel: account.NewAccountHistoryModel(gormPointer), + TxModel: tx.NewTxModel(gormPointer), + TxDetailModel: tx.NewTxDetailModel(gormPointer), + FailTxModel: tx.NewFailTxModel(gormPointer), + LiquidityModel: liquidityModel, + LiquidityHistoryModel: liquidity.NewLiquidityHistoryModel(gormPointer), + BlockModel: block.NewBlockModel(gormPointer), + NftModel: nftModel, + AssetModel: assetModel, + SysConfigModel: sysconfig.NewSysConfigModel(gormPointer), + + PriceFetcher: price.NewFetcher(memCache, c.CoinMarketCap.Url, c.CoinMarketCap.Token), + StateFetcher: state.NewFetcher(redisCache, accountModel, liquidityModel, nftModel), + } +} diff --git a/service/apiserver/server.api b/service/apiserver/server.api new file mode 100644 index 000000000..de0e07ff4 --- /dev/null +++ b/service/apiserver/server.api @@ -0,0 +1,521 @@ +syntax = "v1" + +info( + date: "2022-5-1" + desc: "Public APIs for zkbas" + version: "1.0" +) + +/* ========================= root =========================*/ + +type ( + Status { + Status uint32 `json:"status"` + NetworkId uint32 `json:"network_id"` + } +) + +@server( + group: root +) + +service server-api { + @doc "Get status of zkbas" + @handler GetStatus + get / returns (Status) +} + +type ReqGetRange { + Offset uint32 `form:"offset,range=[0:100000]"` + Limit uint32 `form:"limit,range=[1:100]"` +} + +/* ========================= Account =========================*/ + +type ( + AccountAsset { + Id uint32 `json:"id"` + Name string `json:"name"` + Balance string `json:"balance"` + LpAmount string `json:"lp_amount"` + } + + Account { + Status uint32 `json:"status"` + Index int64 `json:"index"` + Name string `json:"name"` + Pk string `json:"pk"` + Nonce int64 `json:"nonce"` + Assets []*AccountAsset `json:"assets"` + } + + SimpleAccount { + Index int64 `json:"index"` + Name string `json:"name"` + Pk string `json:"pk"` + } + + Accounts { + Total uint32 `json:"total"` + Accounts []*SimpleAccount `json:"accounts"` + } +) + +type ( + ReqGetAccount { + By string `form:"by,options=index|name|pk"` + Value string `form:"value"` + } +) + +@server( + group: account +) + +service server-api { + @doc "Get accounts" + @handler GetAccounts + get /api/v1/accounts (ReqGetRange) returns (Accounts) + + @doc "Get account by account's name, index or pk" + @handler GetAccount + get /api/v1/account (ReqGetAccount) returns (Account) +} + +/* ========================= Asset =========================*/ + +type ( + Asset { + Id uint32 `json:"id"` + Name string `json:"name"` + Decimals uint32 `json:"decimals"` + Symbol string `json:"symbol"` + Address string `json:"address"` + IsGasAsset uint32 `json:"is_gas_asset"` + } + + Assets { + Total uint32 `json:"total"` + Assets []*Asset `json:"assets"` + } +) + +@server( + group: asset +) + +service server-api { + @doc "Get assets" + @handler GetAssets + get /api/v1/assets (ReqGetRange) returns (Assets) +} + +/* ========================= Block =========================*/ + +type ( + Block { + Commitment string `json:"commitment"` + Height int64 `json:"height"` + StateRoot string `json:"state_root"` + PriorityOperations int64 `json:"priority_operations"` + PendingOnChainOperationsHash string `json:"pending_on_chain_operations_hash"` + PendingOnChainOperationsPubData string `json:"pending_on_chain_operations_pub_data"` + CommittedTxHash string `json:"committed_tx_hash"` + CommittedAt int64 `json:"committed_at"` + VerifiedTxHash string `json:"verified_tx_hash"` + VerifiedAt int64 `json:"verified_at"` + Txs []*Tx `json:"txs"` + Status int64 `json:"status"` + } + + Blocks { + Total uint32 `json:"total"` + Blocks []*Block `json:"blocks"` + } + + CurrentHeight { + Height int64 `json:"height"` + } +) + +type ( + ReqGetBlock { + By string `form:"by,options=commitment|height"` + Value string `form:"value"` + } +) + +@server( + group: block +) + +service server-api { + @doc "Get blocks" + @handler GetBlocks + get /api/v1/blocks (ReqGetRange) returns (Blocks) + + @doc "Get block by its height or commitment" + @handler GetBlock + get /api/v1/block (ReqGetBlock) returns (Block) + + @handler GetCurrentHeight + get /api/v1/currentHeight returns (CurrentHeight) +} + +/* ========================= Info =========================*/ + +type ( + ContractAddress { + Name string `json:"name"` + Address string `json:"address"` + } + + Layer2BasicInfo { + BlockCommitted int64 `json:"block_committed"` + BlockVerified int64 `json:"block_verified"` + TotalTransactionCount int64 `json:"total_transaction_count"` + YesterdayTransactionCount int64 `json:"yesterday_transaction_count"` + TodayTransactionCount int64 `json:"today_transaction_count"` + YesterdayActiveUserCount int64 `json:"yesterday_active_user_count"` + TodayActiveUserCount int64 `json:"today_active_user_count"` + ContractAddresses []ContractAddress `json:"contract_addresses"` + } + + CurrencyPrice { + Pair string `json:"pair"` + AssetId uint32 `json:"asset_id"` + Price string `json:"price"` + } + + CurrencyPrices { + Total uint32 `json:"total"` + CurrencyPrices []*CurrencyPrice `json:"currency_prices"` + } + + GasFee { + GasFee string `json:"gas_fee"` + } + + GasAccount { + Status int64 `json:"status"` + Index int64 `json:"index"` + Name string `json:"name"` + } + + GasFeeAssets { + Assets []Asset `json:"assets"` + } + + Search { + DataType int32 `json:"data_type"` + } +) + +type ( + ReqGetCurrencyPrice { + By string `form:"by,options=symbol, default=symbol"` + Value string `form:"value"` + } + + ReqGetGasFee { + AssetId uint32 `form:"asset_id"` + } + + ReqGetWithdrawGasFee { + AssetId uint32 `form:"asset_id"` + } + + ReqSearch { + Keyword string `form:"keyword"` + } +) + +@server( + group: info +) + +service server-api { + @doc "Get zkbas general info, including contract address, and count of transactions and active users" + @handler GetLayer2BasicInfo + get /api/v1/layer2BasicInfo returns (Layer2BasicInfo) + + @doc "Get asset price by its symbol" + @handler GetCurrencyPrice + get /api/v1/currencyPrice (ReqGetCurrencyPrice) returns (CurrencyPrice) + + @doc "Get assets' prices" + @handler GetCurrencyPrices + get /api/v1/currencyPrices (ReqGetRange) returns (CurrencyPrices) + + @doc "Get gas fee amount for using a specific asset as gas asset" + @handler GetGasFee + get /api/v1/gasFee (ReqGetGasFee) returns (GasFee) + + @doc "Get withdraw gas fee amount for using a specific asset as gas asset" + @handler GetWithdrawGasFee + get /api/v1/withdrawGasFee (ReqGetWithdrawGasFee) returns (GasFee) + + @doc "Get supported gas fee assets" + @handler GetGasFeeAssets + get /api/v1/gasFeeAssets returns (GasFeeAssets) + + @doc "Get gas account, who will charge gas fees for transactions" + @handler GetGasAccount + get /api/v1/gasAccount returns (GasAccount) + + @doc "Search with a specific keyword" + @handler Search + get /api/v1/search (ReqSearch) returns (Search) +} + +/* =========================== Pair ==========================*/ + +type ( + SwapAmount { + AssetId uint32 `json:"asset_id"` + AssetName string `json:"asset_name"` + AssetAmount string `json:"asset_amount"` + } + + Pair { + Index uint32 `json:"index"` + AssetAId uint32 `json:"asset_a_id"` + AssetAName string `json:"asset_a_name"` + AssetAAmount string `json:"asset_a_amount"` + AssetBId uint32 `json:"asset_b_id"` + AssetBName string `json:"asset_b_name"` + AssetBAmount string `json:"asset_b_amount"` + FeeRate int64 `json:"fee_rate"` + TreasuryRate int64 `json:"treasury_rate"` + TotalLpAmount string `json:"total_lp_amount"` + } + Pairs { + Pairs []*Pair `json:"pairs"` + } + + LpValue { + AssetAId uint32 `json:"asset_a_id"` + AssetAName string `json:"asset_a_name"` + AssetAAmount string `json:"asset_a_amount"` + AssetBId uint32 `json:"asset_b_id"` + AssetBName string `json:"asset_b_name"` + AssetBAmount string `json:"asset_b_amount"` + } +) + +type ( + ReqGetSwapAmount { + PairIndex uint32 `form:"pair_index"` + AssetId uint32 `form:"asset_id"` + AssetAmount string `form:"asset_amount"` + IsFrom bool `form:"is_from"` + } + + ReqGetLpValue { + PairIndex uint32 `form:"pair_index"` + LpAmount string `form:"lp_amount"` + } + + ReqGetPair { + Index uint32 `form:"index"` + } +) + +@server( + group: pair +) + +service server-api { + @doc "Get swap amount for a specific liquidity pair and in asset amount" + @handler GetSwapAmount + get /api/v1/swapAmount (ReqGetSwapAmount) returns (SwapAmount) + + @doc "Get liquidity pairs" + @handler GetPairs + get /api/v1/pairs returns (Pairs) + + @doc "Get liquidity pool amount for a specific liquidity pair" + @handler GetLpValue + get /api/v1/lpValue (ReqGetLpValue) returns (LpValue) + + @doc "Get liquidity pool info by its index" + @handler GetPair + get /api/v1/pair (ReqGetPair) returns (Pair) +} + +/* ======================= Transaction =======================*/ + +type ( + Tx { + Hash string `json:"hash"` + Type int64 `json:"type,range=[1:64]"` + Amount string `json:"amount"` + Info string `json:"info"` + Status int64 `json:"status"` + Index int64 `json:"index"` + GasFeeAssetId int64 `json:"gas_fee_asset_id"` + GasFee string `json:"gas_fee"` + NftIndex int64 `json:"nft_index"` + CollectionId int64 `json:"collection_id"` + PairIndex int64 `json:"pair_index"` + AssetId int64 `json:"asset_id"` + AssetName string `json:"asset_name"` + NativeAddress string `json:"native_address"` + ExtraInfo string `json:"extra_info"` + Memo string `json:"memo"` + AccountIndex int64 `json:"account_index"` + AccountName string `json:"account_name"` + Nonce int64 `json:"nonce"` + ExpiredAt int64 `json:"expire_at"` + BlockHeight int64 `json:"block_height"` + CreatedAt int64 `json:"created_at"` + StateRoot string `json:"state_root"` + } + + Txs { + Total uint32 `json:"total"` + Txs []*Tx `json:"txs"` + } + + MempoolTxs { + Total uint32 `json:"total"` + MempoolTxs []*Tx `json:"mempool_txs"` + } + + TxHash { + TxHash string `json:"tx_hash"` + } + + NextNonce { + Nonce uint64 `json:"nonce"` + } + + EnrichedTx { + Tx + CommittedAt int64 `json:"committed_at"` + VerifiedAt int64 `json:"verified_at"` + ExecutedAt int64 `json:"executed_at"` + } +) + +type ( + ReqGetBlockTxs { + By string `form:"by,options=block_height|block_commitment"` + Value string `form:"value"` + } + + ReqGetAccountTxs { + By string `form:"by,options=account_index|account_name|account_pk"` + Value string `form:"value"` + Offset uint16 `form:"offset,range=[0:100000]"` + Limit uint16 `form:"limit,range=[1:100]"` + } + + ReqGetTx { + Hash string `form:"hash"` + } + + ReqSendTx { + TxType uint32 `form:"tx_type"` + TxInfo string `form:"tx_info"` + } + + ReqGetAccountMempoolTxs { + By string `form:"by,options=account_index|account_name|account_pk"` + Value string `form:"value"` + } + + ReqGetNextNonce { + AccountIndex uint32 `form:"account_index"` + } +) + +@server( + group: transaction +) + +service server-api { + @doc "Get transactions" + @handler GetTxs + get /api/v1/txs (ReqGetRange) returns (Txs) + + @doc "Get transactions in a block" + @handler GetBlockTxs + get /api/v1/blockTxs (ReqGetBlockTxs) returns (Txs) + + @doc "Get transactions of a specific account" + @handler GetAccountTxs + get /api/v1/accountTxs (ReqGetAccountTxs) returns (Txs) + + @doc "Get transaction by hash" + @handler GetTx + get /api/v1/tx (ReqGetTx) returns (EnrichedTx) + + @doc "Get mempool transactions" + @handler GetMempoolTxs + get /api/v1/mempoolTxs (ReqGetRange) returns (MempoolTxs) + + @doc "Get mempool transactions of a specific account" + @handler GetAccountMempoolTxs + get /api/v1/accountMempoolTxs (ReqGetAccountMempoolTxs) returns (MempoolTxs) + + @doc "Get next nonce" + @handler GetNextNonce + get /api/v1/nextNonce (ReqGetNextNonce) returns (NextNonce) + + @doc "Send raw transaction" + @handler SendTx + post /api/v1/sendTx (ReqSendTx) returns (TxHash) +} + +/* ========================= Nft =========================*/ + +type ( + MaxOfferId { + OfferId uint64 `json:"offer_id"` + } + + Nft { + Index int64 `json:"index"` + CreatorAccountIndex int64 `json:"creator_account_index"` + OwnerAccountIndex int64 `json:"owner_account_index"` + ContentHash string `json:"content_hash"` + L1Address string `json:"l1_address"` + L1TokenId string `json:"l1_token_id"` + CreatorTreasuryRate int64 `json:"creator_treasury_rate"` + CollectionId int64 `json:"collection_id"` + } + Nfts { + Total int64 `json:"total"` + Nfts []*Nft `json:"nfts"` + } +) + +type ( + ReqGetMaxOfferId { + AccountIndex uint32 `form:"account_index"` + } +) + +type ( + ReqGetAccountNfts { + By string `form:"by,options=account_index|account_name|account_pk"` + Value string `form:"value"` + Offset uint16 `form:"offset,range=[0:100000]"` + Limit uint16 `form:"limit,range=[1:100]"` + } +) + +@server( + group: nft +) + +service server-api { + @doc "Get max nft offer id for a specific account" + @handler GetMaxOfferId + get /api/v1/maxOfferId (ReqGetMaxOfferId) returns (MaxOfferId) + + @doc "Get nfts of a specific account" + @handler GetAccountNfts + get /api/v1/accountNfts (ReqGetAccountNfts) returns (Nfts) +} \ No newline at end of file diff --git a/service/apiserver/server.go b/service/apiserver/server.go new file mode 100644 index 000000000..af141ad90 --- /dev/null +++ b/service/apiserver/server.go @@ -0,0 +1,27 @@ +package apiserver + +import ( + "fmt" + + "github.com/zeromicro/go-zero/core/conf" + "github.com/zeromicro/go-zero/rest" + + "github.com/bnb-chain/zkbas/service/apiserver/internal/config" + "github.com/bnb-chain/zkbas/service/apiserver/internal/handler" + "github.com/bnb-chain/zkbas/service/apiserver/internal/svc" +) + +func Run(configFile string) error { + var c config.Config + conf.MustLoad(configFile, &c) + + server := rest.MustNewServer(c.RestConf, rest.WithCors()) + defer server.Stop() + + ctx := svc.NewServiceContext(c) + handler.RegisterHandlers(server, ctx) + + fmt.Printf("Starting server at %s:%d...\n", c.Host, c.Port) + server.Start() + return nil +} diff --git a/service/apiserver/test/getaccount_test.go b/service/apiserver/test/getaccount_test.go new file mode 100644 index 000000000..2f6d324cc --- /dev/null +++ b/service/apiserver/test/getaccount_test.go @@ -0,0 +1,75 @@ +package test + +import ( + "encoding/json" + "fmt" + "io" + "net/http" + "strconv" + "testing" + + "github.com/stretchr/testify/assert" + + "github.com/bnb-chain/zkbas/service/apiserver/internal/types" +) + +func (s *ApiServerSuite) TestGetAccount() { + type args struct { + by string + value string + } + + type testcase struct { + name string + args args + httpCode int + } + + tests := []testcase{ + {"not found by index", args{"index", "9999999999"}, 400}, + {"not found by name", args{"name", "not exist name"}, 400}, + {"not found by pk", args{"pk", "not exist pk"}, 400}, + {"invalid by", args{"invalidby", ""}, 400}, + } + + statusCode, accounts := GetAccounts(s, 0, 100) + if statusCode == http.StatusOK && len(accounts.Accounts) > 0 { + tests = append(tests, []testcase{ + {"found by index", args{"index", strconv.Itoa(int(accounts.Accounts[0].Index))}, 200}, + {"found by name", args{"name", accounts.Accounts[0].Name}, 200}, + {"found by pk", args{"pk", accounts.Accounts[0].Pk}, 200}, + }...) + } + + for _, tt := range tests { + s.T().Run(tt.name, func(t *testing.T) { + httpCode, result := GetAccount(s, tt.args.by, tt.args.value) + assert.Equal(t, tt.httpCode, httpCode) + if httpCode == http.StatusOK { + assert.NotNil(t, result.Pk) + assert.NotNil(t, result.Name) + assert.True(t, result.Nonce >= 0) + assert.NotNil(t, result.Assets) + fmt.Printf("result: %+v \n", result) + } + }) + } + +} + +func GetAccount(s *ApiServerSuite, by, value string) (int, *types.Account) { + resp, err := http.Get(fmt.Sprintf("%s/api/v1/account?by=%s&value=%s", s.url, by, value)) + assert.NoError(s.T(), err) + defer resp.Body.Close() + + body, err := io.ReadAll(resp.Body) + assert.NoError(s.T(), err) + + if resp.StatusCode != http.StatusOK { + return resp.StatusCode, nil + } + result := types.Account{} + //nolint: errcheck + json.Unmarshal(body, &result) + return resp.StatusCode, &result +} diff --git a/service/apiserver/test/getaccountmempooltxs_test.go b/service/apiserver/test/getaccountmempooltxs_test.go new file mode 100644 index 000000000..b779b80aa --- /dev/null +++ b/service/apiserver/test/getaccountmempooltxs_test.go @@ -0,0 +1,80 @@ +package test + +import ( + "encoding/json" + "fmt" + "io" + "net/http" + "strconv" + "testing" + + "github.com/stretchr/testify/assert" + + "github.com/bnb-chain/zkbas/service/apiserver/internal/types" +) + +func (s *ApiServerSuite) TestGetAccountMempoolTxs() { + type args struct { + by string + value string + } + + type testcase struct { + name string + args args + httpCode int + } + + tests := []testcase{ + {"not found by index", args{"account_index", "9999999"}, 200}, + {"not found by name", args{"account_name", "notexists.legend"}, 200}, + {"not found by pk", args{"account_pk", "notexists"}, 200}, + {"invalidby", args{"invalidby", ""}, 400}, + } + + statusCode, txs := GetMempoolTxs(s, 0, 100) + if statusCode == http.StatusOK && len(txs.MempoolTxs) > 0 { + _, account := GetAccount(s, "name", txs.MempoolTxs[len(txs.MempoolTxs)-1].AccountName) + tests = append(tests, []testcase{ + {"found by index", args{"account_index", strconv.Itoa(int(account.Index))}, 200}, + {"found by name", args{"account_name", account.Name}, 200}, + {"found by pk", args{"account_pk", account.Pk}, 200}, + }...) + } + + for _, tt := range tests { + s.T().Run(tt.name, func(t *testing.T) { + httpCode, result := GetAccountMempoolTxs(s, tt.args.by, tt.args.value) + assert.Equal(t, tt.httpCode, httpCode) + if httpCode == http.StatusOK { + if result.Total > 0 { + assert.True(t, len(result.MempoolTxs) > 0) + assert.NotNil(t, result.MempoolTxs[0].BlockHeight) + assert.NotNil(t, result.MempoolTxs[0].Hash) + assert.NotNil(t, result.MempoolTxs[0].Type) + assert.NotNil(t, result.MempoolTxs[0].StateRoot) + assert.NotNil(t, result.MempoolTxs[0].Info) + assert.NotNil(t, result.MempoolTxs[0].Status) + } + fmt.Printf("result: %+v \n", result) + } + }) + } + +} + +func GetAccountMempoolTxs(s *ApiServerSuite, by, value string) (int, *types.MempoolTxs) { + resp, err := http.Get(fmt.Sprintf("%s/api/v1/accountMempoolTxs?by=%s&value=%s", s.url, by, value)) + assert.NoError(s.T(), err) + defer resp.Body.Close() + + body, err := io.ReadAll(resp.Body) + assert.NoError(s.T(), err) + + if resp.StatusCode != http.StatusOK { + return resp.StatusCode, nil + } + result := types.MempoolTxs{} + _ = json.Unmarshal(body, &result) + return resp.StatusCode, &result +} diff --git a/service/apiserver/test/getaccountnfts_test.go b/service/apiserver/test/getaccountnfts_test.go new file mode 100644 index 000000000..0c8fe0c7f --- /dev/null +++ b/service/apiserver/test/getaccountnfts_test.go @@ -0,0 +1,81 @@ +package test + +import ( + "encoding/json" + "fmt" + "io" + "net/http" + "strconv" + "testing" + + "github.com/stretchr/testify/assert" + + "github.com/bnb-chain/zkbas/service/apiserver/internal/types" +) + +func (s *ApiServerSuite) TestGetAccountNftList() { + type args struct { + by string + value string + offset int + limit int + } + + type testcase struct { + name string + args args + httpCode int + } + + tests := []testcase{ + {"not found by index", args{"account_index", "9999999999", 0, 10}, 200}, + {"not found by name", args{"account_name", "notexistname", 0, 10}, 200}, + {"not found by pk", args{"account_pk", "notexistpk", 0, 10}, 200}, + {"invalid by", args{"invalidby", "", 0, 10}, 400}, + } + + statusCode, accounts := GetAccounts(s, 2, 100) + if statusCode == http.StatusOK && len(accounts.Accounts) > 0 { + tests = append(tests, []testcase{ + {"found by index", args{"account_index", strconv.Itoa(int(accounts.Accounts[0].Index)), 0, 10}, 200}, + {"found by name", args{"account_name", accounts.Accounts[0].Name, 0, 10}, 200}, + {"found by pk", args{"account_pk", accounts.Accounts[0].Pk, 0, 10}, 200}, + }...) + } + + for _, tt := range tests { + s.T().Run(tt.name, func(t *testing.T) { + httpCode, result := GetAccountNfts(s, tt.args.by, tt.args.value, tt.args.offset, tt.args.limit) + assert.Equal(t, tt.httpCode, httpCode) + if httpCode == http.StatusOK { + if tt.args.offset < int(result.Total) { + assert.True(t, len(result.Nfts) > 0) + assert.NotNil(t, result.Nfts[0].Index) + assert.NotNil(t, result.Nfts[0].ContentHash) + assert.NotNil(t, result.Nfts[0].OwnerAccountIndex) + assert.NotNil(t, result.Nfts[0].CollectionId) + assert.NotNil(t, result.Nfts[0].CreatorTreasuryRate) + } + fmt.Printf("result: %+v \n", result) + } + }) + } + +} + +func GetAccountNfts(s *ApiServerSuite, by, value string, offset, limit int) (int, *types.Nfts) { + resp, err := http.Get(fmt.Sprintf("%s/api/v1/accountNfts?by=%s&value=%s&offset=%d&limit=%d", s.url, by, value, offset, limit)) + assert.NoError(s.T(), err) + defer resp.Body.Close() + + body, err := io.ReadAll(resp.Body) + assert.NoError(s.T(), err) + + if resp.StatusCode != http.StatusOK { + return resp.StatusCode, nil + } + result := types.Nfts{} + //nolint:errcheck + json.Unmarshal(body, &result) + return resp.StatusCode, &result +} diff --git a/service/apiserver/test/getaccounts_test.go b/service/apiserver/test/getaccounts_test.go new file mode 100644 index 000000000..0c5c70ede --- /dev/null +++ b/service/apiserver/test/getaccounts_test.go @@ -0,0 +1,64 @@ +package test + +import ( + "encoding/json" + "fmt" + "io" + "math" + "net/http" + "testing" + + "github.com/stretchr/testify/assert" + + "github.com/bnb-chain/zkbas/service/apiserver/internal/types" +) + +func (s *ApiServerSuite) TestGetAccounts() { + + type args struct { + offset int + limit int + } + tests := []struct { + name string + args args + httpCode int + }{ + {"found", args{0, 10}, 200}, + {"not found", args{math.MaxInt, 10}, 400}, + } + + for _, tt := range tests { + s.T().Run(tt.name, func(t *testing.T) { + httpCode, result := GetAccounts(s, tt.args.offset, tt.args.limit) + assert.Equal(t, tt.httpCode, httpCode) + if httpCode == http.StatusOK { + if tt.args.offset < int(result.Total) { + assert.True(t, len(result.Accounts) > 0) + assert.NotNil(t, result.Accounts[0].Name) + assert.NotNil(t, result.Accounts[0].Index) + assert.NotNil(t, result.Accounts[0].Pk) + } + fmt.Printf("result: %+v \n", result) + } + }) + } + +} + +func GetAccounts(s *ApiServerSuite, offset, limit int) (int, *types.Accounts) { + resp, err := http.Get(fmt.Sprintf("%s/api/v1/accounts?offset=%d&limit=%d", s.url, offset, limit)) + assert.NoError(s.T(), err) + defer resp.Body.Close() + + body, err := io.ReadAll(resp.Body) + assert.NoError(s.T(), err) + + if resp.StatusCode != http.StatusOK { + return resp.StatusCode, nil + } + result := types.Accounts{} + //nolint:errcheck + json.Unmarshal(body, &result) + return resp.StatusCode, &result +} diff --git a/service/apiserver/test/getaccounttxs_test.go b/service/apiserver/test/getaccounttxs_test.go new file mode 100644 index 000000000..2904ad177 --- /dev/null +++ b/service/apiserver/test/getaccounttxs_test.go @@ -0,0 +1,83 @@ +package test + +import ( + "encoding/json" + "fmt" + "io" + "net/http" + "strconv" + "testing" + + "github.com/stretchr/testify/assert" + + "github.com/bnb-chain/zkbas/service/apiserver/internal/types" +) + +func (s *ApiServerSuite) TestGetAccountTxs() { + type args struct { + by string + value string + offset int + limit int + } + + type testcase struct { + name string + args args + httpCode int + } + + tests := []testcase{ + {"not found by index", args{"account_index", "99999999", 0, 10}, 200}, + {"not found by name", args{"account_name", "fakeaccount.legend", 0, 10}, 200}, + {"not found by pk", args{"account_pk", "fake8470d33c59a5cbf5e10df426eb97c2773ab890c3364f4162ba782a56ca998", 0, 10}, 200}, + {"invalid by", args{"invalidby", "fake8470d33c59a5cbf5e10df426eb97c2773ab890c3364f4162ba782a56ca998", 0, 10}, 400}, + } + + statusCode, txs := GetTxs(s, 0, 100) + if statusCode == http.StatusOK && len(txs.Txs) > 0 { + _, account := GetAccount(s, "name", txs.Txs[len(txs.Txs)-1].AccountName) + tests = append(tests, []testcase{ + {"found by index", args{"account_index", strconv.Itoa(int(account.Index)), 0, 10}, 200}, + {"found by name", args{"account_name", account.Name, 0, 10}, 200}, + {"found by pk", args{"account_pk", account.Pk, 0, 10}, 200}, + }...) + } + + for _, tt := range tests { + s.T().Run(tt.name, func(t *testing.T) { + httpCode, result := GetAccountTxs(s, tt.args.by, tt.args.value, tt.args.offset, tt.args.limit) + assert.Equal(t, tt.httpCode, httpCode) + if httpCode == http.StatusOK { + if tt.args.offset < int(result.Total) { + assert.True(t, len(result.Txs) > 0) + assert.NotNil(t, result.Txs[0].BlockHeight) + assert.NotNil(t, result.Txs[0].Hash) + assert.NotNil(t, result.Txs[0].Type) + assert.NotNil(t, result.Txs[0].StateRoot) + assert.NotNil(t, result.Txs[0].Info) + assert.NotNil(t, result.Txs[0].Status) + } + fmt.Printf("result: %+v \n", result) + } + }) + } + +} + +func GetAccountTxs(s *ApiServerSuite, by, value string, offset, limit int) (int, *types.Txs) { + resp, err := http.Get(fmt.Sprintf("%s/api/v1/accountTxs?by=%s&value=%s&offset=%d&limit=%d", s.url, by, value, offset, limit)) + assert.NoError(s.T(), err) + defer resp.Body.Close() + + body, err := io.ReadAll(resp.Body) + assert.NoError(s.T(), err) + + if resp.StatusCode != http.StatusOK { + return resp.StatusCode, nil + } + result := types.Txs{} + //nolint:errcheck + json.Unmarshal(body, &result) + return resp.StatusCode, &result +} diff --git a/service/apiserver/test/getassets_test.go b/service/apiserver/test/getassets_test.go new file mode 100644 index 000000000..b9e717cb8 --- /dev/null +++ b/service/apiserver/test/getassets_test.go @@ -0,0 +1,61 @@ +package test + +import ( + "encoding/json" + "fmt" + "io" + "net/http" + "testing" + + "github.com/stretchr/testify/assert" + + "github.com/bnb-chain/zkbas/service/apiserver/internal/types" +) + +func (s *ApiServerSuite) TestGetAssets() { + + type args struct { + offset int + limit int + } + tests := []struct { + name string + args args + httpCode int + }{ + {"found", args{0, 10}, 200}, + } + + for _, tt := range tests { + s.T().Run(tt.name, func(t *testing.T) { + httpCode, result := GetAssets(s, tt.args.offset, tt.args.limit) + assert.Equal(t, tt.httpCode, httpCode) + if httpCode == http.StatusOK { + assert.True(t, len(result.Assets) > 0) + assert.NotNil(t, result.Assets[0].Name) + assert.NotNil(t, result.Assets[0].Symbol) + assert.NotNil(t, result.Assets[0].Address) + assert.NotNil(t, result.Assets[0].IsGasAsset) + fmt.Printf("result: %+v \n", result) + } + }) + } + +} + +func GetAssets(s *ApiServerSuite, offset, limit int) (int, *types.Assets) { + resp, err := http.Get(fmt.Sprintf("%s/api/v1/assets?offset=%d&limit=%d", s.url, offset, limit)) + assert.NoError(s.T(), err) + defer resp.Body.Close() + + body, err := io.ReadAll(resp.Body) + assert.NoError(s.T(), err) + + if resp.StatusCode != http.StatusOK { + return resp.StatusCode, nil + } + result := types.Assets{} + //nolint: errcheck + json.Unmarshal(body, &result) + return resp.StatusCode, &result +} diff --git a/service/apiserver/test/getblock_test.go b/service/apiserver/test/getblock_test.go new file mode 100644 index 000000000..d9987ed59 --- /dev/null +++ b/service/apiserver/test/getblock_test.go @@ -0,0 +1,62 @@ +package test + +import ( + "encoding/json" + "fmt" + "io" + "net/http" + "testing" + + "github.com/stretchr/testify/assert" + + "github.com/bnb-chain/zkbas/service/apiserver/internal/types" +) + +func (s *ApiServerSuite) TestGetBlock() { + + type args struct { + by string + value string + } + tests := []struct { + name string + args args + httpCode int + }{ + {"found by height", args{"height", "1"}, 200}, + {"found by commitment", args{"commitment", "0000000000000000000000000000000000000000000000000000000000000000"}, 200}, + {"invalidby", args{"invalidby", ""}, 400}, + } + + for _, tt := range tests { + s.T().Run(tt.name, func(t *testing.T) { + httpCode, result := GetBlock(s, tt.args.by, tt.args.value) + assert.Equal(t, tt.httpCode, httpCode) + if httpCode == http.StatusOK { + assert.NotNil(t, result.Height) + assert.NotNil(t, result.Commitment) + assert.NotNil(t, result.Status) + assert.NotNil(t, result.StateRoot) + fmt.Printf("result: %+v \n", result) + } + }) + } + +} + +func GetBlock(s *ApiServerSuite, by, value string) (int, *types.Block) { + resp, err := http.Get(fmt.Sprintf("%s/api/v1/block?by=%s&value=%s", s.url, by, value)) + assert.NoError(s.T(), err) + defer resp.Body.Close() + + body, err := io.ReadAll(resp.Body) + assert.NoError(s.T(), err) + + if resp.StatusCode != http.StatusOK { + return resp.StatusCode, nil + } + result := types.Block{} + //nolint:errcheck + json.Unmarshal(body, &result) + return resp.StatusCode, &result +} diff --git a/service/apiserver/test/getblocks_test.go b/service/apiserver/test/getblocks_test.go new file mode 100644 index 000000000..ee90c5f71 --- /dev/null +++ b/service/apiserver/test/getblocks_test.go @@ -0,0 +1,66 @@ +package test + +import ( + "encoding/json" + "fmt" + "io" + "math" + "net/http" + "testing" + + "github.com/stretchr/testify/assert" + + "github.com/bnb-chain/zkbas/service/apiserver/internal/types" +) + +func (s *ApiServerSuite) TestGetBlocks() { + + type args struct { + offset int + limit int + } + tests := []struct { + name string + args args + httpCode int + }{ + {"found", args{0, 10}, 200}, + {"not found", args{math.MaxInt, 10}, 400}, + } + + for _, tt := range tests { + s.T().Run(tt.name, func(t *testing.T) { + httpCode, result := GetBlocks(s, tt.args.offset, tt.args.limit) + assert.Equal(t, tt.httpCode, httpCode) + if httpCode == http.StatusOK { + if tt.args.offset < int(result.Total) { + assert.True(t, len(result.Blocks) > 0) + assert.NotNil(t, result.Blocks[0].Height) + assert.NotNil(t, result.Blocks[0].Commitment) + assert.NotNil(t, result.Blocks[0].Status) + assert.NotNil(t, result.Blocks[0].StateRoot) + //assert.NotNil(t, result.Blocks[0].Txs) + } + fmt.Printf("result: %+v \n", result) + } + }) + } + +} + +func GetBlocks(s *ApiServerSuite, offset, limit int) (int, *types.Blocks) { + resp, err := http.Get(fmt.Sprintf("%s/api/v1/blocks?offset=%d&limit=%d", s.url, offset, limit)) + assert.NoError(s.T(), err) + defer resp.Body.Close() + + body, err := io.ReadAll(resp.Body) + assert.NoError(s.T(), err) + + if resp.StatusCode != http.StatusOK { + return resp.StatusCode, nil + } + result := types.Blocks{} + //nolint:errcheck + json.Unmarshal(body, &result) + return resp.StatusCode, &result +} diff --git a/service/apiserver/test/getblocktxs_test.go b/service/apiserver/test/getblocktxs_test.go new file mode 100644 index 000000000..2018ed1f4 --- /dev/null +++ b/service/apiserver/test/getblocktxs_test.go @@ -0,0 +1,77 @@ +package test + +import ( + "encoding/json" + "fmt" + "io" + "net/http" + "strconv" + "testing" + + "github.com/stretchr/testify/assert" + + "github.com/bnb-chain/zkbas/service/apiserver/internal/types" +) + +func (s *ApiServerSuite) TestGetBlockTxs() { + type args struct { + by string + value string + } + + type testcase struct { + name string + args args + httpCode int + } + + tests := []testcase{ + {"not found by block_height", args{"block_height", "99999999"}, 200}, + {"not found by block_commitment", args{"block_commitment", "fsfsfsfsf100"}, 200}, + {"invalidby", args{"invalidby", ""}, 400}, + } + + statusCode, blocks := GetBlocks(s, 0, 100) + if statusCode == http.StatusOK && len(blocks.Blocks) > 0 { + tests = append(tests, []testcase{ + {"found by block_height", args{"block_height", strconv.Itoa(int(blocks.Blocks[len(blocks.Blocks)-1].Height))}, 200}, + {"found by block_commitment", args{"block_commitment", blocks.Blocks[len(blocks.Blocks)-1].Commitment}, 200}, + }...) + } + + for _, tt := range tests { + s.T().Run(tt.name, func(t *testing.T) { + httpCode, result := GetBlockTxs(s, tt.args.by, tt.args.value) + assert.Equal(t, tt.httpCode, httpCode) + if httpCode == http.StatusOK { + if len(result.Txs) > 0 { + assert.NotNil(t, result.Txs[0].BlockHeight) + assert.NotNil(t, result.Txs[0].Hash) + assert.NotNil(t, result.Txs[0].Type) + assert.NotNil(t, result.Txs[0].StateRoot) + assert.NotNil(t, result.Txs[0].Info) + assert.NotNil(t, result.Txs[0].Status) + } + fmt.Printf("result: %+v \n", result) + } + }) + } + +} + +func GetBlockTxs(s *ApiServerSuite, by, value string) (int, *types.Txs) { + resp, err := http.Get(fmt.Sprintf("%s/api/v1/blockTxs?by=%s&value=%s", s.url, by, value)) + assert.NoError(s.T(), err) + defer resp.Body.Close() + + body, err := io.ReadAll(resp.Body) + assert.NoError(s.T(), err) + + if resp.StatusCode != http.StatusOK { + return resp.StatusCode, nil + } + result := types.Txs{} + //nolint: errcheck + json.Unmarshal(body, &result) + return resp.StatusCode, &result +} diff --git a/service/apiserver/test/getcurrentheight_test.go b/service/apiserver/test/getcurrentheight_test.go new file mode 100644 index 000000000..375815ae0 --- /dev/null +++ b/service/apiserver/test/getcurrentheight_test.go @@ -0,0 +1,52 @@ +package test + +import ( + "encoding/json" + "fmt" + "io" + "net/http" + "testing" + + "github.com/stretchr/testify/assert" + + "github.com/bnb-chain/zkbas/service/apiserver/internal/types" +) + +func (s *ApiServerSuite) TestGetCurrentHeight() { + tests := []struct { + name string + httpCode int + }{ + {"found", 200}, + } + + for _, tt := range tests { + s.T().Run(tt.name, func(t *testing.T) { + httpCode, result := GetCurrentHeight(s) + assert.Equal(t, tt.httpCode, httpCode) + if httpCode == http.StatusOK { + assert.NotNil(t, result.Height) + assert.True(t, result.Height > 0) + fmt.Printf("result: %+v \n", result) + } + }) + } + +} + +func GetCurrentHeight(s *ApiServerSuite) (int, *types.CurrentHeight) { + resp, err := http.Get(fmt.Sprintf("%s/api/v1/currentHeight", s.url)) + assert.NoError(s.T(), err) + defer resp.Body.Close() + + body, err := io.ReadAll(resp.Body) + assert.NoError(s.T(), err) + + if resp.StatusCode != http.StatusOK { + return resp.StatusCode, nil + } + result := types.CurrentHeight{} + //nolint: errcheck + json.Unmarshal(body, &result) + return resp.StatusCode, &result +} diff --git a/service/apiserver/test/getgasaccount_test.go b/service/apiserver/test/getgasaccount_test.go new file mode 100644 index 000000000..bb0ea90a4 --- /dev/null +++ b/service/apiserver/test/getgasaccount_test.go @@ -0,0 +1,53 @@ +package test + +import ( + "encoding/json" + "fmt" + "io" + "net/http" + "testing" + + "github.com/stretchr/testify/assert" + + "github.com/bnb-chain/zkbas/service/apiserver/internal/types" +) + +func (s *ApiServerSuite) TestGetGasAccount() { + tests := []struct { + name string + httpCode int + }{ + {"found", 200}, + } + + for _, tt := range tests { + s.T().Run(tt.name, func(t *testing.T) { + httpCode, result := GetGasAccount(s) + assert.Equal(t, tt.httpCode, httpCode) + if httpCode == http.StatusOK { + assert.NotNil(t, result.Index) + assert.NotNil(t, result.Name) + assert.NotNil(t, result.Status) + fmt.Printf("result: %+v \n", result) + } + }) + } + +} + +func GetGasAccount(s *ApiServerSuite) (int, *types.GasAccount) { + resp, err := http.Get(fmt.Sprintf("%s/api/v1/gasAccount", s.url)) + assert.NoError(s.T(), err) + defer resp.Body.Close() + + body, err := io.ReadAll(resp.Body) + assert.NoError(s.T(), err) + + if resp.StatusCode != http.StatusOK { + return resp.StatusCode, nil + } + result := types.GasAccount{} + //nolint: errcheck + json.Unmarshal(body, &result) + return resp.StatusCode, &result +} diff --git a/service/apiserver/test/getgasfee_test.go b/service/apiserver/test/getgasfee_test.go new file mode 100644 index 000000000..513952935 --- /dev/null +++ b/service/apiserver/test/getgasfee_test.go @@ -0,0 +1,62 @@ +package test + +import ( + "encoding/json" + "fmt" + "io" + "math" + "net/http" + "testing" + + "github.com/stretchr/testify/assert" + + "github.com/bnb-chain/zkbas/service/apiserver/internal/types" +) + +func (s *ApiServerSuite) TestGetGasFee() { + type testcase struct { + name string + args int //asset id + httpCode int + } + + tests := []testcase{ + {"not found", math.MaxInt, 400}, + } + + statusCode, assets := GetGasFeeAssets(s) + if statusCode == http.StatusOK && len(assets.Assets) > 0 { + tests = append(tests, []testcase{ + {"found by index", int(assets.Assets[0].Id), 200}, + }...) + } + + for _, tt := range tests { + s.T().Run(tt.name, func(t *testing.T) { + httpCode, result := GetGasFee(s, tt.args) + assert.Equal(t, tt.httpCode, httpCode) + if httpCode == http.StatusOK { + assert.NotNil(t, result.GasFee) + fmt.Printf("result: %+v \n", result) + } + }) + } + +} + +func GetGasFee(s *ApiServerSuite, assetId int) (int, *types.GasFee) { + resp, err := http.Get(fmt.Sprintf("%s/api/v1/gasFee?asset_id=%d", s.url, assetId)) + assert.NoError(s.T(), err) + defer resp.Body.Close() + + body, err := io.ReadAll(resp.Body) + assert.NoError(s.T(), err) + + if resp.StatusCode != http.StatusOK { + return resp.StatusCode, nil + } + result := types.GasFee{} + //nolint: errcheck + json.Unmarshal(body, &result) + return resp.StatusCode, &result +} diff --git a/service/apiserver/test/getgasfeeassets_test.go b/service/apiserver/test/getgasfeeassets_test.go new file mode 100644 index 000000000..2d48b29cf --- /dev/null +++ b/service/apiserver/test/getgasfeeassets_test.go @@ -0,0 +1,56 @@ +package test + +import ( + "encoding/json" + "fmt" + "io" + "net/http" + "testing" + + "github.com/stretchr/testify/assert" + + "github.com/bnb-chain/zkbas/service/apiserver/internal/types" +) + +func (s *ApiServerSuite) TestGetGasFeeAssets() { + tests := []struct { + name string + httpCode int + }{ + {"found", 200}, + } + + for _, tt := range tests { + s.T().Run(tt.name, func(t *testing.T) { + httpCode, result := GetGasFeeAssets(s) + assert.Equal(t, tt.httpCode, httpCode) + if httpCode == http.StatusOK { + assert.NotNil(t, result.Assets) + assert.NotNil(t, result.Assets[0].Id) + assert.NotNil(t, result.Assets[0].Symbol) + assert.NotNil(t, result.Assets[0].Name) + assert.NotNil(t, result.Assets[0].Address) + assert.NotNil(t, result.Assets[0].IsGasAsset) + fmt.Printf("result: %+v \n", result) + } + }) + } + +} + +func GetGasFeeAssets(s *ApiServerSuite) (int, *types.GasFeeAssets) { + resp, err := http.Get(fmt.Sprintf("%s/api/v1/gasFeeAssets", s.url)) + assert.NoError(s.T(), err) + defer resp.Body.Close() + + body, err := io.ReadAll(resp.Body) + assert.NoError(s.T(), err) + + if resp.StatusCode != http.StatusOK { + return resp.StatusCode, nil + } + result := types.GasFeeAssets{} + //nolint: errcheck + json.Unmarshal(body, &result) + return resp.StatusCode, &result +} diff --git a/service/apiserver/test/getlayer2basicinfo_test.go b/service/apiserver/test/getlayer2basicinfo_test.go new file mode 100644 index 000000000..e21f20411 --- /dev/null +++ b/service/apiserver/test/getlayer2basicinfo_test.go @@ -0,0 +1,58 @@ +package test + +import ( + "encoding/json" + "fmt" + "io" + "net/http" + "testing" + + "github.com/stretchr/testify/assert" + + "github.com/bnb-chain/zkbas/service/apiserver/internal/types" +) + +func (s *ApiServerSuite) TestGetLayer2BasicInfo() { + tests := []struct { + name string + httpCode int + }{ + {"found", 200}, + } + + for _, tt := range tests { + s.T().Run(tt.name, func(t *testing.T) { + httpCode, result := GetLayer2BasicInfo(s) + assert.Equal(t, tt.httpCode, httpCode) + if httpCode == http.StatusOK { + assert.NotNil(t, result.TotalTransactionCount) + assert.NotNil(t, result.BlockCommitted) + assert.NotNil(t, result.BlockVerified) + assert.NotNil(t, result.ContractAddresses[0]) + assert.NotNil(t, result.ContractAddresses[1]) + assert.NotNil(t, result.YesterdayTransactionCount) + assert.NotNil(t, result.TodayActiveUserCount) + assert.NotNil(t, result.YesterdayActiveUserCount) + fmt.Printf("result: %+v \n", result) + } + }) + } + +} + +func GetLayer2BasicInfo(s *ApiServerSuite) (int, *types.Layer2BasicInfo) { + resp, err := http.Get(fmt.Sprintf("%s/api/v1/layer2BasicInfo", s.url)) + assert.NoError(s.T(), err) + defer resp.Body.Close() + + body, err := io.ReadAll(resp.Body) + assert.NoError(s.T(), err) + + if resp.StatusCode != http.StatusOK { + return resp.StatusCode, nil + } + result := types.Layer2BasicInfo{} + //nolint: errcheck + json.Unmarshal(body, &result) + return resp.StatusCode, &result +} diff --git a/service/apiserver/test/getlpvalue_test.go b/service/apiserver/test/getlpvalue_test.go new file mode 100644 index 000000000..273bdb89f --- /dev/null +++ b/service/apiserver/test/getlpvalue_test.go @@ -0,0 +1,77 @@ +package test + +import ( + "encoding/json" + "fmt" + "io" + "math" + "net/http" + "testing" + + "github.com/stretchr/testify/assert" + + "github.com/bnb-chain/zkbas/service/apiserver/internal/types" +) + +func (s *ApiServerSuite) TestGetLpValue() { + type args struct { + pairIndex int + lpAmount string + } + + type testcase struct { + name string + args args + httpCode int + } + + tests := []testcase{ + {"not found", args{math.MaxInt, "2"}, 400}, + } + + statusCode, pairs := GetPairs(s, 0, 100) + if statusCode == http.StatusOK && len(pairs.Pairs) > 0 { + for _, pair := range pairs.Pairs { + if pair.TotalLpAmount != "" && pair.TotalLpAmount != "0" { + tests = append(tests, []testcase{ + {"found by index", args{int(pair.Index), "9000"}, 200}, + }...) + break + } + } + } + + for _, tt := range tests { + s.T().Run(tt.name, func(t *testing.T) { + httpCode, result := GetLpValue(s, tt.args.pairIndex, tt.args.lpAmount) + assert.Equal(t, tt.httpCode, httpCode) + if httpCode == http.StatusOK { + assert.NotNil(t, result.AssetAId) + assert.NotNil(t, result.AssetAName) + assert.NotNil(t, result.AssetAAmount) + assert.NotNil(t, result.AssetBId) + assert.NotNil(t, result.AssetBName) + assert.NotNil(t, result.AssetBAmount) + fmt.Printf("result: %+v \n", result) + } + }) + } + +} + +func GetLpValue(s *ApiServerSuite, pairIndex int, lpAmount string) (int, *types.LpValue) { + resp, err := http.Get(fmt.Sprintf("%s/api/v1/lpValue?pair_index=%d&lp_amount=%s", s.url, pairIndex, lpAmount)) + assert.NoError(s.T(), err) + defer resp.Body.Close() + + body, err := io.ReadAll(resp.Body) + assert.NoError(s.T(), err) + + if resp.StatusCode != http.StatusOK { + return resp.StatusCode, nil + } + result := types.LpValue{} + //nolint: errcheck + json.Unmarshal(body, &result) + return resp.StatusCode, &result +} diff --git a/service/apiserver/test/getmaxofferid_test.go b/service/apiserver/test/getmaxofferid_test.go new file mode 100644 index 000000000..826c86ceb --- /dev/null +++ b/service/apiserver/test/getmaxofferid_test.go @@ -0,0 +1,58 @@ +package test + +import ( + "encoding/json" + "fmt" + "io" + "math" + "net/http" + "testing" + + "github.com/stretchr/testify/assert" + + "github.com/bnb-chain/zkbas/service/apiserver/internal/types" +) + +func (s *ApiServerSuite) TestGetMaxOfferId() { + type testcase struct { + name string + args int //accountIndex + httpCode int + } + + tests := []testcase{ + {"not found", math.MaxInt, 400}, + } + + statusCode, accounts := GetAccounts(s, 0, 100) + if statusCode == http.StatusOK && len(accounts.Accounts) > 0 { + tests = append(tests, []testcase{ + {"found", int(accounts.Accounts[0].Index), 200}, + }...) + } + + for _, tt := range tests { + s.T().Run(tt.name, func(t *testing.T) { + httpCode, _ := GetMaxOfferId(s, tt.args) + assert.Equal(t, tt.httpCode, httpCode) + }) + } + +} + +func GetMaxOfferId(s *ApiServerSuite, accountIndex int) (int, *types.MaxOfferId) { + resp, err := http.Get(fmt.Sprintf("%s/api/v1/maxOfferId?account_index=%d", s.url, accountIndex)) + assert.NoError(s.T(), err) + defer resp.Body.Close() + + body, err := io.ReadAll(resp.Body) + assert.NoError(s.T(), err) + + if resp.StatusCode != http.StatusOK { + return resp.StatusCode, nil + } + result := types.MaxOfferId{} + //nolint: errcheck + json.Unmarshal(body, &result) + return resp.StatusCode, &result +} diff --git a/service/apiserver/test/getmempooltxs_test.go b/service/apiserver/test/getmempooltxs_test.go new file mode 100644 index 000000000..8f65c1c0a --- /dev/null +++ b/service/apiserver/test/getmempooltxs_test.go @@ -0,0 +1,65 @@ +package test + +import ( + "encoding/json" + "fmt" + "io" + "net/http" + "testing" + + "github.com/stretchr/testify/assert" + + "github.com/bnb-chain/zkbas/service/apiserver/internal/types" +) + +func (s *ApiServerSuite) TestGetMempoolTxs() { + + type args struct { + offset int + limit int + } + tests := []struct { + name string + args args + httpCode int + }{ + {"found", args{0, 10}, 200}, + } + + for _, tt := range tests { + s.T().Run(tt.name, func(t *testing.T) { + httpCode, result := GetMempoolTxs(s, tt.args.offset, tt.args.limit) + assert.Equal(t, tt.httpCode, httpCode) + if httpCode == http.StatusOK { + if tt.args.offset < int(result.Total) { + assert.True(t, len(result.MempoolTxs) > 0) + assert.NotNil(t, result.MempoolTxs[0].BlockHeight) + assert.NotNil(t, result.MempoolTxs[0].Hash) + assert.NotNil(t, result.MempoolTxs[0].Type) + assert.NotNil(t, result.MempoolTxs[0].StateRoot) + assert.NotNil(t, result.MempoolTxs[0].Info) + assert.NotNil(t, result.MempoolTxs[0].Status) + } + fmt.Printf("result: %+v \n", result) + } + }) + } + +} + +func GetMempoolTxs(s *ApiServerSuite, offset, limit int) (int, *types.MempoolTxs) { + resp, err := http.Get(fmt.Sprintf("%s/api/v1/mempoolTxs?offset=%d&limit=%d", s.url, offset, limit)) + assert.NoError(s.T(), err) + defer resp.Body.Close() + + body, err := io.ReadAll(resp.Body) + assert.NoError(s.T(), err) + + if resp.StatusCode != http.StatusOK { + return resp.StatusCode, nil + } + result := types.MempoolTxs{} + //nolint: errcheck + json.Unmarshal(body, &result) + return resp.StatusCode, &result +} diff --git a/service/apiserver/test/getnextnonce_test.go b/service/apiserver/test/getnextnonce_test.go new file mode 100644 index 000000000..bba21d7fb --- /dev/null +++ b/service/apiserver/test/getnextnonce_test.go @@ -0,0 +1,58 @@ +package test + +import ( + "encoding/json" + "fmt" + "io" + "math" + "net/http" + "testing" + + "github.com/stretchr/testify/assert" + + "github.com/bnb-chain/zkbas/service/apiserver/internal/types" +) + +func (s *ApiServerSuite) TestGeNextNonce() { + type testcase struct { + name string + args int //accountIndex + httpCode int + } + + tests := []testcase{ + {"not found", math.MaxInt, 400}, + } + + statusCode, accounts := GetAccounts(s, 0, 100) + if statusCode == http.StatusOK && len(accounts.Accounts) > 0 { + tests = append(tests, []testcase{ + {"found", int(accounts.Accounts[0].Index), 200}, + }...) + } + + for _, tt := range tests { + s.T().Run(tt.name, func(t *testing.T) { + httpCode, _ := GetNextNonce(s, tt.args) + assert.Equal(t, tt.httpCode, httpCode) + }) + } + +} + +func GetNextNonce(s *ApiServerSuite, accountIndex int) (int, *types.NextNonce) { + resp, err := http.Get(fmt.Sprintf("%s/api/v1/nextNonce?account_index=%d", s.url, accountIndex)) + assert.NoError(s.T(), err) + defer resp.Body.Close() + + body, err := io.ReadAll(resp.Body) + assert.NoError(s.T(), err) + + if resp.StatusCode != http.StatusOK { + return resp.StatusCode, nil + } + result := types.NextNonce{} + //nolint: errcheck + json.Unmarshal(body, &result) + return resp.StatusCode, &result +} diff --git a/service/apiserver/test/getpair_test.go b/service/apiserver/test/getpair_test.go new file mode 100644 index 000000000..31c9ec130 --- /dev/null +++ b/service/apiserver/test/getpair_test.go @@ -0,0 +1,66 @@ +package test + +import ( + "encoding/json" + "fmt" + "io" + "math" + "net/http" + "testing" + + "github.com/stretchr/testify/assert" + + "github.com/bnb-chain/zkbas/service/apiserver/internal/types" +) + +func (s *ApiServerSuite) TestGetPair() { + type testcase struct { + name string + args int //pair index + httpCode int + } + + tests := []testcase{ + {"not found", math.MaxInt, 400}, + } + + statusCode, pairs := GetPairs(s, 0, 100) + if statusCode == http.StatusOK && len(pairs.Pairs) > 0 { + tests = append(tests, []testcase{ + {"found by index", int(pairs.Pairs[0].Index), 200}, + }...) + } + + for _, tt := range tests { + s.T().Run(tt.name, func(t *testing.T) { + httpCode, result := GetPair(s, tt.args) + assert.Equal(t, tt.httpCode, httpCode) + if httpCode == http.StatusOK { + assert.NotNil(t, result.AssetAId) + assert.NotNil(t, result.AssetBId) + assert.NotNil(t, result.AssetAAmount) + assert.NotNil(t, result.AssetBAmount) + assert.NotNil(t, result.TotalLpAmount) + fmt.Printf("result: %+v \n", result) + } + }) + } + +} + +func GetPair(s *ApiServerSuite, pairIndex int) (int, *types.Pair) { + resp, err := http.Get(fmt.Sprintf("%s/api/v1/pair?index=%d", s.url, pairIndex)) + assert.NoError(s.T(), err) + defer resp.Body.Close() + + body, err := io.ReadAll(resp.Body) + assert.NoError(s.T(), err) + + if resp.StatusCode != http.StatusOK { + return resp.StatusCode, nil + } + result := types.Pair{} + //nolint: errcheck + json.Unmarshal(body, &result) + return resp.StatusCode, &result +} diff --git a/service/apiserver/test/getpairs_test.go b/service/apiserver/test/getpairs_test.go new file mode 100644 index 000000000..94599b1c3 --- /dev/null +++ b/service/apiserver/test/getpairs_test.go @@ -0,0 +1,68 @@ +package test + +import ( + "encoding/json" + "fmt" + "io" + "net/http" + "testing" + + "github.com/stretchr/testify/assert" + + "github.com/bnb-chain/zkbas/service/apiserver/internal/types" +) + +func (s *ApiServerSuite) TestGetPairs() { + + type args struct { + offset int + limit int + } + tests := []struct { + name string + args args + httpCode int + }{ + {"found", args{0, 10}, 200}, + } + + for _, tt := range tests { + s.T().Run(tt.name, func(t *testing.T) { + httpCode, result := GetPairs(s, tt.args.offset, tt.args.limit) + assert.Equal(t, tt.httpCode, httpCode) + if httpCode == http.StatusOK { + assert.NotNil(t, result.Pairs) + if len(result.Pairs) > 0 { + assert.NotNil(t, result.Pairs[0].Index) + assert.NotNil(t, result.Pairs[0].AssetAName) + assert.NotNil(t, result.Pairs[0].AssetBName) + assert.NotNil(t, result.Pairs[0].AssetAId) + assert.NotNil(t, result.Pairs[0].AssetBId) + assert.NotNil(t, result.Pairs[0].AssetAAmount) + assert.NotNil(t, result.Pairs[0].AssetBAmount) + assert.NotNil(t, result.Pairs[0].FeeRate) + assert.NotNil(t, result.Pairs[0].TreasuryRate) + } + fmt.Printf("result: %+v \n", result) + } + }) + } + +} + +func GetPairs(s *ApiServerSuite, offset, limit int) (int, *types.Pairs) { + resp, err := http.Get(fmt.Sprintf("%s/api/v1/pairs?offset=%d&limit=%d", s.url, offset, limit)) + assert.NoError(s.T(), err) + defer resp.Body.Close() + + body, err := io.ReadAll(resp.Body) + assert.NoError(s.T(), err) + + if resp.StatusCode != http.StatusOK { + return resp.StatusCode, nil + } + result := types.Pairs{} + //nolint: errcheck + json.Unmarshal(body, &result) + return resp.StatusCode, &result +} diff --git a/service/apiserver/test/getstatus_test.go b/service/apiserver/test/getstatus_test.go new file mode 100644 index 000000000..81bef2bb8 --- /dev/null +++ b/service/apiserver/test/getstatus_test.go @@ -0,0 +1,56 @@ +package test + +import ( + "encoding/json" + "fmt" + "io" + "net/http" + "testing" + + "github.com/stretchr/testify/assert" + + "github.com/bnb-chain/zkbas/service/apiserver/internal/types" +) + +func (s *ApiServerSuite) TestGetStatus() { + + type args struct { + } + tests := []struct { + name string + args args + httpCode int + }{ + {"found", args{}, 200}, + } + + for _, tt := range tests { + s.T().Run(tt.name, func(t *testing.T) { + httpCode, result := GetStatus(s) + assert.Equal(t, tt.httpCode, httpCode) + if httpCode == http.StatusOK { + assert.NotNil(t, result.NetworkId) + assert.NotNil(t, result.Status) + fmt.Printf("result: %+v \n", result) + } + }) + } + +} + +func GetStatus(s *ApiServerSuite) (int, *types.Status) { + resp, err := http.Get(fmt.Sprintf("%s/", s.url)) + assert.NoError(s.T(), err) + defer resp.Body.Close() + + body, err := io.ReadAll(resp.Body) + assert.NoError(s.T(), err) + + if resp.StatusCode != http.StatusOK { + return resp.StatusCode, nil + } + result := types.Status{} + //nolint: errcheck + json.Unmarshal(body, &result) + return resp.StatusCode, &result +} diff --git a/service/apiserver/test/getswapamount_test.go b/service/apiserver/test/getswapamount_test.go new file mode 100644 index 000000000..bd011c4b8 --- /dev/null +++ b/service/apiserver/test/getswapamount_test.go @@ -0,0 +1,78 @@ +package test + +import ( + "encoding/json" + "fmt" + "io" + "math" + "net/http" + "testing" + + "github.com/stretchr/testify/assert" + + "github.com/bnb-chain/zkbas/service/apiserver/internal/types" +) + +func (s *ApiServerSuite) TestGetSwapAmount() { + type args struct { + pairIndex uint32 + assetId uint32 + assetAmount string + isFrom bool + } + + type testcase struct { + name string + args args + httpCode int + } + + tests := []testcase{ + {"not found", args{math.MaxUint32, math.MaxUint32, "1", true}, 400}, + } + + statusCode, pairs := GetPairs(s, 0, 100) + if statusCode == http.StatusOK && len(pairs.Pairs) > 0 { + for _, pair := range pairs.Pairs { + if pair.TotalLpAmount != "" && pair.TotalLpAmount != "0" { + tests = append(tests, []testcase{ + {"found by index with from is true", args{pair.Index, pair.AssetAId, "9000", true}, 200}, + {"found by index with from is true", args{pair.Index, pair.AssetBId, "9000", true}, 200}, + {"found by index with from is false", args{pair.Index, pair.AssetAId, "9000", false}, 200}, + {"found by index with from is false", args{pair.Index, pair.AssetBId, "9000", false}, 200}, + }...) + break + } + } + } + + for _, tt := range tests { + s.T().Run(tt.name, func(t *testing.T) { + httpCode, result := GetSwapAmount(s, tt.args.pairIndex, tt.args.assetId, tt.args.assetAmount, tt.args.isFrom) + assert.Equal(t, tt.httpCode, httpCode) + if httpCode == http.StatusOK { + assert.NotNil(t, result.AssetId) + assert.NotNil(t, result.AssetAmount) + fmt.Printf("result: %+v \n", result) + } + }) + } + +} + +func GetSwapAmount(s *ApiServerSuite, pairIndex, assetId uint32, assetAmount string, isFrom bool) (int, *types.SwapAmount) { + resp, err := http.Get(fmt.Sprintf("%s/api/v1/swapAmount?pair_index=%d&asset_id=%d&asset_amount=%s&is_from=%v", s.url, pairIndex, assetId, assetAmount, isFrom)) + assert.NoError(s.T(), err) + defer resp.Body.Close() + + body, err := io.ReadAll(resp.Body) + assert.NoError(s.T(), err) + + if resp.StatusCode != http.StatusOK { + return resp.StatusCode, nil + } + result := types.SwapAmount{} + //nolint: errcheck + json.Unmarshal(body, &result) + return resp.StatusCode, &result +} diff --git a/service/apiserver/test/gettxbyhash_test.go b/service/apiserver/test/gettxbyhash_test.go new file mode 100644 index 000000000..2e0909911 --- /dev/null +++ b/service/apiserver/test/gettxbyhash_test.go @@ -0,0 +1,67 @@ +package test + +import ( + "encoding/json" + "fmt" + "io" + "net/http" + "testing" + + "github.com/stretchr/testify/assert" + + "github.com/bnb-chain/zkbas/service/apiserver/internal/types" +) + +func (s *ApiServerSuite) TestGetTx() { + type testcase struct { + name string + args string //tx hash + httpCode int + } + + tests := []testcase{ + {"not found", "notexistshash", 400}, + } + + statusCode, txs := GetTxs(s, 0, 100) + + if statusCode == http.StatusOK && len(txs.Txs) > 0 { + tests = append(tests, []testcase{ + {"found", txs.Txs[0].Hash, 200}, + }...) + } + + for _, tt := range tests { + s.T().Run(tt.name, func(t *testing.T) { + httpCode, result := GetTx(s, tt.args) + assert.Equal(t, tt.httpCode, httpCode) + if httpCode == http.StatusOK { + assert.NotNil(t, result.Tx.BlockHeight) + assert.NotNil(t, result.Tx.Hash) + assert.NotNil(t, result.Tx.Type) + assert.NotNil(t, result.Tx.StateRoot) + assert.NotNil(t, result.Tx.Info) + assert.NotNil(t, result.Tx.Status) + fmt.Printf("result: %+v \n", result) + } + }) + } + +} + +func GetTx(s *ApiServerSuite, hash string) (int, *types.EnrichedTx) { + resp, err := http.Get(fmt.Sprintf("%s/api/v1/tx?hash=%s", s.url, hash)) + assert.NoError(s.T(), err) + defer resp.Body.Close() + + body, err := io.ReadAll(resp.Body) + assert.NoError(s.T(), err) + + if resp.StatusCode != http.StatusOK { + return resp.StatusCode, nil + } + result := types.EnrichedTx{} + //nolint: errcheck + json.Unmarshal(body, &result) + return resp.StatusCode, &result +} diff --git a/service/apiserver/test/gettxs_test.go b/service/apiserver/test/gettxs_test.go new file mode 100644 index 000000000..40dd9c331 --- /dev/null +++ b/service/apiserver/test/gettxs_test.go @@ -0,0 +1,67 @@ +package test + +import ( + "encoding/json" + "fmt" + "io" + "math" + "net/http" + "testing" + + "github.com/stretchr/testify/assert" + + "github.com/bnb-chain/zkbas/service/apiserver/internal/types" +) + +func (s *ApiServerSuite) TestGetTxs() { + + type args struct { + offset int + limit int + } + tests := []struct { + name string + args args + httpCode int + }{ + {"found", args{0, 10}, 200}, + {"not found", args{math.MaxInt, 10}, 400}, + } + + for _, tt := range tests { + s.T().Run(tt.name, func(t *testing.T) { + httpCode, result := GetTxs(s, tt.args.offset, tt.args.limit) + assert.Equal(t, tt.httpCode, httpCode) + if httpCode == http.StatusOK { + if tt.args.offset < int(result.Total) { + assert.True(t, len(result.Txs) > 0) + assert.NotNil(t, result.Txs[0].BlockHeight) + assert.NotNil(t, result.Txs[0].Hash) + assert.NotNil(t, result.Txs[0].Type) + assert.NotNil(t, result.Txs[0].StateRoot) + assert.NotNil(t, result.Txs[0].Info) + assert.NotNil(t, result.Txs[0].Status) + } + fmt.Printf("result: %+v \n", result) + } + }) + } + +} + +func GetTxs(s *ApiServerSuite, offset, limit int) (int, *types.Txs) { + resp, err := http.Get(fmt.Sprintf("%s/api/v1/txs?offset=%d&limit=%d", s.url, offset, limit)) + assert.NoError(s.T(), err) + defer resp.Body.Close() + + body, err := io.ReadAll(resp.Body) + assert.NoError(s.T(), err) + + if resp.StatusCode != http.StatusOK { + return resp.StatusCode, nil + } + result := types.Txs{} + //nolint: errcheck + json.Unmarshal(body, &result) + return resp.StatusCode, &result +} diff --git a/service/apiserver/test/getwithdrawgasfee_test.go b/service/apiserver/test/getwithdrawgasfee_test.go new file mode 100644 index 000000000..e7392d555 --- /dev/null +++ b/service/apiserver/test/getwithdrawgasfee_test.go @@ -0,0 +1,62 @@ +package test + +import ( + "encoding/json" + "fmt" + "io" + "math" + "net/http" + "testing" + + "github.com/stretchr/testify/assert" + + "github.com/bnb-chain/zkbas/service/apiserver/internal/types" +) + +func (s *ApiServerSuite) TestGetWithdrawGasFee() { + type testcase struct { + name string + args int //asset id + httpCode int + } + + tests := []testcase{ + {"not found", math.MaxInt, 400}, + } + + statusCode, assets := GetGasFeeAssets(s) + if statusCode == http.StatusOK && len(assets.Assets) > 0 { + tests = append(tests, []testcase{ + {"found by index", int(assets.Assets[0].Id), 200}, + }...) + } + + for _, tt := range tests { + s.T().Run(tt.name, func(t *testing.T) { + httpCode, result := GetWithdrawGasFee(s, tt.args) + assert.Equal(t, tt.httpCode, httpCode) + if httpCode == http.StatusOK { + assert.NotNil(t, result.GasFee) + fmt.Printf("result: %+v \n", result) + } + }) + } + +} + +func GetWithdrawGasFee(s *ApiServerSuite, assetId int) (int, *types.GasFee) { + resp, err := http.Get(fmt.Sprintf("%s/api/v1/withdrawGasFee?asset_id=%d", s.url, assetId)) + assert.NoError(s.T(), err) + defer resp.Body.Close() + + body, err := io.ReadAll(resp.Body) + assert.NoError(s.T(), err) + + if resp.StatusCode != http.StatusOK { + return resp.StatusCode, nil + } + result := types.GasFee{} + //nolint: errcheck + json.Unmarshal(body, &result) + return resp.StatusCode, &result +} diff --git a/service/apiserver/test/search_test.go b/service/apiserver/test/search_test.go new file mode 100644 index 000000000..7ed47e241 --- /dev/null +++ b/service/apiserver/test/search_test.go @@ -0,0 +1,77 @@ +package test + +import ( + "encoding/json" + "fmt" + "io" + "net/http" + "strconv" + "testing" + + "github.com/stretchr/testify/assert" + + "github.com/bnb-chain/zkbas/service/apiserver/internal/types" + types2 "github.com/bnb-chain/zkbas/types" +) + +func (s *ApiServerSuite) TestSearch() { + type testcase struct { + name string + args string //keyword + httpCode int + dataType int32 + } + + tests := []testcase{ + {"not found by account name", "notexist.legend", 400, 0}, + {"not found by account pk", "notexistnotexist", 400, 0}, + {"not found by block height", "9999999", 400, 0}, + {"not found by tx hash", "notexistnotexist", 400, 0}, + } + + statusCode, accounts := GetAccounts(s, 0, 100) + if statusCode == http.StatusOK && len(accounts.Accounts) > 0 { + tests = append(tests, []testcase{ + {"found by account name", accounts.Accounts[0].Name, 200, types2.TypeAccountName}, + {"found by account pk", accounts.Accounts[0].Pk, 200, types2.TypeAccountPk}, + }...) + } + + statusCode, txs := GetTxs(s, 0, 100) + if statusCode == http.StatusOK && len(txs.Txs) > 0 { + tests = append(tests, []testcase{ + {"found by block height", strconv.Itoa(int(txs.Txs[0].BlockHeight)), 200, types2.TypeBlockHeight}, + {"found by tx hash", txs.Txs[0].Hash, 200, types2.TypeTxType}, + }...) + } + + for _, tt := range tests { + s.T().Run(tt.name, func(t *testing.T) { + httpCode, result := Search(s, tt.args) + assert.Equal(t, tt.httpCode, httpCode) + if httpCode == http.StatusOK { + assert.NotNil(t, result.DataType) + assert.Equal(t, tt.dataType, result.DataType) + fmt.Printf("result: %+v \n", result) + } + }) + } + +} + +func Search(s *ApiServerSuite, keyword string) (int, *types.Search) { + resp, err := http.Get(s.url + "/api/v1/search?keyword=" + keyword) + assert.NoError(s.T(), err) + defer resp.Body.Close() + + body, err := io.ReadAll(resp.Body) + assert.NoError(s.T(), err) + + if resp.StatusCode != http.StatusOK { + return resp.StatusCode, nil + } + result := types.Search{} + //nolint: errcheck + json.Unmarshal(body, &result) + return resp.StatusCode, &result +} diff --git a/service/apiserver/test/suite_test.go b/service/apiserver/test/suite_test.go new file mode 100644 index 000000000..1f8c812b7 --- /dev/null +++ b/service/apiserver/test/suite_test.go @@ -0,0 +1,87 @@ +package test + +import ( + "fmt" + "os/exec" + "testing" + "time" + + "github.com/stretchr/testify/suite" + "github.com/zeromicro/go-zero/core/logx" + "github.com/zeromicro/go-zero/core/service" + "github.com/zeromicro/go-zero/core/stores/cache" + "github.com/zeromicro/go-zero/core/stores/redis" + "github.com/zeromicro/go-zero/rest" + + "github.com/bnb-chain/zkbas/service/apiserver/internal/config" + "github.com/bnb-chain/zkbas/service/apiserver/internal/handler" + "github.com/bnb-chain/zkbas/service/apiserver/internal/svc" +) + +type ApiServerSuite struct { + suite.Suite + server *rest.Server + url string +} + +func TestAppSuite(t *testing.T) { + suite.Run(t, new(ApiServerSuite)) +} + +func testDBSetup() { + testDBShutdown() + time.Sleep(5 * time.Second) + cmd := exec.Command("docker", "run", "--name", "postgres-ut-apiserver", "-p", "5433:5432", + "-e", "POSTGRES_PASSWORD=Zkbas@123", "-e", "POSTGRES_USER=postgres", "-e", "POSTGRES_DB=zkbas", + "-e", "PGDATA=/var/lib/postgresql/pgdata", "-d", "ghcr.io/bnb-chain/zkbas/zkbas-ut-postgres:0.0.2") + if err := cmd.Run(); err != nil { + panic(err) + } + time.Sleep(10 * time.Second) +} + +func testDBShutdown() { + cmd := exec.Command("docker", "kill", "postgres-ut-apiserver") + //nolint:errcheck + cmd.Run() + time.Sleep(time.Second) + cmd = exec.Command("docker", "rm", "postgres-ut") + //nolint:errcheck + cmd.Run() +} + +func (s *ApiServerSuite) SetupSuite() { + testDBSetup() + c := config.Config{ + RestConf: rest.RestConf{ + Host: "127.0.0.1", + Port: 9888, + ServiceConf: service.ServiceConf{ + Name: "api-server", + }, + }, + LogConf: logx.LogConf{}, + } + c.Postgres = struct{ DataSource string }{DataSource: "host=127.0.0.1 user=postgres password=Zkbas@123 dbname=zkbas port=5433 sslmode=disable"} + c.CacheRedis = cache.CacheConf{} + c.CacheRedis = append(c.CacheRedis, cache.NodeConf{ + RedisConf: redis.RedisConf{Host: "127.0.0.1"}, + }) + logx.DisableStat() + + ctx := svc.NewServiceContext(c) + + s.url = fmt.Sprintf("http://127.0.0.1:%d", c.Port) + s.server = rest.MustNewServer(c.RestConf, rest.WithCors()) + + handler.RegisterHandlers(s.server, ctx) + logx.Infof("Starting server at %s", s.url) + go s.server.Start() + time.Sleep(1 * time.Second) +} + +func (s *ApiServerSuite) TearDownSuite() { + logx.Infof("Shutting down server at %s", s.url) + s.server.Stop() + testDBShutdown() +} diff --git a/service/committer/committer.go b/service/committer/committer.go new file mode 100644 index 000000000..5f9680935 --- /dev/null +++ b/service/committer/committer.go @@ -0,0 +1,22 @@ +package committer + +import ( + "github.com/zeromicro/go-zero/core/conf" + "github.com/zeromicro/go-zero/core/logx" + + "github.com/bnb-chain/zkbas/service/committer/committer" +) + +func Run(configFile string) error { + var config committer.Config + conf.MustLoad(configFile, &config) + + committer, err := committer.NewCommitter(&config) + if err != nil { + logx.Error("new committer failed:", err) + return err + } + + committer.Run() + return nil +} diff --git a/service/committer/committer/committer.go b/service/committer/committer/committer.go new file mode 100644 index 000000000..d44b9b61d --- /dev/null +++ b/service/committer/committer/committer.go @@ -0,0 +1,241 @@ +package committer + +import ( + "errors" + "fmt" + "time" + + "github.com/zeromicro/go-zero/core/logx" + + "github.com/bnb-chain/zkbas/core" + "github.com/bnb-chain/zkbas/dao/block" + "github.com/bnb-chain/zkbas/dao/mempool" + "github.com/bnb-chain/zkbas/dao/tx" +) + +const ( + MaxCommitterInterval = 60 * 1 +) + +type Config struct { + core.ChainConfig + + BlockConfig struct { + OptionalBlockSizes []int + } +} + +type Committer struct { + config *Config + maxTxsPerBlock int + optionalBlockSizes []int + + bc *core.BlockChain + + executedMemPoolTxs []*mempool.MempoolTx +} + +func NewCommitter(config *Config) (*Committer, error) { + if len(config.BlockConfig.OptionalBlockSizes) == 0 { + return nil, errors.New("nil optional block sizes") + } + + bc, err := core.NewBlockChain(&config.ChainConfig, "committer") + if err != nil { + return nil, fmt.Errorf("new blockchain error: %v", err) + } + + committer := &Committer{ + config: config, + maxTxsPerBlock: config.BlockConfig.OptionalBlockSizes[len(config.BlockConfig.OptionalBlockSizes)-1], + optionalBlockSizes: config.BlockConfig.OptionalBlockSizes, + + bc: bc, + + executedMemPoolTxs: make([]*mempool.MempoolTx, 0), + } + return committer, nil +} + +func (c *Committer) Run() { + curBlock, err := c.restoreExecutedTxs() + if err != nil { + panic("restore executed tx failed: " + err.Error()) + } + + for { + if curBlock.BlockStatus > block.StatusProposing { + curBlock, err = c.bc.ProposeNewBlock() + if err != nil { + panic("propose new block failed: " + err.Error()) + } + } + + // Read pending transactions from mempool_tx table. + pendingTxs, err := c.bc.MempoolModel.GetMempoolTxsByStatus(mempool.PendingTxStatus) + if err != nil { + logx.Error("get pending transactions from mempool failed:", err) + return + } + for len(pendingTxs) == 0 { + if c.shouldCommit(curBlock) { + break + } + + time.Sleep(100 * time.Millisecond) + pendingTxs, err = c.bc.MempoolModel.GetMempoolTxsByStatus(mempool.PendingTxStatus) + if err != nil { + logx.Error("get pending transactions from mempool failed:", err) + return + } + } + + pendingUpdateMempoolTxs := make([]*mempool.MempoolTx, 0, len(pendingTxs)) + pendingDeleteMempoolTxs := make([]*mempool.MempoolTx, 0, len(pendingTxs)) + for _, mempoolTx := range pendingTxs { + if c.shouldCommit(curBlock) { + break + } + + tx := convertMempoolTxToTx(mempoolTx) + err = c.bc.ApplyTransaction(tx) + if err != nil { + logx.Errorf("apply mempool tx ID: %d failed, err %v ", mempoolTx.ID, err) + mempoolTx.Status = mempool.FailTxStatus + pendingDeleteMempoolTxs = append(pendingDeleteMempoolTxs, mempoolTx) + continue + } + mempoolTx.Status = mempool.ExecutedTxStatus + pendingUpdateMempoolTxs = append(pendingUpdateMempoolTxs, mempoolTx) + + // Write the proposed block into database when the first transaction executed. + if len(c.bc.Statedb.Txs) == 1 { + err = c.createNewBlock(curBlock) + if err != nil { + panic("create new block failed" + err.Error()) + } + } + } + + err = c.bc.StateDB().SyncStateCacheToRedis() + if err != nil { + panic("sync redis cache failed: " + err.Error()) + } + + err = c.bc.MempoolModel.UpdateMempoolTxs(pendingUpdateMempoolTxs, pendingDeleteMempoolTxs) + if err != nil { + panic("update mempool failed: " + err.Error()) + } + c.executedMemPoolTxs = append(c.executedMemPoolTxs, pendingUpdateMempoolTxs...) + + if c.shouldCommit(curBlock) { + curBlock, err = c.commitNewBlock(curBlock) + if err != nil { + panic("commit new block failed: " + err.Error()) + } + } + } +} + +func (c *Committer) restoreExecutedTxs() (*block.Block, error) { + bc := c.bc + curHeight, err := bc.BlockModel.GetCurrentHeight() + if err != nil { + return nil, err + } + curBlock, err := bc.BlockModel.GetBlockByHeight(curHeight) + if err != nil { + return nil, err + } + + executedTxs, err := c.bc.MempoolModel.GetMempoolTxsByStatus(mempool.ExecutedTxStatus) + if err != nil { + return nil, err + } + + if curBlock.BlockStatus > block.StatusProposing { + if len(executedTxs) != 0 { + return nil, errors.New("no proposing block but exist executed txs") + } + return curBlock, nil + } + + for _, mempoolTx := range executedTxs { + tx := convertMempoolTxToTx(mempoolTx) + err = c.bc.ApplyTransaction(tx) + if err != nil { + return nil, err + } + } + + c.executedMemPoolTxs = append(c.executedMemPoolTxs, executedTxs...) + return curBlock, nil +} + +func (c *Committer) createNewBlock(curBlock *block.Block) error { + return c.bc.BlockModel.CreateNewBlock(curBlock) +} + +func (c *Committer) shouldCommit(curBlock *block.Block) bool { + var now = time.Now() + if (len(c.bc.Statedb.Txs) > 0 && now.Unix()-curBlock.CreatedAt.Unix() >= MaxCommitterInterval) || + len(c.bc.Statedb.Txs) >= c.maxTxsPerBlock { + return true + } + + return false +} + +func (c *Committer) commitNewBlock(curBlock *block.Block) (*block.Block, error) { + for _, tx := range c.executedMemPoolTxs { + tx.Status = mempool.SuccessTxStatus + } + + blockSize := c.computeCurrentBlockSize() + blockStates, err := c.bc.CommitNewBlock(blockSize, curBlock.CreatedAt.UnixMilli()) + if err != nil { + return nil, err + } + + // Update database in a transaction. + err = c.bc.BlockModel.CreateCompressedBlock(c.executedMemPoolTxs, blockStates) + if err != nil { + return nil, err + } + + c.executedMemPoolTxs = make([]*mempool.MempoolTx, 0) + return blockStates.Block, nil +} + +func (c *Committer) computeCurrentBlockSize() int { + var blockSize int + for i := 0; i < len(c.optionalBlockSizes); i++ { + if len(c.bc.Statedb.Txs) <= c.optionalBlockSizes[i] { + blockSize = c.optionalBlockSizes[i] + break + } + } + return blockSize +} + +func convertMempoolTxToTx(mempoolTx *mempool.MempoolTx) *tx.Tx { + tx := &tx.Tx{ + TxHash: mempoolTx.TxHash, + TxType: mempoolTx.TxType, + GasFee: mempoolTx.GasFee, + GasFeeAssetId: mempoolTx.GasFeeAssetId, + TxStatus: tx.StatusPending, + NftIndex: mempoolTx.NftIndex, + PairIndex: mempoolTx.PairIndex, + AssetId: mempoolTx.AssetId, + TxAmount: mempoolTx.TxAmount, + NativeAddress: mempoolTx.NativeAddress, + TxInfo: mempoolTx.TxInfo, + ExtraInfo: mempoolTx.ExtraInfo, + Memo: mempoolTx.Memo, + AccountIndex: mempoolTx.AccountIndex, + Nonce: mempoolTx.Nonce, + ExpiredAt: mempoolTx.ExpiredAt, + } + return tx +} diff --git a/service/committer/etc/config.yaml.example b/service/committer/etc/config.yaml.example new file mode 100644 index 000000000..d16682f7d --- /dev/null +++ b/service/committer/etc/config.yaml.example @@ -0,0 +1,14 @@ +Name: committer + +Postgres: + DataSource: host=127.0.0.1 user=postgres password=pw dbname=zkbas port=5432 sslmode=disable + +CacheRedis: + - Host: redis:6379 + Type: node + +BlockConfig: + OptionalBlockSizes: [1, 10] + +TreeDB: + Driver: memorydb diff --git a/service/cronjob/committer/Dockerfile b/service/cronjob/committer/Dockerfile deleted file mode 100644 index 02f158bd7..000000000 --- a/service/cronjob/committer/Dockerfile +++ /dev/null @@ -1,20 +0,0 @@ -FROM golang:alpine AS builder - -LABEL stage=gobuilder - -ENV CGO_ENABLED 0 -# ENV GOPROXY https://goproxy.cn,direct - -RUN apk update --no-cache && apk add --no-cache tzdata - -FROM alpine:3.4 - -COPY --from=builder /etc/ssl/certs/ca-certificates.crt /etc/ssl/certs/ca-certificates.crt -COPY --from=builder /usr/share/zoneinfo/Asia/Shanghai /usr/share/zoneinfo/Asia/Shanghai -ENV TZ Asia/Shanghai - -WORKDIR /app -COPY bin/committer /app/committer -COPY configyaml /app/etc - -CMD ["./committer", "-f", "etc/committer.yaml"] \ No newline at end of file diff --git a/service/cronjob/committer/committer.go b/service/cronjob/committer/committer.go deleted file mode 100644 index 75b33dc4b..000000000 --- a/service/cronjob/committer/committer.go +++ /dev/null @@ -1,128 +0,0 @@ -package main - -import ( - "flag" - "fmt" - "time" - - bsmt "github.com/bnb-chain/bas-smt" - "github.com/robfig/cron/v3" - "github.com/zeromicro/go-zero/core/conf" - "github.com/zeromicro/go-zero/core/logx" - - "github.com/bnb-chain/zkbas/common/tree" - "github.com/bnb-chain/zkbas/pkg/treedb" - "github.com/bnb-chain/zkbas/service/cronjob/committer/internal/config" - "github.com/bnb-chain/zkbas/service/cronjob/committer/internal/logic" - "github.com/bnb-chain/zkbas/service/cronjob/committer/internal/svc" -) - -var configFile = flag.String("f", - "./etc/committer.yaml", "the config file") - -func main() { - flag.Parse() - - var c config.Config - conf.MustLoad(*configFile, &c) - logic.TxsAmountPerBlock = c.KeyPath.KeyTxCounts - ctx := svc.NewServiceContext(c) - logx.DisableStat() - var ( - accountTree bsmt.SparseMerkleTree - accountStateTrees []bsmt.SparseMerkleTree - liquidityTree bsmt.SparseMerkleTree - nftTree bsmt.SparseMerkleTree - ) - // get latest account - h, err := ctx.BlockModel.GetCurrentBlockHeight() - if err != nil { - panic(err) - } - latestVerifiedBlockNr, err := ctx.BlockModel.GetLatestVerifiedBlockHeight() - if err != nil { - panic(err) - } - // init tree database - treeCtx := &treedb.Context{ - Name: "committer", - Driver: c.TreeDB.Driver, - LevelDBOption: &c.TreeDB.LevelDBOption, - RedisDBOption: &c.TreeDB.RedisDBOption, - } - err = treedb.SetupTreeDB(treeCtx) - if err != nil { - panic(err) - } - // init accountTree and accountStateTrees - accountTree, accountStateTrees, err = tree.InitAccountTree( - ctx.AccountModel, - ctx.AccountHistoryModel, - h, - treeCtx, - ) - if err != nil { - logx.Error("[committer] => InitMerkleTree error:", err) - return - } - // init nft tree - nftTree, err = tree.InitNftTree( - ctx.L2NftHistoryModel, - h, - treeCtx, - ) - if err != nil { - logx.Error("[committer] => InitMerkleTree error:", err) - return - } - - // init liquidity tree - liquidityTree, err = tree.InitLiquidityTree( - ctx.LiquidityHistoryModel, - h, - treeCtx, - ) - if err != nil { - logx.Error("[committer] => InitMerkleTree error:", err) - return - } - - /* - First read the account generalAsset liquidityAsset lockAsset information from the database, - and then read from the verifier table which layer2 height the information in the database belongs to - */ - var lastCommitTimeStamp = time.Now() - // new cron - cronJob := cron.New(cron.WithChain( - cron.SkipIfStillRunning(cron.DiscardLogger), - )) - _, err = cronJob.AddFunc("@every 10s", func() { - logx.Info("========================= start committer task =========================") - err := logic.CommitterTask( - ctx, - &lastCommitTimeStamp, - treeCtx, - accountTree, - liquidityTree, - nftTree, - &accountStateTrees, - uint64(latestVerifiedBlockNr), - ) - if err != nil { - logx.Info("[committer.CommitterTask main] unable to run:", err) - - accountTree.Reset() - for _, assetTree := range accountStateTrees { - assetTree.Reset() - } - nftTree.Reset() - liquidityTree.Reset() - } - }) - if err != nil { - panic(err) - } - cronJob.Start() - fmt.Printf("Starting committer cronjob ...") - select {} -} diff --git a/service/cronjob/committer/etc/config.yaml.example b/service/cronjob/committer/etc/config.yaml.example deleted file mode 100644 index 73a3c23d6..000000000 --- a/service/cronjob/committer/etc/config.yaml.example +++ /dev/null @@ -1,15 +0,0 @@ -Name: committer.cronjob - -Postgres: - DataSource: host=127.0.0.1 user=postgres password=ZecreyProtocolDB@123 dbname=zkbas port=5432 sslmode=disable - -CacheRedis: - - Host: 127.0.0.1:6379 - # Pass: myredis - Type: node - -KeyPath: - KeyTxCounts: [1, 10] - -TreeDB: - Driver: memorydb \ No newline at end of file diff --git a/service/cronjob/committer/internal/config/config.go b/service/cronjob/committer/internal/config/config.go deleted file mode 100644 index d7479783f..000000000 --- a/service/cronjob/committer/internal/config/config.go +++ /dev/null @@ -1,21 +0,0 @@ -package config - -import ( - "github.com/bnb-chain/zkbas/pkg/treedb" - "github.com/zeromicro/go-zero/core/stores/cache" -) - -type Config struct { - Postgres struct { - DataSource string - } - KeyPath struct { - KeyTxCounts []int - } - CacheRedis cache.CacheConf - TreeDB struct { - Driver treedb.Driver - LevelDBOption treedb.LevelDBOption `json:",optional"` - RedisDBOption treedb.RedisDBOption `json:",optional"` - } -} diff --git a/service/cronjob/committer/internal/logic/committerTask.go b/service/cronjob/committer/internal/logic/committerTask.go deleted file mode 100644 index 7da7931e4..000000000 --- a/service/cronjob/committer/internal/logic/committerTask.go +++ /dev/null @@ -1,767 +0,0 @@ -/* - * Copyright © 2021 Zkbas Protocol - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package logic - -import ( - "encoding/json" - "errors" - "math" - "math/big" - "strconv" - "time" - - bsmt "github.com/bnb-chain/bas-smt" - "github.com/bnb-chain/zkbas-crypto/ffmath" - "github.com/bnb-chain/zkbas-crypto/legend/circuit/bn254/std" - "github.com/consensys/gnark-crypto/ecc/bn254/fr/mimc" - "github.com/ethereum/go-ethereum/common" - "github.com/zeromicro/go-zero/core/logx" - "gorm.io/gorm" - - "github.com/bnb-chain/zkbas/common/commonAsset" - "github.com/bnb-chain/zkbas/common/commonConstant" - "github.com/bnb-chain/zkbas/common/commonTx" - "github.com/bnb-chain/zkbas/common/model/account" - "github.com/bnb-chain/zkbas/common/model/block" - "github.com/bnb-chain/zkbas/common/model/blockForCommit" - "github.com/bnb-chain/zkbas/common/model/mempool" - "github.com/bnb-chain/zkbas/common/model/nft" - "github.com/bnb-chain/zkbas/common/model/tx" - "github.com/bnb-chain/zkbas/common/tree" - "github.com/bnb-chain/zkbas/common/util" - "github.com/bnb-chain/zkbas/errorcode" - "github.com/bnb-chain/zkbas/pkg/treedb" - "github.com/bnb-chain/zkbas/service/cronjob/committer/internal/svc" -) - -func CommitterTask( - ctx *svc.ServiceContext, - lastCommitTimeStamp *time.Time, - treeCtx *treedb.Context, - accountTree bsmt.SparseMerkleTree, - liquidityTree bsmt.SparseMerkleTree, - nftTree bsmt.SparseMerkleTree, - accountAssetTrees *[]bsmt.SparseMerkleTree, - finalityBlockNr uint64, -) error { - // Get Txs from Mempool - mempoolTxs, err := ctx.MempoolModel.GetMempoolTxsListForCommitter() - if err != nil { - if err == errorcode.DbErrNotFound { - return nil - } else { - logx.Errorf("[CommitterTask] unable to get tx in mempool: %s", err.Error()) - return err - } - } - var nTxs = len(mempoolTxs) - logx.Infof("[CommitterTask] Mempool txs number : %d", nTxs) - - // get current block height - currentBlockHeight, err := ctx.BlockModel.GetCurrentBlockHeight() - if err != nil && err != errorcode.DbErrNotFound { - logx.Error("[CommitterTask] err when get current block height") - return err - } - // get last block info - lastBlock, err := ctx.BlockModel.GetBlockByBlockHeight(currentBlockHeight) - if err != nil { - logx.Errorf("[CommitterTask] unable to get block by height: %s", err.Error()) - return err - } - // handle txs - // check how many blocks - MaxTxsAmountPerBlock := TxsAmountPerBlock[len(TxsAmountPerBlock)-1] - blocksSize := int(math.Ceil(float64(nTxs) / float64(MaxTxsAmountPerBlock))) - - // accountMap store the map from account index to accountInfo, decrease the duplicated query from Account Model - var ( - accountMap = make(map[int64]*FormatAccountInfo) - liquidityMap = make(map[int64]*Liquidity) - nftMap = make(map[int64]*L2Nft) - oldStateRoot = lastBlock.StateRoot - ) - for i := 0; i < blocksSize; i++ { - // Check time stamp - var now = time.Now() - if now.Unix()-lastCommitTimeStamp.Unix() < MaxCommitterInterval { - // if time is less than MaxCommitterInterval (15 minutes for now) - // and remaining txs number( equals to "nTxs - (i + 1) * MaxTxsAmountPerBlock") is less than MaxTxsAmountPerBlock - if nTxs-i*MaxTxsAmountPerBlock < MaxTxsAmountPerBlock { - logx.Infof("[CommitterTask] not enough transactions") - return errors.New("[CommitterTask] not enough transactions") - } - } - - var ( - pendingUpdateAccountIndexMap = make(map[int64]bool) - pendingUpdateLiquidityIndexMap = make(map[int64]bool) - - pendingUpdateNftIndexMap = make(map[int64]bool) - pendingNewNftIndexMap = make(map[int64]bool) - pendingNewNftWithdrawHistory []*nft.L2NftWithdrawHistory - - // block txs - txs []*Tx - // final account root - finalStateRoot string - // pub data - pubData []byte - // onchain tx info - priorityOperations int64 - pubDataOffset []uint32 - pendingOnChainOperationsPubData [][]byte - pendingOnChainOperationsHash []byte - pendingMempoolTxs []*MempoolTx - pendingDeleteMempoolTxs []*MempoolTx - ) - // write default string into pending onchain operations hash - pendingOnChainOperationsHash = common.FromHex(util.EmptyStringKeccak) - // handle each transaction - currentBlockHeight += 1 - - // compute block commitment - createdAt := time.Now().UnixMilli() - - OUTER: - for j := 0; j < MaxTxsAmountPerBlock; j++ { - // if not full block, just break - if i*MaxTxsAmountPerBlock+j >= nTxs { - break - } - var ( - pendingPriorityOperation int64 - newCollectionNonce = commonConstant.NilCollectionId - ) - // get mempool tx - mempoolTx := mempoolTxs[i*MaxTxsAmountPerBlock+j] - // handle tx pub data - pendingPriorityOperation, pendingOnChainOperationsPubData, pendingOnChainOperationsHash, pubData, pubDataOffset, err = - handleTxPubData( - mempoolTx, - pubData, - pendingOnChainOperationsPubData, - pendingOnChainOperationsHash, - pubDataOffset, - ) - if err != nil { - logx.Errorf("[CommitterTask] unable to handle l1 tx: %s", err.Error()) - return err - } - // compute new priority operations - priorityOperations += pendingPriorityOperation - - // get related account info - if mempoolTx.AccountIndex != commonConstant.NilTxAccountIndex { - if accountMap[mempoolTx.AccountIndex] == nil { - accountInfo, err := ctx.AccountModel.GetAccountByAccountIndex(mempoolTx.AccountIndex) - if err != nil { - logx.Errorf("[CommitterTask] get account by account index: %s", err.Error()) - return err - } - accountMap[mempoolTx.AccountIndex], err = commonAsset.ToFormatAccountInfo(accountInfo) - if err != nil { - logx.Errorf("[CommitterTask] unable to format account info: %s", err.Error()) - return err - } - } - // handle registerZNS tx - pendingUpdateAccountIndexMap[mempoolTx.AccountIndex] = true - if accountMap[mempoolTx.AccountIndex].Status == account.AccountStatusPending { - if mempoolTx.TxType != TxTypeRegisterZns { - logx.Errorf("[CommitterTask] first transaction should be registerZNS") - return errors.New("[CommitterTask] first transaction should be registerZNS") - } - accountMap[mempoolTx.AccountIndex].Status = account.AccountStatusConfirmed - pendingUpdateAccountIndexMap[mempoolTx.AccountIndex] = true - // update account tree - if int64(len(*accountAssetTrees)) != mempoolTx.AccountIndex { - logx.Errorf("[CommitterTask] invalid account index") - return errors.New("[CommitterTask] invalid account index") - } - emptyAssetTree, err := tree.NewEmptyAccountAssetTree(treeCtx, mempoolTx.AccountIndex, finalityBlockNr) - if err != nil { - logx.Errorf("[CommitterTask] unable to new empty account state tree: %s", err.Error()) - return err - } - *accountAssetTrees = append(*accountAssetTrees, emptyAssetTree) - nAccountLeafHash, err := tree.ComputeAccountLeafHash( - accountMap[mempoolTx.AccountIndex].AccountNameHash, - accountMap[mempoolTx.AccountIndex].PublicKey, - accountMap[mempoolTx.AccountIndex].Nonce, - accountMap[mempoolTx.AccountIndex].CollectionNonce, - (*accountAssetTrees)[mempoolTx.AccountIndex].Root(), - ) - if err != nil { - logx.Errorf("[CommitterTask] unable to compute account leaf: %s", err.Error()) - return err - } - err = accountTree.Set(uint64(mempoolTx.AccountIndex), nAccountLeafHash) - if err != nil { - logx.Errorf("[CommitterTask] unable to update account tree: %s", err.Error()) - return err - } - } - } - - // check if the tx is still valid - if mempoolTx.ExpiredAt != commonConstant.NilExpiredAt { - if mempoolTx.ExpiredAt < createdAt { - mempoolTx.Status = mempool.FailTxStatus - mempoolTx.L2BlockHeight = currentBlockHeight - pendingDeleteMempoolTxs = append(pendingDeleteMempoolTxs, mempoolTx) - - logx.Infof("[CommitterTask] tx %s is expired, mark tx as failed, expiredAt=%d, now=%d", - mempoolTx.TxHash, mempoolTx.ExpiredAt, createdAt) - continue - } - } - if mempoolTx.Nonce != commonConstant.NilNonce { - // check nonce, the latest nonce should be previous nonce + 1 - if mempoolTx.Nonce != accountMap[mempoolTx.AccountIndex].Nonce+1 { - mempoolTx.Status = mempool.FailTxStatus - mempoolTx.L2BlockHeight = currentBlockHeight - pendingDeleteMempoolTxs = append(pendingDeleteMempoolTxs, mempoolTx) - - logx.Errorf("[CommitterTask] tx %s has invalid nonce, mark tx as failed, txNonce=%d, accountNonce=%d", - mempoolTx.TxHash, mempoolTx.Nonce, accountMap[mempoolTx.AccountIndex].Nonce) - continue - } - } - // check mempool tx details are correct - var ( - txDetails []*tx.TxDetail - ) - for _, mempoolTxDetail := range mempoolTx.MempoolDetails { - if mempoolTxDetail.AccountIndex != commonConstant.NilTxAccountIndex { - pendingUpdateAccountIndexMap[mempoolTxDetail.AccountIndex] = true - if accountMap[mempoolTxDetail.AccountIndex] == nil { - accountInfo, err := ctx.AccountModel.GetAccountByAccountIndex(mempoolTxDetail.AccountIndex) - if err != nil { - logx.Errorf("[CommitterTask] get account by account index: %s", err.Error()) - return err - } - accountMap[mempoolTxDetail.AccountIndex], err = commonAsset.ToFormatAccountInfo(accountInfo) - if err != nil { - logx.Errorf("[CommitterTask] unable to format account info: %s", err.Error()) - return err - } - } - } - var ( - baseBalance string - ) - // check balance - switch mempoolTxDetail.AssetType { - case GeneralAssetType: - if accountMap[mempoolTxDetail.AccountIndex].AssetInfo == nil { - accountMap[mempoolTxDetail.AccountIndex].AssetInfo = make(map[int64]*commonAsset.AccountAsset) - } - if accountMap[mempoolTxDetail.AccountIndex].AssetInfo[mempoolTxDetail.AssetId] == nil { - accountMap[mempoolTxDetail.AccountIndex].AssetInfo[mempoolTxDetail.AssetId] = &commonAsset.AccountAsset{ - AssetId: mempoolTxDetail.AssetId, - Balance: ZeroBigInt, - LpAmount: ZeroBigInt, - OfferCanceledOrFinalized: ZeroBigInt, - } - } - // get latest account asset info - baseBalance = accountMap[mempoolTxDetail.AccountIndex].AssetInfo[mempoolTxDetail.AssetId].String() - var ( - nBalance string - ) - if mempoolTx.TxType == TxTypeFullExit { - balanceDelta := &commonAsset.AccountAsset{ - AssetId: accountMap[mempoolTxDetail.AccountIndex].AssetInfo[mempoolTxDetail.AssetId].AssetId, - Balance: ffmath.Neg(accountMap[mempoolTxDetail.AccountIndex].AssetInfo[mempoolTxDetail.AssetId].Balance), - LpAmount: big.NewInt(0), - OfferCanceledOrFinalized: big.NewInt(0), - } - // compute new balance - nBalance, err = commonAsset.ComputeNewBalance(GeneralAssetType, baseBalance, balanceDelta.String()) - if err != nil { - logx.Errorf("[CommitterTask] unable to compute new balance: %s", err.Error()) - return err - } - mempoolTxDetail.BalanceDelta = balanceDelta.String() - txInfo, err := commonTx.ParseFullExitTxInfo(mempoolTx.TxInfo) - if err != nil { - logx.Errorf("[CommitterTask] unable to parse full exit tx info: %s", err.Error()) - return err - } - txInfo.AssetAmount = accountMap[mempoolTxDetail.AccountIndex].AssetInfo[mempoolTxDetail.AssetId].Balance - infoBytes, err := json.Marshal(txInfo) - if err != nil { - logx.Errorf("[CommitterTask] unable to marshal tx: %s", err.Error()) - return err - } - mempoolTx.TxInfo = string(infoBytes) - } else { - // compute new balance - nBalance, err = commonAsset.ComputeNewBalance(GeneralAssetType, baseBalance, mempoolTxDetail.BalanceDelta) - if err != nil { - logx.Errorf("[CommitterTask] unable to compute new balance: %s", err.Error()) - return err - } - } - nAccountAsset, err := commonAsset.ParseAccountAsset(nBalance) - if err != nil { - logx.Errorf("[CommitterTask] unable to parse account asset: %s", err.Error()) - return err - } - // check balance is valid - if nAccountAsset.Balance.Cmp(util.ZeroBigInt) < 0 { - // mark this transaction as invalid transaction - mempoolTx.Status = mempool.FailTxStatus - mempoolTx.L2BlockHeight = currentBlockHeight - pendingDeleteMempoolTxs = append(pendingDeleteMempoolTxs, mempoolTx) - continue OUTER - } - accountMap[mempoolTxDetail.AccountIndex].AssetInfo[mempoolTxDetail.AssetId] = nAccountAsset - // update account state tree - nAssetLeaf, err := tree.ComputeAccountAssetLeafHash( - accountMap[mempoolTxDetail.AccountIndex].AssetInfo[mempoolTxDetail.AssetId].Balance.String(), - accountMap[mempoolTxDetail.AccountIndex].AssetInfo[mempoolTxDetail.AssetId].LpAmount.String(), - accountMap[mempoolTxDetail.AccountIndex].AssetInfo[mempoolTxDetail.AssetId].OfferCanceledOrFinalized.String(), - ) - if err != nil { - logx.Errorf("[CommitterTask] unable to compute new account asset leaf: %s", err.Error()) - return err - } - err = (*accountAssetTrees)[mempoolTxDetail.AccountIndex].Set(uint64(mempoolTxDetail.AssetId), nAssetLeaf) - if err != nil { - logx.Errorf("[CommitterTask] unable to update asset tree: %s", err.Error()) - return err - } - - accountMap[mempoolTxDetail.AccountIndex].AssetRoot = common.Bytes2Hex( - (*accountAssetTrees)[mempoolTxDetail.AccountIndex].Root()) - - break - case LiquidityAssetType: - pendingUpdateLiquidityIndexMap[mempoolTxDetail.AssetId] = true - if liquidityMap[mempoolTxDetail.AssetId] == nil { - liquidityMap[mempoolTxDetail.AssetId], err = ctx.LiquidityModel.GetLiquidityByPairIndex(mempoolTxDetail.AssetId) - if err != nil { - logx.Errorf("[CommitterTask] unable to get latest liquidity by pair index: %s", err.Error()) - return err - } - } - var ( - poolInfo *commonAsset.LiquidityInfo - ) - if mempoolTx.TxType == TxTypeCreatePair { - poolInfo = commonAsset.EmptyLiquidityInfo(mempoolTxDetail.AssetId) - } else { - poolInfo, err = commonAsset.ConstructLiquidityInfo( - liquidityMap[mempoolTxDetail.AssetId].PairIndex, - liquidityMap[mempoolTxDetail.AssetId].AssetAId, - liquidityMap[mempoolTxDetail.AssetId].AssetA, - liquidityMap[mempoolTxDetail.AssetId].AssetBId, - liquidityMap[mempoolTxDetail.AssetId].AssetB, - liquidityMap[mempoolTxDetail.AssetId].LpAmount, - liquidityMap[mempoolTxDetail.AssetId].KLast, - liquidityMap[mempoolTxDetail.AssetId].FeeRate, - liquidityMap[mempoolTxDetail.AssetId].TreasuryAccountIndex, - liquidityMap[mempoolTxDetail.AssetId].TreasuryRate, - ) - if err != nil { - logx.Errorf("[CommitterTask] unable to construct pool info: %s", err.Error()) - return err - } - } - baseBalance = poolInfo.String() - // compute new balance - nBalance, err := commonAsset.ComputeNewBalance( - LiquidityAssetType, baseBalance, mempoolTxDetail.BalanceDelta) - if err != nil { - logx.Errorf("[CommitterTask] unable to compute new balance: %s", err.Error()) - return err - } - nPoolInfo, err := commonAsset.ParseLiquidityInfo(nBalance) - if err != nil { - logx.Errorf("[CommitterTask] unable to parse pair info: %s", err.Error()) - return err - } - // update liquidity info - liquidityMap[mempoolTxDetail.AssetId] = &Liquidity{ - Model: liquidityMap[mempoolTxDetail.AssetId].Model, - PairIndex: nPoolInfo.PairIndex, - AssetAId: liquidityMap[mempoolTxDetail.AssetId].AssetAId, - AssetA: nPoolInfo.AssetA.String(), - AssetBId: liquidityMap[mempoolTxDetail.AssetId].AssetBId, - AssetB: nPoolInfo.AssetB.String(), - LpAmount: nPoolInfo.LpAmount.String(), - KLast: nPoolInfo.KLast.String(), - FeeRate: nPoolInfo.FeeRate, - TreasuryAccountIndex: nPoolInfo.TreasuryAccountIndex, - TreasuryRate: nPoolInfo.TreasuryRate, - } - - // update account state tree - nLiquidityAssetLeaf, err := tree.ComputeLiquidityAssetLeafHash( - liquidityMap[mempoolTxDetail.AssetId].AssetAId, - liquidityMap[mempoolTxDetail.AssetId].AssetA, - liquidityMap[mempoolTxDetail.AssetId].AssetBId, - liquidityMap[mempoolTxDetail.AssetId].AssetB, - liquidityMap[mempoolTxDetail.AssetId].LpAmount, - liquidityMap[mempoolTxDetail.AssetId].KLast, - liquidityMap[mempoolTxDetail.AssetId].FeeRate, - liquidityMap[mempoolTxDetail.AssetId].TreasuryAccountIndex, - liquidityMap[mempoolTxDetail.AssetId].TreasuryRate, - ) - if err != nil { - logx.Errorf("[CommitterTask] unable to compute new account liquidity leaf: %s", err.Error()) - return err - } - err = liquidityTree.Set(uint64(mempoolTxDetail.AssetId), nLiquidityAssetLeaf) - if err != nil { - logx.Errorf("[CommitterTask] unable to update liquidity tree: %s", err.Error()) - return err - } - break - case NftAssetType: - // check if nft exists in the db - if nftMap[mempoolTxDetail.AssetId] == nil { - nftMap[mempoolTxDetail.AssetId], err = ctx.L2NftModel.GetNftAsset(mempoolTxDetail.AssetId) - if err != nil { - logx.Errorf("[CommitterTask] unable to get nft asset: %s", err.Error()) - return err - } - } - // check special type - if mempoolTx.TxType == commonTx.TxTypeDepositNft || mempoolTx.TxType == commonTx.TxTypeMintNft { - pendingNewNftIndexMap[mempoolTxDetail.AssetId] = true - baseBalance = commonAsset.EmptyNftInfo(nftMap[mempoolTxDetail.AssetId].NftIndex).String() - } else { - pendingNewNftIndexMap[mempoolTxDetail.AssetId] = true - pendingUpdateNftIndexMap[mempoolTxDetail.AssetId] = true - // before nft info - baseBalance = commonAsset.ConstructNftInfo( - nftMap[mempoolTxDetail.AssetId].NftIndex, - nftMap[mempoolTxDetail.AssetId].CreatorAccountIndex, - nftMap[mempoolTxDetail.AssetId].OwnerAccountIndex, - nftMap[mempoolTxDetail.AssetId].NftContentHash, - nftMap[mempoolTxDetail.AssetId].NftL1TokenId, - nftMap[mempoolTxDetail.AssetId].NftL1Address, - nftMap[mempoolTxDetail.AssetId].CreatorTreasuryRate, - nftMap[mempoolTxDetail.AssetId].CollectionId, - ).String() - } - if mempoolTx.TxType == commonTx.TxTypeWithdrawNft || mempoolTx.TxType == commonTx.TxTypeFullExitNft { - pendingNewNftWithdrawHistory = append(pendingNewNftWithdrawHistory, &nft.L2NftWithdrawHistory{ - NftIndex: nftMap[mempoolTxDetail.AssetId].NftIndex, - CreatorAccountIndex: nftMap[mempoolTxDetail.AssetId].CreatorAccountIndex, - OwnerAccountIndex: nftMap[mempoolTxDetail.AssetId].OwnerAccountIndex, - NftContentHash: nftMap[mempoolTxDetail.AssetId].NftContentHash, - NftL1Address: nftMap[mempoolTxDetail.AssetId].NftL1Address, - NftL1TokenId: nftMap[mempoolTxDetail.AssetId].NftL1TokenId, - CreatorTreasuryRate: nftMap[mempoolTxDetail.AssetId].CreatorTreasuryRate, - CollectionId: nftMap[mempoolTxDetail.AssetId].CollectionId, - }) - } - // delta nft info - nftInfo, err := commonAsset.ParseNftInfo(mempoolTxDetail.BalanceDelta) - if err != nil { - logx.Errorf("[CommitterTask] unable to parse nft info: %s", err.Error()) - return err - } - if pendingUpdateNftIndexMap[mempoolTxDetail.AssetId] { - // update nft info - nftMap[mempoolTxDetail.AssetId] = &L2Nft{ - Model: nftMap[mempoolTxDetail.AssetId].Model, - NftIndex: nftInfo.NftIndex, - CreatorAccountIndex: nftInfo.CreatorAccountIndex, - OwnerAccountIndex: nftInfo.OwnerAccountIndex, - NftContentHash: nftInfo.NftContentHash, - NftL1Address: nftInfo.NftL1Address, - NftL1TokenId: nftInfo.NftL1TokenId, - CreatorTreasuryRate: nftInfo.CreatorTreasuryRate, - CollectionId: nftInfo.CollectionId, - } - } - // get nft asset - nftAsset := nftMap[mempoolTxDetail.AssetId] - // update nft tree - nNftAssetLeaf, err := tree.ComputeNftAssetLeafHash( - nftAsset.CreatorAccountIndex, nftAsset.OwnerAccountIndex, - nftAsset.NftContentHash, - nftAsset.NftL1Address, nftAsset.NftL1TokenId, - nftAsset.CreatorTreasuryRate, - nftAsset.CollectionId, - ) - if err != nil { - logx.Errorf("[CommitterTask] unable to compute new nft asset leaf: %s", err.Error()) - return err - } - err = nftTree.Set(uint64(mempoolTxDetail.AssetId), nNftAssetLeaf) - if err != nil { - logx.Errorf("[CommitterTask] unable to update nft tree: %s", err.Error()) - return err - } - break - case CollectionNonceAssetType: - baseBalance = strconv.FormatInt(accountMap[mempoolTxDetail.AccountIndex].CollectionNonce, 10) - newCollectionNonce, err = strconv.ParseInt(mempoolTxDetail.BalanceDelta, 10, 64) - if err != nil { - logx.Errorf("[CommitterTask] unable to parse int: %s", err.Error()) - return err - } - if newCollectionNonce != accountMap[mempoolTxDetail.AccountIndex].CollectionNonce+1 { - logx.Errorf("[CommitterTask] invalid collection nonce, newCollectionNonce=%d, accountCollectionNonce=%d", - newCollectionNonce, accountMap[mempoolTxDetail.AccountIndex].CollectionNonce) - return errors.New("[CommitterTask] invalid collection nonce") - } - break - default: - logx.Error("[CommitterTask] invalid tx type") - return errors.New("[CommitterTask] invalid tx type") - } - var ( - nonce, collectionNonce int64 - ) - if mempoolTxDetail.AccountIndex != commonConstant.NilTxAccountIndex { - nonce = accountMap[mempoolTxDetail.AccountIndex].Nonce - collectionNonce = accountMap[mempoolTxDetail.AccountIndex].CollectionNonce - } - txDetails = append(txDetails, &tx.TxDetail{ - AssetId: mempoolTxDetail.AssetId, - AssetType: mempoolTxDetail.AssetType, - AccountIndex: mempoolTxDetail.AccountIndex, - AccountName: mempoolTxDetail.AccountName, - Balance: baseBalance, - BalanceDelta: mempoolTxDetail.BalanceDelta, - Order: mempoolTxDetail.Order, - Nonce: nonce, - AccountOrder: mempoolTxDetail.AccountOrder, - CollectionNonce: collectionNonce, - }) - } - if mempoolTx.Nonce != commonConstant.NilNonce { - // update nonce - accountMap[mempoolTx.AccountIndex].Nonce = mempoolTx.Nonce - } - // check if we need to update nonce - if newCollectionNonce != commonConstant.NilCollectionId { - accountMap[mempoolTx.AccountIndex].CollectionNonce = newCollectionNonce - } - // update account tree - for accountIndex := range pendingUpdateAccountIndexMap { - nAccountLeafHash, err := tree.ComputeAccountLeafHash( - accountMap[accountIndex].AccountNameHash, - accountMap[accountIndex].PublicKey, - accountMap[accountIndex].Nonce, - accountMap[accountIndex].CollectionNonce, - (*accountAssetTrees)[accountIndex].Root(), - ) - if err != nil { - logx.Errorf("[CommitterTask] unable to compute account leaf: %s", err.Error()) - return err - } - err = accountTree.Set(uint64(accountIndex), nAccountLeafHash) - if err != nil { - logx.Errorf("[CommitterTask] unable to update account tree: %s", err.Error()) - return err - } - } - - // add into mempool tx - pendingMempoolTxs = append(pendingMempoolTxs, mempoolTx) - // update mempool tx info - mempoolTx.L2BlockHeight = currentBlockHeight - mempoolTx.Status = mempool.SuccessTxStatus - // construct tx - // account root - hFunc := mimc.NewMiMC() - hFunc.Write(accountTree.Root()) - hFunc.Write(liquidityTree.Root()) - hFunc.Write(nftTree.Root()) - stateRoot := common.Bytes2Hex(hFunc.Sum(nil)) - finalStateRoot = stateRoot - oTx := ConvertMempoolTxToTx(mempoolTx, txDetails, stateRoot, currentBlockHeight) - oTx.TxIndex = int64(len(txs)) - txs = append(txs, oTx) - } - - // construct assets history - var ( - pendingUpdateAccounts []*Account - pendingNewAccountHistory []*AccountHistory - pendingUpdateLiquidity []*Liquidity - pendingNewLiquidityHistory []*LiquidityHistory - pendingUpdateNft []*L2Nft - pendingNewNftHistory []*L2NftHistory - ) - // handle account - for accountIndex, flag := range pendingUpdateAccountIndexMap { - if !flag { - continue - } - accountInfo, err := commonAsset.FromFormatAccountInfo(accountMap[accountIndex]) - if err != nil { - logx.Errorf("[CommitterTask] unable to convert from format account info: %s", err.Error()) - return err - } - pendingUpdateAccounts = append(pendingUpdateAccounts, accountInfo) - pendingNewAccountHistory = append(pendingNewAccountHistory, &AccountHistory{ - AccountIndex: accountInfo.AccountIndex, - Nonce: accountInfo.Nonce, - CollectionNonce: accountInfo.CollectionNonce, - AssetInfo: accountInfo.AssetInfo, - AssetRoot: accountInfo.AssetRoot, - L2BlockHeight: currentBlockHeight, - }) - } - for pairIndex, flag := range pendingUpdateLiquidityIndexMap { - if !flag { - continue - } - pendingUpdateLiquidity = append(pendingUpdateLiquidity, liquidityMap[pairIndex]) - pendingNewLiquidityHistory = append(pendingNewLiquidityHistory, &LiquidityHistory{ - PairIndex: liquidityMap[pairIndex].PairIndex, - AssetAId: liquidityMap[pairIndex].AssetAId, - AssetA: liquidityMap[pairIndex].AssetA, - AssetBId: liquidityMap[pairIndex].AssetBId, - AssetB: liquidityMap[pairIndex].AssetB, - LpAmount: liquidityMap[pairIndex].LpAmount, - KLast: liquidityMap[pairIndex].KLast, - FeeRate: liquidityMap[pairIndex].FeeRate, - TreasuryAccountIndex: liquidityMap[pairIndex].TreasuryAccountIndex, - TreasuryRate: liquidityMap[pairIndex].TreasuryRate, - L2BlockHeight: currentBlockHeight, - }) - } - for nftIndex, flag := range pendingNewNftIndexMap { - if !flag { - continue - } - pendingNewNftHistory = append(pendingNewNftHistory, &L2NftHistory{ - NftIndex: nftMap[nftIndex].NftIndex, - CreatorAccountIndex: nftMap[nftIndex].CreatorAccountIndex, - OwnerAccountIndex: nftMap[nftIndex].OwnerAccountIndex, - NftContentHash: nftMap[nftIndex].NftContentHash, - NftL1Address: nftMap[nftIndex].NftL1Address, - NftL1TokenId: nftMap[nftIndex].NftL1TokenId, - CreatorTreasuryRate: nftMap[nftIndex].CreatorTreasuryRate, - CollectionId: nftMap[nftIndex].CollectionId, - L2BlockHeight: currentBlockHeight, - }) - } - for nftIndex, flag := range pendingUpdateNftIndexMap { - if !flag { - continue - } - pendingUpdateNft = append(pendingUpdateNft, nftMap[nftIndex]) - } - var realTxsAmountPerBlock int - for i := 0; i < len(TxsAmountPerBlock); i++ { - if len(txs) <= TxsAmountPerBlock[i] { - realTxsAmountPerBlock = TxsAmountPerBlock[i] - logx.Infof("the block contains %d txs: %d real txs, %d empty txs", realTxsAmountPerBlock, len(txs), realTxsAmountPerBlock-len(txs)) - break - } - } - emptyPubdata := make([]byte, (realTxsAmountPerBlock-len(txs))*32*std.PubDataSizePerTx) - pubData = append(pubData, emptyPubdata...) - // create commitment - commitment := util.CreateBlockCommitment( - currentBlockHeight, - createdAt, - common.FromHex(oldStateRoot), - common.FromHex(finalStateRoot), - pubData, - int64(len(pubDataOffset)), - ) - // update old state root - oldStateRoot = finalStateRoot - // construct block - createAtTime := time.UnixMilli(createdAt) - var ( - oBlock *block.Block - oBlockForCommit *blockForCommit.BlockForCommit - ) - if len(txs) != 0 { - oBlock = &Block{ - Model: gorm.Model{ - CreatedAt: createAtTime, - }, - BlockSize: uint16(realTxsAmountPerBlock), - BlockCommitment: commitment, - BlockHeight: currentBlockHeight, - StateRoot: finalStateRoot, - PriorityOperations: priorityOperations, - PendingOnChainOperationsHash: common.Bytes2Hex(pendingOnChainOperationsHash), - Txs: txs, - BlockStatus: block.StatusPending, - } - if pendingOnChainOperationsPubData != nil { - onChainOperationsPubDataBytes, err := json.Marshal(pendingOnChainOperationsPubData) - if err != nil { - logx.Errorf("[CommitterTask] unable to marshal on chain operations pub data: %s", err.Error()) - return err - } - oBlock.PendingOnChainOperationsPubData = string(onChainOperationsPubDataBytes) - } - offsetBytes, err := json.Marshal(pubDataOffset) - if err != nil { - logx.Errorf("[CommitterTask] unable to marshal pub data: %s", err.Error()) - return err - } - oBlockForCommit = &BlockForCommit{ - BlockSize: uint16(realTxsAmountPerBlock), - BlockHeight: currentBlockHeight, - StateRoot: finalStateRoot, - PublicData: common.Bytes2Hex(pubData), - Timestamp: createdAt, - PublicDataOffsets: string(offsetBytes), - } - } - - err = tree.CommitTrees(uint64(finalityBlockNr), accountTree, accountAssetTrees, liquidityTree, nftTree) - if err != nil { - logx.Errorf("[CommitterTask] unable to commit trees after txs is executed", err) - return err - } - - // create block for committer - // create block, history, update mempool txs, create new l1 amount infos - err = ctx.BlockModel.CreateBlockForCommitter( - oBlock, - oBlockForCommit, - pendingMempoolTxs, - pendingDeleteMempoolTxs, - pendingUpdateAccounts, - pendingNewAccountHistory, - pendingUpdateLiquidity, - pendingNewLiquidityHistory, - pendingUpdateNft, - pendingNewNftHistory, - pendingNewNftWithdrawHistory, - ) - if err != nil { - logx.Errorf("[CommitterTask] unable to create block for committer: %s", err.Error()) - // rollback trees - err = tree.RollBackTrees(uint64(oBlock.BlockHeight)-1, accountTree, accountAssetTrees, liquidityTree, nftTree) - if err != nil { - logx.Errorf("[CommitterTask] unable to rollback trees", err) - } - return err - } - *lastCommitTimeStamp = time.Now() - } - return nil -} diff --git a/service/cronjob/committer/internal/logic/constants.go b/service/cronjob/committer/internal/logic/constants.go deleted file mode 100644 index b0a4814b6..000000000 --- a/service/cronjob/committer/internal/logic/constants.go +++ /dev/null @@ -1,92 +0,0 @@ -package logic - -import ( - "math/big" - - "github.com/bnb-chain/zkbas/common/commonAsset" - "github.com/bnb-chain/zkbas/common/commonTx" - "github.com/bnb-chain/zkbas/common/model/account" - "github.com/bnb-chain/zkbas/common/model/assetInfo" - "github.com/bnb-chain/zkbas/common/model/block" - "github.com/bnb-chain/zkbas/common/model/blockForCommit" - "github.com/bnb-chain/zkbas/common/model/liquidity" - "github.com/bnb-chain/zkbas/common/model/mempool" - "github.com/bnb-chain/zkbas/common/model/nft" - "github.com/bnb-chain/zkbas/common/model/sysconfig" - "github.com/bnb-chain/zkbas/common/model/tx" -) - -type ( - // tx - Tx = tx.Tx - TxDetail = tx.TxDetail - // block - Block = block.Block - BlockForCommit = blockForCommit.BlockForCommit - // mempool - MempoolTx = mempool.MempoolTx - MempoolTxDetail = mempool.MempoolTxDetail - // assets - L2Nft = nft.L2Nft - // assets history - L2NftHistory = nft.L2NftHistory - // account history - Account = account.Account - AccountHistory = account.AccountHistory - - FormatAccountInfo = commonAsset.AccountInfo - FormatAccountHistoryInfo = commonAsset.FormatAccountHistoryInfo - - Liquidity = liquidity.Liquidity - LiquidityHistory = liquidity.LiquidityHistory - - SysconfigModel = sysconfig.SysconfigModel - MempoolModel = mempool.MempoolModel - BlockModel = block.BlockModel - AssetInfoModel = assetInfo.AssetInfoModel - AssetInfo = assetInfo.AssetInfo - - L2NftModel = nft.L2NftModel - L2NftHistoryModel = nft.L2NftHistoryModel - - PoolInfo = commonAsset.LiquidityInfo -) - -const ( - // tx status - TxStatusPending = tx.StatusPending - // asset type - GeneralAssetType = commonAsset.GeneralAssetType - LiquidityAssetType = commonAsset.LiquidityAssetType - NftAssetType = commonAsset.NftAssetType - CollectionNonceAssetType = commonAsset.CollectionNonceAssetType - - TxTypeRegisterZns = commonTx.TxTypeRegisterZns - TxTypeCreatePair = commonTx.TxTypeCreatePair - TxTypeUpdatePairRate = commonTx.TxTypeUpdatePairRate - TxTypeDeposit = commonTx.TxTypeDeposit - TxTypeTransfer = commonTx.TxTypeTransfer - TxTypeSwap = commonTx.TxTypeSwap - TxTypeAddLiquidity = commonTx.TxTypeAddLiquidity - TxTypeRemoveLiquidity = commonTx.TxTypeRemoveLiquidity - TxTypeMintNft = commonTx.TxTypeMintNft - TxTypeCreateCollection = commonTx.TxTypeCreateCollection - TxTypeTransferNft = commonTx.TxTypeTransferNft - TxTypeAtomicMatch = commonTx.TxTypeAtomicMatch - TxTypeCancelOffer = commonTx.TxTypeCancelOffer - TxTypeDepositNft = commonTx.TxTypeDepositNft - TxTypeWithdraw = commonTx.TxTypeWithdraw - TxTypeWithdrawNft = commonTx.TxTypeWithdrawNft - TxTypeFullExit = commonTx.TxTypeFullExit - TxTypeFullExitNft = commonTx.TxTypeFullExitNft -) - -const ( - // 15 minutes - MaxCommitterInterval = 60 * 1 -) - -var ( - ZeroBigInt = big.NewInt(0) - TxsAmountPerBlock []int -) diff --git a/service/cronjob/committer/internal/logic/utils.go b/service/cronjob/committer/internal/logic/utils.go deleted file mode 100644 index 646f2070b..000000000 --- a/service/cronjob/committer/internal/logic/utils.go +++ /dev/null @@ -1,216 +0,0 @@ -package logic - -import ( - "errors" - - "github.com/zeromicro/go-zero/core/logx" - - "github.com/bnb-chain/zkbas/common/model/tx" - "github.com/bnb-chain/zkbas/common/util" -) - -func ConvertMempoolTxToTx(mempoolTx *MempoolTx, txDetails []*tx.TxDetail, accountRoot string, currentBlockHeight int64) (tx *Tx) { - tx = &Tx{ - TxHash: mempoolTx.TxHash, - TxType: mempoolTx.TxType, - GasFee: mempoolTx.GasFee, - GasFeeAssetId: mempoolTx.GasFeeAssetId, - TxStatus: TxStatusPending, - BlockHeight: currentBlockHeight, - StateRoot: accountRoot, - NftIndex: mempoolTx.NftIndex, - PairIndex: mempoolTx.PairIndex, - AssetId: mempoolTx.AssetId, - TxAmount: mempoolTx.TxAmount, - NativeAddress: mempoolTx.NativeAddress, - TxInfo: mempoolTx.TxInfo, - TxDetails: txDetails, - ExtraInfo: mempoolTx.ExtraInfo, - Memo: mempoolTx.Memo, - AccountIndex: mempoolTx.AccountIndex, - Nonce: mempoolTx.Nonce, - ExpiredAt: mempoolTx.ExpiredAt, - } - return tx -} - -/** -handleTxPubData: handle different layer-1 txs -*/ -func handleTxPubData( - mempoolTx *MempoolTx, - oldPubData []byte, - oldPendingOnChainOperationsPubData [][]byte, - oldPendingOnChainOperationsHash []byte, - oldPubDataOffset []uint32, -) ( - priorityOperation int64, - newPendingOnChainOperationsPubData [][]byte, - newPendingOnChainOperationsHash []byte, - newPubData []byte, - newPubDataOffset []uint32, - err error, -) { - priorityOperation = 0 - newPendingOnChainOperationsHash = oldPendingOnChainOperationsHash - newPendingOnChainOperationsPubData = oldPendingOnChainOperationsPubData - newPubDataOffset = oldPubDataOffset - var pubData []byte - switch mempoolTx.TxType { - case TxTypeRegisterZns: - pubData, err = util.ConvertTxToRegisterZNSPubData(mempoolTx) - if err != nil { - logx.Errorf("[handleTxPubData] unable to convert tx to registerZNS pub data") - return priorityOperation, nil, nil, nil, nil, err - } - newPubDataOffset = append(newPubDataOffset, uint32(len(oldPubData))) - priorityOperation++ - break - case TxTypeCreatePair: - pubData, err = util.ConvertTxToCreatePairPubData(mempoolTx) - if err != nil { - logx.Errorf("[handleTxPubData] unable to convert tx to create pair pub data") - return priorityOperation, nil, nil, nil, nil, err - } - newPubDataOffset = append(newPubDataOffset, uint32(len(oldPubData))) - priorityOperation++ - break - case TxTypeUpdatePairRate: - pubData, err = util.ConvertTxToUpdatePairRatePubData(mempoolTx) - if err != nil { - logx.Errorf("[handleTxPubData] unable to convert tx to update pair rate pub data") - return priorityOperation, nil, nil, nil, nil, err - } - newPubDataOffset = append(newPubDataOffset, uint32(len(oldPubData))) - priorityOperation++ - break - case TxTypeDeposit: - pubData, err = util.ConvertTxToDepositPubData(mempoolTx) - if err != nil { - logx.Errorf("[handleTxPubData] unable to convert tx to deposit pub data") - return priorityOperation, nil, nil, nil, nil, err - } - newPubDataOffset = append(newPubDataOffset, uint32(len(oldPubData))) - priorityOperation++ - break - case TxTypeDepositNft: - pubData, err = util.ConvertTxToDepositNftPubData(mempoolTx) - if err != nil { - logx.Errorf("[handleTxPubData] unable to convert tx to deposit nft pub data") - return priorityOperation, nil, nil, nil, nil, err - } - newPubDataOffset = append(newPubDataOffset, uint32(len(oldPubData))) - priorityOperation++ - break - case TxTypeTransfer: - pubData, err = util.ConvertTxToTransferPubData(mempoolTx) - if err != nil { - logx.Errorf("[handleTxPubData] unable to convert tx to transfer pub data") - return priorityOperation, nil, nil, nil, nil, err - } - break - case TxTypeSwap: - pubData, err = util.ConvertTxToSwapPubData(mempoolTx) - if err != nil { - logx.Errorf("[handleTxPubData] unable to convert tx to swap pub data") - return priorityOperation, nil, nil, nil, nil, err - } - break - case TxTypeAddLiquidity: - pubData, err = util.ConvertTxToAddLiquidityPubData(mempoolTx) - if err != nil { - logx.Errorf("[handleTxPubData] unable to convert tx to add liquidity pub data") - return priorityOperation, nil, nil, nil, nil, err - } - break - case TxTypeRemoveLiquidity: - pubData, err = util.ConvertTxToRemoveLiquidityPubData(mempoolTx) - if err != nil { - logx.Errorf("[handleTxPubData] unable to convert tx to remove liquidity pub data") - return priorityOperation, nil, nil, nil, nil, err - } - break - case TxTypeCreateCollection: - pubData, err = util.ConvertTxToCreateCollectionPubData(mempoolTx) - if err != nil { - logx.Errorf("[handleTxPubData] unable to convert tx to create collection pub data") - return priorityOperation, nil, nil, nil, nil, err - } - break - case TxTypeMintNft: - pubData, err = util.ConvertTxToMintNftPubData(mempoolTx) - if err != nil { - logx.Errorf("[handleTxPubData] unable to convert tx to mint nft pub data") - return priorityOperation, nil, nil, nil, nil, err - } - break - case TxTypeTransferNft: - pubData, err = util.ConvertTxToTransferNftPubData(mempoolTx) - if err != nil { - logx.Errorf("[handleTxPubData] unable to convert tx to transfer nft pub data") - return priorityOperation, nil, nil, nil, nil, err - } - break - case TxTypeAtomicMatch: - pubData, err = util.ConvertTxToAtomicMatchPubData(mempoolTx) - if err != nil { - logx.Errorf("[handleTxPubData] unable to convert tx to atomic match pub data") - return priorityOperation, nil, nil, nil, nil, err - } - break - case TxTypeCancelOffer: - pubData, err = util.ConvertTxToCancelOfferPubData(mempoolTx) - if err != nil { - logx.Errorf("[handleTxPubData] unable to convert tx to cancel offer pub data") - return priorityOperation, nil, nil, nil, nil, err - } - break - case TxTypeWithdraw: - pubData, err = util.ConvertTxToWithdrawPubData(mempoolTx) - if err != nil { - logx.Errorf("[handleTxPubData] unable to convert tx to withdraw pub data") - return priorityOperation, nil, nil, nil, nil, err - } - newPubDataOffset = append(newPubDataOffset, uint32(len(oldPubData))) - newPendingOnChainOperationsPubData = append(newPendingOnChainOperationsPubData, pubData) - newPendingOnChainOperationsHash = util.ConcatKeccakHash(oldPendingOnChainOperationsHash, pubData) - break - case TxTypeWithdrawNft: - pubData, err = util.ConvertTxToWithdrawNftPubData(mempoolTx) - if err != nil { - logx.Errorf("[handleTxPubData] unable to convert tx to withdraw nft pub data") - return priorityOperation, nil, nil, nil, nil, err - } - newPubDataOffset = append(newPubDataOffset, uint32(len(oldPubData))) - newPendingOnChainOperationsPubData = append(newPendingOnChainOperationsPubData, pubData) - newPendingOnChainOperationsHash = util.ConcatKeccakHash(oldPendingOnChainOperationsHash, pubData) - break - case TxTypeFullExit: - pubData, err = util.ConvertTxToFullExitPubData(mempoolTx) - if err != nil { - logx.Errorf("[handleTxPubData] unable to convert tx to full exit pub data") - return priorityOperation, nil, nil, nil, nil, err - } - newPubDataOffset = append(newPubDataOffset, uint32(len(oldPubData))) - priorityOperation++ - newPendingOnChainOperationsPubData = append(newPendingOnChainOperationsPubData, pubData) - newPendingOnChainOperationsHash = util.ConcatKeccakHash(oldPendingOnChainOperationsHash, pubData) - break - case TxTypeFullExitNft: - pubData, err = util.ConvertTxToFullExitNftPubData(mempoolTx) - if err != nil { - logx.Errorf("[handleTxPubData] unable to convert tx to full exit nft pub data") - return priorityOperation, nil, nil, nil, nil, err - } - newPubDataOffset = append(newPubDataOffset, uint32(len(oldPubData))) - priorityOperation++ - newPendingOnChainOperationsPubData = append(newPendingOnChainOperationsPubData, pubData) - newPendingOnChainOperationsHash = util.ConcatKeccakHash(oldPendingOnChainOperationsHash, pubData) - break - default: - logx.Errorf("[handleTxPubData] invalid tx type") - return priorityOperation, nil, nil, nil, nil, errors.New("[handleTxPubData] invalid tx type") - } - newPubData = append(oldPubData, pubData...) - return priorityOperation, newPendingOnChainOperationsPubData, newPendingOnChainOperationsHash, newPubData, newPubDataOffset, nil -} diff --git a/service/cronjob/committer/internal/svc/servicecontext.go b/service/cronjob/committer/internal/svc/servicecontext.go deleted file mode 100644 index a64bb266d..000000000 --- a/service/cronjob/committer/internal/svc/servicecontext.go +++ /dev/null @@ -1,117 +0,0 @@ -package svc - -import ( - "github.com/zeromicro/go-zero/core/logx" - "github.com/zeromicro/go-zero/core/stores/redis" - "github.com/zeromicro/go-zero/core/stores/sqlx" - "gorm.io/driver/postgres" - "gorm.io/gorm" - - "github.com/bnb-chain/zkbas/common/model/account" - "github.com/bnb-chain/zkbas/common/model/assetInfo" - "github.com/bnb-chain/zkbas/common/model/block" - "github.com/bnb-chain/zkbas/common/model/liquidity" - "github.com/bnb-chain/zkbas/common/model/mempool" - "github.com/bnb-chain/zkbas/common/model/nft" - "github.com/bnb-chain/zkbas/common/model/sysconfig" - "github.com/bnb-chain/zkbas/common/model/tx" - "github.com/bnb-chain/zkbas/service/cronjob/committer/internal/config" -) - -type ServiceContext struct { - Config config.Config - - AccountModel account.AccountModel - AccountHistoryModel account.AccountHistoryModel - - L2NftModel nft.L2NftModel - LiquidityModel liquidity.LiquidityModel - LiquidityHistoryModel liquidity.LiquidityHistoryModel - L2NftHistoryModel nft.L2NftHistoryModel - - TxDetailModel tx.TxDetailModel - TxModel tx.TxModel - BlockModel block.BlockModel - MempoolDetailModel mempool.MempoolTxDetailModel - MempoolModel mempool.MempoolModel - L2AssetInfoModel assetInfo.AssetInfoModel - - SysConfigModel sysconfig.SysconfigModel -} - -func WithRedis(redisType string, redisPass string) redis.Option { - return func(p *redis.Redis) { - p.Type = redisType - p.Pass = redisPass - } -} - -func NewServiceContext(c config.Config) *ServiceContext { - gormPointer, err := gorm.Open(postgres.Open(c.Postgres.DataSource)) - if err != nil { - logx.Errorf("gorm connect db error, err = %s", err.Error()) - } - conn := sqlx.NewSqlConn("postgres", c.Postgres.DataSource) - redisConn := redis.New(c.CacheRedis[0].Host, WithRedis(c.CacheRedis[0].Type, c.CacheRedis[0].Pass)) - - return &ServiceContext{ - Config: c, - AccountModel: account.NewAccountModel(conn, c.CacheRedis, gormPointer), - AccountHistoryModel: account.NewAccountHistoryModel(conn, c.CacheRedis, gormPointer), - L2NftModel: nft.NewL2NftModel(conn, c.CacheRedis, gormPointer), - LiquidityModel: liquidity.NewLiquidityModel(conn, c.CacheRedis, gormPointer), - LiquidityHistoryModel: liquidity.NewLiquidityHistoryModel(conn, c.CacheRedis, gormPointer), - L2NftHistoryModel: nft.NewL2NftHistoryModel(conn, c.CacheRedis, gormPointer), - TxDetailModel: tx.NewTxDetailModel(conn, c.CacheRedis, gormPointer), - TxModel: tx.NewTxModel(conn, c.CacheRedis, gormPointer, redisConn), - BlockModel: block.NewBlockModel(conn, c.CacheRedis, gormPointer, redisConn), - MempoolDetailModel: mempool.NewMempoolDetailModel(conn, c.CacheRedis, gormPointer), - MempoolModel: mempool.NewMempoolModel(conn, c.CacheRedis, gormPointer), - L2AssetInfoModel: assetInfo.NewAssetInfoModel(conn, c.CacheRedis, gormPointer), - SysConfigModel: sysconfig.NewSysconfigModel(conn, c.CacheRedis, gormPointer), - } -} - -/* -func (s *ServiceContext) Run() { - mempoolTxs, err := s.MempoolModel.GetAllMempoolTxsList() - if err != nil { - errInfo := fmt.Sprintf("[CommitterTask] => [MempoolModel.GetAllMempoolTxsList] mempool query error:%s", err.Error()) - logx.Error(errInfo) - return - } - if len(mempoolTxs) == 0 { - logx.Info("[CommitterTask] No new mempool transactions") - return - } else { - s.CommitterTask(mempoolTxs) - } -} -func (s *ServiceContext) InitMerkleTree() (err error) { - accounts, err := s.AccountModel.GetAllAccounts() - if err != nil { - return err - } - generalAssets, err := s.AccountAssetModel.GetAllAccountAssets() - if err != nil { - return err - } - liquidityAssets, err := s.LiquidityAssetModel.GetAllLiquidityAssets() - if err != nil { - return err - } - lockAssets, err := s.LockAssetModel.GetAllLockedAssets() - if err != nil { - return err - } - s.GlobalState, err = smt.ConstructGlobalState(accounts, generalAssets, liquidityAssets, lockAssets) - if err != nil { - return err - } - return nil -} -func (s *ServiceContext) CommitterTask(mempoolTxs []*mempool.MempoolTx) { - // - logx.Info("CommitterTask") -} -*/ diff --git a/service/cronjob/monitor/Dockerfile b/service/cronjob/monitor/Dockerfile deleted file mode 100644 index 67a30dd80..000000000 --- a/service/cronjob/monitor/Dockerfile +++ /dev/null @@ -1,20 +0,0 @@ -FROM golang:alpine AS builder - -LABEL stage=gobuilder - -ENV CGO_ENABLED 0 -# ENV GOPROXY https://goproxy.cn,direct - -RUN apk update --no-cache && apk add --no-cache tzdata - -FROM alpine:3.4 - -COPY --from=builder /etc/ssl/certs/ca-certificates.crt /etc/ssl/certs/ca-certificates.crt -COPY --from=builder /usr/share/zoneinfo/Asia/Shanghai /usr/share/zoneinfo/Asia/Shanghai -ENV TZ Asia/Shanghai - -WORKDIR /app -COPY bin/monitor /app/monitor -COPY configyaml /app/etc - -CMD ["./monitor", "-f", "etc/monitor.yaml"] \ No newline at end of file diff --git a/service/cronjob/monitor/etc/config.yaml.example b/service/cronjob/monitor/etc/config.yaml.example deleted file mode 100644 index c7ea3cefd..000000000 --- a/service/cronjob/monitor/etc/config.yaml.example +++ /dev/null @@ -1,20 +0,0 @@ -Name: monitor.cronjob - -Postgres: - DataSource: host=127.0.0.1 user=postgres password=ZecreyProtocolDB@123 dbname=zkbas port=5432 sslmode=disable - -CacheRedis: - - Host: 127.0.0.1:6379 - Pass: myredis - Type: node - -ChainConfig: - # NetworkRPCSysConfigName: "BscTestNetworkRpc" - NetworkRPCSysConfigName: "LocalTestNetworkRpc" - ZkbasContractAddrSysConfigName: "ZkbasContract" - GovernanceContractAddrSysConfigName: "GovernanceContract" - # StartL1BlockHeight: 520000 - StartL1BlockHeight: 0 - PendingBlocksCount: 0 - MaxHandledBlocksCount: 100000 - diff --git a/service/cronjob/monitor/internal/config/config.go b/service/cronjob/monitor/internal/config/config.go deleted file mode 100644 index 7debccca3..000000000 --- a/service/cronjob/monitor/internal/config/config.go +++ /dev/null @@ -1,20 +0,0 @@ -package config - -import ( - "github.com/zeromicro/go-zero/core/stores/cache" -) - -type Config struct { - Postgres struct { - DataSource string - } - CacheRedis cache.CacheConf - ChainConfig struct { - NetworkRPCSysConfigName string - ZkbasContractAddrSysConfigName string - GovernanceContractAddrSysConfigName string - StartL1BlockHeight int64 - PendingBlocksCount uint64 - MaxHandledBlocksCount int64 - } -} diff --git a/service/cronjob/monitor/internal/logic/blockMonitor.go b/service/cronjob/monitor/internal/logic/blockMonitor.go deleted file mode 100644 index 46cae496f..000000000 --- a/service/cronjob/monitor/internal/logic/blockMonitor.go +++ /dev/null @@ -1,201 +0,0 @@ -/* - * Copyright © 2021 Zkbas Protocol - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package logic - -import ( - "context" - "encoding/json" - "errors" - "fmt" - "math/big" - - zkbas "github.com/bnb-chain/zkbas-eth-rpc/zkbas/core/legend" - "github.com/ethereum/go-ethereum" - "github.com/ethereum/go-ethereum/accounts/abi/bind" - "github.com/ethereum/go-ethereum/common" - "github.com/zeromicro/go-zero/core/logx" - - "github.com/bnb-chain/zkbas/common/model/l1BlockMonitor" - "github.com/bnb-chain/zkbas/common/model/l2TxEventMonitor" - "github.com/bnb-chain/zkbas/common/util" - "github.com/bnb-chain/zkbas/errorcode" -) - -/* - MonitorBlocks: monitor layer-1 block events -*/ -func MonitorBlocks(cli *ProviderClient, startHeight int64, pendingBlocksCount uint64, maxHandledBlocksCount int64, zkbasContract string, l1BlockMonitorModel L1BlockMonitorModel) (err error) { - latestHandledBlock, err := l1BlockMonitorModel.GetLatestL1BlockMonitorByBlock() - logx.Errorf("========== start MonitorBlocks ==========") - var handledHeight int64 - if err != nil { - if err == errorcode.DbErrNotFound { - handledHeight = startHeight - } else { - logx.Errorf("[l1BlockMonitorModel.GetLatestL1BlockMonitorByBlock]: %s", err.Error()) - return err - } - } else { - handledHeight = latestHandledBlock.L1BlockHeight - } - // get latest l1 block height(latest height - pendingBlocksCount) - latestHeight, err := cli.GetHeight() - if err != nil { - logx.Errorf("[blockMoniter.MonitorBlocks]<=>[cli.GetHeight] %s", err.Error()) - return err - } - safeHeight := latestHeight - pendingBlocksCount - safeHeight = uint64(util.MinInt64(int64(safeHeight), handledHeight+maxHandledBlocksCount)) - if safeHeight <= uint64(handledHeight) { - logx.Error("[l2BlockMonitor.MonitorBlocks] no new blocks need to be handled") - return nil - } - contractAddress := common.HexToAddress(zkbasContract) - logx.Infof("[MonitorBlocks] fromBlock: %d, toBlock: %d", big.NewInt(handledHeight+1), big.NewInt(int64(safeHeight))) - zkbasInstance, err := zkbas.LoadZkbasInstance(cli, zkbasContract) - if err != nil { - logx.Errorf("[MonitorBlocks] unable to load zkbas instance") - return err - } - priorityRequests, err := zkbasInstance.ZkbasFilterer. - FilterNewPriorityRequest(&bind.FilterOpts{Start: uint64(handledHeight + 1), End: &safeHeight}) - if err != nil { - logx.Errorf("[MonitorBlocks] unable to filter deposit or lock events: %s", err.Error()) - return err - } - priorityRequestCount, priorityRequestCountCheck := 0, 0 - for priorityRequests.Next() { - priorityRequestCount++ - } - query := ethereum.FilterQuery{ - FromBlock: big.NewInt(handledHeight + 1), - ToBlock: big.NewInt(int64(safeHeight)), - Addresses: []common.Address{contractAddress}, - } - logs, err := cli.FilterLogs(context.Background(), query) - if err != nil { - errInfo := fmt.Sprintf("[blockMoniter.MonitorBlocks]<=>[cli.FilterLogs] %s", err.Error()) - logx.Error(errInfo) - return err - } - var ( - l1EventInfos []*L1EventInfo - l2TxEventMonitors []*L2TxEventMonitor - l2BlockEventMonitors []*L2BlockEventMonitor - ) - for _, vlog := range logs { - l1EventInfo := &L1EventInfo{ - TxHash: vlog.TxHash.Hex(), - } - switch vlog.Topics[0].Hex() { - case zkbasLogNewPriorityRequestSigHash.Hex(): - priorityRequestCountCheck++ - var event zkbas.ZkbasNewPriorityRequest - if err = ZkbasContractAbi.UnpackIntoInterface(&event, EventNameNewPriorityRequest, vlog.Data); err != nil { - logx.Errorf("[blockMoniter.MonitorBlocks]<=>[ZkbasContractAbi.UnpackIntoInterface] err: %s", err.Error()) - return err - } - l1EventInfo.EventType = EventTypeNewPriorityRequest - l2TxEventMonitorInfo := &L2TxEventMonitor{ - L1TxHash: vlog.TxHash.Hex(), - L1BlockHeight: int64(vlog.BlockNumber), - SenderAddress: event.Sender.Hex(), - RequestId: int64(event.SerialId), - TxType: int64(event.TxType), - Pubdata: common.Bytes2Hex(event.PubData), - ExpirationBlock: event.ExpirationBlock.Int64(), - Status: l2TxEventMonitor.PendingStatus, - } - l2TxEventMonitors = append(l2TxEventMonitors, l2TxEventMonitorInfo) - case zkbasLogWithdrawalSigHash.Hex(): - case zkbasLogWithdrawalPendingSigHash.Hex(): - case zkbasLogBlockCommitSigHash.Hex(): - var event zkbas.ZkbasBlockCommit - if err = ZkbasContractAbi.UnpackIntoInterface(&event, EventNameBlockCommit, vlog.Data); err != nil { - errInfo := fmt.Sprintf("[blockMoniter.MonitorBlocks]<=>[ZkbasContractAbi.UnpackIntoInterface] %s", err.Error()) - logx.Error(errInfo) - return err - } - l1EventInfo.EventType = EventTypeCommittedBlock - l2BlockEventMonitorInfo := &L2BlockEventMonitor{ - BlockEventType: EventTypeCommittedBlock, - L1BlockHeight: int64(vlog.BlockNumber), - L1TxHash: vlog.TxHash.Hex(), - L2BlockHeight: int64(event.BlockNumber), - Status: PendingStatusL2BlockEventMonitor, - } - l2BlockEventMonitors = append(l2BlockEventMonitors, l2BlockEventMonitorInfo) - case zkbasLogBlockVerificationSigHash.Hex(): - var event zkbas.ZkbasBlockVerification - if err = ZkbasContractAbi.UnpackIntoInterface(&event, EventNameBlockVerification, vlog.Data); err != nil { - errInfo := fmt.Sprintf("[blockMoniter.MonitorBlocks]<=>[ZkbasContractAbi.UnpackIntoInterface] %s", err.Error()) - logx.Error(errInfo) - return err - } - l1EventInfo.EventType = EventTypeVerifiedBlock - l2BlockEventMonitorInfo := &L2BlockEventMonitor{ - BlockEventType: EventTypeVerifiedBlock, - L1BlockHeight: int64(vlog.BlockNumber), - L1TxHash: vlog.TxHash.Hex(), - L2BlockHeight: int64(event.BlockNumber), - Status: PendingStatusL2BlockEventMonitor, - } - l2BlockEventMonitors = append(l2BlockEventMonitors, l2BlockEventMonitorInfo) - case zkbasLogBlocksRevertSigHash.Hex(): - var event zkbas.ZkbasBlocksRevert - if err = ZkbasContractAbi.UnpackIntoInterface(&event, EventNameBlocksRevert, vlog.Data); err != nil { - errInfo := fmt.Sprintf("[blockMoniter.MonitorBlocks]<=>[ZkbasContractAbi.UnpackIntoInterface] %s", err.Error()) - logx.Error(errInfo) - return err - } - l1EventInfo.EventType = EventTypeRevertedBlock - l2BlockEventMonitorInfo := &L2BlockEventMonitor{ - BlockEventType: EventTypeRevertedBlock, - L1BlockHeight: int64(vlog.BlockNumber), - L1TxHash: vlog.TxHash.Hex(), - L2BlockHeight: int64(event.TotalBlocksCommitted), - Status: PendingStatusL2BlockEventMonitor, - } - l2BlockEventMonitors = append(l2BlockEventMonitors, l2BlockEventMonitorInfo) - default: - } - l1EventInfos = append(l1EventInfos, l1EventInfo) - } - if priorityRequestCount != priorityRequestCountCheck { - logx.Errorf("[MonitorBlocks] new priority requests events not match, try it again") - return errors.New("[MonitorBlocks] new priority requests events not match, try it again") - } - eventInfosBytes, err := json.Marshal(l1EventInfos) - if err != nil { - logx.Errorf("[blockMoniter.MonitorBlocks]<=>[json.Marshal] %s", err.Error()) - return err - } - l1BlockMonitorInfo := &l1BlockMonitor.L1BlockMonitor{ - L1BlockHeight: int64(safeHeight), - BlockInfo: string(eventInfosBytes), - MonitorType: l1BlockMonitor.MonitorTypeBlock, - } - if err = l1BlockMonitorModel.CreateMonitorsInfo(l1BlockMonitorInfo, l2TxEventMonitors, l2BlockEventMonitors); err != nil { - errInfo := fmt.Sprintf("[l1BlockMonitorModel.CreateMonitorsInfo] %s", err.Error()) - logx.Error(errInfo) - return err - } - logx.Info("[MonitorBlocks] create txs count:", len(l2TxEventMonitors)) - logx.Info("[MonitorBlocks] create blocks events count:", len(l2BlockEventMonitors)) - logx.Errorf("========== end MonitorBlocks ==========") - return nil -} diff --git a/service/cronjob/monitor/internal/logic/l2BlockMonitor.go b/service/cronjob/monitor/internal/logic/l2BlockMonitor.go deleted file mode 100644 index a5d42efef..000000000 --- a/service/cronjob/monitor/internal/logic/l2BlockMonitor.go +++ /dev/null @@ -1,218 +0,0 @@ -/* - * Copyright © 2021 Zkbas Protocol - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package logic - -import ( - "context" - "sort" - - "github.com/bnb-chain/zkbas-eth-rpc/_rpc" - "github.com/zeromicro/go-zero/core/logx" - - "github.com/bnb-chain/zkbas/common/model/block" - "github.com/bnb-chain/zkbas/common/model/proofSender" - "github.com/bnb-chain/zkbas/service/cronjob/monitor/internal/repo/accountoperator" - "github.com/bnb-chain/zkbas/service/cronjob/monitor/internal/repo/commglobalmap" - "github.com/bnb-chain/zkbas/service/cronjob/monitor/internal/repo/l2eventoperator" - "github.com/bnb-chain/zkbas/service/cronjob/monitor/internal/repo/liquidityoperator" - "github.com/bnb-chain/zkbas/service/cronjob/monitor/internal/repo/mempooloperator" - "github.com/bnb-chain/zkbas/service/cronjob/monitor/internal/repo/nftoperator" - "github.com/bnb-chain/zkbas/service/cronjob/monitor/internal/svc" -) - -type l2BlockEventsMonitor struct { - logx.Logger - ctx context.Context - svcCtx *svc.ServiceContext - accountOperator accountoperator.Model - mempoolOperator mempooloperator.Model - liquidityOperator liquidityoperator.Model - nftOperator nftoperator.Model - l2eventOperator l2eventoperator.Model - commglobalmap commglobalmap.Model -} - -func Newl2BlockEventsMonitor(ctx context.Context, svcCtx *svc.ServiceContext) *l2BlockEventsMonitor { - return &l2BlockEventsMonitor{ - Logger: logx.WithContext(ctx), - ctx: ctx, - svcCtx: svcCtx, - accountOperator: accountoperator.New(svcCtx), - mempoolOperator: mempooloperator.New(svcCtx), - liquidityOperator: liquidityoperator.New(svcCtx), - nftOperator: nftoperator.New(svcCtx), - l2eventOperator: l2eventoperator.New(svcCtx), - commglobalmap: commglobalmap.New(svcCtx), - } -} - -func MonitorL2BlockEvents( - ctx context.Context, - svcCtx *svc.ServiceContext, - bscCli *_rpc.ProviderClient, - bscPendingBlocksCount uint64, - mempoolModel MempoolModel, - blockModel BlockModel, - l1TxSenderModel L1TxSenderModel, -) (err error) { - logx.Errorf("========== start MonitorL2BlockEvents ==========") - pendingSenders, err := l1TxSenderModel.GetL1TxSendersByTxStatus(L1TxSenderPendingStatus) - if err != nil { - logx.Errorf("[MonitorL2BlockEvents] unable to get l1 tx senders by tx status: %s", err.Error()) - return err - } - var ( - relatedBlocks = make(map[int64]*Block) - pendingUpdateSenders []*L1TxSender - pendingUpdateProofSenderStatus = make(map[int64]int) - ) - for _, pendingSender := range pendingSenders { - txHash := pendingSender.L1TxHash - // check if the status of tx is success - _, isPending, err := bscCli.GetTransactionByHash(txHash) - if err != nil { - logx.Errorf("[MonitorL2BlockEvents] GetTransactionByHash err: %s", err.Error()) - continue - } - if isPending { - continue - } - receipt, err := bscCli.GetTransactionReceipt(txHash) - if err != nil { - logx.Errorf("[MonitorL2BlockEvents] GetTransactionReceipt err: %s", err.Error()) - continue - } - // get latest l1 block height(latest height - pendingBlocksCount) - latestHeight, err := bscCli.GetHeight() - if err != nil { - logx.Errorf("[MonitorL2BlockEvents] GetHeight err: %s", err.Error()) - return err - } - if latestHeight < receipt.BlockNumber.Uint64()+bscPendingBlocksCount { - continue - } - var ( - isValidSender bool - isQueriedBlockHash = make(map[string]int64) - ) - for _, vlog := range receipt.Logs { - if isQueriedBlockHash[vlog.BlockHash.Hex()] == 0 { - onChainBlockInfo, err := bscCli.GetBlockHeaderByHash(vlog.BlockHash.Hex()) - if err != nil { - logx.Errorf("[MonitorL2BlockEvents] GetBlockHeaderByHash err: %s", err.Error()) - return err - } - isQueriedBlockHash[vlog.BlockHash.Hex()] = int64(onChainBlockInfo.Time) - } - timeAt := isQueriedBlockHash[vlog.BlockHash.Hex()] - switch vlog.Topics[0].Hex() { - case zkbasLogBlockCommitSigHash.Hex(): - var event ZkbasBlockCommit - if err = ZkbasContractAbi.UnpackIntoInterface(&event, EventNameBlockCommit, vlog.Data); err != nil { - logx.Errorf("[MonitorL2BlockEvents] UnpackIntoInterface err: %s", err.Error()) - return err - } - blockHeight := int64(event.BlockNumber) - if relatedBlocks[blockHeight] == nil { - relatedBlocks[blockHeight], err = blockModel.GetBlockByBlockHeightWithoutTx(blockHeight) - if err != nil { - logx.Errorf("[MonitorL2BlockEvents] GetBlockByBlockHeightWithoutTx err: %s", err.Error()) - return err - } - } - if blockHeight == pendingSender.L2BlockHeight { - isValidSender = true - } - relatedBlocks[blockHeight].CommittedTxHash = receipt.TxHash.Hex() - relatedBlocks[blockHeight].CommittedAt = timeAt - relatedBlocks[blockHeight].BlockStatus = block.StatusCommitted - case zkbasLogBlockVerificationSigHash.Hex(): - var event ZkbasBlockVerification - if err = ZkbasContractAbi.UnpackIntoInterface(&event, EventNameBlockVerification, vlog.Data); err != nil { - logx.Errorf("[MonitorL2BlockEvents] UnpackIntoInterface err: %s", err.Error()) - return err - } - blockHeight := int64(event.BlockNumber) - if relatedBlocks[blockHeight] == nil { - relatedBlocks[blockHeight], err = blockModel.GetBlockByBlockHeightWithoutTx(blockHeight) - if err != nil { - logx.Errorf("[MonitorL2BlockEvents] GetBlockByBlockHeightWithoutTx err: %s", err.Error()) - return err - } - } - if blockHeight == pendingSender.L2BlockHeight { - isValidSender = true - } - relatedBlocks[blockHeight].VerifiedTxHash = receipt.TxHash.Hex() - relatedBlocks[blockHeight].VerifiedAt = timeAt - relatedBlocks[blockHeight].BlockStatus = block.StatusVerifiedAndExecuted - pendingUpdateProofSenderStatus[blockHeight] = proofSender.Confirmed - case zkbasLogBlocksRevertSigHash.Hex(): - // TODO revert - default: - } - } - if isValidSender { - pendingSender.TxStatus = L1TxSenderHandledStatus - pendingUpdateSenders = append(pendingUpdateSenders, pendingSender) - } - } - var pendingUpdateBlocks []*Block - for _, pendingUpdateBlock := range relatedBlocks { - pendingUpdateBlocks = append(pendingUpdateBlocks, pendingUpdateBlock) - } - if len(pendingUpdateBlocks) != 0 { - sort.Slice(pendingUpdateBlocks, func(i, j int) bool { - return pendingUpdateBlocks[i].BlockHeight < pendingUpdateBlocks[j].BlockHeight - }) - } - // handle executed blocks - var pendingUpdateMempoolTxs []*MempoolTx - for _, pendingUpdateBlock := range pendingUpdateBlocks { - if pendingUpdateBlock.BlockStatus == BlockVerifiedStatus { - rowsAffected, pendingDeleteMempoolTxs, err := mempoolModel.GetMempoolTxsByBlockHeight(pendingUpdateBlock.BlockHeight) - if err != nil { - logx.Errorf("[MonitorL2BlockEvents] GetMempoolTxsByBlockHeight err: %s", err.Error()) - return err - } - if rowsAffected == 0 { - continue - } - pendingUpdateMempoolTxs = append(pendingUpdateMempoolTxs, pendingDeleteMempoolTxs...) - } - } - // update blocks, blockDetails, updateEvents, sender - // update assets, locked assets, liquidity - // delete mempool txs - if err = l1TxSenderModel.UpdateRelatedEventsAndResetRelatedAssetsAndTxs(pendingUpdateBlocks, - pendingUpdateSenders, pendingUpdateMempoolTxs, pendingUpdateProofSenderStatus); err != nil { - logx.Errorf("[MonitorL2BlockEvents] UpdateRelatedEventsAndResetRelatedAssetsAndTxs err: %s", err.Error()) - return err - } - // update account cache for globalrpc sendtx interface - m := Newl2BlockEventsMonitor(ctx, svcCtx) - for _, mempooltx := range pendingUpdateMempoolTxs { - if err := m.commglobalmap.SetLatestAccountInfoInToCache(ctx, mempooltx.AccountIndex); err != nil { - logx.Errorf("[CreateMempoolTxs] unable to CreateMempoolTxs, error: %s", err.Error()) - } - } - logx.Errorf("[MonitorL2BlockEvents] update blocks count: %d", len(pendingUpdateBlocks)) - logx.Errorf("[MonitorL2BlockEvents] update senders count: %d", len(pendingUpdateSenders)) - logx.Errorf("[MonitorL2BlockEvents] update mempool txs count: %d", len(pendingUpdateMempoolTxs)) - logx.Errorf("========== end MonitorL2BlockEvents ==========") - return nil -} diff --git a/service/cronjob/monitor/internal/logic/mempoolMonitor.go b/service/cronjob/monitor/internal/logic/mempoolMonitor.go deleted file mode 100644 index e7ddcb941..000000000 --- a/service/cronjob/monitor/internal/logic/mempoolMonitor.go +++ /dev/null @@ -1,827 +0,0 @@ -/* - * Copyright © 2021 Zkbas Protocol - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package logic - -import ( - "context" - "encoding/json" - "errors" - "math/big" - - "github.com/ethereum/go-ethereum/common" - "github.com/zeromicro/go-zero/core/logx" - - "github.com/bnb-chain/zkbas/common/commonAsset" - "github.com/bnb-chain/zkbas/common/commonConstant" - "github.com/bnb-chain/zkbas/common/commonTx" - "github.com/bnb-chain/zkbas/common/model/account" - "github.com/bnb-chain/zkbas/common/model/l2TxEventMonitor" - "github.com/bnb-chain/zkbas/common/model/liquidity" - "github.com/bnb-chain/zkbas/common/model/mempool" - "github.com/bnb-chain/zkbas/common/model/nft" - "github.com/bnb-chain/zkbas/common/tree" - "github.com/bnb-chain/zkbas/common/util" - "github.com/bnb-chain/zkbas/common/util/globalmapHandler" - "github.com/bnb-chain/zkbas/errorcode" - "github.com/bnb-chain/zkbas/service/cronjob/monitor/internal/repo/accountoperator" - "github.com/bnb-chain/zkbas/service/cronjob/monitor/internal/repo/commglobalmap" - "github.com/bnb-chain/zkbas/service/cronjob/monitor/internal/repo/l2eventoperator" - "github.com/bnb-chain/zkbas/service/cronjob/monitor/internal/repo/liquidityoperator" - "github.com/bnb-chain/zkbas/service/cronjob/monitor/internal/repo/mempooloperator" - "github.com/bnb-chain/zkbas/service/cronjob/monitor/internal/repo/nftoperator" - "github.com/bnb-chain/zkbas/service/cronjob/monitor/internal/svc" -) - -type mempoolMonitor struct { - logx.Logger - ctx context.Context - svcCtx *svc.ServiceContext - accountOperator accountoperator.Model - mempoolOperator mempooloperator.Model - liquidityOperator liquidityoperator.Model - nftOperator nftoperator.Model - l2eventOperator l2eventoperator.Model - commglobalmap commglobalmap.Model -} - -func NewMempoolMonitor(ctx context.Context, svcCtx *svc.ServiceContext) *mempoolMonitor { - return &mempoolMonitor{ - Logger: logx.WithContext(ctx), - ctx: ctx, - svcCtx: svcCtx, - accountOperator: accountoperator.New(svcCtx), - mempoolOperator: mempooloperator.New(svcCtx), - liquidityOperator: liquidityoperator.New(svcCtx), - nftOperator: nftoperator.New(svcCtx), - l2eventOperator: l2eventoperator.New(svcCtx), - commglobalmap: commglobalmap.New(svcCtx), - } -} - -func MonitorMempool(ctx context.Context, svcCtx *svc.ServiceContext) error { - logx.Errorf("========== start MonitorMempool ==========") - txs, err := svcCtx.L2TxEventMonitorModel.GetL2TxEventMonitorsByStatus(PendingStatus) - if err != nil { - if err == errorcode.DbErrNotFound { - logx.Info("[MonitorMempool] no l2 oTx event monitors") - return err - } else { - logx.Error("[MonitorMempool] unable to get l2 oTx event monitors") - return err - } - } - var ( - pendingNewAccounts []*account.Account - pendingNewMempoolTxs []*mempool.MempoolTx - pendingNewLiquidityInfos []*liquidity.Liquidity - pendingNewNfts []*nft.L2Nft - newAccountInfoMap = make(map[string]*account.Account) - newNftInfoMap = make(map[int64]*commonAsset.NftInfo) - newLiquidityInfoMap = make(map[int64]*liquidity.Liquidity) - relatedAccountIndex = make(map[int64]bool) - ) - // get last handled request id - currentRequestId, err := svcCtx.L2TxEventMonitorModel.GetLastHandledRequestId() - if err != nil { - logx.Errorf("[MonitorMempool] unable to get last handled request id: %s", err.Error()) - return err - } - for _, oTx := range txs { - // set oTx as handled - oTx.Status = l2TxEventMonitor.HandledStatus - // request id must be in order - if oTx.RequestId != currentRequestId+1 { - logx.Errorf("[MonitorMempool] invalid request id") - return errors.New("[MonitorMempool] invalid request id") - } - currentRequestId++ - txHash := ComputeL1TxTxHash(oTx.RequestId, oTx.L1TxHash) - // handle oTx based on oTx type - switch oTx.TxType { - case TxTypeRegisterZns: - // parse oTx info - txInfo, err := util.ParseRegisterZnsPubData(common.FromHex(oTx.Pubdata)) - if err != nil { - logx.Errorf("[MonitorMempool] unable to parse registerZNS pub data: %s", err.Error()) - return err - } - // check if the account name has been registered - _, err = svcCtx.AccountModel.GetAccountByAccountName(txInfo.AccountName) - if err != errorcode.DbErrNotFound { - logx.Errorf("[MonitorMempool] account name has been registered") - return errors.New("[MonitorMempool] account name has been registered") - } - // set correct account index - //nextAccountIndex++ - //txInfo.AccountIndex = nextAccountIndex - // create new account and account history - accountInfo := &account.Account{ - AccountIndex: txInfo.AccountIndex, - AccountName: txInfo.AccountName, - PublicKey: txInfo.PubKey, - AccountNameHash: common.Bytes2Hex(txInfo.AccountNameHash), - L1Address: oTx.SenderAddress, - Nonce: commonConstant.NilNonce, - CollectionNonce: commonConstant.NilNonce, - AssetInfo: commonConstant.NilAssetInfo, - AssetRoot: common.Bytes2Hex(tree.NilAccountAssetRoot), - Status: account.AccountStatusPending, - } - pendingNewAccounts = append(pendingNewAccounts, accountInfo) - accountNameHash := common.Bytes2Hex(txInfo.AccountNameHash) - newAccountInfoMap[accountNameHash] = accountInfo - // create mempool oTx - // serialize oTx info - txInfoBytes, err := json.Marshal(txInfo) - if err != nil { - logx.Errorf("[MonitorMempool] unable to serialize oTx info : %s", err.Error()) - return err - } - mempoolTx := &mempool.MempoolTx{ - TxHash: txHash, - TxType: int64(txInfo.TxType), - GasFeeAssetId: commonConstant.NilAssetId, - GasFee: commonConstant.NilAssetAmountStr, - NftIndex: commonConstant.NilTxNftIndex, - PairIndex: commonConstant.NilPairIndex, - AssetId: commonConstant.NilAssetId, - TxAmount: commonConstant.NilAssetAmountStr, - NativeAddress: oTx.SenderAddress, - TxInfo: string(txInfoBytes), - AccountIndex: txInfo.AccountIndex, - Nonce: commonConstant.NilNonce, - ExpiredAt: commonConstant.NilExpiredAt, - L2BlockHeight: commonConstant.NilBlockHeight, - Status: mempool.PendingTxStatus, - } - pendingNewMempoolTxs = append(pendingNewMempoolTxs, mempoolTx) - case TxTypeCreatePair: - // parse oTx info - txInfo, err := util.ParseCreatePairPubData(common.FromHex(oTx.Pubdata)) - if err != nil { - logx.Errorf("[MonitorMempool] unable to parse registerZNS pub data: %s", err.Error()) - return err - } - // liquidity info - liquidityInfo := &liquidity.Liquidity{ - PairIndex: txInfo.PairIndex, - AssetAId: txInfo.AssetAId, - AssetA: ZeroBigIntString, - AssetBId: txInfo.AssetBId, - AssetB: ZeroBigIntString, - LpAmount: ZeroBigIntString, - KLast: ZeroBigIntString, - TreasuryAccountIndex: txInfo.TreasuryAccountIndex, - FeeRate: txInfo.FeeRate, - TreasuryRate: txInfo.TreasuryRate, - } - newLiquidityInfoMap[txInfo.PairIndex] = liquidityInfo - pendingNewLiquidityInfos = append(pendingNewLiquidityInfos, liquidityInfo) - // tx detail - poolInfo := &commonAsset.LiquidityInfo{ - PairIndex: txInfo.PairIndex, - AssetAId: txInfo.AssetAId, - AssetA: big.NewInt(0), - AssetBId: txInfo.AssetBId, - AssetB: big.NewInt(0), - LpAmount: big.NewInt(0), - KLast: big.NewInt(0), - FeeRate: txInfo.FeeRate, - TreasuryAccountIndex: txInfo.TreasuryAccountIndex, - TreasuryRate: txInfo.TreasuryRate, - } - txDetail := &mempool.MempoolTxDetail{ - AssetId: txInfo.PairIndex, - AssetType: commonAsset.LiquidityAssetType, - AccountIndex: commonConstant.NilTxAccountIndex, - AccountName: commonConstant.NilAccountName, - BalanceDelta: poolInfo.String(), - Order: 0, - AccountOrder: commonConstant.NilAccountOrder, - } - txInfoBytes, err := json.Marshal(txInfo) - if err != nil { - logx.Errorf("[MonitorMempool] unable to serialize oTx info : %s", err.Error()) - return err - } - mempoolTx := &mempool.MempoolTx{ - TxHash: txHash, - TxType: int64(txInfo.TxType), - GasFeeAssetId: commonConstant.NilAssetId, - GasFee: commonConstant.NilAssetAmountStr, - NftIndex: commonConstant.NilTxNftIndex, - PairIndex: txInfo.PairIndex, - AssetId: commonConstant.NilAssetId, - TxAmount: commonConstant.NilAssetAmountStr, - NativeAddress: commonConstant.NilL1Address, - MempoolDetails: []*mempool.MempoolTxDetail{txDetail}, - TxInfo: string(txInfoBytes), - AccountIndex: commonConstant.NilTxAccountIndex, - Nonce: commonConstant.NilNonce, - ExpiredAt: commonConstant.NilExpiredAt, - L2BlockHeight: commonConstant.NilBlockHeight, - Status: mempool.PendingTxStatus, - } - pendingNewMempoolTxs = append(pendingNewMempoolTxs, mempoolTx) - case TxTypeUpdatePairRate: - // create mempool oTx - txInfo, err := util.ParseUpdatePairRatePubData(common.FromHex(oTx.Pubdata)) - if err != nil { - logx.Errorf("[MonitorMempool] unable to parse update pair rate pub data: %s", err.Error()) - return err - } - var liquidityInfo *liquidity.Liquidity - if newLiquidityInfoMap[txInfo.PairIndex] != nil { - liquidityInfo = newLiquidityInfoMap[txInfo.PairIndex] - } else { - liquidityInfo, err = svcCtx.LiquidityModel.GetLiquidityByPairIndex(txInfo.PairIndex) - if err != nil { - logx.Errorf("[MonitorMempool] unable to get liquidity by pair index: %s", err.Error()) - return err - } - } - liquidityInfo.FeeRate = txInfo.FeeRate - liquidityInfo.TreasuryAccountIndex = txInfo.TreasuryAccountIndex - liquidityInfo.TreasuryRate = txInfo.TreasuryRate - // construct mempool tx - poolInfo, err := commonAsset.ConstructLiquidityInfo( - liquidityInfo.PairIndex, - liquidityInfo.AssetAId, - liquidityInfo.AssetA, - liquidityInfo.AssetBId, - liquidityInfo.AssetB, - liquidityInfo.LpAmount, - liquidityInfo.KLast, - liquidityInfo.FeeRate, - liquidityInfo.TreasuryAccountIndex, - liquidityInfo.TreasuryRate, - ) - if err != nil { - logx.Errorf("[MonitorMempool] unable to construct liquidity info: %s", err.Error()) - return err - } - txDetail := &mempool.MempoolTxDetail{ - AssetId: txInfo.PairIndex, - AssetType: commonAsset.LiquidityAssetType, - AccountIndex: commonConstant.NilTxAccountIndex, - AccountName: commonConstant.NilAccountName, - BalanceDelta: poolInfo.String(), - Order: 0, - AccountOrder: commonConstant.NilAccountOrder, - } - txInfoBytes, err := json.Marshal(txInfo) - if err != nil { - logx.Errorf("[MonitorMempool] unable to serialize oTx info : %s", err.Error()) - return err - } - mempoolTx := &mempool.MempoolTx{ - TxHash: txHash, - TxType: int64(txInfo.TxType), - GasFeeAssetId: commonConstant.NilAssetId, - GasFee: commonConstant.NilAssetAmountStr, - NftIndex: commonConstant.NilTxNftIndex, - PairIndex: liquidityInfo.PairIndex, - AssetId: commonConstant.NilAssetId, - TxAmount: commonConstant.NilAssetAmountStr, - NativeAddress: commonConstant.NilL1Address, - MempoolDetails: []*mempool.MempoolTxDetail{txDetail}, - TxInfo: string(txInfoBytes), - AccountIndex: commonConstant.NilTxAccountIndex, - Nonce: commonConstant.NilNonce, - ExpiredAt: commonConstant.NilExpiredAt, - L2BlockHeight: commonConstant.NilBlockHeight, - Status: mempool.PendingTxStatus, - } - pendingNewMempoolTxs = append(pendingNewMempoolTxs, mempoolTx) - case TxTypeDeposit: - var accountInfo *account.Account - // create mempool oTx - txInfo, err := util.ParseDepositPubData(common.FromHex(oTx.Pubdata)) - if err != nil { - logx.Errorf("[MonitorMempool] unable to parse deposit pub data: %s", err.Error()) - return err - } - accountNameHash := common.Bytes2Hex(txInfo.AccountNameHash) - if newAccountInfoMap[accountNameHash] != nil { - accountInfo = newAccountInfoMap[accountNameHash] - } else { - accountInfo, err = getAccountInfoByAccountNameHash(accountNameHash, svcCtx.AccountModel) - if err != nil { - logx.Errorf("[getAccountInfoByAccountNameHash] unable to get account info: %s", err.Error()) - return err - } - } - txInfo.AccountIndex = accountInfo.AccountIndex - var ( - mempoolTxDetails []*mempool.MempoolTxDetail - ) - balanceDelta := &commonAsset.AccountAsset{ - AssetId: txInfo.AssetId, - Balance: txInfo.AssetAmount, - LpAmount: big.NewInt(0), - OfferCanceledOrFinalized: big.NewInt(0), - } - mempoolTxDetails = append(mempoolTxDetails, &mempool.MempoolTxDetail{ - AssetId: txInfo.AssetId, - AssetType: commonAsset.GeneralAssetType, - AccountIndex: txInfo.AccountIndex, - AccountName: accountInfo.AccountName, - BalanceDelta: balanceDelta.String(), - Order: 0, - AccountOrder: 0, - }) - // serialize oTx info - txInfoBytes, err := json.Marshal(txInfo) - if err != nil { - logx.Errorf("[MonitorMempool] unable to serialize oTx info : %s", err.Error()) - return err - } - mempoolTx := &mempool.MempoolTx{ - TxHash: txHash, - TxType: int64(txInfo.TxType), - GasFeeAssetId: commonConstant.NilAssetId, - GasFee: commonConstant.NilAssetAmountStr, - NftIndex: commonConstant.NilTxNftIndex, - PairIndex: commonConstant.NilPairIndex, - AssetId: txInfo.AssetId, - TxAmount: txInfo.AssetAmount.String(), - NativeAddress: oTx.SenderAddress, - MempoolDetails: mempoolTxDetails, - TxInfo: string(txInfoBytes), - AccountIndex: accountInfo.AccountIndex, - Nonce: commonConstant.NilNonce, - ExpiredAt: commonConstant.NilExpiredAt, - L2BlockHeight: commonConstant.NilBlockHeight, - Status: mempool.PendingTxStatus, - } - pendingNewMempoolTxs = append(pendingNewMempoolTxs, mempoolTx) - if !relatedAccountIndex[accountInfo.AccountIndex] { - relatedAccountIndex[accountInfo.AccountIndex] = true - } - case TxTypeDepositNft: - // create mempool oTx - var accountInfo *account.Account - txInfo, err := util.ParseDepositNftPubData(common.FromHex(oTx.Pubdata)) - if err != nil { - logx.Errorf("[MonitorMempool] unable to parse deposit nft pub data: %s", err.Error()) - return err - } - accountNameHash := common.Bytes2Hex(txInfo.AccountNameHash) - if newAccountInfoMap[accountNameHash] != nil { - accountInfo = newAccountInfoMap[accountNameHash] - } else { - accountInfo, err = getAccountInfoByAccountNameHash(accountNameHash, svcCtx.AccountModel) - if err != nil { - logx.Errorf("[MonitorMempool] unable to get account info: %s", err.Error()) - return err - } - } - // complete oTx info - txInfo.AccountIndex = accountInfo.AccountIndex - redisLock, nftIndex, err := globalmapHandler.GetLatestNftIndexForWrite(svcCtx.NftModel, svcCtx.RedisConnection) - if err != nil { - logx.Errorf("[MonitorMempool] unable to get latest nft index: %s", err.Error()) - return err - } - defer redisLock.Release() - var ( - nftInfo *commonAsset.NftInfo - ) - if txInfo.NftIndex == 0 && txInfo.CreatorAccountIndex == 0 && txInfo.CreatorTreasuryRate == 0 { - txInfo.NftIndex = nftIndex - } - nftInfo = commonAsset.ConstructNftInfo( - txInfo.NftIndex, - txInfo.CreatorAccountIndex, - accountInfo.AccountIndex, - common.Bytes2Hex(txInfo.NftContentHash), - txInfo.NftL1TokenId.String(), - txInfo.NftL1Address, - txInfo.CreatorTreasuryRate, - txInfo.CollectionId, - ) - newNftInfoMap[nftInfo.NftIndex] = nftInfo - var ( - mempoolTxDetails []*mempool.MempoolTxDetail - ) - if err != nil { - logx.Errorf("[MonitorMempool] unable to construct nft info: %s", err.Error()) - return err - } - // user info - accountOrder := int64(0) - order := int64(0) - emptyDeltaAsset := &commonAsset.AccountAsset{ - AssetId: 0, - Balance: big.NewInt(0), - LpAmount: big.NewInt(0), - OfferCanceledOrFinalized: big.NewInt(0), - } - mempoolTxDetails = append(mempoolTxDetails, &mempool.MempoolTxDetail{ - AssetId: 0, - AssetType: commonAsset.GeneralAssetType, - AccountIndex: txInfo.AccountIndex, - AccountName: accountInfo.AccountName, - BalanceDelta: emptyDeltaAsset.String(), - AccountOrder: accountOrder, - Order: order, - }) - order++ - // nft info - mempoolTxDetails = append(mempoolTxDetails, &mempool.MempoolTxDetail{ - AssetId: txInfo.NftIndex, - AssetType: commonAsset.NftAssetType, - AccountIndex: txInfo.AccountIndex, - AccountName: accountInfo.AccountName, - BalanceDelta: nftInfo.String(), - AccountOrder: commonConstant.NilAccountOrder, - Order: order, - }) - // serialize oTx info - txInfoBytes, err := json.Marshal(txInfo) - if err != nil { - logx.Errorf("[MonitorMempool] unable to serialize oTx info : %s", err.Error()) - return err - } - mempoolTx := &mempool.MempoolTx{ - TxHash: txHash, - TxType: int64(txInfo.TxType), - GasFee: commonConstant.NilAssetAmountStr, - GasFeeAssetId: commonConstant.NilAssetId, - NftIndex: nftIndex, - PairIndex: commonConstant.NilPairIndex, - AssetId: commonConstant.NilAssetId, - TxAmount: commonConstant.NilAssetAmountStr, - NativeAddress: oTx.SenderAddress, - MempoolDetails: mempoolTxDetails, - TxInfo: string(txInfoBytes), - AccountIndex: accountInfo.AccountIndex, - Nonce: commonConstant.NilNonce, - L2BlockHeight: commonConstant.NilBlockHeight, - Status: mempool.PendingTxStatus, - } - pendingNewMempoolTxs = append(pendingNewMempoolTxs, mempoolTx) - if !relatedAccountIndex[accountInfo.AccountIndex] { - relatedAccountIndex[accountInfo.AccountIndex] = true - } - // put into new nfts - pendingNewNfts = append(pendingNewNfts, &nft.L2Nft{ - NftIndex: nftInfo.NftIndex, - CreatorAccountIndex: nftInfo.CreatorAccountIndex, - OwnerAccountIndex: nftInfo.OwnerAccountIndex, - NftContentHash: nftInfo.NftContentHash, - NftL1Address: nftInfo.NftL1Address, - NftL1TokenId: nftInfo.NftL1TokenId, - CreatorTreasuryRate: nftInfo.CreatorTreasuryRate, - CollectionId: nftInfo.CollectionId, - }) - case TxTypeFullExit: - // create mempool oTx - var ( - accountInfo *commonAsset.AccountInfo - ) - txInfo, err := util.ParseFullExitPubData(common.FromHex(oTx.Pubdata)) - if err != nil { - logx.Errorf("[MonitorMempool] unable to parse deposit pub data: %s", err.Error()) - return err - } - accountNameHash := common.Bytes2Hex(txInfo.AccountNameHash) - if newAccountInfoMap[accountNameHash] != nil { - accountInfo, err = commonAsset.ToFormatAccountInfo(newAccountInfoMap[accountNameHash]) - if err != nil { - logx.Errorf("[MonitorMempool] unable convert to format account info: %s", err.Error()) - return err - } - for _, mempoolTx := range pendingNewMempoolTxs { - if mempoolTx.AccountIndex != accountInfo.AccountIndex { - continue - } - for _, txDetail := range mempoolTx.MempoolDetails { - if txDetail.AccountIndex != accountInfo.AccountIndex || txDetail.AssetId != txInfo.AssetId { - continue - } - if txDetail.AssetType == GeneralAssetType { - if accountInfo.AssetInfo[txDetail.AssetId] == nil { - accountInfo.AssetInfo[txDetail.AssetId] = &commonAsset.AccountAsset{ - AssetId: txDetail.AssetId, - Balance: big.NewInt(0), - LpAmount: big.NewInt(0), - OfferCanceledOrFinalized: big.NewInt(0), - } - } - nBalance, err := commonAsset.ComputeNewBalance(GeneralAssetType, accountInfo.AssetInfo[txDetail.AssetId].String(), txDetail.BalanceDelta) - if err != nil { - logx.Errorf("[MonitorMempool] unable to compute new balance: %s", err.Error()) - return err - } - accountInfo.AssetInfo[txDetail.AssetId], err = commonAsset.ParseAccountAsset(nBalance) - if err != nil { - logx.Errorf("[MonitorMempool] unable to parse account asset : %s", err.Error()) - return err - } - } - } - } - } else { - newAccountInfoMap[accountNameHash], err = getAccountInfoByAccountNameHash(accountNameHash, svcCtx.AccountModel) - if err != nil { - logx.Errorf("[MonitorMempool] getAccountInfoByAccountNameHash unable to get account info: %s", err.Error()) - return err - } - accountInfo, err = commonAsset.ToFormatAccountInfo(newAccountInfoMap[accountNameHash]) - if err != nil { - logx.Errorf("[MonitorMempool] unable convert to format account info: %s", err.Error()) - return err - } - - mempoolTxs, err := svcCtx.MempoolModel.GetPendingMempoolTxsByAccountIndex(accountInfo.AccountIndex) - if err != nil { - if err != errorcode.DbErrNotFound { - logx.Errorf("[MonitorMempool] unable to get pending mempool txs: %s", err.Error()) - return err - } - } - for _, mempoolTx := range mempoolTxs { - for _, txDetail := range mempoolTx.MempoolDetails { - if txDetail.AccountIndex != accountInfo.AccountIndex || txDetail.AssetId != txInfo.AssetId { - continue - } - if txDetail.AssetType == GeneralAssetType { - nBalance, err := commonAsset.ComputeNewBalance(GeneralAssetType, accountInfo.AssetInfo[txDetail.AssetId].String(), txDetail.BalanceDelta) - if err != nil { - logx.Errorf("[MonitorMempool] unable to compute new balance: %s", err.Error()) - return err - } - accountInfo.AssetInfo[txDetail.AssetId], err = commonAsset.ParseAccountAsset(nBalance) - if err != nil { - logx.Errorf("[MonitorMempool] unable to parse account asset : %s", err.Error()) - return err - } - } - } - } - } - // complete oTx info - txInfo.AccountIndex = accountInfo.AccountIndex - if accountInfo.AssetInfo[txInfo.AssetId] == nil { - txInfo.AssetAmount = big.NewInt(0) - } else { - txInfo.AssetAmount = accountInfo.AssetInfo[txInfo.AssetId].Balance - } - // do delta at committer - var ( - mempoolTxDetails []*mempool.MempoolTxDetail - ) - balanceDelta := &commonAsset.AccountAsset{ - AssetId: txInfo.AssetId, - Balance: big.NewInt(0), - LpAmount: big.NewInt(0), - OfferCanceledOrFinalized: big.NewInt(0), - } - mempoolTxDetails = append(mempoolTxDetails, &mempool.MempoolTxDetail{ - AssetId: txInfo.AssetId, - AssetType: commonAsset.GeneralAssetType, - AccountIndex: txInfo.AccountIndex, - AccountName: accountInfo.AccountName, - BalanceDelta: balanceDelta.String(), - Order: 0, - AccountOrder: 0, - }) - // serialize oTx info - txInfoBytes, err := json.Marshal(txInfo) - if err != nil { - logx.Errorf("[MonitorMempool] unable to serialize oTx info : %s", err.Error()) - return err - } - mempoolTx := &mempool.MempoolTx{ - TxHash: txHash, - TxType: int64(txInfo.TxType), - GasFee: commonConstant.NilAssetAmountStr, - GasFeeAssetId: commonConstant.NilAssetId, - NftIndex: commonConstant.NilTxNftIndex, - PairIndex: commonConstant.NilPairIndex, - AssetId: txInfo.AssetId, - TxAmount: txInfo.AssetAmount.String(), - NativeAddress: oTx.SenderAddress, - MempoolDetails: mempoolTxDetails, - TxInfo: string(txInfoBytes), - AccountIndex: accountInfo.AccountIndex, - Nonce: commonConstant.NilNonce, - L2BlockHeight: commonConstant.NilBlockHeight, - Status: mempool.PendingTxStatus, - } - pendingNewMempoolTxs = append(pendingNewMempoolTxs, mempoolTx) - if !relatedAccountIndex[accountInfo.AccountIndex] { - relatedAccountIndex[accountInfo.AccountIndex] = true - } - case TxTypeFullExitNft: - pendingNewMempoolTxs, relatedAccountIndex, err = processFullExitNft(svcCtx, - txHash, - newAccountInfoMap, newNftInfoMap, oTx, pendingNewMempoolTxs, relatedAccountIndex) - if err != nil { - return err - } - default: - logx.Errorf("[MonitorMempool] invalid oTx type") - return errors.New("[MonitorMempool] invalid oTx type") - } - } - // transaction: active accounts not in account table & update l2 oTx event & create mempool txs - logx.Infof("accounts: %v, mempoolTxs: %v, finalL2TxEvents: %v", len(pendingNewAccounts), len(pendingNewMempoolTxs), len(txs)) - - // update db - if err = svcCtx.L2TxEventMonitorModel.CreateMempoolAndActiveAccount(pendingNewAccounts, pendingNewMempoolTxs, - pendingNewLiquidityInfos, pendingNewNfts, txs); err != nil { - logx.Errorf("[CreateMempoolAndActiveAccount] unable to create mempool txs and update l2 oTx event monitors, error: %s", err.Error()) - return err - } - m := NewMempoolMonitor(ctx, svcCtx) - // update account cache for globalrpc sendtx interface - for _, mempooltx := range pendingNewMempoolTxs { - if err := m.commglobalmap.SetLatestAccountInfoInToCache(ctx, mempooltx.AccountIndex); err != nil { - logx.Errorf("[CreateMempoolTxs] unable to CreateMempoolTxs, error: %s", err.Error()) - } - } - logx.Errorf("========== end MonitorMempool ==========") - return nil -} - -func processFullExitNft( - svcCtx *svc.ServiceContext, - txHash string, - newAccountInfoMap map[string]*account.Account, - newNftInfoMap map[int64]*commonAsset.NftInfo, - oTx *l2TxEventMonitor.L2TxEventMonitor, - pendingNewMempoolTxs []*mempool.MempoolTx, - relatedAccountIndex map[int64]bool, -) ([]*mempool.MempoolTx, map[int64]bool, error) { - // create mempool oTx - var accountInfo *account.Account - txInfo, err := util.ParseFullExitNftPubData(common.FromHex(oTx.Pubdata)) - if err != nil { - logx.Errorf("[MonitorMempool] unable to parse deposit nft pub data: %s", err.Error()) - return pendingNewMempoolTxs, nil, err - } - accountNameHash := common.Bytes2Hex(txInfo.AccountNameHash) - if newAccountInfoMap[accountNameHash] == nil { - accountInfo, err = getAccountInfoByAccountNameHash(accountNameHash, svcCtx.AccountModel) - if err != nil { - logx.Errorf("[MonitorMempool] getAccountInfoByAccountNameHash unable to get account info: %s", err.Error()) - return pendingNewMempoolTxs, nil, err - } - } else { - accountInfo = newAccountInfoMap[accountNameHash] - } - var nftAsset *nft.L2Nft - if newNftInfoMap[txInfo.NftIndex] == nil { - nftAsset, err = svcCtx.NftModel.GetNftAsset(txInfo.NftIndex) - if err != nil { - if err == errorcode.DbErrNotFound { - emptyNftInfo := commonAsset.EmptyNftInfo(txInfo.NftIndex) - nftAsset = &nft.L2Nft{ - NftIndex: emptyNftInfo.NftIndex, - CreatorAccountIndex: emptyNftInfo.CreatorAccountIndex, - OwnerAccountIndex: emptyNftInfo.OwnerAccountIndex, - NftContentHash: emptyNftInfo.NftContentHash, - NftL1Address: emptyNftInfo.NftL1Address, - NftL1TokenId: emptyNftInfo.NftL1TokenId, - CreatorTreasuryRate: emptyNftInfo.CreatorTreasuryRate, - CollectionId: emptyNftInfo.CollectionId, - } - } else { - logx.Errorf("[MonitorMempool] unable to latest nft info: %s", err.Error()) - return pendingNewMempoolTxs, nil, err - } - } else { - if nftAsset.OwnerAccountIndex != accountInfo.AccountIndex { - emptyNftInfo := commonAsset.EmptyNftInfo(txInfo.NftIndex) - nftAsset = &nft.L2Nft{ - NftIndex: emptyNftInfo.NftIndex, - CreatorAccountIndex: emptyNftInfo.CreatorAccountIndex, - OwnerAccountIndex: emptyNftInfo.OwnerAccountIndex, - NftContentHash: emptyNftInfo.NftContentHash, - NftL1Address: emptyNftInfo.NftL1Address, - NftL1TokenId: emptyNftInfo.NftL1TokenId, - CreatorTreasuryRate: emptyNftInfo.CreatorTreasuryRate, - CollectionId: emptyNftInfo.CollectionId, - } - } - } - } else { - nftAsset = &nft.L2Nft{ - NftIndex: newNftInfoMap[txInfo.NftIndex].NftIndex, - CreatorAccountIndex: newNftInfoMap[txInfo.NftIndex].CreatorAccountIndex, - OwnerAccountIndex: newNftInfoMap[txInfo.NftIndex].OwnerAccountIndex, - NftContentHash: newNftInfoMap[txInfo.NftIndex].NftContentHash, - NftL1Address: newNftInfoMap[txInfo.NftIndex].NftL1Address, - NftL1TokenId: newNftInfoMap[txInfo.NftIndex].NftL1TokenId, - CreatorTreasuryRate: newNftInfoMap[txInfo.NftIndex].CreatorTreasuryRate, - CollectionId: newNftInfoMap[txInfo.NftIndex].CollectionId, - } - } - var creatorAccountNameHash []byte - if txInfo.CreatorAccountIndex == 0 && txInfo.CreatorTreasuryRate == 0 { - creatorAccountNameHash = []byte{0} - } else { - creatorAccountInfo, err := svcCtx.AccountModel.GetAccountByAccountIndex(nftAsset.CreatorAccountIndex) - if err != nil { - logx.Errorf("[MonitorMempool] unable to get account info: %s", err.Error()) - return pendingNewMempoolTxs, nil, err - } - creatorAccountNameHash = common.FromHex(creatorAccountInfo.AccountNameHash) - } - // complete oTx info - nftL1TokenId, isValid := new(big.Int).SetString(nftAsset.NftL1TokenId, 10) - if !isValid { - logx.Errorf("[MonitorMempool] unable to parse big int") - return pendingNewMempoolTxs, nil, errors.New("[MonitorMempool] unable to parse big int") - } - txInfo = &commonTx.FullExitNftTxInfo{ - TxType: txInfo.TxType, - AccountIndex: accountInfo.AccountIndex, - CreatorAccountIndex: nftAsset.CreatorAccountIndex, - CreatorTreasuryRate: nftAsset.CreatorTreasuryRate, - NftIndex: txInfo.NftIndex, - CollectionId: nftAsset.CollectionId, - NftL1Address: nftAsset.NftL1Address, - AccountNameHash: txInfo.AccountNameHash, - CreatorAccountNameHash: creatorAccountNameHash, - NftContentHash: common.FromHex(nftAsset.NftContentHash), - NftL1TokenId: nftL1TokenId, - } - var ( - mempoolTxDetails []*mempool.MempoolTxDetail - ) - // empty account delta - emptyAssetDelta := &commonAsset.AccountAsset{ - AssetId: 0, - Balance: big.NewInt(0), - LpAmount: big.NewInt(0), - OfferCanceledOrFinalized: big.NewInt(0), - } - accountOrder := int64(0) - order := int64(0) - mempoolTxDetails = append(mempoolTxDetails, &mempool.MempoolTxDetail{ - AssetId: 0, - AssetType: commonAsset.GeneralAssetType, - AccountIndex: txInfo.AccountIndex, - AccountName: accountInfo.AccountName, - BalanceDelta: emptyAssetDelta.String(), - Order: order, - AccountOrder: accountOrder, - }) - // nft info - newNftInfo := commonAsset.EmptyNftInfo(txInfo.NftIndex) - order++ - mempoolTxDetails = append(mempoolTxDetails, &mempool.MempoolTxDetail{ - AssetId: txInfo.NftIndex, - AssetType: commonAsset.NftAssetType, - AccountIndex: txInfo.AccountIndex, - AccountName: accountInfo.AccountName, - BalanceDelta: newNftInfo.String(), - Order: order, - AccountOrder: commonConstant.NilAccountOrder, - }) - // serialize oTx info - txInfoBytes, err := json.Marshal(txInfo) - if err != nil { - logx.Errorf("[Marshal] unable to serialize oTx info : %s", err.Error()) - return pendingNewMempoolTxs, nil, err - } - mempoolTx := &mempool.MempoolTx{ - TxHash: txHash, - TxType: int64(txInfo.TxType), - GasFee: commonConstant.NilAssetAmountStr, - GasFeeAssetId: commonConstant.NilAssetId, - NftIndex: txInfo.NftIndex, - PairIndex: commonConstant.NilPairIndex, - AssetId: commonConstant.NilAssetId, - TxAmount: commonConstant.NilAssetAmountStr, - NativeAddress: oTx.SenderAddress, - MempoolDetails: mempoolTxDetails, - TxInfo: string(txInfoBytes), - AccountIndex: accountInfo.AccountIndex, - Nonce: commonConstant.NilNonce, - L2BlockHeight: commonConstant.NilBlockHeight, - Status: mempool.PendingTxStatus, - } - pendingNewMempoolTxs = append(pendingNewMempoolTxs, mempoolTx) - if !relatedAccountIndex[accountInfo.AccountIndex] { - relatedAccountIndex[accountInfo.AccountIndex] = true - } - return pendingNewMempoolTxs, relatedAccountIndex, nil -} diff --git a/service/cronjob/monitor/internal/logic/types.go b/service/cronjob/monitor/internal/logic/types.go deleted file mode 100644 index 217579fdf..000000000 --- a/service/cronjob/monitor/internal/logic/types.go +++ /dev/null @@ -1,24 +0,0 @@ -/* - * Copyright © 2021 Zkbas Protocol - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package logic - -type L1EventInfo struct { - // deposit / lock / committed / verified / reverted - EventType uint8 - // tx hash - TxHash string -} diff --git a/service/cronjob/monitor/internal/logic/utils.go b/service/cronjob/monitor/internal/logic/utils.go deleted file mode 100644 index 335914ec0..000000000 --- a/service/cronjob/monitor/internal/logic/utils.go +++ /dev/null @@ -1,44 +0,0 @@ -/* - * Copyright © 2021 Zkbas Protocol - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package logic - -import ( - "encoding/base64" - "strconv" - - "github.com/consensys/gnark-crypto/ecc/bn254/fr/mimc" - "github.com/ethereum/go-ethereum/common" - "github.com/zeromicro/go-zero/core/logx" - - "github.com/bnb-chain/zkbas/common/model/account" -) - -func ComputeL1TxTxHash(requestId int64, txHash string) string { - hFunc := mimc.NewMiMC() - hFunc.Write([]byte(strconv.FormatInt(requestId, 10))) - hFunc.Write(common.FromHex(txHash)) - return base64.StdEncoding.EncodeToString(hFunc.Sum(nil)) -} - -func getAccountInfoByAccountNameHash(accountNameHash string, accountModel account.AccountModel) (accountInfo *account.Account, err error) { - accountInfo, err = accountModel.GetAccountByAccountNameHash(accountNameHash) - if err != nil { - logx.Errorf("[MonitorMempool] unable to get account by account name hash: %s", err.Error()) - return nil, err - } - return accountInfo, nil -} diff --git a/service/cronjob/monitor/internal/repo/accountoperator/account.go b/service/cronjob/monitor/internal/repo/accountoperator/account.go deleted file mode 100644 index 394f7ca93..000000000 --- a/service/cronjob/monitor/internal/repo/accountoperator/account.go +++ /dev/null @@ -1,181 +0,0 @@ -/* - * Copyright © 2021 Zkbas Protocol - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package accountoperator - -import ( - "context" - "errors" - - "github.com/zeromicro/go-zero/core/logx" - "gorm.io/gorm" - - "github.com/bnb-chain/zkbas/common/model/account" - table "github.com/bnb-chain/zkbas/common/model/account" - "github.com/bnb-chain/zkbas/errorcode" - "github.com/bnb-chain/zkbas/pkg/multcache" -) - -type model struct { - table string - db *gorm.DB - cache multcache.MultCache -} - -func (m *model) GetBasicAccountByAccountName(ctx context.Context, accountName string) (*table.Account, error) { - f := func() (interface{}, error) { - account := &table.Account{} - dbTx := m.db.Table(m.table).Where("account_name = ?", accountName).Find(&account) - if dbTx.Error != nil { - logx.Errorf("fail to get account by name: %s, error: %s", accountName, dbTx.Error.Error()) - return nil, errorcode.DbErrSqlOperation - } else if dbTx.RowsAffected == 0 { - return nil, errorcode.DbErrNotFound - } - return account, nil - } - account := &table.Account{} - value, err := m.cache.GetWithSet(ctx, multcache.SpliceCacheKeyAccountByAccountName(accountName), account, multcache.AccountTtl, f) - if err != nil { - return nil, err - } - account, _ = value.(*table.Account) - return account, nil -} - -func (m *model) GetBasicAccountByAccountPk(ctx context.Context, accountPk string) (*table.Account, error) { - f := func() (interface{}, error) { - account := &table.Account{} - dbTx := m.db.Table(m.table).Where("public_key = ?", accountPk).Find(&account) - if dbTx.Error != nil { - logx.Errorf("fail to get account by pk: %s, error: %d", accountPk, dbTx.Error.Error()) - return nil, errorcode.DbErrSqlOperation - } else if dbTx.RowsAffected == 0 { - return nil, errorcode.DbErrNotFound - } - return account, nil - } - account := &table.Account{} - value, err := m.cache.GetWithSet(ctx, multcache.SpliceCacheKeyAccountByAccountPk(accountPk), account, multcache.AccountTtl, f) - if err != nil { - return nil, err - } - account, _ = value.(*table.Account) - return account, nil -} - -/* - Func: GetAccountByAccountIndex - Params: accountIndex int64 - Return: account Account, err error - Description: get account info by index -*/ - -func (m *model) GetAccountByAccountIndex(accountIndex int64) (account *table.Account, err error) { - dbTx := m.db.Table(m.table).Where("account_index = ?", accountIndex).Find(&account) - if dbTx.Error != nil { - logx.Errorf("fail to get account by index: %d, error: %s", accountIndex, dbTx.Error.Error()) - return nil, errorcode.DbErrSqlOperation - } else if dbTx.RowsAffected == 0 { - return nil, errorcode.DbErrNotFound - } - return account, nil -} - -/* - Func: GetAccountByPk - Params: pk string - Return: account Account, err error - Description: get account info by public key -*/ - -func (m *model) GetAccountByPk(pk string) (account *table.Account, err error) { - dbTx := m.db.Table(m.table).Where("public_key = ?", pk).Find(&account) - if dbTx.Error != nil { - logx.Errorf("fail to get account by pk: %s, error: %s", pk, dbTx.Error.Error()) - return nil, errorcode.DbErrSqlOperation - } else if dbTx.RowsAffected == 0 { - return nil, errorcode.DbErrNotFound - } - return account, nil -} - -/* - Func: GetAccountByAccountName - Params: accountName string - Return: account Account, err error - Description: get account info by account name -*/ -func (m *model) GetAccountByAccountName(ctx context.Context, accountName string) (*table.Account, error) { - account := &table.Account{} - dbTx := m.db.Table(m.table).Where("account_name = ?", accountName).Find(&account) - if dbTx.Error != nil { - logx.Errorf("fail to get account by name: %s, error: %s", accountName, dbTx.Error.Error()) - return nil, errorcode.DbErrSqlOperation - } else if dbTx.RowsAffected == 0 { - return nil, errorcode.DbErrNotFound - } - return account, nil -} - -/* - Func: GetAccountsList - Params: limit int, offset int64 - Return: err error - Description: For API /api/v1/info/getAccountsList - -*/ -func (m *model) GetAccountsList(limit int, offset int64) (accounts []*table.Account, err error) { - dbTx := m.db.Table(m.table).Limit(limit).Offset(int(offset)).Order("account_index desc").Find(&accounts) - if dbTx.Error != nil { - logx.Errorf("fail to get accounts, offset: %d, limit: %d, error: %s", offset, limit, dbTx.Error.Error()) - return nil, errorcode.DbErrSqlOperation - } else if dbTx.RowsAffected == 0 { - return nil, errorcode.DbErrNotFound - } - return accounts, nil -} - -/* - Func: GetAccountsTotalCount - Params: - Return: count int64, err error - Description: used for counting total accounts for explorer dashboard -*/ -func (m *model) GetAccountsTotalCount() (count int64, err error) { - dbTx := m.db.Table(m.table).Where("deleted_at is NULL").Count(&count) - if dbTx.Error != nil { - return 0, dbTx.Error - } else if dbTx.RowsAffected == 0 { - return 0, nil - } - return count, nil -} - -func (m *model) CreateActiveAccount(pendingNewAccounts []*account.Account) (err error) { - // TODO: ensure create will update existing account - dbTx := m.db.Table(m.table).CreateInBatches(pendingNewAccounts, len(pendingNewAccounts)) - if dbTx.Error != nil { - logx.Errorf("[CreateInBatches] unable to create pending new account: %s", dbTx.Error.Error()) - return dbTx.Error - } - if dbTx.RowsAffected != int64(len(pendingNewAccounts)) { - logx.Errorf("[CreateMempoolAndActiveAccount] invalid new account") - return errors.New("[CreateMempoolAndActiveAccount] invalid new account") - } - return nil -} diff --git a/service/cronjob/monitor/internal/repo/accountoperator/api.go b/service/cronjob/monitor/internal/repo/accountoperator/api.go deleted file mode 100644 index 6ddfb027e..000000000 --- a/service/cronjob/monitor/internal/repo/accountoperator/api.go +++ /dev/null @@ -1,47 +0,0 @@ -/* - * Copyright © 2021 Zkbas Protocol - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -//go:generate mockgen -source api.go -destination api_mock.go -package account - -package accountoperator - -import ( - "context" - - table "github.com/bnb-chain/zkbas/common/model/account" - "github.com/bnb-chain/zkbas/service/cronjob/monitor/internal/svc" -) - -type Model interface { - GetBasicAccountByAccountName(ctx context.Context, accountName string) (account *table.Account, err error) - GetBasicAccountByAccountPk(ctx context.Context, accountPK string) (account *table.Account, err error) - - GetAccountByAccountIndex(accountIndex int64) (account *table.Account, err error) - GetAccountByPk(pk string) (account *table.Account, err error) - GetAccountByAccountName(ctx context.Context, accountName string) (account *table.Account, err error) - GetAccountsList(limit int, offset int64) (accounts []*table.Account, err error) - GetAccountsTotalCount() (count int64, err error) - CreateActiveAccount(pendingNewAccounts []*table.Account) (err error) -} - -func New(svcCtx *svc.ServiceContext) Model { - return &model{ - table: `account`, - db: svcCtx.GormPointer, - cache: svcCtx.Cache, - } -} diff --git a/service/cronjob/monitor/internal/repo/commglobalmap/api.go b/service/cronjob/monitor/internal/repo/commglobalmap/api.go deleted file mode 100644 index f8bf0e720..000000000 --- a/service/cronjob/monitor/internal/repo/commglobalmap/api.go +++ /dev/null @@ -1,41 +0,0 @@ -package commglobalmap - -import ( - "context" - - "github.com/bnb-chain/zkbas/common/commonAsset" - "github.com/bnb-chain/zkbas/common/model/account" - "github.com/bnb-chain/zkbas/common/model/liquidity" - "github.com/bnb-chain/zkbas/common/model/mempool" - "github.com/bnb-chain/zkbas/common/model/nft" - "github.com/bnb-chain/zkbas/service/cronjob/monitor/internal/svc" -) - -//go:generate mockgen -source api.go -destination api_mock.go -package commglobalmap - -type GlobalAssetInfo struct { - AccountIndex int64 - AssetId int64 - AssetType int64 - ChainId int64 - BaseBalanceEnc string -} - -type Model interface { - DeleteLatestAccountInfoInCache(ctx context.Context, accountIndex int64) error - GetLatestAccountInfoWithCache(ctx context.Context, accountIndex int64) (*commonAsset.AccountInfo, error) - SetLatestAccountInfoInToCache(ctx context.Context, accountIndex int64) error - GetLatestAccountInfo(ctx context.Context, accountIndex int64) (accountInfo *commonAsset.AccountInfo, err error) -} - -func New(svcCtx *svc.ServiceContext) Model { - return &model{ - mempoolModel: mempool.NewMempoolModel(svcCtx.Conn, svcCtx.Config.CacheRedis, svcCtx.GormPointer), - mempoolTxDetailModel: mempool.NewMempoolDetailModel(svcCtx.Conn, svcCtx.Config.CacheRedis, svcCtx.GormPointer), - accountModel: account.NewAccountModel(svcCtx.Conn, svcCtx.Config.CacheRedis, svcCtx.GormPointer), - liquidityModel: liquidity.NewLiquidityModel(svcCtx.Conn, svcCtx.Config.CacheRedis, svcCtx.GormPointer), - redisConnection: svcCtx.RedisConnection, - offerModel: nft.NewOfferModel(svcCtx.Conn, svcCtx.Config.CacheRedis, svcCtx.GormPointer), - cache: svcCtx.Cache, - } -} diff --git a/service/cronjob/monitor/internal/repo/commglobalmap/commGlobalmap.go b/service/cronjob/monitor/internal/repo/commglobalmap/commGlobalmap.go deleted file mode 100644 index 39bee5855..000000000 --- a/service/cronjob/monitor/internal/repo/commglobalmap/commGlobalmap.go +++ /dev/null @@ -1,139 +0,0 @@ -package commglobalmap - -import ( - "context" - "errors" - "strconv" - - "github.com/zeromicro/go-zero/core/logx" - "github.com/zeromicro/go-zero/core/stores/redis" - - "github.com/bnb-chain/zkbas/common/commonAsset" - "github.com/bnb-chain/zkbas/common/commonConstant" - "github.com/bnb-chain/zkbas/common/model/account" - "github.com/bnb-chain/zkbas/common/model/liquidity" - "github.com/bnb-chain/zkbas/common/model/mempool" - "github.com/bnb-chain/zkbas/common/model/nft" - "github.com/bnb-chain/zkbas/common/util" - "github.com/bnb-chain/zkbas/errorcode" - "github.com/bnb-chain/zkbas/pkg/multcache" -) - -type model struct { - mempoolModel mempool.MempoolModel - mempoolTxDetailModel mempool.MempoolTxDetailModel - accountModel account.AccountModel - liquidityModel liquidity.LiquidityModel - redisConnection *redis.Redis - offerModel nft.OfferModel - nftModel nft.L2NftModel - cache multcache.MultCache -} - -func (m *model) GetLatestAccountInfoWithCache(ctx context.Context, accountIndex int64) (*commonAsset.AccountInfo, error) { - f := func() (interface{}, error) { - accountInfo, err := m.GetLatestAccountInfo(ctx, accountIndex) - if err != nil { - return nil, err - } - account, err := commonAsset.FromFormatAccountInfo(accountInfo) - if err != nil { - return nil, err - } - return account, nil - } - accountInfo := &account.Account{} - value, err := m.cache.GetWithSet(ctx, multcache.SpliceCacheKeyAccountByAccountIndex(accountIndex), accountInfo, multcache.AccountTtl, f) - if err != nil { - return nil, err - } - account, _ := value.(*account.Account) - res, err := commonAsset.ToFormatAccountInfo(account) - if err != nil { - return nil, err - } - return res, nil -} - -func (m *model) SetLatestAccountInfoInToCache(ctx context.Context, accountIndex int64) error { - accountInfo, err := m.GetLatestAccountInfo(ctx, accountIndex) - if err != nil { - return err - } - account, err := commonAsset.FromFormatAccountInfo(accountInfo) - if err != nil { - return err - } - if err := m.cache.Set(ctx, multcache.SpliceCacheKeyAccountByAccountIndex(accountIndex), account, multcache.AccountTtl); err != nil { - return err - } - return nil -} - -func (m *model) DeleteLatestAccountInfoInCache(ctx context.Context, accountIndex int64) error { - return m.cache.Delete(ctx, multcache.SpliceCacheKeyAccountByAccountIndex(accountIndex)) -} - -func (m *model) GetLatestAccountInfo(ctx context.Context, accountIndex int64) (*commonAsset.AccountInfo, error) { - oAccountInfo, err := m.accountModel.GetAccountByAccountIndex(accountIndex) - if err != nil { - logx.Errorf("[GetAccountByAccountIndex]param: %d, err: %s", accountIndex, err.Error()) - return nil, err - } - accountInfo, err := commonAsset.ToFormatAccountInfo(oAccountInfo) - if err != nil { - logx.Errorf("[ToFormatAccountInfo]param: %v, err: %s", oAccountInfo, err.Error()) - return nil, err - } - mempoolTxs, err := m.mempoolModel.GetPendingMempoolTxsByAccountIndex(accountIndex) - if err != nil && err != errorcode.DbErrNotFound { - logx.Errorf("[GetPendingMempoolTxsByAccountIndex]param: %d, err: %s", accountIndex, err.Error()) - return nil, err - } - for _, mempoolTx := range mempoolTxs { - if mempoolTx.Nonce != commonConstant.NilNonce { - accountInfo.Nonce = mempoolTx.Nonce - } - for _, mempoolTxDetail := range mempoolTx.MempoolDetails { - if mempoolTxDetail.AccountIndex != accountIndex { - continue - } - switch mempoolTxDetail.AssetType { - case commonAsset.GeneralAssetType: - if accountInfo.AssetInfo[mempoolTxDetail.AssetId] == nil { - accountInfo.AssetInfo[mempoolTxDetail.AssetId] = &commonAsset.AccountAsset{ - AssetId: mempoolTxDetail.AssetId, - Balance: util.ZeroBigInt, - LpAmount: util.ZeroBigInt, - OfferCanceledOrFinalized: util.ZeroBigInt, - } - } - nBalance, err := commonAsset.ComputeNewBalance(commonAsset.GeneralAssetType, - accountInfo.AssetInfo[mempoolTxDetail.AssetId].String(), mempoolTxDetail.BalanceDelta) - if err != nil { - logx.Errorf("[ComputeNewBalance] err: %s", err.Error()) - return nil, err - } - accountInfo.AssetInfo[mempoolTxDetail.AssetId], err = commonAsset.ParseAccountAsset(nBalance) - if err != nil { - logx.Errorf("[ParseAccountAsset]param: %v, err: %s", nBalance, err.Error()) - return nil, err - } - case commonAsset.CollectionNonceAssetType: - accountInfo.CollectionNonce, err = strconv.ParseInt(mempoolTxDetail.BalanceDelta, 10, 64) - if err != nil { - logx.Errorf("[ParseInt] unable to parse int, err: %s", err.Error()) - return nil, err - } - case commonAsset.LiquidityAssetType: - case commonAsset.NftAssetType: - default: - logx.Errorf("invalid asset type") - return nil, errors.New("invalid asset type") - } - } - } - accountInfo.Nonce = accountInfo.Nonce + 1 - accountInfo.CollectionNonce = accountInfo.CollectionNonce + 1 - return accountInfo, nil -} diff --git a/service/cronjob/monitor/internal/repo/l2eventoperator/api.go b/service/cronjob/monitor/internal/repo/l2eventoperator/api.go deleted file mode 100644 index 62f654aba..000000000 --- a/service/cronjob/monitor/internal/repo/l2eventoperator/api.go +++ /dev/null @@ -1,18 +0,0 @@ -package l2eventoperator - -import ( - table "github.com/bnb-chain/zkbas/common/model/l2TxEventMonitor" - "github.com/bnb-chain/zkbas/service/cronjob/monitor/internal/svc" -) - -type Model interface { - UpdateL2Events(pendingUpdateL2Events []*table.L2TxEventMonitor) (err error) -} - -func New(svcCtx *svc.ServiceContext) Model { - return &model{ - table: "l2_tx_event_monitor", - db: svcCtx.GormPointer, - cache: svcCtx.Cache, - } -} diff --git a/service/cronjob/monitor/internal/repo/l2eventoperator/l2event.go b/service/cronjob/monitor/internal/repo/l2eventoperator/l2event.go deleted file mode 100644 index 30b321fab..000000000 --- a/service/cronjob/monitor/internal/repo/l2eventoperator/l2event.go +++ /dev/null @@ -1,32 +0,0 @@ -package l2eventoperator - -import ( - "errors" - - "github.com/zeromicro/go-zero/core/logx" - "gorm.io/gorm" - - "github.com/bnb-chain/zkbas/common/model/l2TxEventMonitor" - "github.com/bnb-chain/zkbas/pkg/multcache" -) - -type model struct { - table string - db *gorm.DB - cache multcache.MultCache -} - -func (m *model) UpdateL2Events(pendingUpdateL2Events []*l2TxEventMonitor.L2TxEventMonitor) (err error) { - for _, pendingUpdateL2Event := range pendingUpdateL2Events { - dbTx := m.db.Table(m.table).Where("id = ?", pendingUpdateL2Event.ID).Select("*").Updates(&pendingUpdateL2Event) - if dbTx.Error != nil { - logx.Errorf("[CreateMempoolAndActiveAccount] unable to update l2 tx event: %s", dbTx.Error.Error()) - return dbTx.Error - } - if dbTx.RowsAffected == 0 { - logx.Errorf("[CreateMempoolAndActiveAccount] invalid l2 tx event") - return errors.New("[CreateMempoolAndActiveAccount] invalid l2 tx event") - } - } - return nil -} diff --git a/service/cronjob/monitor/internal/repo/liquidityoperator/api.go b/service/cronjob/monitor/internal/repo/liquidityoperator/api.go deleted file mode 100644 index 9f71093fb..000000000 --- a/service/cronjob/monitor/internal/repo/liquidityoperator/api.go +++ /dev/null @@ -1,20 +0,0 @@ -package liquidityoperator - -import ( - "github.com/bnb-chain/zkbas/common/model/liquidity" - "github.com/bnb-chain/zkbas/service/cronjob/monitor/internal/svc" -) - -//go:generate mockgen -source api.go -destination api_mock.go -package liquidity - -type Model interface { - CreateLiquidities(pendingNewLiquidityInfos []*liquidity.Liquidity) (err error) -} - -func New(svcCtx *svc.ServiceContext) Model { - return &model{ - table: `liquidity`, - db: svcCtx.GormPointer, - cache: svcCtx.Cache, - } -} diff --git a/service/cronjob/monitor/internal/repo/liquidityoperator/liquidity.go b/service/cronjob/monitor/internal/repo/liquidityoperator/liquidity.go deleted file mode 100644 index 2cd184632..000000000 --- a/service/cronjob/monitor/internal/repo/liquidityoperator/liquidity.go +++ /dev/null @@ -1,33 +0,0 @@ -package liquidityoperator - -import ( - "errors" - - "github.com/zeromicro/go-zero/core/logx" - "gorm.io/gorm" - - "github.com/bnb-chain/zkbas/common/model/liquidity" - "github.com/bnb-chain/zkbas/pkg/multcache" -) - -type model struct { - table string - db *gorm.DB - cache multcache.MultCache -} - -func (m *model) CreateLiquidities(pendingNewLiquidityInfos []*liquidity.Liquidity) (err error) { - if len(pendingNewLiquidityInfos) == 0 { - return nil - } - dbTx := m.db.Table(m.table).CreateInBatches(pendingNewLiquidityInfos, len(pendingNewLiquidityInfos)) - if dbTx.Error != nil { - logx.Errorf("[CreateInBatches] unable to create pending new liquidity infos: %s", dbTx.Error.Error()) - return dbTx.Error - } - if dbTx.RowsAffected != int64(len(pendingNewLiquidityInfos)) { - logx.Errorf("[CreateMempoolAndActiveAccount] invalid new liquidity infos") - return errors.New("[CreateMempoolAndActiveAccount] invalid new liquidity infos") - } - return nil -} diff --git a/service/cronjob/monitor/internal/repo/mempooloperator/api.go b/service/cronjob/monitor/internal/repo/mempooloperator/api.go deleted file mode 100644 index 008fa2a2b..000000000 --- a/service/cronjob/monitor/internal/repo/mempooloperator/api.go +++ /dev/null @@ -1,19 +0,0 @@ -package mempooloperator - -import ( - table "github.com/bnb-chain/zkbas/common/model/mempool" - "github.com/bnb-chain/zkbas/service/cronjob/monitor/internal/svc" -) - -type Model interface { - CreateMempoolTxs(pendingNewMempoolTxs []*table.MempoolTx) (err error) - DeleteMempoolTxs(pendingUpdateMempoolTxs []*table.MempoolTx) (err error) -} - -func New(svcCtx *svc.ServiceContext) Model { - return &model{ - table: `mempool_tx`, - db: svcCtx.GormPointer, - cache: svcCtx.Cache, - } -} diff --git a/service/cronjob/monitor/internal/repo/mempooloperator/mempool.go b/service/cronjob/monitor/internal/repo/mempooloperator/mempool.go deleted file mode 100644 index 6ce995506..000000000 --- a/service/cronjob/monitor/internal/repo/mempooloperator/mempool.go +++ /dev/null @@ -1,57 +0,0 @@ -package mempooloperator - -import ( - "errors" - - "github.com/zeromicro/go-zero/core/logx" - "gorm.io/gorm" - - "github.com/bnb-chain/zkbas/common/model/mempool" - "github.com/bnb-chain/zkbas/pkg/multcache" -) - -type model struct { - table string - db *gorm.DB - cache multcache.MultCache -} - -func (m *model) CreateMempoolTxs(pendingNewMempoolTxs []*mempool.MempoolTx) (err error) { - dbTx := m.db.Table(mempool.MempoolTableName).CreateInBatches(pendingNewMempoolTxs, len(pendingNewMempoolTxs)) - if dbTx.Error != nil { - logx.Errorf("[CreateInBatches] unable to create pending new mempool txs: %s", dbTx.Error.Error()) - return dbTx.Error - } - if dbTx.RowsAffected != int64(len(pendingNewMempoolTxs)) { - logx.Errorf("[CreateInBatches] invalid new mempool txs") - return errors.New("[CreateInBatches] invalid new mempool txs") - } - return nil - -} - -func (m *model) DeleteMempoolTxs(pendingUpdateMempoolTxs []*mempool.MempoolTx) (err error) { - for _, pendingDeleteMempoolTx := range pendingUpdateMempoolTxs { - for _, detail := range pendingDeleteMempoolTx.MempoolDetails { - dbTx := m.db.Table(mempool.DetailTableName).Where("id = ?", detail.ID).Delete(&detail) - if dbTx.Error != nil { - logx.Errorf("[UpdateRelatedEventsAndResetRelatedAssetsAndTxs] %s", dbTx.Error) - return dbTx.Error - } - if dbTx.RowsAffected == 0 { - logx.Errorf("[UpdateRelatedEventsAndResetRelatedAssetsAndTxs] Delete Invalid Mempool Tx") - return errors.New("[UpdateRelatedEventsAndResetRelatedAssetsAndTxs] Delete Invalid Mempool Tx") - } - } - dbTx := m.db.Table(mempool.MempoolTableName).Where("id = ?", pendingDeleteMempoolTx.ID).Delete(&pendingDeleteMempoolTx) - if dbTx.Error != nil { - logx.Errorf("[UpdateRelatedEventsAndResetRelatedAssetsAndTxs] %s", dbTx.Error) - return dbTx.Error - } - if dbTx.RowsAffected == 0 { - logx.Error("[UpdateRelatedEventsAndResetRelatedAssetsAndTxs] Delete Invalid Mempool Tx") - return errors.New("[UpdateRelatedEventsAndResetRelatedAssetsAndTxs] Delete Invalid Mempool Tx") - } - } - return nil -} diff --git a/service/cronjob/monitor/internal/repo/nftoperator/api.go b/service/cronjob/monitor/internal/repo/nftoperator/api.go deleted file mode 100644 index ca8110499..000000000 --- a/service/cronjob/monitor/internal/repo/nftoperator/api.go +++ /dev/null @@ -1,18 +0,0 @@ -package nftoperator - -import ( - table "github.com/bnb-chain/zkbas/common/model/nft" - "github.com/bnb-chain/zkbas/service/cronjob/monitor/internal/svc" -) - -type Model interface { - CreateNfts(pendingNewNfts []*table.L2Nft) (err error) -} - -func New(svcCtx *svc.ServiceContext) Model { - return &model{ - table: "l2_nft", - db: svcCtx.GormPointer, - cache: svcCtx.Cache, - } -} diff --git a/service/cronjob/monitor/internal/repo/nftoperator/nft.go b/service/cronjob/monitor/internal/repo/nftoperator/nft.go deleted file mode 100644 index 7b7520b43..000000000 --- a/service/cronjob/monitor/internal/repo/nftoperator/nft.go +++ /dev/null @@ -1,33 +0,0 @@ -package nftoperator - -import ( - "errors" - - "github.com/zeromicro/go-zero/core/logx" - "gorm.io/gorm" - - "github.com/bnb-chain/zkbas/common/model/nft" - "github.com/bnb-chain/zkbas/pkg/multcache" -) - -type model struct { - table string - db *gorm.DB - cache multcache.MultCache -} - -func (m *model) CreateNfts(pendingNewNfts []*nft.L2Nft) (err error) { - if len(pendingNewNfts) != 0 { - return nil - } - dbTx := m.db.Table(nft.L2NftTableName).CreateInBatches(pendingNewNfts, len(pendingNewNfts)) - if dbTx.Error != nil { - logx.Errorf("[CreateMempoolAndActiveAccount] unable to create pending new nft infos: %s", dbTx.Error.Error()) - return dbTx.Error - } - if dbTx.RowsAffected != int64(len(pendingNewNfts)) { - logx.Errorf("[CreateMempoolAndActiveAccount] invalid new nft infos") - return errors.New("[CreateMempoolAndActiveAccount] invalid new nft infos") - } - return nil -} diff --git a/service/cronjob/monitor/internal/svc/servicecontext.go b/service/cronjob/monitor/internal/svc/servicecontext.go deleted file mode 100644 index 0d4a4b1e1..000000000 --- a/service/cronjob/monitor/internal/svc/servicecontext.go +++ /dev/null @@ -1,74 +0,0 @@ -package svc - -import ( - "github.com/zeromicro/go-zero/core/logx" - "github.com/zeromicro/go-zero/core/stores/redis" - "github.com/zeromicro/go-zero/core/stores/sqlx" - "gorm.io/driver/postgres" - "gorm.io/gorm" - - "github.com/bnb-chain/zkbas/common/model/account" - asset "github.com/bnb-chain/zkbas/common/model/assetInfo" - "github.com/bnb-chain/zkbas/common/model/block" - "github.com/bnb-chain/zkbas/common/model/l1BlockMonitor" - "github.com/bnb-chain/zkbas/common/model/l1TxSender" - "github.com/bnb-chain/zkbas/common/model/l2TxEventMonitor" - "github.com/bnb-chain/zkbas/common/model/liquidity" - "github.com/bnb-chain/zkbas/common/model/mempool" - "github.com/bnb-chain/zkbas/common/model/nft" - "github.com/bnb-chain/zkbas/common/model/sysconfig" - "github.com/bnb-chain/zkbas/pkg/multcache" - "github.com/bnb-chain/zkbas/service/cronjob/monitor/internal/config" -) - -type ServiceContext struct { - NftModel nft.L2NftModel - BlockModel block.BlockModel - AccountModel account.AccountModel - MempoolModel mempool.MempoolModel - LiquidityModel liquidity.LiquidityModel - SysConfigModel sysconfig.SysconfigModel - L1TxSenderModel l1TxSender.L1TxSenderModel - L2AssetInfoModel asset.AssetInfoModel - L2TxEventMonitorModel l2TxEventMonitor.L2TxEventMonitorModel - L1BlockMonitorModel l1BlockMonitor.L1BlockMonitorModel - - RedisConnection *redis.Redis - GormPointer *gorm.DB - Cache multcache.MultCache - Conn sqlx.SqlConn - Config config.Config -} - -func WithRedis(redisType string, redisPass string) redis.Option { - return func(p *redis.Redis) { - p.Type = redisType - p.Pass = redisPass - } -} - -func NewServiceContext(c config.Config) *ServiceContext { - gormPointer, err := gorm.Open(postgres.Open(c.Postgres.DataSource)) - if err != nil { - logx.Errorf("gorm connect db error, err = %s", err.Error()) - } - conn := sqlx.NewSqlConn("postgres", c.Postgres.DataSource) - redisConn := redis.New(c.CacheRedis[0].Host, WithRedis(c.CacheRedis[0].Type, c.CacheRedis[0].Pass)) - return &ServiceContext{ - L2TxEventMonitorModel: l2TxEventMonitor.NewL2TxEventMonitorModel(conn, c.CacheRedis, gormPointer), - AccountModel: account.NewAccountModel(conn, c.CacheRedis, gormPointer), - MempoolModel: mempool.NewMempoolModel(conn, c.CacheRedis, gormPointer), - LiquidityModel: liquidity.NewLiquidityModel(conn, c.CacheRedis, gormPointer), - NftModel: nft.NewL2NftModel(conn, c.CacheRedis, gormPointer), - BlockModel: block.NewBlockModel(conn, c.CacheRedis, gormPointer, redisConn), - L1TxSenderModel: l1TxSender.NewL1TxSenderModel(conn, c.CacheRedis, gormPointer), - L1BlockMonitorModel: l1BlockMonitor.NewL1BlockMonitorModel(conn, c.CacheRedis, gormPointer), - L2AssetInfoModel: asset.NewAssetInfoModel(conn, c.CacheRedis, gormPointer), - SysConfigModel: sysconfig.NewSysconfigModel(conn, c.CacheRedis, gormPointer), - RedisConnection: redisConn, - GormPointer: gormPointer, - Cache: multcache.NewGoCache(100, 10), - Conn: conn, - Config: c, - } -} diff --git a/service/cronjob/monitor/monitor.go b/service/cronjob/monitor/monitor.go deleted file mode 100644 index c0eae97d9..000000000 --- a/service/cronjob/monitor/monitor.go +++ /dev/null @@ -1,79 +0,0 @@ -package main - -import ( - "context" - "flag" - - "github.com/bnb-chain/zkbas-eth-rpc/_rpc" - "github.com/robfig/cron/v3" - "github.com/zeromicro/go-zero/core/conf" - "github.com/zeromicro/go-zero/core/logx" - - "github.com/bnb-chain/zkbas/service/cronjob/monitor/internal/config" - "github.com/bnb-chain/zkbas/service/cronjob/monitor/internal/logic" - "github.com/bnb-chain/zkbas/service/cronjob/monitor/internal/svc" -) - -var configFile = flag.String("f", - "./etc/monitor.yaml", "the config file") - -func main() { - flag.Parse() - var c config.Config - conf.MustLoad(*configFile, &c) - ctx := svc.NewServiceContext(c) - ZkbasRollupAddress, err := ctx.SysConfigModel.GetSysconfigByName(c.ChainConfig.ZkbasContractAddrSysConfigName) - if err != nil { - logx.Errorf("[main] GetSysconfigByName err: %s", err.Error()) - panic(err) - } - NetworkRpc, err := ctx.SysConfigModel.GetSysconfigByName(c.ChainConfig.NetworkRPCSysConfigName) - if err != nil { - logx.Severef("[monitor] fatal error, cannot fetch NetworkRPC from sysConfig, err: %s, SysConfigName: %s", - err.Error(), c.ChainConfig.NetworkRPCSysConfigName) - panic(err) - } - logx.Infof("[monitor] ChainName: %s, ZkbasRollupAddress: %s, NetworkRpc: %s", c.ChainConfig.ZkbasContractAddrSysConfigName, ZkbasRollupAddress.Value, NetworkRpc.Value) - zkbasRpcCli, err := _rpc.NewClient(NetworkRpc.Value) - if err != nil { - panic(err) - } - cronjob := cron.New(cron.WithChain( - cron.SkipIfStillRunning(cron.DiscardLogger), - )) - if _, err = cronjob.AddFunc("@every 10s", func() { - logic.MonitorBlocks(zkbasRpcCli, c.ChainConfig.StartL1BlockHeight, c.ChainConfig.PendingBlocksCount, - c.ChainConfig.MaxHandledBlocksCount, ZkbasRollupAddress.Value, ctx.L1BlockMonitorModel) - }); err != nil { - panic(err) - } - if _, err = cronjob.AddFunc("@every 10s", func() { - logic.MonitorMempool(context.Background(), ctx) - }); err != nil { - panic(err) - } - if _, err = cronjob.AddFunc("@every 10s", func() { - logic.MonitorL2BlockEvents(context.Background(), ctx, zkbasRpcCli, c.ChainConfig.PendingBlocksCount, - ctx.MempoolModel, ctx.BlockModel, ctx.L1TxSenderModel) - }); err != nil { - panic(err) - } - // governance monitor - GovernanceContractAddress, err := ctx.SysConfigModel.GetSysconfigByName(c.ChainConfig.GovernanceContractAddrSysConfigName) - if err != nil { - logx.Severef("[monitor] fatal error, cannot fetch ZkbasContractAddr from sysConfig, err: %s, SysConfigName: %s", - err.Error(), c.ChainConfig.GovernanceContractAddrSysConfigName) - panic(err) - } - - if _, err = cronjob.AddFunc("@every 10s", func() { - logic.MonitorGovernanceContract(zkbasRpcCli, c.ChainConfig.StartL1BlockHeight, c.ChainConfig.PendingBlocksCount, c.ChainConfig.MaxHandledBlocksCount, - GovernanceContractAddress.Value, ctx.L1BlockMonitorModel, ctx.SysConfigModel, ctx.L2AssetInfoModel, - ) - }); err != nil { - panic(err) - } - cronjob.Start() - logx.Info("Starting monitor cronjob ...") - select {} -} diff --git a/service/cronjob/prover/Dockerfile b/service/cronjob/prover/Dockerfile deleted file mode 100644 index 9444040cd..000000000 --- a/service/cronjob/prover/Dockerfile +++ /dev/null @@ -1,35 +0,0 @@ -FROM golang:alpine AS builder - -LABEL stage=gobuilder - -ENV CGO_ENABLED 0 - -RUN apk update --no-cache && apk add --no-cache tzdata python3 curl which bash - -RUN curl -sSL https://sdk.cloud.google.com | bash - -ENV PATH $PATH:/root/google-cloud-sdk/bin - -WORKDIR /app -RUN gsutil cp gs://zkbas_key/zkbas-legend1.pk zkbas-legend1.pk -RUN gsutil cp gs://zkbas_key/zkbas-legend1.vk zkbas-legend1.vk - -RUN gsutil cp gs://zkbas_key/zkbas-legend10.pk zkbas-legend10.pk -RUN gsutil cp gs://zkbas_key/zkbas-legend10.vk zkbas-legend10.vk - -FROM alpine:3.4 - -COPY --from=builder /etc/ssl/certs/ca-certificates.crt /etc/ssl/certs/ca-certificates.crt -COPY --from=builder /usr/share/zoneinfo/Asia/Shanghai /usr/share/zoneinfo/Asia/Shanghai -ENV TZ Asia/Shanghai - -WORKDIR /app -COPY bin/prover /app/prover -COPY configyaml /app/etc -COPY --from=builder /app/zkbas-legend1.pk zkbas-legend1.pk -COPY --from=builder /app/zkbas-legend1.vk zkbas-legend1.vk - -COPY --from=builder /app/zkbas-legend10.pk zkbas-legend10.pk -COPY --from=builder /app/zkbas-legend10.vk zkbas-legend10.vk - -CMD ["./prover", "-f", "etc/prover.yaml"] \ No newline at end of file diff --git a/service/cronjob/prover/etc/prover.yaml.example b/service/cronjob/prover/etc/prover.yaml.example deleted file mode 100644 index edf773ef1..000000000 --- a/service/cronjob/prover/etc/prover.yaml.example +++ /dev/null @@ -1,14 +0,0 @@ -Name: prover.cronjob - -Postgres: - DataSource: host=127.0.0.1 user=postgres password=ZecreyProtocolDB@123 dbname=zkbas port=5432 sslmode=disable - -CacheRedis: - - Host: 127.0.0.1:6379 - Type: node - -KeyPath: - ProvingKeyPath: [/Users/likang/Documents/git/bnb-chain/zkbas/zkbas1.pk, /Users/likang/Documents/git/bnb-chain/zkbas/zkbas10.pk] - VerifyingKeyPath: [/Users/likang/Documents/git/bnb-chain/zkbas/zkbas1.vk, /Users/likang/Documents/git/bnb-chain/zkbas/zkbas10.vk] - KeyTxCounts: [1, 10] - diff --git a/service/cronjob/prover/internal/config/config.go b/service/cronjob/prover/internal/config/config.go deleted file mode 100644 index 57ea2dbb4..000000000 --- a/service/cronjob/prover/internal/config/config.go +++ /dev/null @@ -1,17 +0,0 @@ -package config - -import ( - "github.com/zeromicro/go-zero/core/stores/cache" -) - -type Config struct { - KeyPath struct { - ProvingKeyPath []string - VerifyingKeyPath []string - KeyTxCounts []int - } - Postgres struct { - DataSource string - } - CacheRedis cache.CacheConf -} diff --git a/service/cronjob/prover/internal/logic/constants.go b/service/cronjob/prover/internal/logic/constants.go deleted file mode 100644 index 2c17c6973..000000000 --- a/service/cronjob/prover/internal/logic/constants.go +++ /dev/null @@ -1,36 +0,0 @@ -/* - * Copyright © 2021 Zkbas Protocol - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package logic - -import ( - cryptoBlock "github.com/bnb-chain/zkbas-crypto/legend/circuit/bn254/block" - "github.com/consensys/gnark/backend/groth16" - "github.com/consensys/gnark/frontend" -) - -type ( - CryptoBlock = cryptoBlock.Block -) - -const RedisLockKey = "prover_mutex_key" - -var ( - VerifyingKeys []groth16.VerifyingKey - ProvingKeys []groth16.ProvingKey - KeyTxCounts []int - R1cs []frontend.CompiledConstraintSystem -) diff --git a/service/cronjob/prover/internal/logic/proveblocklogic.go b/service/cronjob/prover/internal/logic/proveblocklogic.go deleted file mode 100644 index f9652cb43..000000000 --- a/service/cronjob/prover/internal/logic/proveblocklogic.go +++ /dev/null @@ -1,106 +0,0 @@ -/* - * Copyright © 2021 Zkbas Protocol - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package logic - -import ( - "encoding/json" - "errors" - "fmt" - - "github.com/zeromicro/go-zero/core/logx" - - "github.com/bnb-chain/zkbas/common/model/blockForProof" - "github.com/bnb-chain/zkbas/common/model/proofSender" - "github.com/bnb-chain/zkbas/common/util" - lockUtil "github.com/bnb-chain/zkbas/common/util/globalmapHandler" - "github.com/bnb-chain/zkbas/service/cronjob/prover/internal/svc" -) - -func ProveBlock(ctx *svc.ServiceContext) error { - lock := lockUtil.GetRedisLockByKey(ctx.RedisConn, RedisLockKey) - err := lockUtil.TryAcquireLock(lock) - if err != nil { - return fmt.Errorf("acquire lock error, err=%s", err.Error()) - } - defer lock.Release() - - // fetch unproved block - unprovedBlock, err := ctx.BlockForProofModel.GetUnprovedCryptoBlockByMode(util.COO_MODE) - if err != nil { - return fmt.Errorf("[ProveBlock] GetUnprovedBlock Error: err: %v", err) - } - // update status of block - err = ctx.BlockForProofModel.UpdateUnprovedCryptoBlockStatus(unprovedBlock, blockForProof.StatusReceived) - if err != nil { - return fmt.Errorf("[ProveBlock] update block status error, err=%s", err.Error()) - } - - // parse CryptoBlock - var cryptoBlock *CryptoBlock - err = json.Unmarshal([]byte(unprovedBlock.BlockData), &cryptoBlock) - if err != nil { - return errors.New("[ProveBlock] json.Unmarshal Error") - } - - var keyIndex int - for ; keyIndex < len(KeyTxCounts); keyIndex++ { - if len(cryptoBlock.Txs) == KeyTxCounts[keyIndex] { - break - } - } - if keyIndex == len(KeyTxCounts) { - logx.Errorf("[ProveBlock] Can't find correct vk/pk") - return err - } - - // Generate Proof - proof, err := util.GenerateProof(R1cs[keyIndex], ProvingKeys[keyIndex], VerifyingKeys[keyIndex], cryptoBlock) - if err != nil { - return errors.New("[ProveBlock] GenerateProof Error") - } - - formattedProof, err := util.FormatProof(proof, cryptoBlock.OldStateRoot, cryptoBlock.NewStateRoot, cryptoBlock.BlockCommitment) - if err != nil { - logx.Errorf("[ProveBlock] unable to format proof: %s", err.Error()) - return err - } - - // marshal formattedProof - proofBytes, err := json.Marshal(formattedProof) - if err != nil { - logx.Errorf("[ProveBlock] formattedProof json.Marshal error: %s", err.Error()) - return err - } - - // check the existence of proof - _, err = ctx.ProofSenderModel.GetProofByBlockNumber(unprovedBlock.BlockHeight) - if err == nil { - return fmt.Errorf("[ProveBlock] proof of current height exists") - } - - var row = &proofSender.ProofSender{ - ProofInfo: string(proofBytes), - BlockNumber: unprovedBlock.BlockHeight, - Status: proofSender.NotSent, - } - err = ctx.ProofSenderModel.CreateProof(row) - if err != nil { - _ = ctx.BlockForProofModel.UpdateUnprovedCryptoBlockStatus(unprovedBlock, blockForProof.StatusPublished) - return fmt.Errorf("[ProveBlock] create proof error, err=%s", err.Error()) - } - return nil -} diff --git a/service/cronjob/prover/internal/svc/servicecontext.go b/service/cronjob/prover/internal/svc/servicecontext.go deleted file mode 100644 index f8e1a321a..000000000 --- a/service/cronjob/prover/internal/svc/servicecontext.go +++ /dev/null @@ -1,43 +0,0 @@ -package svc - -import ( - "github.com/zeromicro/go-zero/core/logx" - "github.com/zeromicro/go-zero/core/stores/redis" - "github.com/zeromicro/go-zero/core/stores/sqlx" - "gorm.io/driver/postgres" - "gorm.io/gorm" - - "github.com/bnb-chain/zkbas/common/model/blockForProof" - "github.com/bnb-chain/zkbas/common/model/proofSender" - "github.com/bnb-chain/zkbas/service/cronjob/prover/internal/config" -) - -type ServiceContext struct { - Config config.Config - - RedisConn *redis.Redis - - ProofSenderModel proofSender.ProofSenderModel - BlockForProofModel blockForProof.BlockForProofModel -} - -func WithRedis(redisType string, redisPass string) redis.Option { - return func(p *redis.Redis) { - p.Type = redisType - p.Pass = redisPass - } -} -func NewServiceContext(c config.Config) *ServiceContext { - gormPointer, err := gorm.Open(postgres.Open(c.Postgres.DataSource)) - if err != nil { - logx.Errorf("gorm connect db error, err = %s", err.Error()) - } - conn := sqlx.NewSqlConn("postgres", c.Postgres.DataSource) - redisConn := redis.New(c.CacheRedis[0].Host, WithRedis(c.CacheRedis[0].Type, c.CacheRedis[0].Pass)) - return &ServiceContext{ - Config: c, - RedisConn: redisConn, - BlockForProofModel: blockForProof.NewBlockForProofModel(conn, c.CacheRedis, gormPointer), - ProofSenderModel: proofSender.NewProofSenderModel(gormPointer), - } -} diff --git a/service/cronjob/prover/prover.go b/service/cronjob/prover/prover.go deleted file mode 100644 index 93ddc3d42..000000000 --- a/service/cronjob/prover/prover.go +++ /dev/null @@ -1,79 +0,0 @@ -package main - -import ( - "flag" - - "github.com/bnb-chain/zkbas-crypto/legend/circuit/bn254/block" - "github.com/consensys/gnark-crypto/ecc" - "github.com/consensys/gnark/backend/groth16" - "github.com/consensys/gnark/frontend" - "github.com/consensys/gnark/frontend/cs/r1cs" - "github.com/robfig/cron/v3" - "github.com/zeromicro/go-zero/core/conf" - "github.com/zeromicro/go-zero/core/logx" - - "github.com/bnb-chain/zkbas/common/util" - "github.com/bnb-chain/zkbas/service/cronjob/prover/internal/config" - "github.com/bnb-chain/zkbas/service/cronjob/prover/internal/logic" - "github.com/bnb-chain/zkbas/service/cronjob/prover/internal/svc" -) - -var configFile = flag.String("f", "./etc/prover.yaml", "the config file") - -func main() { - flag.Parse() - - var c config.Config - conf.MustLoad(*configFile, &c) - ctx := svc.NewServiceContext(c) - logx.DisableStat() - - logic.KeyTxCounts = c.KeyPath.KeyTxCounts - logic.ProvingKeys = make([]groth16.ProvingKey, len(logic.KeyTxCounts)) - logic.VerifyingKeys = make([]groth16.VerifyingKey, len(logic.KeyTxCounts)) - logic.R1cs = make([]frontend.CompiledConstraintSystem, len(logic.KeyTxCounts)) - var err error - for i := 0; i < len(logic.KeyTxCounts); i++ { - var circuit block.BlockConstraints - circuit.TxsCount = logic.KeyTxCounts[i] - circuit.Txs = make([]block.TxConstraints, circuit.TxsCount) - for i := 0; i < circuit.TxsCount; i++ { - circuit.Txs[i] = block.GetZeroTxConstraint() - } - logx.Infof("start compile block size %d circuit", circuit.TxsCount) - logic.R1cs[i], err = frontend.Compile(ecc.BN254, r1cs.NewBuilder, &circuit, frontend.IgnoreUnconstrainedInputs()) - if err != nil { - panic("r1cs init error") - } - logx.Infof("circuit constraints: %d", logic.R1cs[i].GetNbConstraints()) - logx.Info("finish compile circuit") - // read proving and verifying keys - logic.ProvingKeys[i], err = util.LoadProvingKey(c.KeyPath.ProvingKeyPath[i]) - if err != nil { - panic("provingKey loading error") - } - logic.VerifyingKeys[i], err = util.LoadVerifyingKey(c.KeyPath.VerifyingKeyPath[i]) - if err != nil { - panic("verifyingKey loading error") - } - } - - cronJob := cron.New(cron.WithChain( - cron.SkipIfStillRunning(cron.DiscardLogger), - )) - _, err = cronJob.AddFunc("@every 10s", func() { - logx.Info("start prover job......") - // cron job for receiving cryptoBlock and handling - err = logic.ProveBlock(ctx) - if err != nil { - logx.Error("Prove Error: ", err.Error()) - } - }) - if err != nil { - panic(err) - } - cronJob.Start() - - logx.Info("prover cronjob is starting......") - select {} -} diff --git a/service/cronjob/sender/Dockerfile b/service/cronjob/sender/Dockerfile deleted file mode 100644 index 193ecdec0..000000000 --- a/service/cronjob/sender/Dockerfile +++ /dev/null @@ -1,20 +0,0 @@ -FROM golang:alpine AS builder - -LABEL stage=gobuilder - -ENV CGO_ENABLED 0 -# ENV GOPROXY https://goproxy.cn,direct - -RUN apk update --no-cache && apk add --no-cache tzdata - -FROM alpine:3.4 - -COPY --from=builder /etc/ssl/certs/ca-certificates.crt /etc/ssl/certs/ca-certificates.crt -COPY --from=builder /usr/share/zoneinfo/Asia/Shanghai /usr/share/zoneinfo/Asia/Shanghai -ENV TZ Asia/Shanghai - -WORKDIR /app -COPY bin/sender /app/sender -COPY configyaml /app/etc - -CMD ["./sender", "-f", "etc/sender.yaml"] \ No newline at end of file diff --git a/service/cronjob/sender/etc/config.yaml.example b/service/cronjob/sender/etc/config.yaml.example deleted file mode 100644 index 58cf35737..000000000 --- a/service/cronjob/sender/etc/config.yaml.example +++ /dev/null @@ -1,21 +0,0 @@ -Name: sender.cronjob - -Postgres: - DataSource: host=127.0.0.1 user=postgres password=ZecreyProtocolDB@123 dbname=zkbas port=5432 sslmode=disable - -CacheRedis: - - Host: 127.0.0.1:6379 - Pass: myredis - Type: node - -ChainConfig: - # NetworkRPCSysConfigName: "BscTestNetworkRpc" - NetworkRPCSysConfigName: "LocalTestNetworkRpc" - ZkbasContractAddrSysConfigName: "ZkbasContract" - MaxWaitingTime: 120 - MaxBlockCount: 3 - # Sk: "107f9d2a50ce2d8337e0c5220574e9fcf2bf60002da5acf07718f4d531ea3faa" - Sk: "08e504b8a5fd5bbc41e953f6e9cbe3371661c0010767c09315ace07e5a1e938e" - GasLimit: 5000000 - L1ChainId: "48" - diff --git a/service/cronjob/sender/internal/config/config.go b/service/cronjob/sender/internal/config/config.go deleted file mode 100644 index b06f6dc19..000000000 --- a/service/cronjob/sender/internal/config/config.go +++ /dev/null @@ -1,21 +0,0 @@ -package config - -import ( - "github.com/zeromicro/go-zero/core/stores/cache" -) - -type Config struct { - Postgres struct { - DataSource string - } - CacheRedis cache.CacheConf - ChainConfig struct { - NetworkRPCSysConfigName string - ZkbasContractAddrSysConfigName string - MaxWaitingTime int64 - MaxBlockCount int - Sk string - GasLimit uint64 - L1ChainId string - } -} diff --git a/service/cronjob/sender/internal/logic/commit.go b/service/cronjob/sender/internal/logic/commit.go deleted file mode 100644 index 1287d38b9..000000000 --- a/service/cronjob/sender/internal/logic/commit.go +++ /dev/null @@ -1,199 +0,0 @@ -/* - * Copyright © 2021 Zkbas Protocol - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package logic - -import ( - "errors" - "time" - - zkbas "github.com/bnb-chain/zkbas-eth-rpc/zkbas/core/legend" - "github.com/zeromicro/go-zero/core/logx" - - "github.com/bnb-chain/zkbas/common/util" - "github.com/bnb-chain/zkbas/errorcode" -) - -func SendCommittedBlocks(param *SenderParam, l1TxSenderModel L1TxSenderModel, - blockModel BlockModel, blockForCommitModel BlockForCommitModel) (err error) { - var ( - cli = param.Cli - authCli = param.AuthCli - zkbasInstance = param.ZkbasInstance - gasPrice = param.GasPrice - gasLimit = param.GasLimit - maxBlockCount = param.MaxBlocksCount - maxWaitingTime = param.MaxWaitingTime - ) - // scan l1 tx sender table for handled committed height - lastHandledBlock, getHandleErr := l1TxSenderModel.GetLatestHandledBlock(CommitTxType) - if getHandleErr != nil && getHandleErr != errorcode.DbErrNotFound { - logx.Errorf("[SendVerifiedAndExecutedBlocks] GetLatestHandledBlock err: %s", getHandleErr.Error()) - return getHandleErr - } - // scan l1 tx sender table for pending committed height that higher than the latest handled height - pendingSender, getPendingerr := l1TxSenderModel.GetLatestPendingBlock(CommitTxType) - if getPendingerr != nil { - if getPendingerr != errorcode.DbErrNotFound { - logx.Errorf("[SendVerifiedAndExecutedBlocks] GetLatestPendingBlock err: %s", getPendingerr.Error()) - return getPendingerr - } - } - - // case 1: - if getHandleErr == errorcode.DbErrNotFound && getPendingerr == nil { - _, isPending, err := cli.GetTransactionByHash(pendingSender.L1TxHash) - // if err != nil, means we cannot get this tx by hash - if err != nil { - // if we cannot get it from rpc and the time over 1 min - lastUpdatedAt := pendingSender.UpdatedAt.UnixMilli() - now := time.Now().UnixMilli() - if now-lastUpdatedAt > maxWaitingTime { - err := l1TxSenderModel.DeleteL1TxSender(pendingSender) - if err != nil { - logx.Errorf("[SendCommittedBlocks] unable to delete l1 tx sender: %s", err.Error()) - return err - } - return nil - } else { - return nil - } - } - // if it is pending, still waiting - if isPending { - logx.Infof("[SendCommittedBlocks] tx is still pending, no need to work for anything tx hash: %s", pendingSender.L1TxHash) - return nil - } else { - receipt, err := cli.GetTransactionReceipt(pendingSender.L1TxHash) - if err != nil { - logx.Errorf("[SendCommittedBlocks] unable to get transaction receipt: %s", err.Error()) - return err - } - if receipt.Status == 0 { - logx.Infof("[SendCommittedBlocks] the transaction is failure, please check: %s", pendingSender.L1TxHash) - return nil - } - } - } - // case 2: - if getHandleErr == nil && getPendingerr == nil { - isSuccess, err := cli.WaitingTransactionStatus(pendingSender.L1TxHash) - // if err != nil, means we cannot get this tx by hash - if err != nil { - // if we cannot get it from rpc and the time over 1 min - lastUpdatedAt := pendingSender.UpdatedAt.UnixMilli() - now := time.Now().UnixMilli() - if now-lastUpdatedAt > maxWaitingTime { - // drop the record - err := l1TxSenderModel.DeleteL1TxSender(pendingSender) - if err != nil { - logx.Errorf("[SendCommittedBlocks] unable to delete l1 tx sender: %s", err.Error()) - return err - } - return nil - } else { - logx.Infof("[SendCommittedBlocks] tx cannot be found, but not exceed time limit: %s", pendingSender.L1TxHash) - return nil - } - } - // if it is pending, still waiting - if !isSuccess { - logx.Infof("[SendCommittedBlocks] tx is still pending, no need to work for anything tx hash: %s", pendingSender.L1TxHash) - return nil - } - } - - // case 3: - var lastStoredBlockInfo StorageStoredBlockInfo - var pendingCommitBlocks []ZkbasCommitBlockInfo - // if lastHandledBlock == nil, means we haven't committed any blocks, just start from 0 - // if errorcode.DbErrNotFound, means we haven't committed new blocks, just start to commit - if getHandleErr == errorcode.DbErrNotFound && getPendingerr == errorcode.DbErrNotFound { - var blocks []*BlockForCommit - blocks, err = blockForCommitModel.GetBlockForCommitBetween(1, int64(maxBlockCount)) - if err != nil { - logx.Errorf("[SendCommittedBlocks] GetBlockForCommitBetween err: %d, maxBlockCount: %d", err.Error(), maxBlockCount) - return err - } - pendingCommitBlocks, err = ConvertBlocksForCommitToCommitBlockInfos(blocks) - if err != nil { - logx.Errorf("[SendCommittedBlocks] unable to convert blocks to commit block infos: %s", err.Error()) - return err - } - // set stored block header to default 0 - lastStoredBlockInfo = DefaultBlockHeader() - } - if getHandleErr == nil && getPendingerr == errorcode.DbErrNotFound { - // if errorcode.DbErrNotFound, means we haven't committed new blocks, just start to commit - // get blocks higher than last handled blocks - var blocks []*BlockForCommit - // commit new blocks - blocks, err = blockForCommitModel.GetBlockForCommitBetween(lastHandledBlock.L2BlockHeight+1, lastHandledBlock.L2BlockHeight+int64(maxBlockCount)) - if err != nil { - logx.Errorf("[SendCommittedBlocks] unable to get sender new blocks: %s", err.Error()) - return err - } - pendingCommitBlocks, err = ConvertBlocksForCommitToCommitBlockInfos(blocks) - if err != nil { - logx.Errorf("[SendCommittedBlocks] unable to convert blocks to commit block infos: %s", err.Error()) - return err - } - // get last block info - lastHandledBlockInfo, err := blockModel.GetBlockByBlockHeight(lastHandledBlock.L2BlockHeight) - if err != nil && err != errorcode.DbErrNotFound { - logx.Errorf("[SendCommittedBlocks] unable to get last handled block info: %s", err.Error()) - return err - } - // construct last stored block header - lastStoredBlockInfo = util.ConstructStoredBlockInfo(lastHandledBlockInfo) - } - // commit blocks on-chain - if len(pendingCommitBlocks) != 0 { - txHash, err := zkbas.CommitBlocks( - cli, authCli, - zkbasInstance, - lastStoredBlockInfo, - pendingCommitBlocks, - gasPrice, - gasLimit) - if err != nil { - logx.Errorf("[SendCommittedBlocks] unable to commit blocks: %s", err.Error()) - return err - } - for _, pendingCommittedBlock := range pendingCommitBlocks { - logx.Infof("[SendCommittedBlocks] commit blocks: %v", pendingCommittedBlock.BlockNumber) - } - // update l1 tx sender table records - newSender := &L1TxSender{ - L1TxHash: txHash, - TxStatus: PendingStatus, - TxType: CommitTxType, - L2BlockHeight: int64(pendingCommitBlocks[len(pendingCommitBlocks)-1].BlockNumber), - } - isValid, err := l1TxSenderModel.CreateL1TxSender(newSender) - if err != nil { - logx.Errorf("[SendCommittedBlocks] unable to create l1 tx sender") - return err - } - if !isValid { - logx.Errorf("[SendCommittedBlocks] cannot create new senders") - return errors.New("[SendCommittedBlocks] cannot create new senders") - } - logx.Infof("[SendCommittedBlocks] new blocks have been committed(height): %v", newSender.L2BlockHeight) - return nil - } - return nil -} diff --git a/service/cronjob/sender/internal/logic/constants.go b/service/cronjob/sender/internal/logic/constants.go deleted file mode 100644 index 6c19181db..000000000 --- a/service/cronjob/sender/internal/logic/constants.go +++ /dev/null @@ -1,70 +0,0 @@ -/* - * Copyright © 2021 Zkbas Protocol - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package logic - -import ( - "math/big" - - "github.com/bnb-chain/zkbas-eth-rpc/_rpc" - zkbas "github.com/bnb-chain/zkbas-eth-rpc/zkbas/core/legend" - - "github.com/bnb-chain/zkbas/common/model/assetInfo" - "github.com/bnb-chain/zkbas/common/model/block" - "github.com/bnb-chain/zkbas/common/model/blockForCommit" - "github.com/bnb-chain/zkbas/common/model/l1TxSender" - "github.com/bnb-chain/zkbas/common/model/proofSender" - "github.com/bnb-chain/zkbas/common/model/tx" -) - -type ( - Tx = tx.Tx - TxDetail = tx.TxDetail - Block = block.Block - BlockForCommit = blockForCommit.BlockForCommit - L1TxSenderModel = l1TxSender.L1TxSenderModel - L1TxSender = l1TxSender.L1TxSender - BlockModel = block.BlockModel - BlockForCommitModel = blockForCommit.BlockForCommitModel - - ProviderClient = _rpc.ProviderClient - AuthClient = _rpc.AuthClient - Zkbas = zkbas.Zkbas - - ZkbasCommitBlockInfo = zkbas.OldZkbasCommitBlockInfo - ZkbasVerifyBlockInfo = zkbas.OldZkbasVerifyAndExecuteBlockInfo - StorageStoredBlockInfo = zkbas.StorageStoredBlockInfo - - L2AssetInfoModel = assetInfo.AssetInfoModel - - ProofSenderModel = proofSender.ProofSenderModel -) - -const ( - PendingStatus = l1TxSender.PendingStatus - CommitTxType = l1TxSender.CommitTxType - VerifyAndExecuteTxType = l1TxSender.VerifyAndExecuteTxType -) - -type SenderParam struct { - Cli *ProviderClient - AuthCli *AuthClient - ZkbasInstance *Zkbas - MaxWaitingTime int64 - MaxBlocksCount int - GasPrice *big.Int - GasLimit uint64 -} diff --git a/service/cronjob/sender/internal/logic/utils.go b/service/cronjob/sender/internal/logic/utils.go deleted file mode 100644 index 37bdc4088..000000000 --- a/service/cronjob/sender/internal/logic/utils.go +++ /dev/null @@ -1,77 +0,0 @@ -package logic - -import ( - "encoding/json" - "math/big" - - "github.com/ethereum/go-ethereum/common" - "github.com/zeromicro/go-zero/core/logx" - - "github.com/bnb-chain/zkbas/common/tree" - "github.com/bnb-chain/zkbas/common/util" -) - -func DefaultBlockHeader() StorageStoredBlockInfo { - var ( - pendingOnchainOperationsHash [32]byte - stateRoot [32]byte - commitment [32]byte - ) - copy(pendingOnchainOperationsHash[:], common.FromHex(util.EmptyStringKeccak)[:]) - copy(stateRoot[:], tree.NilStateRoot[:]) - copy(commitment[:], common.FromHex("0x0000000000000000000000000000000000000000000000000000000000000000")[:]) - return StorageStoredBlockInfo{ - BlockSize: 0, - BlockNumber: 0, - PriorityOperations: 0, - PendingOnchainOperationsHash: pendingOnchainOperationsHash, - Timestamp: big.NewInt(0), - StateRoot: stateRoot, - Commitment: commitment, - } -} - -/* - ConvertBlocksForCommitToCommitBlockInfos: helper function to convert blocks to commit block infos -*/ -func ConvertBlocksForCommitToCommitBlockInfos(oBlocks []*BlockForCommit) (commitBlocks []ZkbasCommitBlockInfo, err error) { - for _, oBlock := range oBlocks { - var newStateRoot [32]byte - var pubDataOffsets []uint32 - copy(newStateRoot[:], common.FromHex(oBlock.StateRoot)[:]) - err = json.Unmarshal([]byte(oBlock.PublicDataOffsets), &pubDataOffsets) - if err != nil { - logx.Errorf("[ConvertBlocksForCommitToCommitBlockInfos] unable to unmarshal: %s", err.Error()) - return nil, err - } - commitBlock := ZkbasCommitBlockInfo{ - NewStateRoot: newStateRoot, - PublicData: common.FromHex(oBlock.PublicData), - Timestamp: big.NewInt(oBlock.Timestamp), - PublicDataOffsets: pubDataOffsets, - BlockNumber: uint32(oBlock.BlockHeight), - BlockSize: oBlock.BlockSize, - } - commitBlocks = append(commitBlocks, commitBlock) - } - return commitBlocks, nil -} - -func ConvertBlocksToVerifyAndExecuteBlockInfos(oBlocks []*Block) (verifyAndExecuteBlocks []ZkbasVerifyBlockInfo, err error) { - for _, oBlock := range oBlocks { - var pendingOnChainOpsPubData [][]byte - if oBlock.PendingOnChainOperationsPubData != "" { - err = json.Unmarshal([]byte(oBlock.PendingOnChainOperationsPubData), &pendingOnChainOpsPubData) - if err != nil { - logx.Errorf("[ConvertBlocksToVerifyAndExecuteBlockInfos] unable to unmarshal pending pub data: %s", err.Error()) - return nil, err - } - } - verifyAndExecuteBlock := ZkbasVerifyBlockInfo{ - BlockHeader: util.ConstructStoredBlockInfo(oBlock), - PendingOnchainOpsPubData: pendingOnChainOpsPubData, - } - verifyAndExecuteBlocks = append(verifyAndExecuteBlocks, verifyAndExecuteBlock) - } - return verifyAndExecuteBlocks, nil -} diff --git a/service/cronjob/sender/internal/logic/verify.go b/service/cronjob/sender/internal/logic/verify.go deleted file mode 100644 index 8bfb11130..000000000 --- a/service/cronjob/sender/internal/logic/verify.go +++ /dev/null @@ -1,207 +0,0 @@ -/* - * Copyright © 2021 Zkbas Protocol - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package logic - -import ( - "encoding/json" - "errors" - "math/big" - "time" - - zkbas "github.com/bnb-chain/zkbas-eth-rpc/zkbas/core/legend" - "github.com/zeromicro/go-zero/core/logx" - - "github.com/bnb-chain/zkbas/common/model/block" - "github.com/bnb-chain/zkbas/common/util" - "github.com/bnb-chain/zkbas/errorcode" -) - -func SendVerifiedAndExecutedBlocks( - param *SenderParam, - l1TxSenderModel L1TxSenderModel, - blockModel BlockModel, - proofSenderModel ProofSenderModel, -) (err error) { - var ( - cli = param.Cli - authCli = param.AuthCli - zkbasInstance = param.ZkbasInstance - gasPrice = param.GasPrice - gasLimit = param.GasLimit - maxBlockCount = param.MaxBlocksCount - maxWaitingTime = param.MaxWaitingTime - ) - // scan l1 tx sender table for handled verified and executed height - lastHandledBlock, getHandleErr := l1TxSenderModel.GetLatestHandledBlock(VerifyAndExecuteTxType) - if getHandleErr != nil && getHandleErr != errorcode.DbErrNotFound { - logx.Errorf("[SendVerifiedAndExecutedBlocks] unable to get latest handled block: %s", getHandleErr.Error()) - return getHandleErr - } - // scan l1 tx sender table for pending verified and executed height that higher than the latest handled height - pendingSender, getPendingerr := l1TxSenderModel.GetLatestPendingBlock(VerifyAndExecuteTxType) - if getPendingerr != nil && getPendingerr != errorcode.DbErrNotFound { - logx.Errorf("[SendVerifiedAndExecutedBlocks] unable to get latest pending blocks: %s", getPendingerr.Error()) - return getPendingerr - } - // case 1: check tx status on L1 - if getHandleErr == errorcode.DbErrNotFound && getPendingerr == nil { - _, isPending, err := cli.GetTransactionByHash(pendingSender.L1TxHash) - // if err != nil, means we cannot get this tx by hash - if err != nil { - // if we cannot get it from rpc and the time over 1 min - lastUpdatedAt := pendingSender.UpdatedAt.UnixMilli() - now := time.Now().UnixMilli() - if now-lastUpdatedAt > maxWaitingTime { - // drop the record - err := l1TxSenderModel.DeleteL1TxSender(pendingSender) - if err != nil { - logx.Errorf("[SendVerifiedAndExecutedBlocks] unable to delete l1 tx sender: %s", err.Error()) - return err - } - return nil - } else { - return nil - } - } - // if it is pending, still waiting - if isPending { - logx.Infof("[SendVerifiedAndExecutedBlocks] tx is still pending, no need to work for anything tx hash: %s", pendingSender.L1TxHash) - return nil - } else { - receipt, err := cli.GetTransactionReceipt(pendingSender.L1TxHash) - if err != nil { - logx.Errorf("[SendVerifiedAndExecutedBlocks] unable to get transaction receipt: %s", err.Error()) - return err - } - if receipt.Status == 0 { - logx.Infof("[SendVerifiedAndExecutedBlocks] the transaction is failure, please check: %s", pendingSender.L1TxHash) - return nil - } - } - } - // case 2: - if getHandleErr == nil && getPendingerr == nil { - isSuccess, err := cli.WaitingTransactionStatus(pendingSender.L1TxHash) - // if err != nil, means we cannot get this tx by hash - if err != nil { - // if we cannot get it from rpc and the time over 1 min - lastUpdatedAt := pendingSender.UpdatedAt.UnixMilli() - if time.Now().UnixMilli()-lastUpdatedAt > maxWaitingTime { - // drop the record - if err := l1TxSenderModel.DeleteL1TxSender(pendingSender); err != nil { - logx.Errorf("[SendVerifiedAndExecutedBlocks] unable to delete l1 tx sender: %s", err.Error()) - return err - } - } - return nil - } - // if it is pending, still waiting - if !isSuccess { - return nil - } - } - // case 3: means we haven't verified and executed new blocks, just start to commit - var ( - start int64 - blocks []*block.Block - pendingVerifyAndExecuteBlocks []ZkbasVerifyBlockInfo - ) - if getHandleErr == errorcode.DbErrNotFound && getPendingerr == errorcode.DbErrNotFound { - // get blocks from block table - blocks, err = blockModel.GetBlocksForProverBetween(1, int64(maxBlockCount)) - if err != nil { - logx.Errorf("[SendVerifiedAndExecutedBlocks] GetBlocksForProverBetween err: %s, maxBlockCount: %d", err.Error(), maxBlockCount) - return err - } - pendingVerifyAndExecuteBlocks, err = ConvertBlocksToVerifyAndExecuteBlockInfos(blocks) - if err != nil { - logx.Errorf("[SendVerifiedAndExecutedBlocks] unable to convert blocks to verify and execute block infos: %s", err.Error()) - return err - } - start = int64(1) - } - if getHandleErr == nil && getPendingerr == errorcode.DbErrNotFound { - blocks, err = blockModel.GetBlocksForProverBetween(lastHandledBlock.L2BlockHeight+1, lastHandledBlock.L2BlockHeight+int64(maxBlockCount)) - if err != nil { - logx.Errorf("[SendVerifiedAndExecutedBlocks] unable to get sender new blocks: %s", err.Error()) - return err - } - pendingVerifyAndExecuteBlocks, err = ConvertBlocksToVerifyAndExecuteBlockInfos(blocks) - if err != nil { - logx.Errorf("[SendVerifiedAndExecutedBlocks] unable to convert blocks to commit block infos: %s", err.Error()) - return err - } - start = lastHandledBlock.L2BlockHeight + 1 - } - // TODO: for test - /* - if len(blocks) < maxBlockCount { - logx.Errorf("current pending verify blocks %d is less than %d", len(blocks), maxBlockCount) - return err - } - */ - proofSenders, err := proofSenderModel.GetProofsByBlockRange(start, blocks[len(blocks)-1].BlockHeight, maxBlockCount) - if err != nil { - logx.Errorf("[SendVerifiedAndExecutedBlocks] unable to get proofs: %s", err.Error()) - return err - } - if len(proofSenders) != len(blocks) { - logx.Errorf("[SendVerifiedAndExecutedBlocks] we haven't generated related proofs, please wait") - return errors.New("[SendVerifiedAndExecutedBlocks] we haven't generated related proofs, please wait") - } - var proofs []*big.Int - for _, proofSender := range proofSenders { - var proofInfo *util.FormattedProof - err = json.Unmarshal([]byte(proofSender.ProofInfo), &proofInfo) - if err != nil { - logx.Errorf("[SendVerifiedAndExecutedBlocks] unable to unmarshal proof info: %s", err.Error()) - return err - } - proofs = append(proofs, proofInfo.A[:]...) - proofs = append(proofs, proofInfo.B[0][0], proofInfo.B[0][1]) - proofs = append(proofs, proofInfo.B[1][0], proofInfo.B[1][1]) - proofs = append(proofs, proofInfo.C[:]...) - } - // commit blocks on-chain - if len(pendingVerifyAndExecuteBlocks) != 0 { - txHash, err := zkbas.VerifyAndExecuteBlocks(cli, authCli, zkbasInstance, - pendingVerifyAndExecuteBlocks, proofs, gasPrice, gasLimit) - if err != nil { - logx.Errorf("[SendVerifiedAndExecutedBlocks] VerifyAndExecuteBlocks err: %s", err.Error()) - return err - } - // update l1 tx sender table records - newSender := &L1TxSender{ - L1TxHash: txHash, - TxStatus: PendingStatus, - TxType: VerifyAndExecuteTxType, - L2BlockHeight: int64(pendingVerifyAndExecuteBlocks[len(pendingVerifyAndExecuteBlocks)-1].BlockHeader.BlockNumber), - } - isValid, err := l1TxSenderModel.CreateL1TxSender(newSender) - if err != nil { - logx.Errorf("[SendVerifiedAndExecutedBlocks] CreateL1TxSender err: %s", err.Error()) - return err - } - if !isValid { - logx.Errorf("[SendVerifiedAndExecutedBlocks] cannot create new senders") - return errors.New("[SendVerifiedAndExecutedBlocks] cannot create new senders") - } - logx.Errorf("[SendVerifiedAndExecutedBlocks] new blocks have been verified and executed(height): %d", newSender.L2BlockHeight) - return nil - } - return nil -} diff --git a/service/cronjob/sender/internal/svc/servicecontext.go b/service/cronjob/sender/internal/svc/servicecontext.go deleted file mode 100644 index f12a2d488..000000000 --- a/service/cronjob/sender/internal/svc/servicecontext.go +++ /dev/null @@ -1,50 +0,0 @@ -package svc - -import ( - "github.com/zeromicro/go-zero/core/logx" - "github.com/zeromicro/go-zero/core/stores/redis" - "github.com/zeromicro/go-zero/core/stores/sqlx" - "gorm.io/driver/postgres" - "gorm.io/gorm" - - "github.com/bnb-chain/zkbas/common/model/block" - "github.com/bnb-chain/zkbas/common/model/blockForCommit" - "github.com/bnb-chain/zkbas/common/model/l1TxSender" - "github.com/bnb-chain/zkbas/common/model/proofSender" - "github.com/bnb-chain/zkbas/common/model/sysconfig" - "github.com/bnb-chain/zkbas/service/cronjob/sender/internal/config" -) - -type ServiceContext struct { - Config config.Config - - BlockModel block.BlockModel - BlockForCommitModel blockForCommit.BlockForCommitModel - L1TxSenderModel l1TxSender.L1TxSenderModel - SysConfigModel sysconfig.SysconfigModel - ProofSenderModel proofSender.ProofSenderModel -} - -func WithRedis(redisType string, redisPass string) redis.Option { - return func(p *redis.Redis) { - p.Type = redisType - p.Pass = redisPass - } -} - -func NewServiceContext(c config.Config) *ServiceContext { - gormPointer, err := gorm.Open(postgres.Open(c.Postgres.DataSource)) - if err != nil { - logx.Errorf("gorm connect db error, err = %s", err.Error()) - } - conn := sqlx.NewSqlConn("postgres", c.Postgres.DataSource) - redisConn := redis.New(c.CacheRedis[0].Host, WithRedis(c.CacheRedis[0].Type, c.CacheRedis[0].Pass)) - return &ServiceContext{ - Config: c, - BlockModel: block.NewBlockModel(conn, c.CacheRedis, gormPointer, redisConn), - BlockForCommitModel: blockForCommit.NewBlockForCommitModel(conn, c.CacheRedis, gormPointer), - L1TxSenderModel: l1TxSender.NewL1TxSenderModel(conn, c.CacheRedis, gormPointer), - SysConfigModel: sysconfig.NewSysconfigModel(conn, c.CacheRedis, gormPointer), - ProofSenderModel: proofSender.NewProofSenderModel(gormPointer), - } -} diff --git a/service/cronjob/sender/sender.go b/service/cronjob/sender/sender.go deleted file mode 100644 index fe1f3e181..000000000 --- a/service/cronjob/sender/sender.go +++ /dev/null @@ -1,126 +0,0 @@ -package main - -import ( - "context" - "flag" - "math/big" - "time" - - "github.com/bnb-chain/zkbas-eth-rpc/_rpc" - zkbas "github.com/bnb-chain/zkbas-eth-rpc/zkbas/core/legend" - "github.com/robfig/cron/v3" - "github.com/zeromicro/go-zero/core/conf" - "github.com/zeromicro/go-zero/core/logx" - - "github.com/bnb-chain/zkbas/service/cronjob/sender/internal/config" - "github.com/bnb-chain/zkbas/service/cronjob/sender/internal/logic" - "github.com/bnb-chain/zkbas/service/cronjob/sender/internal/svc" -) - -var configFile = flag.String("f", - "./etc/sender.yaml", "the config file") - -func main() { - flag.Parse() - - var c config.Config - conf.MustLoad(*configFile, &c) - ctx := svc.NewServiceContext(c) - // srv := server.NewSenderServer(ctx) - logx.DisableStat() - networkEndpointName := c.ChainConfig.NetworkRPCSysConfigName - networkEndpoint, err := ctx.SysConfigModel.GetSysconfigByName(networkEndpointName) - if err != nil { - logx.Severef("[sender] fatal error, cannot fetch networkEndpoint from sysConfig, err: %s, SysConfigName: %s", - err.Error(), c.ChainConfig.NetworkRPCSysConfigName) - panic(err) - } - ZkbasRollupAddress, err := ctx.SysConfigModel.GetSysconfigByName(c.ChainConfig.ZkbasContractAddrSysConfigName) - if err != nil { - logx.Severef("[sender] fatal error, cannot fetch ZkbasRollupAddress from sysConfig, err: %s, SysConfigName: %s", - err.Error(), c.ChainConfig.ZkbasContractAddrSysConfigName) - panic(err) - } - - cli, err := _rpc.NewClient(networkEndpoint.Value) - if err != nil { - panic(err) - } - var chainId *big.Int - if c.ChainConfig.L1ChainId == "" { - chainId, err = cli.ChainID(context.Background()) - if err != nil { - panic(err) - } - } else { - var ( - isValid bool - ) - chainId, isValid = new(big.Int).SetString(c.ChainConfig.L1ChainId, 10) - if !isValid { - panic("invalid l1 chain id") - } - } - - authCli, err := _rpc.NewAuthClient(cli, c.ChainConfig.Sk, chainId) - if err != nil { - panic(err) - } - zkbasInstance, err := zkbas.LoadZkbasInstance(cli, ZkbasRollupAddress.Value) - if err != nil { - panic(err) - } - gasPrice, err := cli.SuggestGasPrice(context.Background()) - if err != nil { - panic(err) - } - - var param = &logic.SenderParam{ - Cli: cli, - AuthCli: authCli, - ZkbasInstance: zkbasInstance, - MaxWaitingTime: c.ChainConfig.MaxWaitingTime * time.Second.Milliseconds(), - MaxBlocksCount: c.ChainConfig.MaxBlockCount, - GasPrice: gasPrice, - GasLimit: c.ChainConfig.GasLimit, - } - - // new cron - cronJob := cron.New(cron.WithChain( - cron.SkipIfStillRunning(cron.DiscardLogger), - )) - - _, err = cronJob.AddFunc("@every 10s", func() { - logx.Info("========================= start sender committer task =========================") - err := logic.SendCommittedBlocks( - param, - ctx.L1TxSenderModel, - ctx.BlockModel, - ctx.BlockForCommitModel, - ) - if err != nil { - logx.Info("[sender.SendCommittedBlocks main] unable to run:", err) - } - logx.Info("========================= end sender committer task =========================") - }) - if err != nil { - panic(err) - } - - _, err = cronJob.AddFunc("@every 10s", func() { - logx.Info("========================= start sender verifier task =========================") - err = logic.SendVerifiedAndExecutedBlocks(param, ctx.L1TxSenderModel, ctx.BlockModel, ctx.ProofSenderModel) - if err != nil { - logx.Info("[sender.SendCommittedBlocks main] unable to run:", err) - } - logx.Info("========================= end sender verifier task =========================") - }) - if err != nil { - panic(err) - } - - cronJob.Start() - - logx.Info("sender cronjob is starting......") - select {} -} diff --git a/service/cronjob/witnessGenerator/Dockerfile b/service/cronjob/witnessGenerator/Dockerfile deleted file mode 100644 index 4e6a6438d..000000000 --- a/service/cronjob/witnessGenerator/Dockerfile +++ /dev/null @@ -1,29 +0,0 @@ -FROM golang:alpine AS builder - -LABEL stage=gobuilder - -ENV CGO_ENABLED 0 - -RUN apk update --no-cache && apk add --no-cache tzdata - -FROM alpine:3.4 - -COPY --from=builder /etc/ssl/certs/ca-certificates.crt /etc/ssl/certs/ca-certificates.crt -COPY --from=builder /usr/share/zoneinfo/Asia/Shanghai /usr/share/zoneinfo/Asia/Shanghai -ENV TZ Asia/Shanghai - -WORKDIR /app -COPY bin/witnessgenerator /app/witnessgenerator -COPY configyaml /app/etc - -RUN apk --no-cache add ca-certificates openssl libstdc++ && update-ca-certificates - -RUN apk update --no-cache && apk add --no-cache tzdata python3 curl which bash - -RUN curl -sSL https://sdk.cloud.google.com | bash - -ENV PATH $PATH:/root/google-cloud-sdk/bin - -WORKDIR /app - -CMD ["./witnessgenerator", "-f", "etc/witnessgenerator.yaml"] diff --git a/service/cronjob/witnessGenerator/etc/witnessGenerator.yaml.example b/service/cronjob/witnessGenerator/etc/witnessGenerator.yaml.example deleted file mode 100644 index 117659470..000000000 --- a/service/cronjob/witnessGenerator/etc/witnessGenerator.yaml.example +++ /dev/null @@ -1,11 +0,0 @@ -Name: witnessGenerator.cronjob - -Postgres: - DataSource: host=127.0.0.1 user=postgres password=ZecreyProtocolDB@123 dbname=zkbas port=5432 sslmode=disable - -CacheRedis: - - Host: 127.0.0.1:6379 - Type: node - -TreeDB: - Driver: memorydb \ No newline at end of file diff --git a/service/cronjob/witnessGenerator/internal/config/config.go b/service/cronjob/witnessGenerator/internal/config/config.go deleted file mode 100644 index 13312445d..000000000 --- a/service/cronjob/witnessGenerator/internal/config/config.go +++ /dev/null @@ -1,18 +0,0 @@ -package config - -import ( - "github.com/bnb-chain/zkbas/pkg/treedb" - "github.com/zeromicro/go-zero/core/stores/cache" -) - -type Config struct { - Postgres struct { - DataSource string - } - CacheRedis cache.CacheConf - TreeDB struct { - Driver treedb.Driver - LevelDBOption treedb.LevelDBOption `json:",optional"` - RedisDBOption treedb.RedisDBOption `json:",optional"` - } -} diff --git a/service/cronjob/witnessGenerator/internal/logic/constants.go b/service/cronjob/witnessGenerator/internal/logic/constants.go deleted file mode 100644 index d914a68d9..000000000 --- a/service/cronjob/witnessGenerator/internal/logic/constants.go +++ /dev/null @@ -1,46 +0,0 @@ -/* - * Copyright © 2021 Zkbas Protocol - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package logic - -import ( - "time" - - cryptoBlock "github.com/bnb-chain/zkbas-crypto/legend/circuit/bn254/block" - zkbas "github.com/bnb-chain/zkbas-eth-rpc/zkbas/core/legend" - - "github.com/bnb-chain/zkbas/common/model/block" - "github.com/bnb-chain/zkbas/common/model/blockForProof" -) - -const ( - UnprovedBlockReceivedTimeout = 10 * time.Minute - - BlockProcessDelta = 10 -) - -type ( - Block = block.Block - CryptoTx = cryptoBlock.Tx - CryptoBlock = cryptoBlock.Block - BlockForProof = blockForProof.BlockForProof - StorageStoredBlockInfo = zkbas.StorageStoredBlockInfo -) - -type CryptoBlockInfo struct { - BlockInfo *CryptoBlock - Status int64 -} diff --git a/service/cronjob/witnessGenerator/internal/logic/handleCryptoBlock.go b/service/cronjob/witnessGenerator/internal/logic/handleCryptoBlock.go deleted file mode 100644 index 2ccad2b49..000000000 --- a/service/cronjob/witnessGenerator/internal/logic/handleCryptoBlock.go +++ /dev/null @@ -1,200 +0,0 @@ -/* - * Copyright © 2021 Zkbas Protocol - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package logic - -import ( - "errors" - "time" - - bsmt "github.com/bnb-chain/bas-smt" - cryptoBlock "github.com/bnb-chain/zkbas-crypto/legend/circuit/bn254/block" - "github.com/ethereum/go-ethereum/common" - "github.com/zeromicro/go-zero/core/logx" - - "github.com/bnb-chain/zkbas/common/model/blockForProof" - "github.com/bnb-chain/zkbas/common/proverUtil" - "github.com/bnb-chain/zkbas/common/tree" - "github.com/bnb-chain/zkbas/errorcode" - "github.com/bnb-chain/zkbas/pkg/treedb" - "github.com/bnb-chain/zkbas/service/cronjob/witnessGenerator/internal/svc" -) - -func GenerateWitness( - treeCtx *treedb.Context, - accountTree bsmt.SparseMerkleTree, - assetTrees *[]bsmt.SparseMerkleTree, - liquidityTree bsmt.SparseMerkleTree, - nftTree bsmt.SparseMerkleTree, - ctx *svc.ServiceContext, - deltaHeight int64, -) { - err := generateUnprovedBlockWitness(ctx, treeCtx, accountTree, assetTrees, liquidityTree, nftTree, deltaHeight) - if err != nil { - logx.Errorf("generate block witness error, err=%s", err.Error()) - } - - updateTimeoutUnprovedBlock(ctx) -} - -func generateUnprovedBlockWitness( - ctx *svc.ServiceContext, - treeCtx *treedb.Context, - accountTree bsmt.SparseMerkleTree, - assetTrees *[]bsmt.SparseMerkleTree, - liquidityTree bsmt.SparseMerkleTree, - nftTree bsmt.SparseMerkleTree, - deltaHeight int64, -) error { - latestUnprovedHeight, err := ctx.BlockForProofModel.GetLatestUnprovedBlockHeight() - if err != nil { - if err == errorcode.DbErrNotFound { - latestUnprovedHeight = 0 - } else { - return err - } - } - - // get last handled block info - blocks, err := ctx.BlockModel.GetBlocksBetween(latestUnprovedHeight+1, latestUnprovedHeight+deltaHeight) - if err != nil { - return err - } - // get latestVerifiedBlockNr - latestVerifiedBlockNr, err := ctx.BlockModel.GetLatestVerifiedBlockHeight() - if err != nil { - return err - } - - // scan each block - for _, oBlock := range blocks { - var ( - oldStateRoot []byte - newStateRoot []byte - blockCommitment []byte - isFirst bool - ) - var ( - cryptoTxs []*CryptoTx - ) - // scan each transaction - for _, oTx := range oBlock.Txs { - var ( - cryptoTx *CryptoTx - ) - cryptoTx, err = proverUtil.ConstructCryptoTx(oTx, treeCtx, accountTree, assetTrees, liquidityTree, nftTree, ctx.AccountModel, uint64(latestVerifiedBlockNr)) - if err != nil { - logx.Errorf("[prover] unable to construct crypto tx: %s", err.Error()) - return err - } - if !isFirst { - oldStateRoot = cryptoTx.StateRootBefore - isFirst = true - } - newStateRoot = cryptoTx.StateRootAfter - cryptoTxs = append(cryptoTxs, cryptoTx) - logx.Info("after state root:", common.Bytes2Hex(newStateRoot)) - } - - emptyTxCount := int(oBlock.BlockSize) - len(oBlock.Txs) - for i := 0; i < emptyTxCount; i++ { - cryptoTxs = append(cryptoTxs, cryptoBlock.EmptyTx()) - } - blockCommitment = common.FromHex(oBlock.BlockCommitment) - if common.Bytes2Hex(newStateRoot) != oBlock.StateRoot { - logx.Info("error: new root:", common.Bytes2Hex(newStateRoot)) - logx.Info("error: BlockCommitment:", common.Bytes2Hex(blockCommitment)) - return errors.New("state root doesn't match") - } - - blockInfo, err := proverUtil.BlockToCryptoBlock(oBlock, oldStateRoot, newStateRoot, cryptoTxs) - if err != nil { - logx.Errorf("[prover] unable to convert block to crypto block") - return err - } - var nCryptoBlockInfo = &CryptoBlockInfo{ - BlockInfo: blockInfo, - Status: blockForProof.StatusPublished, - } - logx.Info("new root:", common.Bytes2Hex(nCryptoBlockInfo.BlockInfo.NewStateRoot)) - logx.Info("BlockCommitment:", common.Bytes2Hex(nCryptoBlockInfo.BlockInfo.BlockCommitment)) - - // insert crypto blocks array - unprovedCryptoBlockModel, err := CryptoBlockInfoToBlockForProof(nCryptoBlockInfo) - if err != nil { - logx.Errorf("[prover] marshal crypto block info error, err=%s", err.Error()) - return err - } - - // commit trees - err = tree.CommitTrees(uint64(latestVerifiedBlockNr), accountTree, assetTrees, liquidityTree, nftTree) - if err != nil { - logx.Errorf("[prover] unable to commit trees after txs is executed", err.Error()) - return err - } - - err = ctx.BlockForProofModel.CreateConsecutiveUnprovedCryptoBlock(unprovedCryptoBlockModel) - if err != nil { - // rollback trees - err = tree.RollBackTrees(uint64(oBlock.BlockHeight)-1, accountTree, assetTrees, liquidityTree, nftTree) - if err != nil { - logx.Errorf("[prover] unable to rollback trees", err) - } - logx.Errorf("[prover] create unproved crypto block error, err=%s", err.Error()) - return err - } - - } - return nil -} - -func updateTimeoutUnprovedBlock(ctx *svc.ServiceContext) { - latestConfirmedProof, err := ctx.ProofSenderModel.GetLatestConfirmedProof() - if err != nil && err != errorcode.DbErrNotFound { - return - } - - var nextBlockNumber int64 = 1 - if err != errorcode.DbErrNotFound { - nextBlockNumber = latestConfirmedProof.BlockNumber + 1 - } - - nextUnprovedBlock, err := ctx.BlockForProofModel.GetUnprovedCryptoBlockByBlockNumber(nextBlockNumber) - if err != nil { - return - } - - // skip if next block is not processed - if nextUnprovedBlock.Status == blockForProof.StatusPublished { - return - } - - // skip if the next block proof exists - // if the proof is not submitted and verified in L1, there should be another alerts - _, err = ctx.ProofSenderModel.GetProofByBlockNumber(nextBlockNumber) - if err == nil { - return - } - - // update block status to Published if it's timeout - if time.Now().After(nextUnprovedBlock.UpdatedAt.Add(UnprovedBlockReceivedTimeout)) { - err := ctx.BlockForProofModel.UpdateUnprovedCryptoBlockStatus(nextUnprovedBlock, blockForProof.StatusPublished) - if err != nil { - logx.Errorf("update unproved block status error, err=%s", err.Error()) - return - } - } -} diff --git a/service/cronjob/witnessGenerator/internal/logic/utils.go b/service/cronjob/witnessGenerator/internal/logic/utils.go deleted file mode 100644 index c022b1ab5..000000000 --- a/service/cronjob/witnessGenerator/internal/logic/utils.go +++ /dev/null @@ -1,27 +0,0 @@ -package logic - -import ( - "encoding/json" - "errors" - - "github.com/bnb-chain/zkbas/common/model/blockForProof" -) - -func CryptoBlockInfoToBlockForProof(cryptoBlock *CryptoBlockInfo) (*BlockForProof, error) { - if cryptoBlock == nil { - return nil, errors.New("crypto block is nil") - } - - blockInfo, err := json.Marshal(cryptoBlock.BlockInfo) - if err != nil { - return nil, err - } - - blockModel := blockForProof.BlockForProof{ - BlockHeight: cryptoBlock.BlockInfo.BlockNumber, - BlockData: string(blockInfo), - Status: cryptoBlock.Status, - } - - return &blockModel, nil -} diff --git a/service/cronjob/witnessGenerator/internal/svc/servicecontext.go b/service/cronjob/witnessGenerator/internal/svc/servicecontext.go deleted file mode 100644 index d3de527b4..000000000 --- a/service/cronjob/witnessGenerator/internal/svc/servicecontext.go +++ /dev/null @@ -1,58 +0,0 @@ -package svc - -import ( - "github.com/zeromicro/go-zero/core/logx" - "github.com/zeromicro/go-zero/core/stores/redis" - "github.com/zeromicro/go-zero/core/stores/sqlx" - "gorm.io/driver/postgres" - "gorm.io/gorm" - - "github.com/bnb-chain/zkbas/common/model/account" - "github.com/bnb-chain/zkbas/common/model/block" - "github.com/bnb-chain/zkbas/common/model/blockForProof" - "github.com/bnb-chain/zkbas/common/model/liquidity" - "github.com/bnb-chain/zkbas/common/model/nft" - "github.com/bnb-chain/zkbas/common/model/proofSender" - "github.com/bnb-chain/zkbas/service/cronjob/witnessGenerator/internal/config" -) - -type ServiceContext struct { - Config config.Config - - RedisConn *redis.Redis - - BlockModel block.BlockModel - AccountModel account.AccountModel - AccountHistoryModel account.AccountHistoryModel - LiquidityHistoryModel liquidity.LiquidityHistoryModel - NftHistoryModel nft.L2NftHistoryModel - ProofSenderModel proofSender.ProofSenderModel - BlockForProofModel blockForProof.BlockForProofModel -} - -func WithRedis(redisType string, redisPass string) redis.Option { - return func(p *redis.Redis) { - p.Type = redisType - p.Pass = redisPass - } -} - -func NewServiceContext(c config.Config) *ServiceContext { - gormPointer, err := gorm.Open(postgres.Open(c.Postgres.DataSource)) - if err != nil { - logx.Errorf("gorm connect db error, err = %s", err.Error()) - } - conn := sqlx.NewSqlConn("postgres", c.Postgres.DataSource) - redisConn := redis.New(c.CacheRedis[0].Host, WithRedis(c.CacheRedis[0].Type, c.CacheRedis[0].Pass)) - return &ServiceContext{ - Config: c, - RedisConn: redisConn, - BlockModel: block.NewBlockModel(conn, c.CacheRedis, gormPointer, redisConn), - BlockForProofModel: blockForProof.NewBlockForProofModel(conn, c.CacheRedis, gormPointer), - AccountModel: account.NewAccountModel(conn, c.CacheRedis, gormPointer), - AccountHistoryModel: account.NewAccountHistoryModel(conn, c.CacheRedis, gormPointer), - LiquidityHistoryModel: liquidity.NewLiquidityHistoryModel(conn, c.CacheRedis, gormPointer), - NftHistoryModel: nft.NewL2NftHistoryModel(conn, c.CacheRedis, gormPointer), - ProofSenderModel: proofSender.NewProofSenderModel(gormPointer), - } -} diff --git a/service/cronjob/witnessGenerator/witnessgenerator.go b/service/cronjob/witnessGenerator/witnessgenerator.go deleted file mode 100644 index d11f9b9ca..000000000 --- a/service/cronjob/witnessGenerator/witnessgenerator.go +++ /dev/null @@ -1,111 +0,0 @@ -package main - -import ( - "flag" - - bsmt "github.com/bnb-chain/bas-smt" - "github.com/pkg/errors" - "github.com/robfig/cron/v3" - "github.com/zeromicro/go-zero/core/conf" - "github.com/zeromicro/go-zero/core/logx" - - "github.com/bnb-chain/zkbas/common/model/proofSender" - "github.com/bnb-chain/zkbas/common/tree" - "github.com/bnb-chain/zkbas/errorcode" - "github.com/bnb-chain/zkbas/pkg/treedb" - "github.com/bnb-chain/zkbas/service/cronjob/witnessGenerator/internal/config" - "github.com/bnb-chain/zkbas/service/cronjob/witnessGenerator/internal/logic" - "github.com/bnb-chain/zkbas/service/cronjob/witnessGenerator/internal/svc" -) - -var configFile = flag.String("f", "./etc/witnessGenerator.yaml", "the config file") - -func main() { - flag.Parse() - - var c config.Config - conf.MustLoad(*configFile, &c) - ctx := svc.NewServiceContext(c) - logx.DisableStat() - - p, err := ctx.ProofSenderModel.GetLatestConfirmedProof() - if err != nil { - if err != errorcode.DbErrNotFound { - logx.Error("[prover] => GetLatestConfirmedProof error:", err) - return - } else { - p = &proofSender.ProofSender{ - BlockNumber: 0, - } - } - } - var ( - accountTree bsmt.SparseMerkleTree - assetTrees []bsmt.SparseMerkleTree - liquidityTree bsmt.SparseMerkleTree - nftTree bsmt.SparseMerkleTree - ) - // init tree database - treeCtx := &treedb.Context{ - Name: "witness", - Driver: c.TreeDB.Driver, - LevelDBOption: &c.TreeDB.LevelDBOption, - RedisDBOption: &c.TreeDB.RedisDBOption, - } - err = treedb.SetupTreeDB(treeCtx) - if err != nil { - panic(errors.Wrap(err, "[prover] => Init tree database failed")) - } - // init accountTree and accountStateTrees - // the init block number use the latest sent block - accountTree, assetTrees, err = tree.InitAccountTree( - ctx.AccountModel, - ctx.AccountHistoryModel, - p.BlockNumber, - treeCtx, - ) - // the blockHeight depends on the proof start position - if err != nil { - logx.Error("[prover] => InitMerkleTree error:", err) - return - } - - liquidityTree, err = tree.InitLiquidityTree(ctx.LiquidityHistoryModel, p.BlockNumber, - treeCtx) - if err != nil { - logx.Errorf("[prover] InitLiquidityTree error: %s", err.Error()) - return - } - nftTree, err = tree.InitNftTree(ctx.NftHistoryModel, p.BlockNumber, - treeCtx) - if err != nil { - logx.Errorf("[prover] InitNftTree error: %s", err.Error()) - return - } - - cronJob := cron.New(cron.WithChain( - cron.SkipIfStillRunning(cron.DiscardLogger), - )) - _, err = cronJob.AddFunc("@every 10s", func() { - // cron job for creating cryptoBlock - logx.Info("==========start generate block witness==========") - logic.GenerateWitness( - treeCtx, - accountTree, - &assetTrees, - liquidityTree, - nftTree, - ctx, - logic.BlockProcessDelta, - ) - logx.Info("==========end generate block witness==========") - }) - if err != nil { - panic(err) - } - cronJob.Start() - - logx.Info("witness generator cronjob is starting......") - - select {} -} diff --git a/service/monitor/config/config.go b/service/monitor/config/config.go new file mode 100644 index 000000000..e33fcf603 --- /dev/null +++ b/service/monitor/config/config.go @@ -0,0 +1,18 @@ +package config + +import ( + "github.com/zeromicro/go-zero/core/logx" +) + +type Config struct { + Postgres struct { + DataSource string + } + ChainConfig struct { + NetworkRPCSysConfigName string + StartL1BlockHeight int64 + ConfirmBlocksCount uint64 + MaxHandledBlocksCount int64 + } + LogConf logx.LogConf +} diff --git a/service/monitor/etc/config.yaml.example b/service/monitor/etc/config.yaml.example new file mode 100644 index 000000000..0ac0edd84 --- /dev/null +++ b/service/monitor/etc/config.yaml.example @@ -0,0 +1,14 @@ +Name: monitor + +Postgres: + DataSource: host=127.0.0.1 user=postgres password=Zkbas@123 dbname=zkbas port=5432 sslmode=disable + +ChainConfig: + NetworkRPCSysConfigName: "BscTestNetworkRpc" + #NetworkRPCSysConfigName: "LocalTestNetworkRpc" + StartL1BlockHeight: $blockNumber + ConfirmBlocksCount: 0 + MaxHandledBlocksCount: 5000 + +TreeDB: + Driver: memorydb \ No newline at end of file diff --git a/service/monitor/monitor.go b/service/monitor/monitor.go new file mode 100644 index 000000000..db78851a3 --- /dev/null +++ b/service/monitor/monitor.go @@ -0,0 +1,59 @@ +package monitor + +import ( + "github.com/robfig/cron/v3" + "github.com/zeromicro/go-zero/core/conf" + "github.com/zeromicro/go-zero/core/logx" + "github.com/zeromicro/go-zero/core/proc" + + "github.com/bnb-chain/zkbas/service/monitor/config" + "github.com/bnb-chain/zkbas/service/monitor/monitor" +) + +func Run(configFile string) error { + var c config.Config + conf.MustLoad(configFile, &c) + m := monitor.NewMonitor(c) + logx.MustSetup(c.LogConf) + logx.DisableStat() + proc.AddShutdownListener(func() { + logx.Close() + }) + cronjob := cron.New(cron.WithChain( + cron.SkipIfStillRunning(cron.DiscardLogger), + )) + + // m generic blocks + if _, err := cronjob.AddFunc("@every 10s", func() { + err := m.MonitorGenericBlocks() + if err != nil { + logx.Errorf("monitor blocks error, %v", err) + } + }); err != nil { + panic(err) + } + + // m priority requests + if _, err := cronjob.AddFunc("@every 10s", func() { + err := m.MonitorPriorityRequests() + if err != nil { + logx.Errorf("monitor priority requests error, %v", err) + } + }); err != nil { + panic(err) + } + + // m governance blocks + if _, err := cronjob.AddFunc("@every 10s", func() { + err := m.MonitorGovernanceBlocks() + if err != nil { + logx.Errorf("monitor governance blocks error, %v", err) + } + + }); err != nil { + panic(err) + } + cronjob.Start() + logx.Info("Starting monitor cronjob ...") + select {} +} diff --git a/service/monitor/monitor/monitor.go b/service/monitor/monitor/monitor.go new file mode 100644 index 000000000..38975ccd7 --- /dev/null +++ b/service/monitor/monitor/monitor.go @@ -0,0 +1,101 @@ +/* + * Copyright © 2021 ZkBAS Protocol + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package monitor + +import ( + "github.com/zeromicro/go-zero/core/logx" + "gorm.io/driver/postgres" + "gorm.io/gorm" + + "github.com/bnb-chain/zkbas-eth-rpc/_rpc" + "github.com/bnb-chain/zkbas/dao/asset" + "github.com/bnb-chain/zkbas/dao/block" + "github.com/bnb-chain/zkbas/dao/l1rolluptx" + "github.com/bnb-chain/zkbas/dao/l1syncedblock" + "github.com/bnb-chain/zkbas/dao/mempool" + "github.com/bnb-chain/zkbas/dao/priorityrequest" + "github.com/bnb-chain/zkbas/dao/sysconfig" + "github.com/bnb-chain/zkbas/service/monitor/config" + "github.com/bnb-chain/zkbas/types" +) + +type Monitor struct { + Config config.Config + + cli *_rpc.ProviderClient + + zkbasContractAddress string + governanceContractAddress string + + BlockModel block.BlockModel + MempoolModel mempool.MempoolModel + SysConfigModel sysconfig.SysConfigModel + L1RollupTxModel l1rolluptx.L1RollupTxModel + L2AssetModel asset.AssetModel + PriorityRequestModel priorityrequest.PriorityRequestModel + L1SyncedBlockModel l1syncedblock.L1SyncedBlockModel +} + +func NewMonitor(c config.Config) *Monitor { + db, err := gorm.Open(postgres.Open(c.Postgres.DataSource)) + if err != nil { + logx.Errorf("gorm connect db error, err: %s", err.Error()) + } + monitor := &Monitor{ + Config: c, + PriorityRequestModel: priorityrequest.NewPriorityRequestModel(db), + MempoolModel: mempool.NewMempoolModel(db), + BlockModel: block.NewBlockModel(db), + L1RollupTxModel: l1rolluptx.NewL1RollupTxModel(db), + L1SyncedBlockModel: l1syncedblock.NewL1SyncedBlockModel(db), + L2AssetModel: asset.NewAssetModel(db), + SysConfigModel: sysconfig.NewSysConfigModel(db), + } + + zkbasAddressConfig, err := monitor.SysConfigModel.GetSysConfigByName(types.ZkbasContract) + if err != nil { + logx.Errorf("GetSysConfigByName err: %s", err.Error()) + panic(err) + } + + governanceAddressConfig, err := monitor.SysConfigModel.GetSysConfigByName(types.GovernanceContract) + if err != nil { + logx.Severef("fatal error, cannot fetch governance contract from sysconfig, err: %s, SysConfigName: %s", + err.Error(), types.GovernanceContract) + panic(err) + } + + networkRpc, err := monitor.SysConfigModel.GetSysConfigByName(c.ChainConfig.NetworkRPCSysConfigName) + if err != nil { + logx.Severef("fatal error, cannot fetch NetworkRPC from sysconfig, err: %s, SysConfigName: %s", + err.Error(), c.ChainConfig.NetworkRPCSysConfigName) + panic(err) + } + logx.Infof("ChainName: %s, zkbasContractAddress: %s, networkRpc: %s", + c.ChainConfig.NetworkRPCSysConfigName, zkbasAddressConfig.Value, networkRpc.Value) + + bscRpcCli, err := _rpc.NewClient(networkRpc.Value) + if err != nil { + panic(err) + } + + monitor.zkbasContractAddress = zkbasAddressConfig.Value + monitor.governanceContractAddress = governanceAddressConfig.Value + monitor.cli = bscRpcCli + + return monitor +} diff --git a/service/monitor/monitor/monitor_generic_blocks.go b/service/monitor/monitor/monitor_generic_blocks.go new file mode 100644 index 000000000..2c8000bc3 --- /dev/null +++ b/service/monitor/monitor/monitor_generic_blocks.go @@ -0,0 +1,250 @@ +/* + * Copyright © 2021 ZkBAS Protocol + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package monitor + +import ( + "context" + "encoding/json" + "fmt" + "math/big" + + "github.com/ethereum/go-ethereum" + "github.com/ethereum/go-ethereum/accounts/abi/bind" + "github.com/ethereum/go-ethereum/common" + "github.com/ethereum/go-ethereum/core/types" + "github.com/zeromicro/go-zero/core/logx" + + "github.com/bnb-chain/zkbas-eth-rpc/_rpc" + zkbas "github.com/bnb-chain/zkbas-eth-rpc/zkbas/core/legend" + common2 "github.com/bnb-chain/zkbas/common" + "github.com/bnb-chain/zkbas/dao/block" + "github.com/bnb-chain/zkbas/dao/l1syncedblock" + "github.com/bnb-chain/zkbas/dao/mempool" + "github.com/bnb-chain/zkbas/dao/priorityrequest" + types2 "github.com/bnb-chain/zkbas/types" +) + +func (m *Monitor) MonitorGenericBlocks() (err error) { + latestHandledBlock, err := m.L1SyncedBlockModel.GetLatestL1BlockByType(l1syncedblock.TypeGeneric) + var handledHeight int64 + if err != nil { + if err == types2.DbErrNotFound { + handledHeight = m.Config.ChainConfig.StartL1BlockHeight + } else { + return fmt.Errorf("failed to get latest l1 monitor block, err: %v", err) + } + } else { + handledHeight = latestHandledBlock.L1BlockHeight + } + + // get latest l1 block height(latest height - pendingBlocksCount) + latestHeight, err := m.cli.GetHeight() + if err != nil { + return fmt.Errorf("failed to get l1 height, err: %v", err) + } + + safeHeight := latestHeight - m.Config.ChainConfig.ConfirmBlocksCount + safeHeight = uint64(common2.MinInt64(int64(safeHeight), handledHeight+m.Config.ChainConfig.MaxHandledBlocksCount)) + if safeHeight <= uint64(handledHeight) { + return nil + } + + logx.Infof("syncing l1 blocks from %d to %d", big.NewInt(handledHeight+1), big.NewInt(int64(safeHeight))) + + priorityRequestCount, err := getPriorityRequestCount(m.cli, m.zkbasContractAddress, uint64(handledHeight+1), safeHeight) + if err != nil { + return fmt.Errorf("failed to get priority request count, err: %v", err) + } + + logs, err := getZkbasContractLogs(m.cli, m.zkbasContractAddress, uint64(handledHeight+1), safeHeight) + if err != nil { + return fmt.Errorf("failed to get contract logs, err: %v", err) + } + var ( + l1EventInfos []*L1EventInfo + priorityRequests []*priorityrequest.PriorityRequest + + priorityRequestCountCheck = 0 + + relatedBlocks = make(map[int64]*block.Block) + ) + for _, vlog := range logs { + l1EventInfo := &L1EventInfo{ + TxHash: vlog.TxHash.Hex(), + } + + logBlock, err := m.cli.GetBlockHeaderByNumber(big.NewInt(int64(vlog.BlockNumber))) + if err != nil { + return fmt.Errorf("failed to get block header, err: %v", err) + } + + switch vlog.Topics[0].Hex() { + case zkbasLogNewPriorityRequestSigHash.Hex(): + priorityRequestCountCheck++ + l1EventInfo.EventType = EventTypeNewPriorityRequest + + l2TxEventMonitorInfo, err := convertLogToNewPriorityRequestEvent(vlog) + if err != nil { + return fmt.Errorf("failed to convert NewPriorityRequest log, err: %v", err) + } + priorityRequests = append(priorityRequests, l2TxEventMonitorInfo) + case zkbasLogWithdrawalSigHash.Hex(): + case zkbasLogWithdrawalPendingSigHash.Hex(): + case zkbasLogBlockCommitSigHash.Hex(): + l1EventInfo.EventType = EventTypeCommittedBlock + + var event zkbas.ZkbasBlockCommit + if err := ZkbasContractAbi.UnpackIntoInterface(&event, EventNameBlockCommit, vlog.Data); err != nil { + return fmt.Errorf("failed to unpack ZkbasBlockCommit event, err: %v", err) + } + + // update block status + blockHeight := int64(event.BlockNumber) + if relatedBlocks[blockHeight] == nil { + relatedBlocks[blockHeight], err = m.BlockModel.GetBlockByHeightWithoutTx(blockHeight) + if err != nil { + return fmt.Errorf("GetBlockByHeightWithoutTx err: %v", err) + } + } + relatedBlocks[blockHeight].CommittedTxHash = vlog.TxHash.Hex() + relatedBlocks[blockHeight].CommittedAt = int64(logBlock.Time) + relatedBlocks[blockHeight].BlockStatus = block.StatusCommitted + case zkbasLogBlockVerificationSigHash.Hex(): + l1EventInfo.EventType = EventTypeVerifiedBlock + + var event zkbas.ZkbasBlockVerification + if err := ZkbasContractAbi.UnpackIntoInterface(&event, EventNameBlockVerification, vlog.Data); err != nil { + return fmt.Errorf("failed to unpack ZkbasBlockVerification err: %v", err) + } + + // update block status + blockHeight := int64(event.BlockNumber) + if relatedBlocks[blockHeight] == nil { + relatedBlocks[blockHeight], err = m.BlockModel.GetBlockByHeightWithoutTx(blockHeight) + if err != nil { + return fmt.Errorf("failed to GetBlockByHeightWithoutTx: %v", err) + } + } + relatedBlocks[blockHeight].VerifiedTxHash = vlog.TxHash.Hex() + relatedBlocks[blockHeight].VerifiedAt = int64(logBlock.Time) + relatedBlocks[blockHeight].BlockStatus = block.StatusVerifiedAndExecuted + case zkbasLogBlocksRevertSigHash.Hex(): + l1EventInfo.EventType = EventTypeRevertedBlock + default: + } + + l1EventInfos = append(l1EventInfos, l1EventInfo) + } + if priorityRequestCount != priorityRequestCountCheck { + return fmt.Errorf("new priority requests events not match, try it again") + } + + eventInfosBytes, err := json.Marshal(l1EventInfos) + if err != nil { + return err + } + l1BlockMonitorInfo := &l1syncedblock.L1SyncedBlock{ + L1BlockHeight: int64(safeHeight), + BlockInfo: string(eventInfosBytes), + Type: l1syncedblock.TypeGeneric, + } + + // get pending update blocks + pendingUpdateBlocks := make([]*block.Block, 0, len(relatedBlocks)) + for _, pendingUpdateBlock := range relatedBlocks { + pendingUpdateBlocks = append(pendingUpdateBlocks, pendingUpdateBlock) + } + + // get mempool txs to delete + pendingDeleteMempoolTxs, err := getMempoolTxsToDelete(pendingUpdateBlocks, m.MempoolModel) + if err != nil { + return fmt.Errorf("failed to get mempool txs to delete, err: %v", err) + } + + if err = m.L1SyncedBlockModel.CreateGenericBlock(l1BlockMonitorInfo, priorityRequests, + pendingUpdateBlocks, pendingDeleteMempoolTxs); err != nil { + return fmt.Errorf("failed to store monitor info, err: %v", err) + } + logx.Info("create txs count:", len(priorityRequests)) + return nil +} + +func getMempoolTxsToDelete(blocks []*block.Block, mempoolModel mempool.MempoolModel) ([]*mempool.MempoolTx, error) { + var toDeleteMempoolTxs []*mempool.MempoolTx + for _, pendingUpdateBlock := range blocks { + if pendingUpdateBlock.BlockStatus == BlockVerifiedStatus { + _, blockToDleteMempoolTxs, err := mempoolModel.GetMempoolTxsByBlockHeight(pendingUpdateBlock.BlockHeight) + if err != nil { + logx.Errorf("GetMempoolTxsByBlockHeight err: %s", err.Error()) + return nil, err + } + if len(blockToDleteMempoolTxs) == 0 { + continue + } + toDeleteMempoolTxs = append(toDeleteMempoolTxs, blockToDleteMempoolTxs...) + } + } + return toDeleteMempoolTxs, nil +} + +func getZkbasContractLogs(cli *_rpc.ProviderClient, zkbasContract string, startHeight, endHeight uint64) ([]types.Log, error) { + query := ethereum.FilterQuery{ + FromBlock: big.NewInt(int64(startHeight)), + ToBlock: big.NewInt(int64(endHeight)), + Addresses: []common.Address{common.HexToAddress(zkbasContract)}, + } + logs, err := cli.FilterLogs(context.Background(), query) + if err != nil { + return nil, err + } + return logs, nil +} + +func getPriorityRequestCount(cli *_rpc.ProviderClient, zkbasContract string, startHeight, endHeight uint64) (int, error) { + zkbasInstance, err := zkbas.LoadZkbasInstance(cli, zkbasContract) + if err != nil { + return 0, err + } + priorityRequests, err := zkbasInstance.ZkbasFilterer. + FilterNewPriorityRequest(&bind.FilterOpts{Start: startHeight, End: &endHeight}) + if err != nil { + return 0, err + } + priorityRequestCount := 0 + for priorityRequests.Next() { + priorityRequestCount++ + } + return priorityRequestCount, nil +} + +func convertLogToNewPriorityRequestEvent(log types.Log) (*priorityrequest.PriorityRequest, error) { + var event zkbas.ZkbasNewPriorityRequest + if err := ZkbasContractAbi.UnpackIntoInterface(&event, EventNameNewPriorityRequest, log.Data); err != nil { + return nil, err + } + request := &priorityrequest.PriorityRequest{ + L1TxHash: log.TxHash.Hex(), + L1BlockHeight: int64(log.BlockNumber), + SenderAddress: event.Sender.Hex(), + RequestId: int64(event.SerialId), + TxType: int64(event.TxType), + Pubdata: common.Bytes2Hex(event.PubData), + ExpirationBlock: event.ExpirationBlock.Int64(), + Status: priorityrequest.PendingStatus, + } + return request, nil +} diff --git a/service/cronjob/monitor/internal/logic/governanceMonitor.go b/service/monitor/monitor/monitor_governance_blocks.go similarity index 55% rename from service/cronjob/monitor/internal/logic/governanceMonitor.go rename to service/monitor/monitor/monitor_governance_blocks.go index ff905f5d6..ceb43130b 100644 --- a/service/cronjob/monitor/internal/logic/governanceMonitor.go +++ b/service/monitor/monitor/monitor_governance_blocks.go @@ -1,5 +1,5 @@ /* - * Copyright © 2021 Zkbas Protocol + * Copyright © 2021 ZkBAS Protocol * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -14,111 +14,99 @@ * limitations under the License. */ -package logic +package monitor import ( "context" "encoding/json" + "fmt" "math/big" - zkbas "github.com/bnb-chain/zkbas-eth-rpc/zkbas/core/legend" - "github.com/bnb-chain/zkbas-eth-rpc/zkbas/core/zero/basic" "github.com/ethereum/go-ethereum" "github.com/ethereum/go-ethereum/common" "github.com/zeromicro/go-zero/core/logx" - asset "github.com/bnb-chain/zkbas/common/model/assetInfo" - "github.com/bnb-chain/zkbas/common/model/l1BlockMonitor" - "github.com/bnb-chain/zkbas/common/sysconfigName" - "github.com/bnb-chain/zkbas/common/util" - "github.com/bnb-chain/zkbas/errorcode" + zkbas "github.com/bnb-chain/zkbas-eth-rpc/zkbas/core/legend" + "github.com/bnb-chain/zkbas-eth-rpc/zkbas/core/zero/basic" + common2 "github.com/bnb-chain/zkbas/common" + "github.com/bnb-chain/zkbas/dao/asset" + "github.com/bnb-chain/zkbas/dao/l1syncedblock" + "github.com/bnb-chain/zkbas/dao/sysconfig" + "github.com/bnb-chain/zkbas/types" ) -/* - MonitorGovernanceContract: monitor layer-1 governance related events -*/ -func MonitorGovernanceContract(cli *ProviderClient, startHeight int64, pendingBlocksCount uint64, maxHandledBlocksCount int64, - governanceContract string, l1BlockMonitorModel L1BlockMonitorModel, sysconfigModel SysconfigModel, l2AssetInfoModel L2AssetInfoModel) (err error) { - logx.Info("========================= start MonitorGovernanceContract =========================") +func (m *Monitor) MonitorGovernanceBlocks() (err error) { // get latest handled l1 block from database by chain id - latestHandledBlock, err := l1BlockMonitorModel.GetLatestL1BlockMonitorByGovernance() + latestHandledBlock, err := m.L1SyncedBlockModel.GetLatestL1BlockByType(l1syncedblock.TypeGovernance) var handledHeight int64 if err != nil { - if err == errorcode.DbErrNotFound { - handledHeight = startHeight + if err == types.DbErrNotFound { + handledHeight = m.Config.ChainConfig.StartL1BlockHeight } else { - logx.Errorf("[l1BlockMonitorModel.GetLatestL1BlockMonitorByBlock]: %s", err.Error()) - return err + return fmt.Errorf("failed to get l1 block: %v", err) } } else { handledHeight = latestHandledBlock.L1BlockHeight } // get latest l1 block height(latest height - pendingBlocksCount) - latestHeight, err := cli.GetHeight() + latestHeight, err := m.cli.GetHeight() if err != nil { - logx.Errorf("[MonitorGovernanceContract] GetHeight err: %s", err.Error()) - return err + return fmt.Errorf("failed to get latest l1 block through rpc client: %v", err) } // compute safe height - safeHeight := latestHeight - pendingBlocksCount - safeHeight = uint64(util.MinInt64(int64(safeHeight), handledHeight+maxHandledBlocksCount)) + safeHeight := latestHeight - m.Config.ChainConfig.ConfirmBlocksCount + safeHeight = uint64(common2.MinInt64(int64(safeHeight), handledHeight+m.Config.ChainConfig.MaxHandledBlocksCount)) // check if safe height > handledHeight if safeHeight <= uint64(handledHeight) { return nil } - contractAddress := common.HexToAddress(governanceContract) - logx.Infof("[MonitorGovernanceContract] fromBlock: %d, toBlock: %d", big.NewInt(handledHeight+1), big.NewInt(int64(safeHeight))) + contractAddress := common.HexToAddress(m.governanceContractAddress) + logx.Infof("fromBlock: %d, toBlock: %d", big.NewInt(handledHeight+1), big.NewInt(int64(safeHeight))) query := ethereum.FilterQuery{ FromBlock: big.NewInt(handledHeight + 1), ToBlock: big.NewInt(int64(safeHeight)), Addresses: []common.Address{contractAddress}, } - logs, err := cli.FilterLogs(context.Background(), query) + logs, err := m.cli.FilterLogs(context.Background(), query) if err != nil { - logx.Errorf("[MonitorGovernanceContract] FilterLogs err: %s", err.Error()) - return err + return fmt.Errorf("failed to query logs through rpc client: %v", err) } var ( - l1EventInfos []*L1EventInfo - l2AssetInfoMap = make(map[string]*L2AssetInfo) - pendingUpdateL2AssetInfoMap = make(map[string]*L2AssetInfo) - pendingNewSysconfigInfoMap = make(map[string]*Sysconfig) - pendingUpdateSysconfigInfoMap = make(map[string]*Sysconfig) + l1EventInfos []*L1EventInfo + l2AssetInfoMap = make(map[string]*asset.Asset) + pendingUpdateL2AssetMap = make(map[string]*asset.Asset) + pendingNewSysConfigMap = make(map[string]*sysconfig.SysConfig) + pendingUpdateSysConfigMap = make(map[string]*sysconfig.SysConfig) ) for _, vlog := range logs { switch vlog.Topics[0].Hex() { case governanceLogNewAssetSigHash.Hex(): var event zkbas.GovernanceNewAsset if err = GovernanceContractAbi.UnpackIntoInterface(&event, EventNameNewAsset, vlog.Data); err != nil { - logx.Errorf("[MonitorGovernanceContract] UnpackIntoInterface err: %s", err.Error()) - return err + return fmt.Errorf("unpackIntoInterface err: %v", err) } l1EventInfo := &L1EventInfo{ EventType: EventTypeAddAsset, TxHash: vlog.TxHash.Hex(), } // get asset info by contract address - erc20Instance, err := zkbas.LoadERC20(cli, event.AssetAddress.Hex()) + erc20Instance, err := zkbas.LoadERC20(m.cli, event.AssetAddress.Hex()) if err != nil { - logx.Errorf("[MonitorGovernanceContract] LoadERC20 err: %s", err.Error()) return err } name, err := erc20Instance.Name(basic.EmptyCallOpts()) if err != nil { - logx.Errorf("[MonitorGovernanceContract] erc20Instance.Name err: %s", err.Error()) return err } symbol, err := erc20Instance.Symbol(basic.EmptyCallOpts()) if err != nil { - logx.Errorf("[MonitorGovernanceContract] erc20Instance.Symbol err: %s", err.Error()) return err } decimals, err := erc20Instance.Decimals(basic.EmptyCallOpts()) if err != nil { - logx.Errorf("[MonitorGovernanceContract] erc20Instance.Decimals err: %s", err.Error()) return err } - l2AssetInfo := &L2AssetInfo{ + l2AssetInfo := &asset.Asset{ AssetId: uint32(event.AssetId), L1Address: event.AssetAddress.Hex(), AssetName: name, @@ -132,50 +120,47 @@ func MonitorGovernanceContract(cli *ProviderClient, startHeight int64, pendingBl // parse event info var event zkbas.GovernanceNewGovernor if err = GovernanceContractAbi.UnpackIntoInterface(&event, EventNameNewGovernor, vlog.Data); err != nil { - logx.Errorf("[MonitorGovernanceContract] UnpackIntoInterface err: %s", err.Error()) - return err + return fmt.Errorf("unpackIntoInterface err: %v", err) } // set up database info l1EventInfo := &L1EventInfo{ EventType: EventTypeNewGovernor, TxHash: vlog.TxHash.Hex(), } - configInfo := &Sysconfig{ - Name: sysconfigName.Governor, + configInfo := &sysconfig.SysConfig{ + Name: types.Governor, Value: event.NewGovernor.Hex(), ValueType: "string", Comment: "governor", } // set into array l1EventInfos = append(l1EventInfos, l1EventInfo) - pendingNewSysconfigInfoMap[configInfo.Name] = configInfo + pendingNewSysConfigMap[configInfo.Name] = configInfo case governanceLogNewAssetGovernanceSigHash.Hex(): // parse event info var event zkbas.GovernanceNewAssetGovernance err = GovernanceContractAbi.UnpackIntoInterface(&event, EventNameNewAssetGovernance, vlog.Data) if err != nil { - logx.Errorf("[MonitorGovernanceContract] UnpackIntoInterface err: %s", err.Error()) - return err + return fmt.Errorf("unpackIntoInterface err: %v", err) } l1EventInfo := &L1EventInfo{ EventType: EventTypeNewAssetGovernance, TxHash: vlog.TxHash.Hex(), } - configInfo := &Sysconfig{ - Name: sysconfigName.AssetGovernanceContract, + configInfo := &sysconfig.SysConfig{ + Name: types.AssetGovernanceContract, Value: event.NewAssetGovernance.Hex(), ValueType: "string", Comment: "asset governance contract", } // set into array l1EventInfos = append(l1EventInfos, l1EventInfo) - pendingNewSysconfigInfoMap[configInfo.Name] = configInfo + pendingNewSysConfigMap[configInfo.Name] = configInfo case governanceLogValidatorStatusUpdateSigHash.Hex(): // parse event info var event zkbas.GovernanceValidatorStatusUpdate if err = GovernanceContractAbi.UnpackIntoInterface(&event, EventNameValidatorStatusUpdate, vlog.Data); err != nil { - logx.Errorf("[blockMoniter.MonitorGovernanceContract]<=>[GovernanceContractAbi.UnpackIntoInterface] %s", err.Error()) - return err + return fmt.Errorf("unpack GovernanceValidatorStatusUpdate, err: %v", err) } // set up database info l1EventInfo := &L1EventInfo{ @@ -187,12 +172,11 @@ func MonitorGovernanceContract(cli *ProviderClient, startHeight int64, pendingBl IsActive bool } // get data from db - if pendingNewSysconfigInfoMap[sysconfigName.Validators] != nil { - configInfo := pendingNewSysconfigInfoMap[sysconfigName.Validators] + if pendingNewSysConfigMap[types.Validators] != nil { + configInfo := pendingNewSysConfigMap[types.Validators] var validators map[string]*ValidatorInfo err = json.Unmarshal([]byte(configInfo.Value), &validators) if err != nil { - logx.Errorf("[MonitorGovernanceContract] unable to unmarshal: %s", err.Error()) return err } if validators[event.ValidatorAddress.Hex()] == nil { @@ -205,16 +189,14 @@ func MonitorGovernanceContract(cli *ProviderClient, startHeight int64, pendingBl } validatorBytes, err := json.Marshal(validators) if err != nil { - logx.Errorf("[MonitorGovernanceContract] unable to marshal: %s", err.Error()) return err } - pendingNewSysconfigInfoMap[sysconfigName.Validators].Value = string(validatorBytes) + pendingNewSysConfigMap[types.Validators].Value = string(validatorBytes) } else { - configInfo, err := sysconfigModel.GetSysconfigByName(sysconfigName.Validators) + configInfo, err := m.SysConfigModel.GetSysConfigByName(types.Validators) if err != nil { - if err != errorcode.DbErrNotFound { - logx.Errorf("[MonitorGovernanceContract] unable to get sysconfig by name: %s", err.Error()) - return err + if err != types.DbErrNotFound { + return fmt.Errorf("unable to get sys config by name: %v", err) } else { validators := make(map[string]*ValidatorInfo) validators[event.ValidatorAddress.Hex()] = &ValidatorInfo{ @@ -223,11 +205,10 @@ func MonitorGovernanceContract(cli *ProviderClient, startHeight int64, pendingBl } validatorsBytes, err := json.Marshal(validators) if err != nil { - logx.Errorf("[MonitorGovernanceContract] unable to marshal: %s", err.Error()) - return err + return fmt.Errorf("unable to marshal validators: %v", err) } - pendingNewSysconfigInfoMap[sysconfigName.Validators] = &Sysconfig{ - Name: sysconfigName.Validators, + pendingNewSysConfigMap[types.Validators] = &sysconfig.SysConfig{ + Name: types.Validators, Value: string(validatorsBytes), ValueType: "map[string]*ValidatorInfo", Comment: "validator info", @@ -237,7 +218,6 @@ func MonitorGovernanceContract(cli *ProviderClient, startHeight int64, pendingBl var validators map[string]*ValidatorInfo err = json.Unmarshal([]byte(configInfo.Value), &validators) if err != nil { - logx.Errorf("[MonitorGovernanceContract] unable to unmarshal: %s", err.Error()) return err } if validators[event.ValidatorAddress.Hex()] == nil { @@ -251,37 +231,36 @@ func MonitorGovernanceContract(cli *ProviderClient, startHeight int64, pendingBl // reset into map validatorBytes, err := json.Marshal(validators) if err != nil { - logx.Errorf("[MonitorGovernanceContract] unable to marshal: %s", err.Error()) return err } - pendingUpdateSysconfigInfoMap[sysconfigName.Validators].Value = string(validatorBytes) + if pendingUpdateSysConfigMap[types.Validators] == nil { + pendingUpdateSysConfigMap[types.Validators] = configInfo + } + pendingUpdateSysConfigMap[types.Validators].Value = string(validatorBytes) } } l1EventInfos = append(l1EventInfos, l1EventInfo) - break case governanceLogAssetPausedUpdateSigHash.Hex(): // parse event info var event zkbas.GovernanceAssetPausedUpdate err = GovernanceContractAbi.UnpackIntoInterface(&event, EventNameAssetPausedUpdate, vlog.Data) if err != nil { - logx.Errorf("[blockMoniter.MonitorGovernanceContract]<=>[GovernanceContractAbi.UnpackIntoInterface] %s", err.Error()) - return err + return fmt.Errorf("unpack GovernanceAssetPausedUpdate failed, err: %v", err) } // set up database info l1EventInfo := &L1EventInfo{ EventType: EventTypeAssetPausedUpdate, TxHash: vlog.TxHash.Hex(), } - var assetInfo *L2AssetInfo + var assetInfo *asset.Asset if l2AssetInfoMap[event.Token.Hex()] != nil { assetInfo = l2AssetInfoMap[event.Token.Hex()] } else { - assetInfo, err = l2AssetInfoModel.GetAssetByAddress(event.Token.Hex()) + assetInfo, err = m.L2AssetModel.GetAssetByAddress(event.Token.Hex()) if err != nil { - logx.Errorf("[MonitorGovernanceContract] unable to get l2 asset by address: %s", err.Error()) - return err + return fmt.Errorf("unable to get l2 asset by address, err: %v", err) } - pendingUpdateL2AssetInfoMap[event.Token.Hex()] = assetInfo + pendingUpdateL2AssetMap[event.Token.Hex()] = assetInfo } var status uint32 if event.Paused { @@ -292,50 +271,47 @@ func MonitorGovernanceContract(cli *ProviderClient, startHeight int64, pendingBl assetInfo.Status = status // set into array l1EventInfos = append(l1EventInfos, l1EventInfo) - break default: - break } } // serialize into block info eventInfosBytes, err := json.Marshal(l1EventInfos) if err != nil { - logx.Errorf("[MonitorGovernanceContract] Marshal err: %s", err.Error()) return err } - l1BlockMonitorInfo := &l1BlockMonitor.L1BlockMonitor{ + syncedBlock := &l1syncedblock.L1SyncedBlock{ L1BlockHeight: int64(safeHeight), BlockInfo: string(eventInfosBytes), - MonitorType: l1BlockMonitor.MonitorTypeGovernance, + Type: l1syncedblock.TypeGovernance, } var ( - l2AssetInfos []*L2AssetInfo - pendingUpdateL2AssetInfos []*L2AssetInfo - pendingNewSysconfigInfos []*Sysconfig - pendingUpdateSysconfigInfos []*Sysconfig + l2AssetInfos []*asset.Asset + pendingUpdateL2AssetInfos []*asset.Asset + pendingNewSysconfigInfos []*sysconfig.SysConfig + pendingUpdateSysconfigInfos []*sysconfig.SysConfig ) for _, l2AssetInfo := range l2AssetInfoMap { l2AssetInfos = append(l2AssetInfos, l2AssetInfo) } - for _, pendingUpdateL2AssetInfo := range pendingUpdateL2AssetInfoMap { + for _, pendingUpdateL2AssetInfo := range pendingUpdateL2AssetMap { pendingUpdateL2AssetInfos = append(pendingUpdateL2AssetInfos, pendingUpdateL2AssetInfo) } - for _, pendingNewSysconfigInfo := range pendingNewSysconfigInfoMap { + for _, pendingNewSysconfigInfo := range pendingNewSysConfigMap { pendingNewSysconfigInfos = append(pendingNewSysconfigInfos, pendingNewSysconfigInfo) } - for _, pendingUpdateSysconfigInfo := range pendingUpdateSysconfigInfoMap { + for _, pendingUpdateSysconfigInfo := range pendingUpdateSysConfigMap { pendingUpdateSysconfigInfos = append(pendingUpdateSysconfigInfos, pendingUpdateSysconfigInfo) } - logx.Infof("[MonitorGovernanceContract] l1 block info height: %v, l2 asset info size: %v, pending update l2 asset info size: %v", - l1BlockMonitorInfo.L1BlockHeight, - len(l2AssetInfos), - len(pendingUpdateL2AssetInfos), - ) - if err = l1BlockMonitorModel.CreateGovernanceMonitorInfo(l1BlockMonitorInfo, l2AssetInfos, + if len(l2AssetInfos) > 0 || len(pendingUpdateL2AssetInfos) > 0 { + logx.Infof("l1 block info height: %v, l2 asset info size: %v, pending update l2 asset info size: %v", + syncedBlock.L1BlockHeight, + len(l2AssetInfos), + len(pendingUpdateL2AssetInfos), + ) + } + if err = m.L1SyncedBlockModel.CreateGovernanceBlock(syncedBlock, l2AssetInfos, pendingUpdateL2AssetInfos, pendingNewSysconfigInfos, pendingUpdateSysconfigInfos); err != nil { - logx.Errorf("[MonitorGovernanceContract] CreateGovernanceMonitorInfo err: %s", err.Error()) - return err + return fmt.Errorf("store governance monitor info error, err: %v", err) } - logx.Info("========================= end MonitorGovernanceContract =========================") return nil } diff --git a/service/monitor/monitor/monitor_priority_requests.go b/service/monitor/monitor/monitor_priority_requests.go new file mode 100644 index 000000000..d73645e87 --- /dev/null +++ b/service/monitor/monitor/monitor_priority_requests.go @@ -0,0 +1,189 @@ +/* + * Copyright © 2021 ZkBAS Protocol + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package monitor + +import ( + "encoding/json" + "fmt" + + "github.com/ethereum/go-ethereum/common" + + "github.com/bnb-chain/zkbas/common/chain" + "github.com/bnb-chain/zkbas/dao/mempool" + "github.com/bnb-chain/zkbas/types" +) + +func (m *Monitor) MonitorPriorityRequests() error { + pendingRequests, err := m.PriorityRequestModel.GetPriorityRequestsByStatus(PendingStatus) + if err != nil { + if err != types.DbErrNotFound { + return err + } + return nil + } + var ( + pendingNewMempoolTxs []*mempool.MempoolTx + ) + // get last handled request id + currentRequestId, err := m.PriorityRequestModel.GetLatestHandledRequestId() + if err != nil { + return fmt.Errorf("unable to get last handled request id, err: %v", err) + } + + for _, request := range pendingRequests { + // request id must be in order + if request.RequestId != currentRequestId+1 { + return fmt.Errorf("invalid request id") + } + currentRequestId++ + + txHash := ComputeL1TxTxHash(request.RequestId, request.L1TxHash) + + mempoolTx := &mempool.MempoolTx{ + TxHash: txHash, + GasFeeAssetId: types.NilAssetId, + GasFee: types.NilAssetAmountStr, + NftIndex: types.NilTxNftIndex, + PairIndex: types.NilPairIndex, + AssetId: types.NilAssetId, + TxAmount: types.NilAssetAmountStr, + NativeAddress: request.SenderAddress, + AccountIndex: types.NilAccountIndex, + Nonce: types.NilNonce, + ExpiredAt: types.NilExpiredAt, + L2BlockHeight: types.NilBlockHeight, + Status: mempool.PendingTxStatus, + } + // handle request based on request type + switch request.TxType { + case TxTypeRegisterZns: + // parse request info + txInfo, err := chain.ParseRegisterZnsPubData(common.FromHex(request.Pubdata)) + if err != nil { + return fmt.Errorf("unable to parse registerZNS pub data, err: %v", err) + } + + txInfoBytes, err := json.Marshal(txInfo) + if err != nil { + return err + } + + mempoolTx.TxType = int64(txInfo.TxType) + mempoolTx.TxInfo = string(txInfoBytes) + + pendingNewMempoolTxs = append(pendingNewMempoolTxs, mempoolTx) + case TxTypeCreatePair: + txInfo, err := chain.ParseCreatePairPubData(common.FromHex(request.Pubdata)) + if err != nil { + return fmt.Errorf("unable to parse registerZNS pub data: %v", err) + } + + txInfoBytes, err := json.Marshal(txInfo) + if err != nil { + return fmt.Errorf("unable to serialize request info : %v", err) + } + + mempoolTx.TxType = int64(txInfo.TxType) + mempoolTx.TxInfo = string(txInfoBytes) + + pendingNewMempoolTxs = append(pendingNewMempoolTxs, mempoolTx) + case TxTypeUpdatePairRate: + txInfo, err := chain.ParseUpdatePairRatePubData(common.FromHex(request.Pubdata)) + if err != nil { + return fmt.Errorf("unable to parse update pair rate pub data: %v", err) + } + + txInfoBytes, err := json.Marshal(txInfo) + if err != nil { + return fmt.Errorf("unable to serialize request info : %v", err) + } + + mempoolTx.TxType = int64(txInfo.TxType) + mempoolTx.TxInfo = string(txInfoBytes) + + pendingNewMempoolTxs = append(pendingNewMempoolTxs, mempoolTx) + case TxTypeDeposit: + txInfo, err := chain.ParseDepositPubData(common.FromHex(request.Pubdata)) + if err != nil { + return fmt.Errorf("unable to parse deposit pub data: %v", err) + } + + txInfoBytes, err := json.Marshal(txInfo) + if err != nil { + return fmt.Errorf("unable to serialize request info : %v", err) + } + + mempoolTx.TxType = int64(txInfo.TxType) + mempoolTx.TxInfo = string(txInfoBytes) + + pendingNewMempoolTxs = append(pendingNewMempoolTxs, mempoolTx) + case TxTypeDepositNft: + txInfo, err := chain.ParseDepositNftPubData(common.FromHex(request.Pubdata)) + if err != nil { + return fmt.Errorf("unable to parse deposit nft pub data: %v", err) + } + + txInfoBytes, err := json.Marshal(txInfo) + if err != nil { + return fmt.Errorf("unable to serialize request info: %v", err) + } + + mempoolTx.TxType = int64(txInfo.TxType) + mempoolTx.TxInfo = string(txInfoBytes) + + pendingNewMempoolTxs = append(pendingNewMempoolTxs, mempoolTx) + case TxTypeFullExit: + txInfo, err := chain.ParseFullExitPubData(common.FromHex(request.Pubdata)) + if err != nil { + return fmt.Errorf("unable to parse deposit pub data: %v", err) + } + + txInfoBytes, err := json.Marshal(txInfo) + if err != nil { + return fmt.Errorf("unable to serialize request info : %v", err) + } + + mempoolTx.TxType = int64(txInfo.TxType) + mempoolTx.TxInfo = string(txInfoBytes) + + pendingNewMempoolTxs = append(pendingNewMempoolTxs, mempoolTx) + case TxTypeFullExitNft: + txInfo, err := chain.ParseFullExitNftPubData(common.FromHex(request.Pubdata)) + if err != nil { + return fmt.Errorf("unable to parse deposit nft pub data: %v", err) + } + + txInfoBytes, err := json.Marshal(txInfo) + if err != nil { + return fmt.Errorf("unable to serialize request info : %v", err) + } + + mempoolTx.TxType = int64(txInfo.TxType) + mempoolTx.TxInfo = string(txInfoBytes) + + pendingNewMempoolTxs = append(pendingNewMempoolTxs, mempoolTx) + default: + return fmt.Errorf("invalid request type") + } + } + + // update db + if err = m.PriorityRequestModel.CreateMempoolTxsAndUpdateRequests(pendingNewMempoolTxs, pendingRequests); err != nil { + return fmt.Errorf("unable to create mempool pendingRequests and update priority requests, error: %v", err) + } + return nil +} diff --git a/service/cronjob/monitor/internal/logic/constants.go b/service/monitor/monitor/types.go similarity index 55% rename from service/cronjob/monitor/internal/logic/constants.go rename to service/monitor/monitor/types.go index 8c47f7c4d..0ceed8021 100644 --- a/service/cronjob/monitor/internal/logic/constants.go +++ b/service/monitor/monitor/types.go @@ -1,5 +1,5 @@ /* - * Copyright © 2021 Zkbas Protocol + * Copyright © 2021 ZkBAS Protocol * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -14,69 +14,30 @@ * limitations under the License. */ -package logic +package monitor import ( "strings" - "github.com/bnb-chain/zkbas-eth-rpc/_rpc" - zkbas "github.com/bnb-chain/zkbas-eth-rpc/zkbas/core/legend" "github.com/ethereum/go-ethereum/accounts/abi" "github.com/ethereum/go-ethereum/crypto" - "github.com/bnb-chain/zkbas/common/commonAsset" - "github.com/bnb-chain/zkbas/common/commonTx" - asset "github.com/bnb-chain/zkbas/common/model/assetInfo" - "github.com/bnb-chain/zkbas/common/model/block" - "github.com/bnb-chain/zkbas/common/model/l1BlockMonitor" - "github.com/bnb-chain/zkbas/common/model/l1TxSender" - "github.com/bnb-chain/zkbas/common/model/l2BlockEventMonitor" - "github.com/bnb-chain/zkbas/common/model/l2TxEventMonitor" - "github.com/bnb-chain/zkbas/common/model/mempool" - "github.com/bnb-chain/zkbas/common/model/sysconfig" -) - -type ( - ProviderClient = _rpc.ProviderClient - AuthClient = _rpc.AuthClient - - L1BlockMonitorModel = l1BlockMonitor.L1BlockMonitorModel - L2TxEventMonitorModel = l2TxEventMonitor.L2TxEventMonitorModel - L2BlockEventMonitorModel = l2BlockEventMonitor.L2BlockEventMonitorModel - SysconfigModel = sysconfig.SysconfigModel - MempoolModel = mempool.MempoolModel - BlockModel = block.BlockModel - L2AssetInfoModel = asset.AssetInfoModel - L1TxSenderModel = l1TxSender.L1TxSenderModel - - L2AssetInfo = asset.AssetInfo - Sysconfig = sysconfig.Sysconfig - L2TxEventMonitor = l2TxEventMonitor.L2TxEventMonitor - L2BlockEventMonitor = l2BlockEventMonitor.L2BlockEventMonitor - Block = block.Block - L1TxSender = l1TxSender.L1TxSender - MempoolTx = mempool.MempoolTx - - ZkbasBlockCommit = zkbas.ZkbasBlockCommit - ZkbasBlockVerification = zkbas.ZkbasBlockVerification + zkbas "github.com/bnb-chain/zkbas-eth-rpc/zkbas/core/legend" + "github.com/bnb-chain/zkbas/dao/block" + "github.com/bnb-chain/zkbas/dao/priorityrequest" + "github.com/bnb-chain/zkbas/types" ) const ( - // zkbas event name EventNameNewPriorityRequest = "NewPriorityRequest" EventNameBlockCommit = "BlockCommit" EventNameBlockVerification = "BlockVerification" - EventNameBlocksRevert = "BlocksRevert" - // tx type for l2 block event monitors EventTypeNewPriorityRequest = 0 - EventTypeCommittedBlock = l2BlockEventMonitor.CommittedBlockEventType - EventTypeVerifiedBlock = l2BlockEventMonitor.VerifiedBlockEventType - EventTypeRevertedBlock = l2BlockEventMonitor.RevertedBlockEventType - // status - PendingStatusL2BlockEventMonitor = l2BlockEventMonitor.PendingStatus + EventTypeCommittedBlock = 1 + EventTypeVerifiedBlock = 2 + EventTypeRevertedBlock = 3 - // governance event name EventNameNewAsset = "NewAsset" EventNameNewGovernor = "NewGovernor" EventNameNewAssetGovernance = "NewAssetGovernance" @@ -89,28 +50,21 @@ const ( EventTypeValidatorStatusUpdate = 7 EventTypeAssetPausedUpdate = 8 - // event status - PendingStatus = l2TxEventMonitor.PendingStatus + PendingStatus = priorityrequest.PendingStatus - // tx type - TxTypeRegisterZns = commonTx.TxTypeRegisterZns - TxTypeCreatePair = commonTx.TxTypeCreatePair - TxTypeUpdatePairRate = commonTx.TxTypeUpdatePairRate - TxTypeDeposit = commonTx.TxTypeDeposit - TxTypeDepositNft = commonTx.TxTypeDepositNft - TxTypeFullExit = commonTx.TxTypeFullExit - TxTypeFullExitNft = commonTx.TxTypeFullExitNft - - GeneralAssetType = commonAsset.GeneralAssetType + TxTypeRegisterZns = types.TxTypeRegisterZns + TxTypeCreatePair = types.TxTypeCreatePair + TxTypeUpdatePairRate = types.TxTypeUpdatePairRate + TxTypeDeposit = types.TxTypeDeposit + TxTypeDepositNft = types.TxTypeDepositNft + TxTypeFullExit = types.TxTypeFullExit + TxTypeFullExitNft = types.TxTypeFullExitNft BlockVerifiedStatus = block.StatusVerifiedAndExecuted - - L1TxSenderPendingStatus = l1TxSender.PendingStatus - L1TxSenderHandledStatus = l1TxSender.HandledStatus ) var ( - ZkbasContractAbi, _ = abi.JSON(strings.NewReader(zkbas.ZkbasABI)) + ZkbasContractAbi, _ = abi.JSON(strings.NewReader(zkbas.ZkbasMetaData.ABI)) // Zkbas contract logs sig zkbasLogNewPriorityRequestSig = []byte("NewPriorityRequest(address,uint64,uint8,bytes,uint256)") zkbasLogWithdrawalSig = []byte("Withdrawal(uint16,uint128)") @@ -126,7 +80,7 @@ var ( zkbasLogBlockVerificationSigHash = crypto.Keccak256Hash(zkbasLogBlockVerificationSig) zkbasLogBlocksRevertSigHash = crypto.Keccak256Hash(zkbasLogBlocksRevertSig) - GovernanceContractAbi, _ = abi.JSON(strings.NewReader(zkbas.GovernanceABI)) + GovernanceContractAbi, _ = abi.JSON(strings.NewReader(zkbas.GovernanceMetaData.ABI)) governanceLogNewAssetSig = []byte("NewAsset(address,uint16)") governanceLogNewGovernorSig = []byte("NewGovernor(address)") @@ -141,6 +95,9 @@ var ( governanceLogAssetPausedUpdateSigHash = crypto.Keccak256Hash(governanceLogAssetPausedUpdateSig) ) -const ( - ZeroBigIntString = "0" -) +type L1EventInfo struct { + // deposit / lock / committed / verified / reverted + EventType uint8 + // tx hash + TxHash string +} diff --git a/common/commonAsset/types_test.go b/service/monitor/monitor/util.go similarity index 59% rename from common/commonAsset/types_test.go rename to service/monitor/monitor/util.go index f04390828..0406721a8 100644 --- a/common/commonAsset/types_test.go +++ b/service/monitor/monitor/util.go @@ -1,5 +1,5 @@ /* - * Copyright © 2021 Zkbas Protocol + * Copyright © 2021 ZkBAS Protocol * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -12,25 +12,21 @@ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. - * */ -package commonAsset +package monitor import ( - "fmt" - "math/big" - "testing" + "encoding/hex" + "strconv" - "github.com/bnb-chain/zkbas-crypto/ffmath" + "github.com/consensys/gnark-crypto/ecc/bn254/fr/mimc" + "github.com/ethereum/go-ethereum/common" ) -func TestFromFormatAccountInfo(t *testing.T) { - a := big.NewInt(9) - b := big.NewInt(128) - c := ffmath.Div(a, b) - fmt.Println(c.String()) - - d := big.NewInt(2) - fmt.Println(new(big.Int).SetBit(d, 0, 1).String()) +func ComputeL1TxTxHash(requestId int64, txHash string) string { + hFunc := mimc.NewMiMC() + hFunc.Write([]byte(strconv.FormatInt(requestId, 10))) + hFunc.Write(common.FromHex(txHash)) + return hex.EncodeToString(hFunc.Sum(nil)) } diff --git a/service/rpc/globalRPC/internal/config/config.go b/service/prover/config/config.go old mode 100755 new mode 100644 similarity index 64% rename from service/rpc/globalRPC/internal/config/config.go rename to service/prover/config/config.go index e0e33f07a..d391c8ee2 --- a/service/rpc/globalRPC/internal/config/config.go +++ b/service/prover/config/config.go @@ -3,14 +3,19 @@ package config import ( "github.com/zeromicro/go-zero/core/logx" "github.com/zeromicro/go-zero/core/stores/cache" - "github.com/zeromicro/go-zero/zrpc" ) type Config struct { - zrpc.RpcServerConf Postgres struct { DataSource string } - LogConf logx.LogConf CacheRedis cache.CacheConf + LogConf logx.LogConf + KeyPath struct { + ProvingKeyPath []string + VerifyingKeyPath []string + } + BlockConfig struct { + OptionalBlockSizes []int + } } diff --git a/configyaml/prover.yaml.example b/service/prover/etc/config.yaml.example similarity index 61% rename from configyaml/prover.yaml.example rename to service/prover/etc/config.yaml.example index 72d775be3..17f06f93b 100644 --- a/configyaml/prover.yaml.example +++ b/service/prover/etc/config.yaml.example @@ -1,4 +1,4 @@ -Name: prover.cronjob +Name: prover Postgres: DataSource: host=127.0.0.1 user=postgres password=pw dbname=zkbas port=5432 sslmode=disable @@ -10,4 +10,15 @@ CacheRedis: KeyPath: ProvingKeyPath: [/app/zkbas1.pk,/app/zkbas10.pk] VerifyingKeyPath: [/app/zkbas1.vk,/app/zkbas10.vk] - KeyTxCounts: [1, 10] + +BlockConfig: + OptionalBlockSizes: [1, 10] + +LogConf: + ServiceName: prover + Mode: console + Path: ./log/prover + StackCooldownMillis: 500 + Level: error + + diff --git a/service/prover/prover.go b/service/prover/prover.go new file mode 100644 index 000000000..c95c862e7 --- /dev/null +++ b/service/prover/prover.go @@ -0,0 +1,39 @@ +package prover + +import ( + "github.com/robfig/cron/v3" + "github.com/zeromicro/go-zero/core/conf" + "github.com/zeromicro/go-zero/core/logx" + "github.com/zeromicro/go-zero/core/proc" + + "github.com/bnb-chain/zkbas/service/prover/config" + "github.com/bnb-chain/zkbas/service/prover/prover" +) + +func Run(configFile string) error { + var c config.Config + conf.MustLoad(configFile, &c) + p := prover.NewProver(c) + logx.MustSetup(c.LogConf) + logx.DisableStat() + proc.AddShutdownListener(func() { + logx.Close() + }) + + cronJob := cron.New(cron.WithChain( + cron.SkipIfStillRunning(cron.DiscardLogger), + )) + _, err := cronJob.AddFunc("@every 10s", func() { + logx.Info("start prover job......") + // cron job for receiving cryptoBlock and handling + err := p.ProveBlock() + if err != nil { + logx.Errorf("failed to generate proof, %v", err) + } + }) + if err != nil { + panic(err) + } + cronJob.Start() + select {} +} diff --git a/service/prover/prover/prover.go b/service/prover/prover/prover.go new file mode 100644 index 000000000..4620fc3b0 --- /dev/null +++ b/service/prover/prover/prover.go @@ -0,0 +1,178 @@ +package prover + +import ( + "encoding/json" + "fmt" + + "github.com/consensys/gnark-crypto/ecc" + "github.com/consensys/gnark/backend/groth16" + "github.com/consensys/gnark/frontend" + "github.com/consensys/gnark/frontend/cs/r1cs" + "github.com/zeromicro/go-zero/core/logx" + "github.com/zeromicro/go-zero/core/stores/redis" + "gorm.io/driver/postgres" + "gorm.io/gorm" + + "github.com/bnb-chain/zkbas-crypto/legend/circuit/bn254/block" + "github.com/bnb-chain/zkbas/common/prove" + "github.com/bnb-chain/zkbas/common/redislock" + "github.com/bnb-chain/zkbas/dao/blockwitness" + "github.com/bnb-chain/zkbas/dao/proof" + "github.com/bnb-chain/zkbas/service/prover/config" + "github.com/bnb-chain/zkbas/types" +) + +type Prover struct { + Config config.Config + + RedisConn *redis.Redis + + ProofModel proof.ProofModel + BlockWitnessModel blockwitness.BlockWitnessModel + + VerifyingKeys []groth16.VerifyingKey + ProvingKeys []groth16.ProvingKey + OptionalBlockSizes []int + R1cs []frontend.CompiledConstraintSystem +} + +func WithRedis(redisType string, redisPass string) redis.Option { + return func(p *redis.Redis) { + p.Type = redisType + p.Pass = redisPass + } +} +func NewProver(c config.Config) *Prover { + db, err := gorm.Open(postgres.Open(c.Postgres.DataSource)) + if err != nil { + logx.Errorf("gorm connect db error, err = %s", err.Error()) + } + redisConn := redis.New(c.CacheRedis[0].Host, WithRedis(c.CacheRedis[0].Type, c.CacheRedis[0].Pass)) + prover := &Prover{ + Config: c, + RedisConn: redisConn, + BlockWitnessModel: blockwitness.NewBlockWitnessModel(db), + ProofModel: proof.NewProofModel(db), + } + + prover.OptionalBlockSizes = c.BlockConfig.OptionalBlockSizes + prover.ProvingKeys = make([]groth16.ProvingKey, len(prover.OptionalBlockSizes)) + prover.VerifyingKeys = make([]groth16.VerifyingKey, len(prover.OptionalBlockSizes)) + prover.R1cs = make([]frontend.CompiledConstraintSystem, len(prover.OptionalBlockSizes)) + for i := 0; i < len(prover.OptionalBlockSizes); i++ { + var circuit block.BlockConstraints + circuit.TxsCount = prover.OptionalBlockSizes[i] + circuit.Txs = make([]block.TxConstraints, circuit.TxsCount) + for i := 0; i < circuit.TxsCount; i++ { + circuit.Txs[i] = block.GetZeroTxConstraint() + } + logx.Infof("start compile block size %d circuit", circuit.TxsCount) + prover.R1cs[i], err = frontend.Compile(ecc.BN254, r1cs.NewBuilder, &circuit, frontend.IgnoreUnconstrainedInputs()) + if err != nil { + panic("r1cs init error") + } + logx.Infof("circuit constraints: %d", prover.R1cs[i].GetNbConstraints()) + logx.Info("finish compile circuit") + // read proving and verifying keys + prover.ProvingKeys[i], err = prove.LoadProvingKey(c.KeyPath.ProvingKeyPath[i]) + if err != nil { + panic("provingKey loading error") + } + prover.VerifyingKeys[i], err = prove.LoadVerifyingKey(c.KeyPath.VerifyingKeyPath[i]) + if err != nil { + panic("verifyingKey loading error") + } + } + + return prover +} + +func (p *Prover) ProveBlock() error { + blockWitness, err := func() (*blockwitness.BlockWitness, error) { + lock := redislock.GetRedisLockByKey(p.RedisConn, RedisLockKey) + err := redislock.TryAcquireLock(lock) + if err != nil { + return nil, err + } + //nolint:errcheck + defer lock.Release() + + // Fetch unproved block witness. + blockWitness, err := p.BlockWitnessModel.GetLatestBlockWitness() + if err != nil { + return nil, err + } + // Update status of block witness. + err = p.BlockWitnessModel.UpdateBlockWitnessStatus(blockWitness, blockwitness.StatusReceived) + if err != nil { + return nil, err + } + return blockWitness, nil + }() + if err != nil { + if err == types.DbErrNotFound { + return nil + } + return err + } + defer func() { + if err == nil { + return + } + + // Recover block witness status. + res := p.BlockWitnessModel.UpdateBlockWitnessStatus(blockWitness, blockwitness.StatusPublished) + if res != nil { + logx.Errorf("revert block witness status failed, err %v", res) + } + }() + + // Parse crypto block. + var cryptoBlock *block.Block + err = json.Unmarshal([]byte(blockWitness.WitnessData), &cryptoBlock) + if err != nil { + return err + } + + var keyIndex int + for ; keyIndex < len(p.OptionalBlockSizes); keyIndex++ { + if len(cryptoBlock.Txs) == p.OptionalBlockSizes[keyIndex] { + break + } + } + if keyIndex == len(p.OptionalBlockSizes) { + return fmt.Errorf("can't find correct vk/pk") + } + + // Generate proof. + blockProof, err := prove.GenerateProof(p.R1cs[keyIndex], p.ProvingKeys[keyIndex], p.VerifyingKeys[keyIndex], cryptoBlock) + if err != nil { + return fmt.Errorf("failed to generateProof, err: %v", err) + } + + formattedProof, err := prove.FormatProof(blockProof, cryptoBlock.OldStateRoot, cryptoBlock.NewStateRoot, cryptoBlock.BlockCommitment) + if err != nil { + return fmt.Errorf("unable to format blockProof: %v", err) + } + + // Marshal formatted proof. + proofBytes, err := json.Marshal(formattedProof) + if err != nil { + return err + } + + // Check the existence of block proof. + _, err = p.ProofModel.GetProofByBlockNumber(blockWitness.Height) + if err == nil { + logx.Errorf("blockProof of height %d exists", blockWitness.Height) + return nil + } + + var row = &proof.Proof{ + ProofInfo: string(proofBytes), + BlockNumber: blockWitness.Height, + Status: proof.NotSent, + } + err = p.ProofModel.CreateProof(row) + return err +} diff --git a/common/util/bigIntHelper.go b/service/prover/prover/types.go similarity index 83% rename from common/util/bigIntHelper.go rename to service/prover/prover/types.go index a7fe9dcb5..164742e2c 100644 --- a/common/util/bigIntHelper.go +++ b/service/prover/prover/types.go @@ -1,5 +1,5 @@ /* - * Copyright © 2021 Zkbas Protocol + * Copyright © 2021 ZkBAS Protocol * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -12,15 +12,8 @@ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. - * */ -package util - -import ( - "math/big" -) +package prover -var ( - ZeroBigInt = big.NewInt(0) -) +const RedisLockKey = "prover_mutex_key" diff --git a/service/cronjob/prover/verifier_parse.py b/service/prover/verifier_parse.py similarity index 100% rename from service/cronjob/prover/verifier_parse.py rename to service/prover/verifier_parse.py diff --git a/service/rpc/globalRPC/Dockerfile b/service/rpc/globalRPC/Dockerfile deleted file mode 100644 index 4f3f269ec..000000000 --- a/service/rpc/globalRPC/Dockerfile +++ /dev/null @@ -1,20 +0,0 @@ - -FROM golang:alpine AS builder - -LABEL stage=gobuilder - -ENV CGO_ENABLED 0 - -RUN apk update --no-cache && apk add --no-cache tzdata - -FROM alpine:3.4 - -COPY --from=builder /etc/ssl/certs/ca-certificates.crt /etc/ssl/certs/ca-certificates.crt -COPY --from=builder /usr/share/zoneinfo/Asia/Shanghai /usr/share/zoneinfo/Asia/Shanghai -ENV TZ Asia/Shanghai - -WORKDIR /app -COPY bin/globalrpc /app/globalrpc -COPY configyaml /app/etc - -CMD ["./globalrpc", "-f", "etc/globalrpc.yaml"] \ No newline at end of file diff --git a/service/rpc/globalRPC/etc/config.yaml.example b/service/rpc/globalRPC/etc/config.yaml.example deleted file mode 100644 index 538a4f35a..000000000 --- a/service/rpc/globalRPC/etc/config.yaml.example +++ /dev/null @@ -1,21 +0,0 @@ -Name: global.rpc -ListenOn: 127.0.0.1:8080 - -Etcd: - Hosts: - - 127.0.0.1:2379 - Key: global.rpc - -Postgres: - DataSource: host=127.0.0.1 user=postgres password=ZecreyProtocolDB@123 dbname=zkbas port=5432 sslmode=disable - -CacheRedis: - - Host: 127.0.0.1:6379 - Pass: myredis - Type: node - -LogConf: - ServiceName: global.rpc - Mode: console - Path: ./log/globalrpc - StackCooldownMillis: 500 diff --git a/service/rpc/globalRPC/globalRPC.proto b/service/rpc/globalRPC/globalRPC.proto deleted file mode 100644 index bfa33af35..000000000 --- a/service/rpc/globalRPC/globalRPC.proto +++ /dev/null @@ -1,223 +0,0 @@ -syntax = "proto3"; - -package globalRPCProto; - -option go_package = "./globalRPCProto"; - -/* =================== Account =================== */ -message ReqGetLatestAccountInfoByAccountIndex{ - uint32 AccountIndex = 1; -} - -message RespGetLatestAccountInfoByAccountIndex{ - int64 AccountId = 1; - int64 AccountIndex = 2; - string AccountName = 3; - string PublicKey = 4; - string AccountNameHash = 5; - string L1Address = 6; - int64 Nonce = 7; - int64 CollectionNonce = 8; - repeated AssetResult AccountAsset = 9; - string AssetRoot = 10; - int64 Status = 11; -} - -/* =================== Asset =================== */ -/* - Proto Name: GetLatestAssetsListByAccountIndex - Description: get latest account assets info by accountIndex -*/ -message ReqGetLatestAssetsListByAccountIndex{ - uint32 AccountIndex = 1; -} - -message AssetResult{ - uint32 AssetId = 1; - string Balance = 2; - string LpAmount = 3; - string OfferCanceledOrFinalized = 4; -} - -message RespGetLatestAssetsListByAccountIndex{ - repeated AssetResult ResultAssetsList = 1; -} - -/* ====================== Liquidity ====================== */ - -/* - Proto Name: getLatestPairInfo - Description: get latest pair asset info - */ -message ReqGetLatestPairInfo{ - uint32 PairIndex = 1; -} - -message RespGetLatestPairInfo{ - // assetA Amount - string AssetAAmount = 1; - // assetA Id - uint32 AssetAId = 2; - // assetB Amount - string AssetBAmount = 3; - // assetA Id - uint32 AssetBId = 4; - // LP share Amount - string LpAmount = 5; -} - -/* - Proto Name: GetSwapAmount - Description: get swap amount - */ -message ReqGetSwapAmount{ - uint32 PairIndex = 1; - uint32 AssetId = 2; - string AssetAmount = 3; - bool IsFrom = 4; -} - -message RespGetSwapAmount{ - string SwapAssetAmount = 1; - uint32 SwapAssetId = 2; -} - -/* - Proto Name: GetLpValue - Description: preview lp value - */ -message ReqGetLpValue{ - uint32 PairIndex = 1; - string LPAmount = 2; -} - -message RespGetLpValue{ - uint32 AssetAId = 1; - string AssetAAmount = 2; - uint32 AssetBId = 3; - string AssetBAmount = 4; -} - -/* ====================== Transaction ====================== */ - -message TxDetailInfo{ - uint32 AssetId = 1; - uint32 AssetType = 2; - uint32 AccountIndex = 3; - string AccountName = 4; - string BalanceDelta = 5; -} - - -message TxInfo{ - string TxHash = 1; - uint32 TxType = 2; - uint32 GasFeeAssetId = 3; - string GasFee = 4; - uint32 NftIndex = 5; - uint32 PairIndex = 6; - uint32 AssetId = 7; - string TxAmount = 8; - string NativeAddress = 9; - repeated TxDetailInfo TxDetails = 10; - string Memo = 11; - uint32 AccountIndex = 12; - uint64 Nonce = 13; - uint64 CreateAt = 14; - uint32 Status = 15; - uint64 BlockHeight = 16; -} - -/* - Proto Name: sendTx - Description: sendTransaction - */ -message ReqSendTx{ - uint32 TxType = 1; - string TxInfo = 2; -} - -message RespSendTx{ - string TxId = 1; -} - -/* - Proto Name: getTransactionCount - Description: getMaxNonce - */ - -message ReqGetNextNonce{ - uint32 AccountIndex = 1; -} - -message RespGetNextNonce{ - uint64 Nonce = 1; -} - - -message ReqSendCreateCollectionTx{ - string TxInfo = 1; -} - -message RespSendCreateCollectionTx{ - int64 CollectionId = 1; -} - -message ReqSendMintNftTx{ - string TxInfo = 1; -} - -message RespSendMintNftTx{ - int64 NftIndex = 1; -} - - -/* ====================== NFT ====================== */ - -/* - Proto Name: getMaxOfferId - Description: - */ - - -message ReqGetMaxOfferId{ - uint32 AccountIndex = 1; - } - -message RespGetMaxOfferId{ - uint64 OfferId = 1; -} - -/* - Proto Name: sendTxByRawInfo - Description: sendTransactionByRawInfo - */ -message ReqSendTxByRawInfo{ - string TxInfo = 1; -} - -/* ====================== END ====================== */ - - - -service globalRPC { - rpc getLatestAssetsListByAccountIndex (ReqGetLatestAssetsListByAccountIndex) returns (RespGetLatestAssetsListByAccountIndex); - rpc getLatestAccountInfoByAccountIndex (ReqGetLatestAccountInfoByAccountIndex) returns (RespGetLatestAccountInfoByAccountIndex); - rpc getLatestPairInfo(ReqGetLatestPairInfo) returns (RespGetLatestPairInfo); - rpc getSwapAmount(ReqGetSwapAmount) returns (RespGetSwapAmount); - rpc getLpValue(ReqGetLpValue) returns (RespGetLpValue); - rpc sendTx(ReqSendTx) returns (RespSendTx); - rpc sendCreateCollectionTx(ReqSendCreateCollectionTx) returns (RespSendCreateCollectionTx); - rpc sendMintNftTx(ReqSendMintNftTx) returns (RespSendMintNftTx); - rpc getNextNonce(ReqGetNextNonce) returns (RespGetNextNonce); - rpc getMaxOfferId(ReqGetMaxOfferId) returns (RespGetMaxOfferId); - rpc sendAddLiquidityTx(ReqSendTxByRawInfo) returns (RespSendTx); - rpc sendAtomicMatchTx(ReqSendTxByRawInfo) returns (RespSendTx); - rpc sendCancelOfferTx(ReqSendTxByRawInfo) returns (RespSendTx); - rpc sendRemoveLiquidityTx(ReqSendTxByRawInfo) returns (RespSendTx); - rpc sendSwapTx(ReqSendTxByRawInfo) returns (RespSendTx); - rpc sendTransferNftTx(ReqSendTxByRawInfo) returns (RespSendTx); - rpc sendTransferTx(ReqSendTxByRawInfo) returns (RespSendTx); - rpc sendWithdrawNftTx(ReqSendTxByRawInfo) returns (RespSendTx); - rpc sendWithdrawTx(ReqSendTxByRawInfo) returns (RespSendTx); -} \ No newline at end of file diff --git a/service/rpc/globalRPC/globalrpc.go b/service/rpc/globalRPC/globalrpc.go deleted file mode 100644 index 9226fd6ef..000000000 --- a/service/rpc/globalRPC/globalrpc.go +++ /dev/null @@ -1,43 +0,0 @@ -package main - -import ( - "flag" - "fmt" - - "github.com/zeromicro/go-zero/core/conf" - "github.com/zeromicro/go-zero/core/logx" - "github.com/zeromicro/go-zero/core/service" - "github.com/zeromicro/go-zero/zrpc" - "google.golang.org/grpc" - "google.golang.org/grpc/reflection" - - "github.com/bnb-chain/zkbas/service/rpc/globalRPC/globalRPCProto" - "github.com/bnb-chain/zkbas/service/rpc/globalRPC/internal/config" - "github.com/bnb-chain/zkbas/service/rpc/globalRPC/internal/server" - "github.com/bnb-chain/zkbas/service/rpc/globalRPC/internal/svc" -) - -var configFile = flag.String("f", - "./etc/config.yaml", "the config file") - -func main() { - flag.Parse() - - var c config.Config - conf.MustLoad(*configFile, &c) - ctx := svc.NewServiceContext(c) - svr := server.NewGlobalRPCServer(ctx) - logx.DisableStat() - - s := zrpc.MustNewServer(c.RpcServerConf, func(grpcServer *grpc.Server) { - globalRPCProto.RegisterGlobalRPCServer(grpcServer, svr) - - if c.Mode == service.DevMode || c.Mode == service.TestMode { - reflection.Register(grpcServer) - } - }) - defer s.Stop() - - fmt.Printf("Starting rpc server at %s...\n", c.ListenOn) - s.Start() -} diff --git a/service/rpc/globalRPC/globalrpc/globalrpc.go b/service/rpc/globalRPC/globalrpc/globalrpc.go deleted file mode 100644 index 85bbfc442..000000000 --- a/service/rpc/globalRPC/globalrpc/globalrpc.go +++ /dev/null @@ -1,167 +0,0 @@ -// Code generated by goctl. DO NOT EDIT! -// Source: globalRPC.proto - -package globalrpc - -import ( - "context" - - "github.com/bnb-chain/zkbas/service/rpc/globalRPC/globalRPCProto" - - "github.com/zeromicro/go-zero/zrpc" - "google.golang.org/grpc" -) - -type ( - AssetResult = globalRPCProto.AssetResult - ReqGetLatestAccountInfoByAccountIndex = globalRPCProto.ReqGetLatestAccountInfoByAccountIndex - ReqGetLatestAssetsListByAccountIndex = globalRPCProto.ReqGetLatestAssetsListByAccountIndex - ReqGetLatestPairInfo = globalRPCProto.ReqGetLatestPairInfo - ReqGetLpValue = globalRPCProto.ReqGetLpValue - ReqGetMaxOfferId = globalRPCProto.ReqGetMaxOfferId - ReqGetNextNonce = globalRPCProto.ReqGetNextNonce - ReqGetSwapAmount = globalRPCProto.ReqGetSwapAmount - ReqSendCreateCollectionTx = globalRPCProto.ReqSendCreateCollectionTx - ReqSendMintNftTx = globalRPCProto.ReqSendMintNftTx - ReqSendTx = globalRPCProto.ReqSendTx - ReqSendTxByRawInfo = globalRPCProto.ReqSendTxByRawInfo - RespGetLatestAccountInfoByAccountIndex = globalRPCProto.RespGetLatestAccountInfoByAccountIndex - RespGetLatestAssetsListByAccountIndex = globalRPCProto.RespGetLatestAssetsListByAccountIndex - RespGetLatestPairInfo = globalRPCProto.RespGetLatestPairInfo - RespGetLpValue = globalRPCProto.RespGetLpValue - RespGetMaxOfferId = globalRPCProto.RespGetMaxOfferId - RespGetNextNonce = globalRPCProto.RespGetNextNonce - RespGetSwapAmount = globalRPCProto.RespGetSwapAmount - RespSendCreateCollectionTx = globalRPCProto.RespSendCreateCollectionTx - RespSendMintNftTx = globalRPCProto.RespSendMintNftTx - RespSendTx = globalRPCProto.RespSendTx - TxDetailInfo = globalRPCProto.TxDetailInfo - TxInfo = globalRPCProto.TxInfo - - GlobalRPC interface { - GetLatestAssetsListByAccountIndex(ctx context.Context, in *ReqGetLatestAssetsListByAccountIndex, opts ...grpc.CallOption) (*RespGetLatestAssetsListByAccountIndex, error) - GetLatestAccountInfoByAccountIndex(ctx context.Context, in *ReqGetLatestAccountInfoByAccountIndex, opts ...grpc.CallOption) (*RespGetLatestAccountInfoByAccountIndex, error) - GetLatestPairInfo(ctx context.Context, in *ReqGetLatestPairInfo, opts ...grpc.CallOption) (*RespGetLatestPairInfo, error) - GetSwapAmount(ctx context.Context, in *ReqGetSwapAmount, opts ...grpc.CallOption) (*RespGetSwapAmount, error) - GetLpValue(ctx context.Context, in *ReqGetLpValue, opts ...grpc.CallOption) (*RespGetLpValue, error) - SendTx(ctx context.Context, in *ReqSendTx, opts ...grpc.CallOption) (*RespSendTx, error) - SendCreateCollectionTx(ctx context.Context, in *ReqSendCreateCollectionTx, opts ...grpc.CallOption) (*RespSendCreateCollectionTx, error) - SendMintNftTx(ctx context.Context, in *ReqSendMintNftTx, opts ...grpc.CallOption) (*RespSendMintNftTx, error) - GetNextNonce(ctx context.Context, in *ReqGetNextNonce, opts ...grpc.CallOption) (*RespGetNextNonce, error) - GetMaxOfferId(ctx context.Context, in *ReqGetMaxOfferId, opts ...grpc.CallOption) (*RespGetMaxOfferId, error) - SendAddLiquidityTx(ctx context.Context, in *ReqSendTxByRawInfo, opts ...grpc.CallOption) (*RespSendTx, error) - SendAtomicMatchTx(ctx context.Context, in *ReqSendTxByRawInfo, opts ...grpc.CallOption) (*RespSendTx, error) - SendCancelOfferTx(ctx context.Context, in *ReqSendTxByRawInfo, opts ...grpc.CallOption) (*RespSendTx, error) - SendRemoveLiquidityTx(ctx context.Context, in *ReqSendTxByRawInfo, opts ...grpc.CallOption) (*RespSendTx, error) - SendSwapTx(ctx context.Context, in *ReqSendTxByRawInfo, opts ...grpc.CallOption) (*RespSendTx, error) - SendTransferNftTx(ctx context.Context, in *ReqSendTxByRawInfo, opts ...grpc.CallOption) (*RespSendTx, error) - SendTransferTx(ctx context.Context, in *ReqSendTxByRawInfo, opts ...grpc.CallOption) (*RespSendTx, error) - SendWithdrawNftTx(ctx context.Context, in *ReqSendTxByRawInfo, opts ...grpc.CallOption) (*RespSendTx, error) - SendWithdrawTx(ctx context.Context, in *ReqSendTxByRawInfo, opts ...grpc.CallOption) (*RespSendTx, error) - } - - defaultGlobalRPC struct { - cli zrpc.Client - } -) - -func NewGlobalRPC(cli zrpc.Client) GlobalRPC { - return &defaultGlobalRPC{ - cli: cli, - } -} - -func (m *defaultGlobalRPC) GetLatestAssetsListByAccountIndex(ctx context.Context, in *ReqGetLatestAssetsListByAccountIndex, opts ...grpc.CallOption) (*RespGetLatestAssetsListByAccountIndex, error) { - client := globalRPCProto.NewGlobalRPCClient(m.cli.Conn()) - return client.GetLatestAssetsListByAccountIndex(ctx, in, opts...) -} - -func (m *defaultGlobalRPC) GetLatestAccountInfoByAccountIndex(ctx context.Context, in *ReqGetLatestAccountInfoByAccountIndex, opts ...grpc.CallOption) (*RespGetLatestAccountInfoByAccountIndex, error) { - client := globalRPCProto.NewGlobalRPCClient(m.cli.Conn()) - return client.GetLatestAccountInfoByAccountIndex(ctx, in, opts...) -} - -func (m *defaultGlobalRPC) GetLatestPairInfo(ctx context.Context, in *ReqGetLatestPairInfo, opts ...grpc.CallOption) (*RespGetLatestPairInfo, error) { - client := globalRPCProto.NewGlobalRPCClient(m.cli.Conn()) - return client.GetLatestPairInfo(ctx, in, opts...) -} - -func (m *defaultGlobalRPC) GetSwapAmount(ctx context.Context, in *ReqGetSwapAmount, opts ...grpc.CallOption) (*RespGetSwapAmount, error) { - client := globalRPCProto.NewGlobalRPCClient(m.cli.Conn()) - return client.GetSwapAmount(ctx, in, opts...) -} - -func (m *defaultGlobalRPC) GetLpValue(ctx context.Context, in *ReqGetLpValue, opts ...grpc.CallOption) (*RespGetLpValue, error) { - client := globalRPCProto.NewGlobalRPCClient(m.cli.Conn()) - return client.GetLpValue(ctx, in, opts...) -} - -func (m *defaultGlobalRPC) SendTx(ctx context.Context, in *ReqSendTx, opts ...grpc.CallOption) (*RespSendTx, error) { - client := globalRPCProto.NewGlobalRPCClient(m.cli.Conn()) - return client.SendTx(ctx, in, opts...) -} - -func (m *defaultGlobalRPC) SendCreateCollectionTx(ctx context.Context, in *ReqSendCreateCollectionTx, opts ...grpc.CallOption) (*RespSendCreateCollectionTx, error) { - client := globalRPCProto.NewGlobalRPCClient(m.cli.Conn()) - return client.SendCreateCollectionTx(ctx, in, opts...) -} - -func (m *defaultGlobalRPC) SendMintNftTx(ctx context.Context, in *ReqSendMintNftTx, opts ...grpc.CallOption) (*RespSendMintNftTx, error) { - client := globalRPCProto.NewGlobalRPCClient(m.cli.Conn()) - return client.SendMintNftTx(ctx, in, opts...) -} - -func (m *defaultGlobalRPC) GetNextNonce(ctx context.Context, in *ReqGetNextNonce, opts ...grpc.CallOption) (*RespGetNextNonce, error) { - client := globalRPCProto.NewGlobalRPCClient(m.cli.Conn()) - return client.GetNextNonce(ctx, in, opts...) -} - -func (m *defaultGlobalRPC) GetMaxOfferId(ctx context.Context, in *ReqGetMaxOfferId, opts ...grpc.CallOption) (*RespGetMaxOfferId, error) { - client := globalRPCProto.NewGlobalRPCClient(m.cli.Conn()) - return client.GetMaxOfferId(ctx, in, opts...) -} - -func (m *defaultGlobalRPC) SendAddLiquidityTx(ctx context.Context, in *ReqSendTxByRawInfo, opts ...grpc.CallOption) (*RespSendTx, error) { - client := globalRPCProto.NewGlobalRPCClient(m.cli.Conn()) - return client.SendAddLiquidityTx(ctx, in, opts...) -} - -func (m *defaultGlobalRPC) SendAtomicMatchTx(ctx context.Context, in *ReqSendTxByRawInfo, opts ...grpc.CallOption) (*RespSendTx, error) { - client := globalRPCProto.NewGlobalRPCClient(m.cli.Conn()) - return client.SendAtomicMatchTx(ctx, in, opts...) -} - -func (m *defaultGlobalRPC) SendCancelOfferTx(ctx context.Context, in *ReqSendTxByRawInfo, opts ...grpc.CallOption) (*RespSendTx, error) { - client := globalRPCProto.NewGlobalRPCClient(m.cli.Conn()) - return client.SendCancelOfferTx(ctx, in, opts...) -} - -func (m *defaultGlobalRPC) SendRemoveLiquidityTx(ctx context.Context, in *ReqSendTxByRawInfo, opts ...grpc.CallOption) (*RespSendTx, error) { - client := globalRPCProto.NewGlobalRPCClient(m.cli.Conn()) - return client.SendRemoveLiquidityTx(ctx, in, opts...) -} - -func (m *defaultGlobalRPC) SendSwapTx(ctx context.Context, in *ReqSendTxByRawInfo, opts ...grpc.CallOption) (*RespSendTx, error) { - client := globalRPCProto.NewGlobalRPCClient(m.cli.Conn()) - return client.SendSwapTx(ctx, in, opts...) -} - -func (m *defaultGlobalRPC) SendTransferNftTx(ctx context.Context, in *ReqSendTxByRawInfo, opts ...grpc.CallOption) (*RespSendTx, error) { - client := globalRPCProto.NewGlobalRPCClient(m.cli.Conn()) - return client.SendTransferNftTx(ctx, in, opts...) -} - -func (m *defaultGlobalRPC) SendTransferTx(ctx context.Context, in *ReqSendTxByRawInfo, opts ...grpc.CallOption) (*RespSendTx, error) { - client := globalRPCProto.NewGlobalRPCClient(m.cli.Conn()) - return client.SendTransferTx(ctx, in, opts...) -} - -func (m *defaultGlobalRPC) SendWithdrawNftTx(ctx context.Context, in *ReqSendTxByRawInfo, opts ...grpc.CallOption) (*RespSendTx, error) { - client := globalRPCProto.NewGlobalRPCClient(m.cli.Conn()) - return client.SendWithdrawNftTx(ctx, in, opts...) -} - -func (m *defaultGlobalRPC) SendWithdrawTx(ctx context.Context, in *ReqSendTxByRawInfo, opts ...grpc.CallOption) (*RespSendTx, error) { - client := globalRPCProto.NewGlobalRPCClient(m.cli.Conn()) - return client.SendWithdrawTx(ctx, in, opts...) -} diff --git a/service/rpc/globalRPC/internal/logic/getlatestaccountinfobyaccountindexlogic.go b/service/rpc/globalRPC/internal/logic/getlatestaccountinfobyaccountindexlogic.go deleted file mode 100644 index 10d751788..000000000 --- a/service/rpc/globalRPC/internal/logic/getlatestaccountinfobyaccountindexlogic.go +++ /dev/null @@ -1,61 +0,0 @@ -package logic - -import ( - "context" - - "github.com/zeromicro/go-zero/core/logx" - - "github.com/bnb-chain/zkbas/errorcode" - "github.com/bnb-chain/zkbas/service/rpc/globalRPC/globalRPCProto" - "github.com/bnb-chain/zkbas/service/rpc/globalRPC/internal/repo/commglobalmap" - "github.com/bnb-chain/zkbas/service/rpc/globalRPC/internal/svc" -) - -type GetLatestAccountInfoByAccountIndexLogic struct { - ctx context.Context - svcCtx *svc.ServiceContext - logx.Logger - commglobalmap commglobalmap.Commglobalmap -} - -func NewGetLatestAccountInfoByAccountIndexLogic(ctx context.Context, svcCtx *svc.ServiceContext) *GetLatestAccountInfoByAccountIndexLogic { - return &GetLatestAccountInfoByAccountIndexLogic{ - ctx: ctx, - svcCtx: svcCtx, - Logger: logx.WithContext(ctx), - commglobalmap: commglobalmap.New(svcCtx), - } -} - -func (l *GetLatestAccountInfoByAccountIndexLogic) GetLatestAccountInfoByAccountIndex(in *globalRPCProto.ReqGetLatestAccountInfoByAccountIndex) (*globalRPCProto.RespGetLatestAccountInfoByAccountIndex, error) { - account, err := l.commglobalmap.GetLatestAccountInfoWithCache(l.ctx, int64(in.AccountIndex)) - if err != nil { - logx.Errorf("[GetLatestAccountInfo] err: %s", err.Error()) - if err == errorcode.DbErrNotFound { - return nil, errorcode.RpcErrNotFound - } - return nil, errorcode.RpcErrInternal - } - resp := &globalRPCProto.RespGetLatestAccountInfoByAccountIndex{ - AccountId: int64(account.AccountId), - AccountIndex: account.AccountIndex, - AccountName: account.AccountName, - PublicKey: account.PublicKey, - AccountNameHash: account.AccountNameHash, - L1Address: account.L1Address, - Nonce: account.Nonce, - CollectionNonce: account.CollectionNonce, - AccountAsset: make([]*globalRPCProto.AssetResult, 0), - AssetRoot: account.AssetRoot, - Status: int64(account.Status), - } - for assetID, asset := range account.AssetInfo { - resp.AccountAsset = append(resp.AccountAsset, &globalRPCProto.AssetResult{ - AssetId: uint32(assetID), - Balance: asset.Balance.String(), - LpAmount: asset.LpAmount.String(), - OfferCanceledOrFinalized: asset.OfferCanceledOrFinalized.String(), - }) - } - return resp, nil -} diff --git a/service/rpc/globalRPC/internal/logic/getlatestassetslistbyaccountindexlogic.go b/service/rpc/globalRPC/internal/logic/getlatestassetslistbyaccountindexlogic.go deleted file mode 100644 index f68406515..000000000 --- a/service/rpc/globalRPC/internal/logic/getlatestassetslistbyaccountindexlogic.go +++ /dev/null @@ -1,51 +0,0 @@ -package logic - -import ( - "context" - - "github.com/zeromicro/go-zero/core/logx" - - "github.com/bnb-chain/zkbas/errorcode" - "github.com/bnb-chain/zkbas/service/rpc/globalRPC/globalRPCProto" - "github.com/bnb-chain/zkbas/service/rpc/globalRPC/internal/repo/commglobalmap" - "github.com/bnb-chain/zkbas/service/rpc/globalRPC/internal/svc" -) - -type GetLatestAssetsListByAccountIndexLogic struct { - ctx context.Context - svcCtx *svc.ServiceContext - logx.Logger - commglobalmap commglobalmap.Commglobalmap -} - -func NewGetLatestAssetsListByAccountIndexLogic(ctx context.Context, svcCtx *svc.ServiceContext) *GetLatestAssetsListByAccountIndexLogic { - return &GetLatestAssetsListByAccountIndexLogic{ - ctx: ctx, - svcCtx: svcCtx, - Logger: logx.WithContext(ctx), - commglobalmap: commglobalmap.New(svcCtx), - } -} - -func (l *GetLatestAssetsListByAccountIndexLogic) GetLatestAssetsListByAccountIndex(in *globalRPCProto.ReqGetLatestAssetsListByAccountIndex) (*globalRPCProto.RespGetLatestAssetsListByAccountIndex, error) { - accountInfo, err := l.commglobalmap.GetLatestAccountInfoWithCache(l.ctx, int64(in.AccountIndex)) - if err != nil { - logx.Errorf("[GetLatestAccountInfo] err: %s", err.Error()) - if err == errorcode.DbErrNotFound { - return nil, errorcode.RpcErrNotFound - } - return nil, errorcode.RpcErrInternal - } - resp := &globalRPCProto.RespGetLatestAssetsListByAccountIndex{ - ResultAssetsList: make([]*globalRPCProto.AssetResult, 0), - } - for assetID, asset := range accountInfo.AssetInfo { - resp.ResultAssetsList = append(resp.ResultAssetsList, &globalRPCProto.AssetResult{ - AssetId: uint32(assetID), - Balance: asset.Balance.String(), - LpAmount: asset.LpAmount.String(), - OfferCanceledOrFinalized: asset.OfferCanceledOrFinalized.String(), - }) - } - return resp, nil -} diff --git a/service/rpc/globalRPC/internal/logic/getlatestpairinfologic.go b/service/rpc/globalRPC/internal/logic/getlatestpairinfologic.go deleted file mode 100644 index fbe4d186d..000000000 --- a/service/rpc/globalRPC/internal/logic/getlatestpairinfologic.go +++ /dev/null @@ -1,51 +0,0 @@ -package logic - -import ( - "context" - - "github.com/zeromicro/go-zero/core/logx" - - "github.com/bnb-chain/zkbas/common/checker" - "github.com/bnb-chain/zkbas/errorcode" - "github.com/bnb-chain/zkbas/service/rpc/globalRPC/globalRPCProto" - "github.com/bnb-chain/zkbas/service/rpc/globalRPC/internal/repo/commglobalmap" - "github.com/bnb-chain/zkbas/service/rpc/globalRPC/internal/svc" -) - -type GetLatestPairInfoLogic struct { - ctx context.Context - svcCtx *svc.ServiceContext - logx.Logger - commglobalmap commglobalmap.Commglobalmap -} - -func NewGetLatestPairInfoLogic(ctx context.Context, svcCtx *svc.ServiceContext) *GetLatestPairInfoLogic { - return &GetLatestPairInfoLogic{ - ctx: ctx, - svcCtx: svcCtx, - Logger: logx.WithContext(ctx), - commglobalmap: commglobalmap.New(svcCtx), - } -} - -func (l *GetLatestPairInfoLogic) GetLatestPairInfo(in *globalRPCProto.ReqGetLatestPairInfo) (*globalRPCProto.RespGetLatestPairInfo, error) { - if checker.CheckPairIndex(in.PairIndex) { - logx.Errorf("[CheckPairIndex] param: %d", in.PairIndex) - return nil, errorcode.RpcErrInvalidParam.RefineError("invalid PairIndex") - } - liquidity, err := l.commglobalmap.GetLatestLiquidityInfoForReadWithCache(l.ctx, int64(in.PairIndex)) - if err != nil { - logx.Errorf("[GetLatestLiquidityInfoForReadWithCache] err: %s", err.Error()) - if err == errorcode.DbErrNotFound { - return nil, errorcode.RpcErrNotFound - } - return nil, errorcode.RpcErrInternal - } - return &globalRPCProto.RespGetLatestPairInfo{ - AssetAAmount: liquidity.AssetA.String(), - AssetAId: uint32(liquidity.AssetAId), - AssetBAmount: liquidity.AssetB.String(), - AssetBId: uint32(liquidity.AssetBId), - LpAmount: liquidity.LpAmount.String(), - }, nil -} diff --git a/service/rpc/globalRPC/internal/logic/getlpvaluelogic.go b/service/rpc/globalRPC/internal/logic/getlpvaluelogic.go deleted file mode 100644 index 91fa66d38..000000000 --- a/service/rpc/globalRPC/internal/logic/getlpvaluelogic.go +++ /dev/null @@ -1,63 +0,0 @@ -package logic - -import ( - "context" - "math/big" - - "github.com/zeromicro/go-zero/core/logx" - - "github.com/bnb-chain/zkbas/common/checker" - "github.com/bnb-chain/zkbas/common/util" - "github.com/bnb-chain/zkbas/errorcode" - "github.com/bnb-chain/zkbas/service/rpc/globalRPC/globalRPCProto" - "github.com/bnb-chain/zkbas/service/rpc/globalRPC/internal/repo/commglobalmap" - "github.com/bnb-chain/zkbas/service/rpc/globalRPC/internal/svc" -) - -type GetLpValueLogic struct { - ctx context.Context - svcCtx *svc.ServiceContext - logx.Logger - commglobalmap commglobalmap.Commglobalmap -} - -func NewGetLpValueLogic(ctx context.Context, svcCtx *svc.ServiceContext) *GetLpValueLogic { - return &GetLpValueLogic{ - ctx: ctx, - svcCtx: svcCtx, - Logger: logx.WithContext(ctx), - commglobalmap: commglobalmap.New(svcCtx), - } -} - -func (l *GetLpValueLogic) GetLpValue(in *globalRPCProto.ReqGetLpValue) (*globalRPCProto.RespGetLpValue, error) { - if checker.CheckPairIndex(in.PairIndex) { - logx.Errorf("[CheckPairIndex] param: %d", in.PairIndex) - return nil, errorcode.RpcErrInvalidParam.RefineError("invalid PairIndex") - } - amount, isTure := new(big.Int).SetString(in.LPAmount, 10) - if !isTure { - logx.Errorf("[SetString] err: %s", in.LPAmount) - return nil, errorcode.RpcErrInvalidParam - } - - liquidity, err := l.commglobalmap.GetLatestLiquidityInfoForReadWithCache(l.ctx, int64(in.PairIndex)) - if err != nil { - logx.Errorf("[GetLatestLiquidityInfoForReadWithCache] err: %s", err.Error()) - if err == errorcode.DbErrNotFound { - return nil, errorcode.RpcErrNotFound - } - return nil, errorcode.RpcErrInternal - } - assetAAmount, assetBAmount, err := util.ComputeRemoveLiquidityAmount(liquidity, amount) - if err != nil { - logx.Errorf("[ComputeRemoveLiquidityAmount] err: %s", err.Error()) - return nil, errorcode.RpcErrInternal - } - return &globalRPCProto.RespGetLpValue{ - AssetAId: uint32(liquidity.AssetAId), - AssetAAmount: assetAAmount.String(), - AssetBId: uint32(liquidity.AssetBId), - AssetBAmount: assetBAmount.String(), - }, nil -} diff --git a/service/rpc/globalRPC/internal/logic/getmaxofferidlogic.go b/service/rpc/globalRPC/internal/logic/getmaxofferidlogic.go deleted file mode 100644 index 3d14d3396..000000000 --- a/service/rpc/globalRPC/internal/logic/getmaxofferidlogic.go +++ /dev/null @@ -1,42 +0,0 @@ -package logic - -import ( - "context" - - "github.com/zeromicro/go-zero/core/logx" - - "github.com/bnb-chain/zkbas/errorcode" - "github.com/bnb-chain/zkbas/service/rpc/globalRPC/globalRPCProto" - "github.com/bnb-chain/zkbas/service/rpc/globalRPC/internal/repo/commglobalmap" - "github.com/bnb-chain/zkbas/service/rpc/globalRPC/internal/svc" -) - -type GetMaxOfferIdLogic struct { - ctx context.Context - svcCtx *svc.ServiceContext - logx.Logger - commglobalmap commglobalmap.Commglobalmap -} - -func NewGetMaxOfferIdLogic(ctx context.Context, svcCtx *svc.ServiceContext) *GetMaxOfferIdLogic { - return &GetMaxOfferIdLogic{ - ctx: ctx, - svcCtx: svcCtx, - Logger: logx.WithContext(ctx), - commglobalmap: commglobalmap.New(svcCtx), - } -} - -func (l *GetMaxOfferIdLogic) GetMaxOfferId(in *globalRPCProto.ReqGetMaxOfferId) (*globalRPCProto.RespGetMaxOfferId, error) { - nftIndex, err := l.commglobalmap.GetLatestOfferIdForWrite(l.ctx, int64(in.AccountIndex)) - if err != nil { - logx.Errorf("[GetLatestAccountInfo] err: %s", err.Error()) - if err == errorcode.DbErrNotFound { - return nil, errorcode.RpcErrNotFound - } - return nil, errorcode.RpcErrInternal - } - return &globalRPCProto.RespGetMaxOfferId{ - OfferId: uint64(nftIndex), - }, nil -} diff --git a/service/rpc/globalRPC/internal/logic/getnextnoncelogic.go b/service/rpc/globalRPC/internal/logic/getnextnoncelogic.go deleted file mode 100644 index 56397f97f..000000000 --- a/service/rpc/globalRPC/internal/logic/getnextnoncelogic.go +++ /dev/null @@ -1,42 +0,0 @@ -package logic - -import ( - "context" - - "github.com/zeromicro/go-zero/core/logx" - - "github.com/bnb-chain/zkbas/errorcode" - "github.com/bnb-chain/zkbas/service/rpc/globalRPC/globalRPCProto" - "github.com/bnb-chain/zkbas/service/rpc/globalRPC/internal/repo/commglobalmap" - "github.com/bnb-chain/zkbas/service/rpc/globalRPC/internal/svc" -) - -type GetNextNonceLogic struct { - ctx context.Context - svcCtx *svc.ServiceContext - logx.Logger - commglobalmap commglobalmap.Commglobalmap -} - -func NewGetNextNonceLogic(ctx context.Context, svcCtx *svc.ServiceContext) *GetNextNonceLogic { - return &GetNextNonceLogic{ - ctx: ctx, - svcCtx: svcCtx, - Logger: logx.WithContext(ctx), - commglobalmap: commglobalmap.New(svcCtx), - } -} - -func (l *GetNextNonceLogic) GetNextNonce(in *globalRPCProto.ReqGetNextNonce) (*globalRPCProto.RespGetNextNonce, error) { - accountInfo, err := l.commglobalmap.GetLatestAccountInfo(l.ctx, int64(in.AccountIndex)) - if err != nil { - logx.Errorf("[GetLatestAccountInfo] err: %s", err.Error()) - if err == errorcode.DbErrNotFound { - return nil, errorcode.RpcErrNotFound - } - return nil, errorcode.RpcErrInternal - } - return &globalRPCProto.RespGetNextNonce{ - Nonce: uint64(accountInfo.Nonce), - }, nil -} diff --git a/service/rpc/globalRPC/internal/logic/getswapamountlogic.go b/service/rpc/globalRPC/internal/logic/getswapamountlogic.go deleted file mode 100644 index ff9ca155c..000000000 --- a/service/rpc/globalRPC/internal/logic/getswapamountlogic.go +++ /dev/null @@ -1,79 +0,0 @@ -package logic - -import ( - "context" - "math/big" - - "github.com/zeromicro/go-zero/core/logx" - - "github.com/bnb-chain/zkbas/common/checker" - "github.com/bnb-chain/zkbas/common/util" - "github.com/bnb-chain/zkbas/errorcode" - "github.com/bnb-chain/zkbas/service/rpc/globalRPC/globalRPCProto" - "github.com/bnb-chain/zkbas/service/rpc/globalRPC/internal/repo/commglobalmap" - "github.com/bnb-chain/zkbas/service/rpc/globalRPC/internal/svc" -) - -type GetSwapAmountLogic struct { - ctx context.Context - svcCtx *svc.ServiceContext - logx.Logger - commglobalmap commglobalmap.Commglobalmap -} - -func NewGetSwapAmountLogic(ctx context.Context, svcCtx *svc.ServiceContext) *GetSwapAmountLogic { - return &GetSwapAmountLogic{ - ctx: ctx, - svcCtx: svcCtx, - Logger: logx.WithContext(ctx), - commglobalmap: commglobalmap.New(svcCtx), - } -} - -func (l *GetSwapAmountLogic) GetSwapAmount(in *globalRPCProto.ReqGetSwapAmount) (*globalRPCProto.RespGetSwapAmount, error) { - if checker.CheckPairIndex(in.PairIndex) { - logx.Errorf("[CheckPairIndex] Parameter mismatch: %d", in.PairIndex) - return nil, errorcode.RpcErrInvalidParam - } - deltaAmount, isTure := new(big.Int).SetString(in.AssetAmount, 10) - if !isTure { - logx.Errorf("[SetString] err, AssetAmount: %s", in.AssetAmount) - return nil, errorcode.RpcErrInvalidParam - } - - liquidity, err := l.commglobalmap.GetLatestLiquidityInfoForReadWithCache(l.ctx, int64(in.PairIndex)) - if err != nil { - logx.Errorf("[GetLatestLiquidityInfoForReadWithCache] err: %s", err.Error()) - if err == errorcode.DbErrNotFound { - return nil, errorcode.RpcErrNotFound - } - return nil, errorcode.RpcErrInternal - } - if liquidity.AssetA == nil || liquidity.AssetA.Cmp(big.NewInt(0)) == 0 || - liquidity.AssetB == nil || liquidity.AssetB.Cmp(big.NewInt(0)) == 0 { - logx.Errorf("liquidity: %v, err: %s", liquidity, errorcode.RpcErrLiquidityInvalidAssetAmount.Error()) - return &globalRPCProto.RespGetSwapAmount{}, errorcode.RpcErrLiquidityInvalidAssetAmount - } - - if int64(in.AssetId) != liquidity.AssetAId && int64(in.AssetId) != liquidity.AssetBId { - logx.Errorf("input:%v,liquidity: %v, err: %s", in, liquidity, errorcode.RpcErrLiquidityInvalidAssetAmount.Error()) - return &globalRPCProto.RespGetSwapAmount{}, errorcode.RpcErrLiquidityInvalidAssetID - } - logx.Errorf("[ComputeDelta] liquidity: %v", liquidity) - logx.Errorf("[ComputeDelta] in: %v", in) - logx.Errorf("[ComputeDelta] deltaAmount: %v", deltaAmount) - - var assetAmount *big.Int - var toAssetId int64 - assetAmount, toAssetId, err = util.ComputeDelta(liquidity.AssetA, liquidity.AssetB, liquidity.AssetAId, liquidity.AssetBId, - int64(in.AssetId), in.IsFrom, deltaAmount, liquidity.FeeRate) - if err != nil { - logx.Errorf("[ComputeDelta] err: %s", err.Error()) - return nil, errorcode.RpcErrInternal - } - logx.Errorf("[ComputeDelta] assetAmount:%v", assetAmount) - return &globalRPCProto.RespGetSwapAmount{ - SwapAssetAmount: assetAmount.String(), - SwapAssetId: uint32(toAssetId), - }, nil -} diff --git a/service/rpc/globalRPC/internal/logic/sendaddliquiditytxlogic.go b/service/rpc/globalRPC/internal/logic/sendaddliquiditytxlogic.go deleted file mode 100644 index 97fc2c596..000000000 --- a/service/rpc/globalRPC/internal/logic/sendaddliquiditytxlogic.go +++ /dev/null @@ -1,54 +0,0 @@ -/* - * Copyright © 2021 Zkbas Protocol - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package logic - -import ( - "context" - - "github.com/zeromicro/go-zero/core/logx" - - "github.com/bnb-chain/zkbas/service/rpc/globalRPC/globalRPCProto" - "github.com/bnb-chain/zkbas/service/rpc/globalRPC/internal/logic/sendrawtx" - "github.com/bnb-chain/zkbas/service/rpc/globalRPC/internal/repo/commglobalmap" - "github.com/bnb-chain/zkbas/service/rpc/globalRPC/internal/svc" -) - -type SendAddLiquidityTxLogic struct { - ctx context.Context - svcCtx *svc.ServiceContext - logx.Logger - commglobalmap commglobalmap.Commglobalmap -} - -func NewSendAddLiquidityTxLogic(ctx context.Context, svcCtx *svc.ServiceContext) *SendAddLiquidityTxLogic { - return &SendAddLiquidityTxLogic{ - ctx: ctx, - svcCtx: svcCtx, - Logger: logx.WithContext(ctx), - commglobalmap: commglobalmap.New(svcCtx), - } -} - -func (l *SendAddLiquidityTxLogic) SendAddLiquidityTx(reqSendTx *globalRPCProto.ReqSendTxByRawInfo) (respSendTx *globalRPCProto.RespSendTx, err error) { - respSendTx = &globalRPCProto.RespSendTx{} - txId, err := sendrawtx.SendAddLiquidityTx(l.ctx, l.svcCtx, l.commglobalmap, reqSendTx.TxInfo) - if err != nil { - return nil, err - } - respSendTx.TxId = txId - return respSendTx, nil -} diff --git a/service/rpc/globalRPC/internal/logic/sendatomicmatchtxlogic.go b/service/rpc/globalRPC/internal/logic/sendatomicmatchtxlogic.go deleted file mode 100644 index 685246cdf..000000000 --- a/service/rpc/globalRPC/internal/logic/sendatomicmatchtxlogic.go +++ /dev/null @@ -1,54 +0,0 @@ -/* -* Copyright © 2021 Zkbas Protocol -* -* Licensed under the Apache License, Version 2.0 (the "License"); -* you may not use this file except in compliance with the License. -* You may obtain a copy of the License at -* -* http://www.apache.org/licenses/LICENSE-2.0 -* -* Unless required by applicable law or agreed to in writing, software -* distributed under the License is distributed on an "AS IS" BASIS, -* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -* See the License for the specific language governing permissions and -* limitations under the License. - */ - -package logic - -import ( - "context" - - "github.com/zeromicro/go-zero/core/logx" - - "github.com/bnb-chain/zkbas/service/rpc/globalRPC/globalRPCProto" - "github.com/bnb-chain/zkbas/service/rpc/globalRPC/internal/logic/sendrawtx" - "github.com/bnb-chain/zkbas/service/rpc/globalRPC/internal/repo/commglobalmap" - "github.com/bnb-chain/zkbas/service/rpc/globalRPC/internal/svc" -) - -type SendAtomicMatchTxLogic struct { - ctx context.Context - svcCtx *svc.ServiceContext - logx.Logger - commglobalmap commglobalmap.Commglobalmap -} - -func NewSendAtomicMatchTxLogic(ctx context.Context, svcCtx *svc.ServiceContext) *SendAtomicMatchTxLogic { - return &SendAtomicMatchTxLogic{ - ctx: ctx, - svcCtx: svcCtx, - Logger: logx.WithContext(ctx), - commglobalmap: commglobalmap.New(svcCtx), - } -} - -func (l *SendAtomicMatchTxLogic) SendAtomicMatchTx(reqSendTx *globalRPCProto.ReqSendTxByRawInfo) (respSendTx *globalRPCProto.RespSendTx, err error) { - respSendTx = &globalRPCProto.RespSendTx{} - txId, err := sendrawtx.SendAtomicMatchTx(l.ctx, l.svcCtx, l.commglobalmap, reqSendTx.TxInfo) - if err != nil { - return nil, err - } - respSendTx.TxId = txId - return respSendTx, nil -} diff --git a/service/rpc/globalRPC/internal/logic/sendcanceloffertxlogic.go b/service/rpc/globalRPC/internal/logic/sendcanceloffertxlogic.go deleted file mode 100644 index 4fe7b2dc9..000000000 --- a/service/rpc/globalRPC/internal/logic/sendcanceloffertxlogic.go +++ /dev/null @@ -1,53 +0,0 @@ -/* - * Copyright © 2021 Zkbas Protocol - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package logic - -import ( - "context" - - "github.com/zeromicro/go-zero/core/logx" - - "github.com/bnb-chain/zkbas/service/rpc/globalRPC/globalRPCProto" - "github.com/bnb-chain/zkbas/service/rpc/globalRPC/internal/logic/sendrawtx" - "github.com/bnb-chain/zkbas/service/rpc/globalRPC/internal/repo/commglobalmap" - "github.com/bnb-chain/zkbas/service/rpc/globalRPC/internal/svc" -) - -type SendCancelOfferTxLogic struct { - ctx context.Context - svcCtx *svc.ServiceContext - logx.Logger - commglobalmap commglobalmap.Commglobalmap -} - -func NewSendCancelOfferTxLogic(ctx context.Context, svcCtx *svc.ServiceContext) *SendCancelOfferTxLogic { - return &SendCancelOfferTxLogic{ - ctx: ctx, - svcCtx: svcCtx, - Logger: logx.WithContext(ctx), - commglobalmap: commglobalmap.New(svcCtx), - } -} -func (l *SendCancelOfferTxLogic) SendCancelOfferTx(in *globalRPCProto.ReqSendTxByRawInfo) (respSendTx *globalRPCProto.RespSendTx, err error) { - respSendTx = &globalRPCProto.RespSendTx{} - txId, err := sendrawtx.SendCancelOfferTx(l.ctx, l.svcCtx, l.commglobalmap, in.TxInfo) - if err != nil { - return nil, err - } - respSendTx.TxId = txId - return respSendTx, nil -} diff --git a/service/rpc/globalRPC/internal/logic/sendcreatecollectiontxlogic.go b/service/rpc/globalRPC/internal/logic/sendcreatecollectiontxlogic.go deleted file mode 100644 index 59c6136c1..000000000 --- a/service/rpc/globalRPC/internal/logic/sendcreatecollectiontxlogic.go +++ /dev/null @@ -1,144 +0,0 @@ -package logic - -import ( - "context" - "encoding/json" - "errors" - - "github.com/bnb-chain/zkbas-crypto/wasm/legend/legendTxTypes" - "github.com/zeromicro/go-zero/core/logx" - - "github.com/bnb-chain/zkbas/common/commonAsset" - "github.com/bnb-chain/zkbas/common/commonConstant" - "github.com/bnb-chain/zkbas/common/commonTx" - "github.com/bnb-chain/zkbas/common/model/mempool" - "github.com/bnb-chain/zkbas/common/model/nft" - "github.com/bnb-chain/zkbas/common/zcrypto/txVerification" - "github.com/bnb-chain/zkbas/errorcode" - "github.com/bnb-chain/zkbas/service/rpc/globalRPC/globalRPCProto" - "github.com/bnb-chain/zkbas/service/rpc/globalRPC/internal/logic/sendrawtx" - "github.com/bnb-chain/zkbas/service/rpc/globalRPC/internal/repo/commglobalmap" - "github.com/bnb-chain/zkbas/service/rpc/globalRPC/internal/svc" -) - -type SendCreateCollectionTxLogic struct { - ctx context.Context - svcCtx *svc.ServiceContext - logx.Logger - commglobalmap commglobalmap.Commglobalmap -} - -func NewSendCreateCollectionTxLogic(ctx context.Context, svcCtx *svc.ServiceContext) *SendCreateCollectionTxLogic { - return &SendCreateCollectionTxLogic{ - ctx: ctx, - svcCtx: svcCtx, - Logger: logx.WithContext(ctx), - commglobalmap: commglobalmap.New(svcCtx), - } -} - -func (l *SendCreateCollectionTxLogic) SendCreateCollectionTx(in *globalRPCProto.ReqSendCreateCollectionTx) (*globalRPCProto.RespSendCreateCollectionTx, error) { - txInfo, err := commonTx.ParseCreateCollectionTxInfo(in.TxInfo) - if err != nil { - logx.Errorf("cannot parse tx err: %s", err.Error()) - return nil, errorcode.RpcErrInvalidTx - } - - if err := legendTxTypes.ValidateCreateCollectionTxInfo(txInfo); err != nil { - logx.Errorf("cannot pass static check, err: %s", err.Error()) - return nil, errorcode.RpcErrInvalidTxField.RefineError(err) - } - - if err := sendrawtx.CheckGasAccountIndex(txInfo.GasAccountIndex, l.svcCtx.SysConfigModel); err != nil { - return nil, err - } - - var ( - accountInfoMap = make(map[int64]*commonAsset.AccountInfo) - ) - accountInfoMap[txInfo.AccountIndex], err = l.commglobalmap.GetLatestAccountInfo(l.ctx, txInfo.AccountIndex) - if err != nil { - if err == errorcode.DbErrNotFound { - return nil, errorcode.RpcErrInvalidTxField.RefineError("invalid FromAccountIndex") - } - logx.Errorf("unable to get account info by index: %d, err: %s", txInfo.AccountIndex, err.Error()) - return nil, errorcode.RpcErrInternal - } - if accountInfoMap[txInfo.GasAccountIndex] == nil { - accountInfoMap[txInfo.GasAccountIndex], err = l.commglobalmap.GetBasicAccountInfo(l.ctx, txInfo.GasAccountIndex) - if err != nil { - if err == errorcode.DbErrNotFound { - return nil, errorcode.RpcErrInvalidTxField.RefineError("invalid GasAccountIndex") - } - logx.Errorf("unable to get account info by index: %d, err: %s", txInfo.GasAccountIndex, err.Error()) - return nil, errorcode.RpcErrInternal - } - } - - txInfo.CollectionId = accountInfoMap[txInfo.AccountIndex].CollectionNonce - - var txDetails []*mempool.MempoolTxDetail - txDetails, err = txVerification.VerifyCreateCollectionTxInfo(accountInfoMap, txInfo) - if err != nil { - return nil, errorcode.RpcErrVerification.RefineError(err) - } - - // write into mempool - txInfoBytes, err := json.Marshal(txInfo) - if err != nil { - logx.Errorf("unable to marshal tx, err: %s", err.Error()) - return nil, errorcode.RpcErrInternal - } - _, mempoolTx := sendrawtx.ConstructMempoolTx( - commonTx.TxTypeCreateCollection, - txInfo.GasFeeAssetId, - txInfo.GasFeeAssetAmount.String(), - commonConstant.NilTxNftIndex, - commonConstant.NilPairIndex, - commonConstant.NilAssetId, - accountInfoMap[txInfo.AccountIndex].AccountName, - commonConstant.NilL1Address, - string(txInfoBytes), - "", - txInfo.AccountIndex, - txInfo.Nonce, - txInfo.ExpiredAt, - txDetails, - ) - // construct nft Collection info - nftCollectionInfo := &nft.L2NftCollection{ - CollectionId: txInfo.CollectionId, - AccountIndex: txInfo.AccountIndex, - Name: txInfo.Name, - Introduction: txInfo.Introduction, - Status: nft.CollectionPending, - } - if err = createMempoolTxForCreateCollection(nftCollectionInfo, mempoolTx, l.svcCtx); err != nil { - logx.Errorf("fail to create mempool tx: %v, err: %s", mempoolTx, err.Error()) - _ = sendrawtx.CreateFailTx(l.svcCtx.FailTxModel, commonTx.TxTypeCreateCollection, txInfo, err) - return nil, errorcode.RpcErrInternal - } - return &globalRPCProto.RespSendCreateCollectionTx{CollectionId: txInfo.CollectionId}, nil -} - -func createMempoolTxForCreateCollection( - nftCollectionInfo *nft.L2NftCollection, - nMempoolTx *mempool.MempoolTx, - svcCtx *svc.ServiceContext, -) (err error) { - // check collectionId exist - exist, err := svcCtx.CollectionModel.IfCollectionExistsByCollectionId(nftCollectionInfo.CollectionId) - if err != nil { - return err - } - if exist { - logx.Errorf("collectionId duplicate creation: %d", nftCollectionInfo.CollectionId) - return errors.New("collectionId duplicate creation") - } - - // write into mempool - if err := svcCtx.MempoolModel.CreateMempoolTxAndL2CollectionAndNonce(nMempoolTx, nftCollectionInfo); err != nil { - return err - } - return nil -} diff --git a/service/rpc/globalRPC/internal/logic/sendmintnfttxlogic.go b/service/rpc/globalRPC/internal/logic/sendmintnfttxlogic.go deleted file mode 100644 index 8b9ef2b83..000000000 --- a/service/rpc/globalRPC/internal/logic/sendmintnfttxlogic.go +++ /dev/null @@ -1,190 +0,0 @@ -package logic - -import ( - "context" - "encoding/json" - - "github.com/bnb-chain/zkbas-crypto/wasm/legend/legendTxTypes" - "github.com/zeromicro/go-zero/core/logx" - "github.com/zeromicro/go-zero/core/stores/redis" - - "github.com/bnb-chain/zkbas/common/commonAsset" - "github.com/bnb-chain/zkbas/common/commonConstant" - "github.com/bnb-chain/zkbas/common/commonTx" - "github.com/bnb-chain/zkbas/common/model/mempool" - "github.com/bnb-chain/zkbas/common/model/nft" - "github.com/bnb-chain/zkbas/common/util" - "github.com/bnb-chain/zkbas/common/util/globalmapHandler" - "github.com/bnb-chain/zkbas/common/zcrypto/txVerification" - "github.com/bnb-chain/zkbas/errorcode" - "github.com/bnb-chain/zkbas/service/rpc/globalRPC/globalRPCProto" - "github.com/bnb-chain/zkbas/service/rpc/globalRPC/internal/logic/sendrawtx" - "github.com/bnb-chain/zkbas/service/rpc/globalRPC/internal/repo/commglobalmap" - "github.com/bnb-chain/zkbas/service/rpc/globalRPC/internal/svc" -) - -type SendMintNftTxLogic struct { - ctx context.Context - svcCtx *svc.ServiceContext - logx.Logger - commglobalmap commglobalmap.Commglobalmap -} - -func NewSendMintNftTxLogic(ctx context.Context, svcCtx *svc.ServiceContext) *SendMintNftTxLogic { - return &SendMintNftTxLogic{ - ctx: ctx, - svcCtx: svcCtx, - Logger: logx.WithContext(ctx), - commglobalmap: commglobalmap.New(svcCtx), - } -} - -func (l *SendMintNftTxLogic) SendMintNftTx(in *globalRPCProto.ReqSendMintNftTx) (*globalRPCProto.RespSendMintNftTx, error) { - txInfo, err := commonTx.ParseMintNftTxInfo(in.TxInfo) - if err != nil { - logx.Errorf("cannot parse tx err: %s", err.Error()) - return nil, errorcode.RpcErrInvalidTx - } - - if err := legendTxTypes.ValidateMintNftTxInfo(txInfo); err != nil { - logx.Errorf("cannot pass static check, err: %s", err.Error()) - return nil, errorcode.RpcErrInvalidTxField.RefineError(err) - } - - if err := sendrawtx.CheckGasAccountIndex(txInfo.GasAccountIndex, l.svcCtx.SysConfigModel); err != nil { - return nil, err - } - - var ( - accountInfoMap = make(map[int64]*commonAsset.AccountInfo) - nftIndex int64 - redisLock *redis.RedisLock - ) - - accountInfoMap[txInfo.CreatorAccountIndex], err = l.commglobalmap.GetLatestAccountInfo(l.ctx, txInfo.CreatorAccountIndex) - if err != nil { - if err == errorcode.DbErrNotFound { - return nil, errorcode.RpcErrInvalidTxField.RefineError("invalid FromAccountIndex") - } - logx.Errorf("unable to get account info by index: %d, err: %s", txInfo.CreatorAccountIndex, err.Error()) - return nil, errorcode.RpcErrInternal - } - if accountInfoMap[txInfo.CreatorAccountIndex].CollectionNonce < txInfo.NftCollectionId { - logx.Errorf("collection id %d is greater than collection nonce %d", - txInfo.NftCollectionId, accountInfoMap[txInfo.CreatorAccountIndex].CollectionNonce) - return nil, errorcode.RpcErrInvalidTxField.RefineError("invalid NftCollectionId") - } - - if accountInfoMap[txInfo.ToAccountIndex] == nil { - accountInfoMap[txInfo.ToAccountIndex], err = l.commglobalmap.GetBasicAccountInfo(l.ctx, txInfo.ToAccountIndex) - if err != nil { - if err == errorcode.DbErrNotFound { - return nil, errorcode.RpcErrInvalidTxField.RefineError("invalid ToAccountIndex") - } - logx.Errorf("unable to get account info by index: %d, err: %s", txInfo.ToAccountIndex, err.Error()) - return nil, errorcode.RpcErrInternal - } - } - if accountInfoMap[txInfo.ToAccountIndex].AccountNameHash != txInfo.ToAccountNameHash { - logx.Errorf("invalid account name hash, expected: %s, actual: %s", accountInfoMap[txInfo.ToAccountIndex].AccountNameHash, txInfo.ToAccountNameHash) - return nil, errorcode.RpcErrInvalidTxField.RefineError("invalid ToAccountNameHash") - } - if accountInfoMap[txInfo.GasAccountIndex] == nil { - accountInfoMap[txInfo.GasAccountIndex], err = l.commglobalmap.GetBasicAccountInfo(l.ctx, txInfo.GasAccountIndex) - if err != nil { - if err == errorcode.DbErrNotFound { - return nil, errorcode.RpcErrInvalidTxField.RefineError("invalid GasAccountIndex") - } - logx.Errorf("unable to get account info by index: %d, err: %s", txInfo.GasAccountIndex, err.Error()) - return nil, errorcode.RpcErrInternal - } - } - - redisLock, nftIndex, err = globalmapHandler.GetLatestNftIndexForWrite(l.svcCtx.NftModel, l.svcCtx.RedisConnection) - if err != nil { - logx.Errorf("[sendMintNftTx] unable to get latest nft index: %s", err.Error()) - return nil, err - } - defer redisLock.Release() - - var ( - txDetails []*mempool.MempoolTxDetail - ) - txInfo.NftIndex = nftIndex - txDetails, err = txVerification.VerifyMintNftTxInfo( - accountInfoMap, - txInfo, - ) - if err != nil { - return nil, errorcode.RpcErrVerification.RefineError(err) - } - - // construct nft info - nftInfo := &nft.L2Nft{ - NftIndex: nftIndex, - CreatorAccountIndex: txInfo.CreatorAccountIndex, - OwnerAccountIndex: txInfo.ToAccountIndex, - NftContentHash: txInfo.NftContentHash, - NftL1Address: commonConstant.NilL1Address, - NftL1TokenId: commonConstant.NilL1TokenId, - CreatorTreasuryRate: txInfo.CreatorTreasuryRate, - CollectionId: txInfo.NftCollectionId, - } - // delete key - key := util.GetNftKeyForRead(nftIndex) - _, err = l.svcCtx.RedisConnection.Del(key) - if err != nil { - logx.Errorf("[sendMintNftTx] unable to delete key from redis: %s", err.Error()) - return nil, errorcode.RpcErrInternal - } - // write into mempool - txInfoBytes, err := json.Marshal(txInfo) - if err != nil { - logx.Errorf("unable to marshal tx, err: %s", err.Error()) - return nil, errorcode.RpcErrInternal - } - _, mempoolTx := sendrawtx.ConstructMempoolTx( - commonTx.TxTypeMintNft, - txInfo.GasFeeAssetId, - txInfo.GasFeeAssetAmount.String(), - nftIndex, - commonConstant.NilPairIndex, - commonConstant.NilAssetId, - commonConstant.NilAssetAmountStr, - "", - string(txInfoBytes), - "", - txInfo.CreatorAccountIndex, - txInfo.Nonce, - txInfo.ExpiredAt, - txDetails, - ) - - if err := l.svcCtx.MempoolModel.CreateMempoolTxAndL2Nft(mempoolTx, nftInfo); err != nil { - logx.Errorf("fail to create mempool tx: %v, err: %s", mempoolTx, err.Error()) - _ = sendrawtx.CreateFailTx(l.svcCtx.FailTxModel, commonTx.TxTypeMintNft, txInfo, err) - return nil, errorcode.RpcErrInternal - } - - resp := &globalRPCProto.RespSendMintNftTx{NftIndex: txInfo.NftIndex} - - // update redis - var formatNftInfo *commonAsset.NftInfo - for _, txDetail := range mempoolTx.MempoolDetails { - if txDetail.AssetType == commonAsset.NftAssetType { - formatNftInfo, err = commonAsset.ParseNftInfo(txDetail.BalanceDelta) - if err != nil { - logx.Errorf("unable to parse nft info: %s", err.Error()) - return resp, nil - } - } - } - nftInfoBytes, err := json.Marshal(formatNftInfo) - if err != nil { - logx.Errorf("unable to marshal tx, err: %s", err.Error()) - return resp, nil - } - _ = l.svcCtx.RedisConnection.Setex(key, string(nftInfoBytes), globalmapHandler.NftExpiryTime) - - return resp, nil -} diff --git a/service/rpc/globalRPC/internal/logic/sendrawtx/sendaddliquiditytx.go b/service/rpc/globalRPC/internal/logic/sendrawtx/sendaddliquiditytx.go deleted file mode 100644 index 8f0925066..000000000 --- a/service/rpc/globalRPC/internal/logic/sendrawtx/sendaddliquiditytx.go +++ /dev/null @@ -1,144 +0,0 @@ -package sendrawtx - -import ( - "context" - "encoding/json" - "math/big" - - "github.com/bnb-chain/zkbas-crypto/wasm/legend/legendTxTypes" - "github.com/zeromicro/go-zero/core/logx" - - "github.com/bnb-chain/zkbas/common/commonAsset" - "github.com/bnb-chain/zkbas/common/commonConstant" - "github.com/bnb-chain/zkbas/common/commonTx" - "github.com/bnb-chain/zkbas/common/model/mempool" - "github.com/bnb-chain/zkbas/common/util" - "github.com/bnb-chain/zkbas/common/zcrypto/txVerification" - "github.com/bnb-chain/zkbas/errorcode" - "github.com/bnb-chain/zkbas/service/rpc/globalRPC/internal/repo/commglobalmap" - "github.com/bnb-chain/zkbas/service/rpc/globalRPC/internal/svc" -) - -func SendAddLiquidityTx(ctx context.Context, svcCtx *svc.ServiceContext, commglobalmap commglobalmap.Commglobalmap, rawTxInfo string) (txId string, err error) { - txInfo, err := commonTx.ParseAddLiquidityTxInfo(rawTxInfo) - if err != nil { - logx.Errorf("cannot parse tx err: %s", err.Error()) - return "", errorcode.RpcErrInvalidTx - } - - if err := legendTxTypes.ValidateAddLiquidityTxInfo(txInfo); err != nil { - logx.Errorf("cannot pass static check, err: %s", err.Error()) - return "", errorcode.RpcErrInvalidTxField.RefineError(err) - } - - if err := CheckGasAccountIndex(txInfo.GasAccountIndex, svcCtx.SysConfigModel); err != nil { - return txId, err - } - - liquidityInfo, err := commglobalmap.GetLatestLiquidityInfoForWrite(ctx, txInfo.PairIndex) - if err != nil { - if err == errorcode.DbErrNotFound { - return "", errorcode.RpcErrInvalidTxField.RefineError("invalid PairIndex") - } - logx.Errorf("fail to get liquidity info: %d, err: %s", txInfo.PairIndex, err.Error()) - return "", err - } - if liquidityInfo.AssetA == nil || liquidityInfo.AssetB == nil { - logx.Errorf("invalid liquidity assets") - return "", errorcode.RpcErrInternal - } - if liquidityInfo.AssetA.Cmp(big.NewInt(0)) == 0 { - txInfo.LpAmount, err = util.ComputeEmptyLpAmount(txInfo.AssetAAmount, txInfo.AssetBAmount) - if err != nil { - logx.Errorf("cannot computer lp amount, err: %s", err.Error()) - return "", errorcode.RpcErrInternal - } - } else { - txInfo.LpAmount, err = util.ComputeLpAmount(liquidityInfo, txInfo.AssetAAmount) - if err != nil { - logx.Errorf("cannot computer lp amount, err: %s", err.Error()) - return "", errorcode.RpcErrInternal - } - } - - var ( - accountInfoMap = make(map[int64]*commonAsset.AccountInfo) - ) - accountInfoMap[txInfo.FromAccountIndex], err = commglobalmap.GetLatestAccountInfo(ctx, txInfo.FromAccountIndex) - if err != nil { - if err == errorcode.DbErrNotFound { - return "", errorcode.RpcErrInvalidTxField.RefineError("invalid FromAccountIndex") - } - logx.Errorf("unable to get account info by index: %d, err: %s", txInfo.FromAccountIndex, err.Error()) - return "", errorcode.RpcErrInternal - } - if accountInfoMap[txInfo.GasAccountIndex] == nil { - accountInfoMap[txInfo.GasAccountIndex], err = commglobalmap.GetBasicAccountInfo(ctx, txInfo.GasAccountIndex) - if err != nil { - if err == errorcode.DbErrNotFound { - return txId, errorcode.RpcErrInvalidTxField.RefineError("invalid GasAccountIndex") - } - logx.Errorf("unable to get account info by index: %d, err: %s", txInfo.GasAccountIndex, err.Error()) - return "", errorcode.RpcErrInternal - } - } - if accountInfoMap[liquidityInfo.TreasuryAccountIndex] == nil { - accountInfoMap[liquidityInfo.TreasuryAccountIndex], err = commglobalmap.GetBasicAccountInfo(ctx, liquidityInfo.TreasuryAccountIndex) - if err != nil { - if err == errorcode.DbErrNotFound { - return txId, errorcode.RpcErrInvalidTxField.RefineError("invalid liquidity") - } - logx.Errorf("unable to get account info by index: %d, err: %s", liquidityInfo.TreasuryAccountIndex, err.Error()) - return "", errorcode.RpcErrInternal - } - } - - var ( - txDetails []*mempool.MempoolTxDetail - ) - // verify tx - txDetails, err = txVerification.VerifyAddLiquidityTxInfo( - accountInfoMap, - liquidityInfo, - txInfo) - if err != nil { - return "", errorcode.RpcErrVerification.RefineError(err) - } - - // write into mempool - txInfoBytes, err := json.Marshal(txInfo) - if err != nil { - logx.Errorf("unable to marshal tx, err: %s", err.Error()) - return "", errorcode.RpcErrInternal - } - txId, mempoolTx := ConstructMempoolTx( - commonTx.TxTypeAddLiquidity, - txInfo.GasFeeAssetId, - txInfo.GasFeeAssetAmount.String(), - commonConstant.NilTxNftIndex, - txInfo.PairIndex, - commonConstant.NilAssetId, - txInfo.LpAmount.String(), - "", - string(txInfoBytes), - "", - txInfo.FromAccountIndex, - txInfo.Nonce, - txInfo.ExpiredAt, - txDetails, - ) - if err := commglobalmap.DeleteLatestLiquidityInfoForWriteInCache(ctx, txInfo.PairIndex); err != nil { - logx.Errorf("fail to delete liquidity info: %d, err: %s", txInfo.PairIndex, err.Error()) - return "", errorcode.RpcErrInternal - } - if err := svcCtx.MempoolModel.CreateBatchedMempoolTxs([]*mempool.MempoolTx{mempoolTx}); err != nil { - logx.Errorf("fail to create mempool tx: %v, err: %s", mempoolTx, err.Error()) - _ = CreateFailTx(svcCtx.FailTxModel, commonTx.TxTypeAddLiquidity, txInfo, err) - return "", err - } - // update cache, not key logic - if err := commglobalmap.SetLatestLiquidityInfoForWrite(ctx, txInfo.PairIndex); err != nil { - logx.Errorf("[SetLatestLiquidityInfoForWrite] param: %d, err: %s", txInfo.PairIndex, err.Error()) - } - return txId, nil -} diff --git a/service/rpc/globalRPC/internal/logic/sendrawtx/sendatomicmatchtx.go b/service/rpc/globalRPC/internal/logic/sendrawtx/sendatomicmatchtx.go deleted file mode 100644 index 84ed3f6c6..000000000 --- a/service/rpc/globalRPC/internal/logic/sendrawtx/sendatomicmatchtx.go +++ /dev/null @@ -1,202 +0,0 @@ -package sendrawtx - -import ( - "context" - "encoding/json" - "time" - - "github.com/bnb-chain/zkbas-crypto/wasm/legend/legendTxTypes" - "github.com/ethereum/go-ethereum/common" - "github.com/zeromicro/go-zero/core/logx" - - "github.com/bnb-chain/zkbas/common/commonAsset" - "github.com/bnb-chain/zkbas/common/commonConstant" - "github.com/bnb-chain/zkbas/common/commonTx" - "github.com/bnb-chain/zkbas/common/model/mempool" - "github.com/bnb-chain/zkbas/common/model/nft" - "github.com/bnb-chain/zkbas/common/util" - "github.com/bnb-chain/zkbas/common/util/globalmapHandler" - "github.com/bnb-chain/zkbas/common/zcrypto/txVerification" - "github.com/bnb-chain/zkbas/errorcode" - "github.com/bnb-chain/zkbas/service/rpc/globalRPC/internal/repo/commglobalmap" - "github.com/bnb-chain/zkbas/service/rpc/globalRPC/internal/svc" -) - -func SendAtomicMatchTx(ctx context.Context, svcCtx *svc.ServiceContext, commglobalmap commglobalmap.Commglobalmap, rawTxInfo string) (txId string, err error) { - txInfo, err := commonTx.ParseAtomicMatchTxInfo(rawTxInfo) - if err != nil { - logx.Errorf("cannot parse tx err: %s", err.Error()) - return "", errorcode.RpcErrInvalidTx - } - - if err := legendTxTypes.ValidateAtomicMatchTxInfo(txInfo); err != nil { - logx.Errorf("cannot pass static check, err: %s", err.Error()) - return "", errorcode.RpcErrInvalidTxField.RefineError(err) - } - - if err := CheckGasAccountIndex(txInfo.GasAccountIndex, svcCtx.SysConfigModel); err != nil { - return "", err - } - - now := time.Now().UnixMilli() - if txInfo.BuyOffer.ExpiredAt < now || txInfo.SellOffer.ExpiredAt < now { - logx.Errorf("[sendAtomicMatchTx] invalid time stamp") - return "", errorcode.RpcErrInvalidTxField.RefineError("invalid ExpiredAt of BuyOffer or SellOffer") - } - if txInfo.BuyOffer.NftIndex != txInfo.SellOffer.NftIndex || - txInfo.BuyOffer.AssetId != txInfo.SellOffer.AssetId || - txInfo.BuyOffer.AssetAmount.String() != txInfo.SellOffer.AssetAmount.String() || - txInfo.BuyOffer.TreasuryRate != txInfo.SellOffer.TreasuryRate { - return "", errorcode.RpcErrInvalidTxField.RefineError("mismatch between BuyOffer and SellOffer") - } - var ( - accountInfoMap = make(map[int64]*commonAsset.AccountInfo) - ) - accountInfoMap[txInfo.AccountIndex], err = commglobalmap.GetLatestAccountInfo(ctx, txInfo.AccountIndex) - if err != nil { - if err == errorcode.DbErrNotFound { - return "", errorcode.RpcErrInvalidTxField.RefineError("invalid FromAccountIndex") - } - logx.Errorf("unable to get account info by index: %d, err: %s", txInfo.AccountIndex, err.Error()) - return "", errorcode.RpcErrInternal - } - if accountInfoMap[txInfo.BuyOffer.AccountIndex] == nil { - accountInfoMap[txInfo.BuyOffer.AccountIndex], err = commglobalmap.GetBasicAccountInfo(ctx, txInfo.BuyOffer.AccountIndex) - if err != nil { - if err == errorcode.DbErrNotFound { - return "", errorcode.RpcErrInvalidTxField.RefineError("invalid BuyOffer.AccountIndex") - } - logx.Errorf("unable to get account info by index: %d, err: %s", txInfo.BuyOffer.AccountIndex, err.Error()) - return "", errorcode.RpcErrInternal - } - } - if accountInfoMap[txInfo.SellOffer.AccountIndex] == nil { - accountInfoMap[txInfo.SellOffer.AccountIndex], err = commglobalmap.GetBasicAccountInfo(ctx, txInfo.SellOffer.AccountIndex) - if err != nil { - if err == errorcode.DbErrNotFound { - return "", errorcode.RpcErrInvalidTxField.RefineError("invalid SellOffer.AccountIndex") - } - logx.Errorf("unable to get account info by index: %d, err: %s", txInfo.SellOffer.AccountIndex, err.Error()) - return "", errorcode.RpcErrInternal - } - } - if accountInfoMap[txInfo.GasAccountIndex] == nil { - accountInfoMap[txInfo.GasAccountIndex], err = commglobalmap.GetBasicAccountInfo(ctx, txInfo.GasAccountIndex) - if err != nil { - if err == errorcode.DbErrNotFound { - return "", errorcode.RpcErrInvalidTxField.RefineError("invalid GasAccountIndex") - } - logx.Errorf("unable to get account info by index: %d, err: %s", txInfo.GasAccountIndex, err.Error()) - return "", errorcode.RpcErrInternal - } - } - - nftInfo, err := commglobalmap.GetLatestNftInfoForRead(ctx, txInfo.BuyOffer.NftIndex) - if err != nil { - if err == errorcode.DbErrNotFound { - return "", errorcode.RpcErrInvalidTxField.RefineError("invalid BuyOffer.NftIndex") - } - logx.Errorf("fail to get nft info: %d, err: %s", txInfo.BuyOffer.NftIndex, err.Error()) - return "", err - } - if nftInfo.OwnerAccountIndex != txInfo.SellOffer.AccountIndex { - logx.Errorf("not owner, owner: %d, seller: %d", nftInfo.OwnerAccountIndex, txInfo.SellOffer.AccountIndex) - return "", errorcode.RpcErrInvalidTxField.RefineError("seller is not nft owner") - } - - var ( - txDetails []*mempool.MempoolTxDetail - ) - txDetails, err = txVerification.VerifyAtomicMatchTxInfo( - accountInfoMap, - nftInfo, - txInfo, - ) - if err != nil { - return "", errorcode.RpcErrVerification.RefineError(err) - } - key := util.GetNftKeyForRead(txInfo.BuyOffer.NftIndex) - _, err = svcCtx.RedisConnection.Del(key) - if err != nil { - logx.Errorf("unable to delete key from redis: %s", err.Error()) - return "", errorcode.RpcErrInternal - } - txInfoBytes, err := json.Marshal(txInfo) - if err != nil { - logx.Errorf("unable to marshal tx, err: %s", err.Error()) - return "", errorcode.RpcErrInternal - } - txId, mempoolTx := ConstructMempoolTx( - commonTx.TxTypeAtomicMatch, - txInfo.GasFeeAssetId, - txInfo.GasFeeAssetAmount.String(), - txInfo.BuyOffer.NftIndex, - commonConstant.NilPairIndex, - txInfo.BuyOffer.AssetId, - txInfo.BuyOffer.AssetAmount.String(), - "", - string(txInfoBytes), - "", - txInfo.AccountIndex, - txInfo.Nonce, - txInfo.ExpiredAt, - txDetails, - ) - nftExchange := &nft.L2NftExchange{ - BuyerAccountIndex: txInfo.BuyOffer.AccountIndex, - OwnerAccountIndex: txInfo.SellOffer.AccountIndex, - NftIndex: txInfo.BuyOffer.NftIndex, - AssetId: txInfo.BuyOffer.AssetId, - AssetAmount: txInfo.BuyOffer.AssetAmount.String(), - } - var offers []*nft.Offer - offers = append(offers, &nft.Offer{ - OfferType: txInfo.BuyOffer.Type, - OfferId: txInfo.BuyOffer.OfferId, - AccountIndex: txInfo.BuyOffer.AccountIndex, - NftIndex: txInfo.BuyOffer.NftIndex, - AssetId: txInfo.BuyOffer.AssetId, - AssetAmount: txInfo.BuyOffer.AssetAmount.String(), - ListedAt: txInfo.BuyOffer.ListedAt, - ExpiredAt: txInfo.BuyOffer.ExpiredAt, - TreasuryRate: txInfo.BuyOffer.TreasuryRate, - Sig: common.Bytes2Hex(txInfo.BuyOffer.Sig), - Status: nft.OfferFinishedStatus, - }) - offers = append(offers, &nft.Offer{ - OfferType: txInfo.SellOffer.Type, - OfferId: txInfo.SellOffer.OfferId, - AccountIndex: txInfo.SellOffer.AccountIndex, - NftIndex: txInfo.SellOffer.NftIndex, - AssetId: txInfo.SellOffer.AssetId, - AssetAmount: txInfo.SellOffer.AssetAmount.String(), - ListedAt: txInfo.SellOffer.ListedAt, - ExpiredAt: txInfo.SellOffer.ExpiredAt, - TreasuryRate: txInfo.SellOffer.TreasuryRate, - Sig: common.Bytes2Hex(txInfo.SellOffer.Sig), - Status: nft.OfferFinishedStatus, - }) - - if err := svcCtx.MempoolModel.CreateMempoolTxAndL2NftExchange(mempoolTx, offers, nftExchange); err != nil { - logx.Errorf("fail to create mempool tx: %v, err: %s", mempoolTx, err.Error()) - _ = CreateFailTx(svcCtx.FailTxModel, commonTx.TxTypeAtomicMatch, txInfo, err) - return "", err - } - var formatNftInfo *commonAsset.NftInfo - for _, txDetail := range mempoolTx.MempoolDetails { - if txDetail.AssetType == commonAsset.NftAssetType { - formatNftInfo, err = commonAsset.ParseNftInfo(txDetail.BalanceDelta) - if err != nil { - logx.Errorf("unable to parse nft info: %s", err.Error()) - return txId, nil - } - } - } - nftInfoBytes, err := json.Marshal(formatNftInfo) - if err != nil { - logx.Errorf("unable to marshal tx: %s", err.Error()) - return txId, nil - } - _ = svcCtx.RedisConnection.Setex(key, string(nftInfoBytes), globalmapHandler.NftExpiryTime) - return txId, nil -} diff --git a/service/rpc/globalRPC/internal/logic/sendrawtx/sendcanceloffertx.go b/service/rpc/globalRPC/internal/logic/sendrawtx/sendcanceloffertx.go deleted file mode 100644 index ab79d2711..000000000 --- a/service/rpc/globalRPC/internal/logic/sendrawtx/sendcanceloffertx.go +++ /dev/null @@ -1,139 +0,0 @@ -package sendrawtx - -import ( - "context" - "encoding/json" - "math/big" - - "github.com/bnb-chain/zkbas-crypto/wasm/legend/legendTxTypes" - "github.com/zeromicro/go-zero/core/logx" - - "github.com/bnb-chain/zkbas/common/commonAsset" - "github.com/bnb-chain/zkbas/common/commonConstant" - "github.com/bnb-chain/zkbas/common/commonTx" - "github.com/bnb-chain/zkbas/common/model/mempool" - "github.com/bnb-chain/zkbas/common/model/nft" - "github.com/bnb-chain/zkbas/common/zcrypto/txVerification" - "github.com/bnb-chain/zkbas/errorcode" - "github.com/bnb-chain/zkbas/service/rpc/globalRPC/internal/repo/commglobalmap" - "github.com/bnb-chain/zkbas/service/rpc/globalRPC/internal/svc" -) - -func SendCancelOfferTx(ctx context.Context, svcCtx *svc.ServiceContext, commglobalmap commglobalmap.Commglobalmap, rawTxInfo string) (txId string, err error) { - txInfo, err := commonTx.ParseCancelOfferTxInfo(rawTxInfo) - if err != nil { - logx.Errorf("cannot parse tx err: %s", err.Error()) - return "", errorcode.RpcErrInvalidTx - } - - if err := legendTxTypes.ValidateCancelOfferTxInfo(txInfo); err != nil { - logx.Errorf("cannot pass static check, err: %s", err.Error()) - return "", errorcode.RpcErrInvalidTxField.RefineError(err) - } - - if err := CheckGasAccountIndex(txInfo.GasAccountIndex, svcCtx.SysConfigModel); err != nil { - return "", err - } - - var ( - accountInfoMap = make(map[int64]*commonAsset.AccountInfo) - ) - accountInfoMap[txInfo.AccountIndex], err = commglobalmap.GetLatestAccountInfo(ctx, txInfo.AccountIndex) - if err != nil { - if err == errorcode.DbErrNotFound { - return "", errorcode.RpcErrInvalidTxField.RefineError("invalid AccountIndex") - } - logx.Errorf("unable to get account info by index: %d, err: %s", txInfo.AccountIndex, err.Error()) - return "", errorcode.RpcErrInternal - } - if accountInfoMap[txInfo.GasAccountIndex] == nil { - accountInfoMap[txInfo.GasAccountIndex], err = commglobalmap.GetBasicAccountInfo(ctx, txInfo.GasAccountIndex) - if err != nil { - if err == errorcode.DbErrNotFound { - return "", errorcode.RpcErrInvalidTxField.RefineError("invalid GasAccountIndex") - } - logx.Errorf("unable to get account info by index: %d, err: %s", txInfo.GasAccountIndex, err.Error()) - return "", errorcode.RpcErrInternal - } - } - - offerAssetId := txInfo.OfferId / 128 - offerIndex := txInfo.OfferId % 128 - if accountInfoMap[txInfo.AccountIndex].AssetInfo[offerAssetId] == nil { - accountInfoMap[txInfo.AccountIndex].AssetInfo[offerAssetId] = &commonAsset.AccountAsset{ - AssetId: offerAssetId, - Balance: big.NewInt(0), - LpAmount: big.NewInt(0), - OfferCanceledOrFinalized: big.NewInt(0), - } - } else { - offerInfo := accountInfoMap[txInfo.AccountIndex].AssetInfo[offerAssetId].OfferCanceledOrFinalized - xBit := offerInfo.Bit(int(offerIndex)) - if xBit == 1 { - logx.Errorf("offer is already confirmed or canceled") - return "", errorcode.RpcErrInvalidTxField.RefineError("invalid OfferId, already confirmed or canceled") - } - } - - var ( - txDetails []*mempool.MempoolTxDetail - ) - // verify tx - txDetails, err = txVerification.VerifyCancelOfferTxInfo( - accountInfoMap, - txInfo, - ) - if err != nil { - return "", errorcode.RpcErrVerification.RefineError(err) - } - - // write into mempool - txInfoBytes, err := json.Marshal(txInfo) - if err != nil { - logx.Errorf("unable to marshal tx, err: %s", err.Error()) - return "", errorcode.RpcErrInternal - } - txId, mempoolTx := ConstructMempoolTx( - commonTx.TxTypeCancelOffer, - txInfo.GasFeeAssetId, - txInfo.GasFeeAssetAmount.String(), - commonConstant.NilTxNftIndex, - commonConstant.NilPairIndex, - commonConstant.NilAssetId, - accountInfoMap[txInfo.AccountIndex].AccountName, - commonConstant.NilL1Address, - string(txInfoBytes), - "", - txInfo.AccountIndex, - txInfo.Nonce, - txInfo.ExpiredAt, - txDetails, - ) - var isUpdate bool - offerInfo, err := svcCtx.OfferModel.GetOfferByAccountIndexAndOfferId(txInfo.AccountIndex, txInfo.OfferId) - if err == errorcode.DbErrNotFound { - offerInfo = &nft.Offer{ - OfferType: 0, - OfferId: txInfo.OfferId, - AccountIndex: txInfo.AccountIndex, - NftIndex: 0, - AssetId: 0, - AssetAmount: "0", - ListedAt: 0, - ExpiredAt: 0, - TreasuryRate: 0, - Sig: "", - Status: nft.OfferFinishedStatus, - } - } else { - offerInfo.Status = nft.OfferFinishedStatus - isUpdate = true - } - - if err := svcCtx.MempoolModel.CreateMempoolTxAndUpdateOffer(mempoolTx, offerInfo, isUpdate); err != nil { - logx.Errorf("fail to create mempool tx: %v, err: %s", mempoolTx, err.Error()) - _ = CreateFailTx(svcCtx.FailTxModel, commonTx.TxTypeCancelOffer, txInfo, err) - return "", errorcode.RpcErrInternal - } - return txId, nil -} diff --git a/service/rpc/globalRPC/internal/logic/sendrawtx/sendremoveliquiditytx.go b/service/rpc/globalRPC/internal/logic/sendrawtx/sendremoveliquiditytx.go deleted file mode 100644 index 3bf8056aa..000000000 --- a/service/rpc/globalRPC/internal/logic/sendrawtx/sendremoveliquiditytx.go +++ /dev/null @@ -1,185 +0,0 @@ -package sendrawtx - -import ( - "context" - "encoding/json" - "errors" - "fmt" - "math/big" - - "github.com/bnb-chain/zkbas-crypto/wasm/legend/legendTxTypes" - "github.com/zeromicro/go-zero/core/logx" - - "github.com/bnb-chain/zkbas/common/commonAsset" - "github.com/bnb-chain/zkbas/common/commonConstant" - "github.com/bnb-chain/zkbas/common/commonTx" - "github.com/bnb-chain/zkbas/common/model/mempool" - "github.com/bnb-chain/zkbas/common/util" - "github.com/bnb-chain/zkbas/common/util/globalmapHandler" - "github.com/bnb-chain/zkbas/common/zcrypto/txVerification" - "github.com/bnb-chain/zkbas/errorcode" - "github.com/bnb-chain/zkbas/service/rpc/globalRPC/internal/repo/commglobalmap" - "github.com/bnb-chain/zkbas/service/rpc/globalRPC/internal/svc" -) - -func SendRemoveLiquidityTx(ctx context.Context, svcCtx *svc.ServiceContext, commglobalmap commglobalmap.Commglobalmap, rawTxInfo string) (txId string, err error) { - txInfo, err := commonTx.ParseRemoveLiquidityTxInfo(rawTxInfo) - if err != nil { - logx.Errorf("cannot parse tx err: %s", err.Error()) - return "", errorcode.RpcErrInvalidTx - } - - if err := legendTxTypes.ValidateRemoveLiquidityTxInfo(txInfo); err != nil { - logx.Errorf("cannot pass static check, err: %s", err.Error()) - return "", errorcode.RpcErrInvalidTxField.RefineError(err) - } - - if err := CheckGasAccountIndex(txInfo.GasAccountIndex, svcCtx.SysConfigModel); err != nil { - return "", err - } - - liquidityInfo, err := commglobalmap.GetLatestLiquidityInfoForWrite(ctx, txInfo.PairIndex) - if err != nil { - logx.Errorf("[sendRemoveLiquidityTx] unable to get latest liquidity info for write: %s", err.Error()) - return "", err - } - - // check params - if liquidityInfo.AssetA == nil || - liquidityInfo.AssetA.Cmp(big.NewInt(0)) == 0 || - liquidityInfo.AssetB == nil || - liquidityInfo.AssetB.Cmp(big.NewInt(0)) == 0 || - liquidityInfo.LpAmount == nil || - liquidityInfo.LpAmount.Cmp(big.NewInt(0)) == 0 { - logx.Errorf("[sendRemoveLiquidityTx] invalid params") - return "", errors.New("[sendRemoveLiquidityTx] invalid params") - } - - var ( - assetAAmount, assetBAmount *big.Int - ) - assetAAmount, assetBAmount, err = util.ComputeRemoveLiquidityAmount(liquidityInfo, txInfo.LpAmount) - if err != nil { - logx.Errorf("[ComputeRemoveLiquidityAmount] err: %s", err.Error()) - return "", err - } - if assetAAmount.Cmp(txInfo.AssetAMinAmount) < 0 || assetBAmount.Cmp(txInfo.AssetBMinAmount) < 0 { - errInfo := fmt.Sprintf("[logic.sendRemoveLiquidityTx] less than MinDelta: %s:%s/%s:%s", - txInfo.AssetAMinAmount.String(), txInfo.AssetBMinAmount.String(), assetAAmount.String(), assetBAmount.String()) - logx.Error(errInfo) - return "", errors.New(errInfo) - } - // add into tx info - txInfo.AssetAAmountDelta = assetAAmount - txInfo.AssetBAmountDelta = assetBAmount - - var ( - accountInfoMap = make(map[int64]*commonAsset.AccountInfo) - ) - accountInfoMap[txInfo.FromAccountIndex], err = commglobalmap.GetLatestAccountInfo(ctx, txInfo.FromAccountIndex) - if err != nil { - if err == errorcode.DbErrNotFound { - return "", errorcode.RpcErrInvalidTxField.RefineError("invalid FromAccountIndex") - } - logx.Errorf("unable to get account info by index: %d, err: %s", txInfo.FromAccountIndex, err.Error()) - return "", errorcode.RpcErrInternal - } - if accountInfoMap[txInfo.GasAccountIndex] == nil { - accountInfoMap[txInfo.GasAccountIndex], err = commglobalmap.GetBasicAccountInfo(ctx, txInfo.GasAccountIndex) - if err != nil { - if err == errorcode.DbErrNotFound { - return "", errorcode.RpcErrInvalidTxField.RefineError("invalid GasAccountIndex") - } - logx.Errorf("unable to get account info by index: %d, err: %s", txInfo.GasAccountIndex, err.Error()) - return "", errorcode.RpcErrInternal - } - } - if accountInfoMap[liquidityInfo.TreasuryAccountIndex] == nil { - accountInfoMap[liquidityInfo.TreasuryAccountIndex], err = commglobalmap.GetBasicAccountInfo(ctx, liquidityInfo.TreasuryAccountIndex) - if err != nil { - if err == errorcode.DbErrNotFound { - return "", errorcode.RpcErrInvalidTxField.RefineError("invalid liquidity") - } - logx.Errorf("unable to get account info by index: %d, err: %s", liquidityInfo.TreasuryAccountIndex, err.Error()) - return "", errorcode.RpcErrInternal - } - } - - var ( - txDetails []*mempool.MempoolTxDetail - ) - // verify tx - txDetails, err = txVerification.VerifyRemoveLiquidityTxInfo( - accountInfoMap, - liquidityInfo, - txInfo) - if err != nil { - return "", errorcode.RpcErrVerification.RefineError(err) - } - - // write into mempool - txInfoBytes, err := json.Marshal(txInfo) - if err != nil { - logx.Errorf("unable to marshal tx, err: %s", err.Error()) - return "", errorcode.RpcErrInternal - } - txId, mempoolTx := ConstructMempoolTx( - commonTx.TxTypeRemoveLiquidity, - txInfo.GasFeeAssetId, - txInfo.GasFeeAssetAmount.String(), - commonConstant.NilTxNftIndex, - txInfo.PairIndex, - commonConstant.NilAssetId, - txInfo.LpAmount.String(), - "", - string(txInfoBytes), - "", - txInfo.FromAccountIndex, - txInfo.Nonce, - txInfo.ExpiredAt, - txDetails, - ) - // delete key - key := util.GetLiquidityKeyForWrite(txInfo.PairIndex) - key2 := util.GetLiquidityKeyForRead(txInfo.PairIndex) - _, err = svcCtx.RedisConnection.Del(key) - if err != nil { - logx.Errorf("unable to delete key from redis: %s", err.Error()) - return "", errorcode.RpcErrInternal - } - _, err = svcCtx.RedisConnection.Del(key2) - if err != nil { - logx.Errorf("unable to delete key from redis: %s", err.Error()) - return "", errorcode.RpcErrInternal - } - // insert into mempool - if err := svcCtx.MempoolModel.CreateBatchedMempoolTxs([]*mempool.MempoolTx{mempoolTx}); err != nil { - logx.Errorf("fail to create mempool tx: %v, err: %s", mempoolTx, err.Error()) - _ = CreateFailTx(svcCtx.FailTxModel, commonTx.TxTypeRemoveLiquidity, txInfo, err) - return "", errorcode.RpcErrInternal - } - // update redis - // get latest liquidity info - for _, txDetail := range txDetails { - if txDetail.AssetType == commonAsset.LiquidityAssetType { - nBalance, err := commonAsset.ComputeNewBalance(commonAsset.LiquidityAssetType, liquidityInfo.String(), txDetail.BalanceDelta) - if err != nil { - logx.Errorf("unable to compute new balance: %s", err.Error()) - return txId, nil - } - liquidityInfo, err = commonAsset.ParseLiquidityInfo(nBalance) - if err != nil { - logx.Errorf("unable to parse liquidity info: %s", err.Error()) - return txId, nil - } - } - } - liquidityInfoBytes, err := json.Marshal(liquidityInfo) - if err != nil { - logx.Errorf("[sendRemoveLiquidityTx] unable to marshal: %s", err.Error()) - return txId, nil - } - _ = svcCtx.RedisConnection.Setex(key, string(liquidityInfoBytes), globalmapHandler.LiquidityExpiryTime) - - return txId, nil -} diff --git a/service/rpc/globalRPC/internal/logic/sendrawtx/sendswaptx.go b/service/rpc/globalRPC/internal/logic/sendrawtx/sendswaptx.go deleted file mode 100644 index 15c87d6e9..000000000 --- a/service/rpc/globalRPC/internal/logic/sendrawtx/sendswaptx.go +++ /dev/null @@ -1,199 +0,0 @@ -package sendrawtx - -import ( - "context" - "encoding/json" - "errors" - "math/big" - - "github.com/bnb-chain/zkbas-crypto/wasm/legend/legendTxTypes" - "github.com/zeromicro/go-zero/core/logx" - - "github.com/bnb-chain/zkbas/common/commonAsset" - "github.com/bnb-chain/zkbas/common/commonConstant" - "github.com/bnb-chain/zkbas/common/commonTx" - "github.com/bnb-chain/zkbas/common/model/mempool" - "github.com/bnb-chain/zkbas/common/util" - "github.com/bnb-chain/zkbas/common/util/globalmapHandler" - "github.com/bnb-chain/zkbas/common/zcrypto/txVerification" - "github.com/bnb-chain/zkbas/errorcode" - "github.com/bnb-chain/zkbas/service/rpc/globalRPC/internal/repo/commglobalmap" - "github.com/bnb-chain/zkbas/service/rpc/globalRPC/internal/svc" -) - -func SendSwapTx(ctx context.Context, svcCtx *svc.ServiceContext, commglobalmap commglobalmap.Commglobalmap, rawTxInfo string) (txId string, err error) { - txInfo, err := commonTx.ParseSwapTxInfo(rawTxInfo) - if err != nil { - logx.Errorf("cannot parse tx err: %s", err.Error()) - return "", errorcode.RpcErrInvalidTx - } - - if err := legendTxTypes.ValidateSwapTxInfo(txInfo); err != nil { - logx.Errorf("cannot pass static check, err: %s", err.Error()) - return "", errorcode.RpcErrInvalidTxField.RefineError(err) - } - - if err := CheckGasAccountIndex(txInfo.GasAccountIndex, svcCtx.SysConfigModel); err != nil { - return "", err - } - - liquidityInfo, err := commglobalmap.GetLatestLiquidityInfoForWrite(ctx, txInfo.PairIndex) - if err != nil { - logx.Errorf("[sendSwapTx] unable to get latest liquidity info for write: %s", err.Error()) - return "", errorcode.RpcErrInternal - } - - // check params - if liquidityInfo.AssetA == nil || liquidityInfo.AssetA.Cmp(big.NewInt(0)) == 0 || - liquidityInfo.AssetB == nil || liquidityInfo.AssetB.Cmp(big.NewInt(0)) == 0 { - logx.Errorf("invalid params") - return "", errorcode.RpcErrInternal - } - - // compute delta - var ( - toDelta *big.Int - ) - if liquidityInfo.AssetAId == txInfo.AssetAId && - liquidityInfo.AssetBId == txInfo.AssetBId { - toDelta, _, err = util.ComputeDelta( - liquidityInfo.AssetA, - liquidityInfo.AssetB, - liquidityInfo.AssetAId, - liquidityInfo.AssetBId, - txInfo.AssetAId, - true, - txInfo.AssetAAmount, - liquidityInfo.FeeRate, - ) - } else if liquidityInfo.AssetAId == txInfo.AssetBId && - liquidityInfo.AssetBId == txInfo.AssetAId { - toDelta, _, err = util.ComputeDelta( - liquidityInfo.AssetA, - liquidityInfo.AssetB, - liquidityInfo.AssetAId, - liquidityInfo.AssetBId, - txInfo.AssetBId, - true, - txInfo.AssetAAmount, - liquidityInfo.FeeRate, - ) - } else { - err = errors.New("invalid pair assetIds") - } - if err != nil { - logx.Errorf("invalid AssetIds: %d %d %d, err: %s", - txInfo.AssetAId, - uint32(liquidityInfo.AssetAId), - uint32(liquidityInfo.AssetBId), - err.Error()) - return "", errorcode.RpcErrInternal - } - // check if toDelta is less than minToAmount - if toDelta.Cmp(txInfo.AssetBMinAmount) < 0 { - logx.Errorf("minToAmount is bigger than toDelta: %s, %s", - txInfo.AssetBMinAmount.String(), toDelta.String()) - return "", errorcode.RpcErrInvalidTxField.RefineError("invalid AssetBMinAmount") - } - // complete tx info - txInfo.AssetBAmountDelta = toDelta - - var ( - accountInfoMap = make(map[int64]*commonAsset.AccountInfo) - ) - accountInfoMap[txInfo.FromAccountIndex], err = commglobalmap.GetLatestAccountInfo(ctx, txInfo.FromAccountIndex) - if err != nil { - if err == errorcode.DbErrNotFound { - return "", errorcode.RpcErrInvalidTxField.RefineError("invalid FromAccountIndex") - } - logx.Errorf("unable to get account info by index: %d, err: %s", txInfo.FromAccountIndex, err.Error()) - return "", errorcode.RpcErrInternal - } - if accountInfoMap[txInfo.GasAccountIndex] == nil { - accountInfoMap[txInfo.GasAccountIndex], err = commglobalmap.GetBasicAccountInfo(ctx, txInfo.GasAccountIndex) - if err != nil { - if err == errorcode.DbErrNotFound { - return "", errorcode.RpcErrInvalidTxField.RefineError("invalid GasAccountIndex") - } - logx.Errorf("unable to get account info by index: %d, err: %s", txInfo.GasAccountIndex, err.Error()) - return "", errorcode.RpcErrInternal - } - } - - var ( - txDetails []*mempool.MempoolTxDetail - ) - // verify tx - txDetails, err = txVerification.VerifySwapTxInfo( - accountInfoMap, - liquidityInfo, - txInfo) - if err != nil { - return "", errorcode.RpcErrVerification.RefineError(err) - } - - // write into mempool - txInfoBytes, err := json.Marshal(txInfo) - if err != nil { - logx.Errorf("unable to marshal tx, err: %s", err.Error()) - return "", errorcode.RpcErrInternal - } - txId, mempoolTx := ConstructMempoolTx( - commonTx.TxTypeSwap, - txInfo.GasFeeAssetId, - txInfo.GasFeeAssetAmount.String(), - commonConstant.NilTxNftIndex, - txInfo.PairIndex, - commonConstant.NilAssetId, - txInfo.AssetAAmount.String(), - "", - string(txInfoBytes), - "", - txInfo.FromAccountIndex, - txInfo.Nonce, - txInfo.ExpiredAt, - txDetails, - ) - // delete key - keyW := util.GetLiquidityKeyForWrite(txInfo.PairIndex) - keyR := util.GetLiquidityKeyForRead(txInfo.PairIndex) - _, err = svcCtx.RedisConnection.Del(keyW) - if err != nil { - logx.Errorf("unable to delete key from redis, key: %s, err: %s", keyW, err.Error()) - return "", errorcode.RpcErrInternal - } - _, err = svcCtx.RedisConnection.Del(keyR) - if err != nil { - logx.Errorf("unable to delete key from redis, key: %s, err: %s", keyR, err.Error()) - return "", errorcode.RpcErrInternal - } - // insert into mempool - if err := svcCtx.MempoolModel.CreateBatchedMempoolTxs([]*mempool.MempoolTx{mempoolTx}); err != nil { - logx.Errorf("fail to create mempool tx: %v, err: %s", mempoolTx, err.Error()) - _ = CreateFailTx(svcCtx.FailTxModel, commonTx.TxTypeSwap, txInfo, err) - return "", errorcode.RpcErrInternal - } - // update redis - // get latest liquidity info - for _, txDetail := range txDetails { - if txDetail.AssetType == commonAsset.LiquidityAssetType { - nBalance, err := commonAsset.ComputeNewBalance(commonAsset.LiquidityAssetType, liquidityInfo.String(), txDetail.BalanceDelta) - if err != nil { - logx.Errorf("unable to compute new balance: %s", err.Error()) - return txId, errorcode.RpcErrInternal - } - liquidityInfo, err = commonAsset.ParseLiquidityInfo(nBalance) - if err != nil { - logx.Errorf("unable to parse liquidity info: %s", err.Error()) - return txId, errorcode.RpcErrInternal - } - } - } - liquidityInfoBytes, err := json.Marshal(liquidityInfo) - if err != nil { - logx.Errorf("unable to marshal tx, err: %s", err.Error()) - return txId, errorcode.RpcErrInternal - } - _ = svcCtx.RedisConnection.Setex(keyW, string(liquidityInfoBytes), globalmapHandler.LiquidityExpiryTime) - return txId, nil -} diff --git a/service/rpc/globalRPC/internal/logic/sendrawtx/sendtransfernfttx.go b/service/rpc/globalRPC/internal/logic/sendrawtx/sendtransfernfttx.go deleted file mode 100644 index 9fff84f6a..000000000 --- a/service/rpc/globalRPC/internal/logic/sendrawtx/sendtransfernfttx.go +++ /dev/null @@ -1,134 +0,0 @@ -package sendrawtx - -import ( - "context" - "encoding/json" - - "github.com/bnb-chain/zkbas-crypto/wasm/legend/legendTxTypes" - "github.com/zeromicro/go-zero/core/logx" - - "github.com/bnb-chain/zkbas/common/commonAsset" - "github.com/bnb-chain/zkbas/common/commonConstant" - "github.com/bnb-chain/zkbas/common/commonTx" - "github.com/bnb-chain/zkbas/common/model/mempool" - "github.com/bnb-chain/zkbas/common/zcrypto/txVerification" - "github.com/bnb-chain/zkbas/errorcode" - "github.com/bnb-chain/zkbas/service/rpc/globalRPC/internal/repo/commglobalmap" - "github.com/bnb-chain/zkbas/service/rpc/globalRPC/internal/svc" -) - -func SendTransferNftTx(ctx context.Context, svcCtx *svc.ServiceContext, commglobalmap commglobalmap.Commglobalmap, rawTxInfo string) (txId string, err error) { - txInfo, err := commonTx.ParseTransferNftTxInfo(rawTxInfo) - if err != nil { - logx.Errorf("cannot parse tx err: %s", err.Error()) - return "", errorcode.RpcErrInvalidTx - } - - if err := legendTxTypes.ValidateTransferNftTxInfo(txInfo); err != nil { - logx.Errorf("cannot pass static check, err: %s", err.Error()) - return "", errorcode.RpcErrInvalidTxField.RefineError(err) - } - - if err := CheckGasAccountIndex(txInfo.GasAccountIndex, svcCtx.SysConfigModel); err != nil { - return "", err - } - - var ( - accountInfoMap = make(map[int64]*commonAsset.AccountInfo) - ) - accountInfoMap[txInfo.FromAccountIndex], err = commglobalmap.GetLatestAccountInfo(ctx, txInfo.FromAccountIndex) - if err != nil { - if err == errorcode.DbErrNotFound { - return "", errorcode.RpcErrInvalidTxField.RefineError("invalid FromAccountIndex") - } - logx.Errorf("unable to get account info by index: %d, err: %s", txInfo.FromAccountIndex, err.Error()) - return "", errorcode.RpcErrInternal - } - if accountInfoMap[txInfo.ToAccountIndex] == nil { - accountInfoMap[txInfo.ToAccountIndex], err = commglobalmap.GetBasicAccountInfo(ctx, txInfo.ToAccountIndex) - if err != nil { - if err == errorcode.DbErrNotFound { - return "", errorcode.RpcErrInvalidTxField.RefineError("invalid ToAccountIndex") - } - logx.Errorf("unable to get account info by index: %d, err: %s", txInfo.ToAccountIndex, err.Error()) - return "", errorcode.RpcErrInternal - } - } - if accountInfoMap[txInfo.ToAccountIndex].AccountNameHash != txInfo.ToAccountNameHash { - logx.Errorf("invalid account name hash, expected: %s, actual: %s", accountInfoMap[txInfo.ToAccountIndex].AccountNameHash, txInfo.ToAccountNameHash) - return "", errorcode.RpcErrInvalidTxField.RefineError("invalid ToAccountNameHash") - } - if accountInfoMap[txInfo.GasAccountIndex] == nil { - accountInfoMap[txInfo.GasAccountIndex], err = commglobalmap.GetBasicAccountInfo(ctx, txInfo.GasAccountIndex) - if err != nil { - if err == errorcode.DbErrNotFound { - return "", errorcode.RpcErrInvalidTxField.RefineError("invalid GasAccountIndex") - } - logx.Errorf("unable to get account info by index: %d, err: %s", txInfo.GasAccountIndex, err.Error()) - return "", errorcode.RpcErrInternal - } - } - - nftInfo, err := commglobalmap.GetLatestNftInfoForRead(ctx, txInfo.NftIndex) - if err != nil { - if err == errorcode.DbErrNotFound { - return "", errorcode.RpcErrInvalidTxField.RefineError("invalid NftIndex") - } - logx.Errorf("fail to get nft info: %d, err: %s", txInfo.NftIndex, err.Error()) - return "", err - } - if nftInfo.OwnerAccountIndex != txInfo.FromAccountIndex { - logx.Errorf("not nft owner") - return "", errorcode.RpcErrInvalidTxField.RefineError("not nft owner") - } - - var ( - txDetails []*mempool.MempoolTxDetail - ) - // verify tx - txDetails, err = txVerification.VerifyTransferNftTxInfo( - accountInfoMap, - nftInfo, - txInfo, - ) - if err != nil { - return "", errorcode.RpcErrVerification.RefineError(err) - } - - txInfoBytes, err := json.Marshal(txInfo) - if err != nil { - logx.Errorf("unable to marshal tx, err: %s", err.Error()) - return "", errorcode.RpcErrInternal - } - txId, mempoolTx := ConstructMempoolTx( - commonTx.TxTypeTransferNft, - txInfo.GasFeeAssetId, - txInfo.GasFeeAssetAmount.String(), - txInfo.NftIndex, - commonConstant.NilPairIndex, - commonConstant.NilAssetId, - commonConstant.NilAssetAmountStr, - "", - string(txInfoBytes), - "", - txInfo.FromAccountIndex, - txInfo.Nonce, - txInfo.ExpiredAt, - txDetails, - ) - - if err := commglobalmap.DeleteLatestNftInfoForReadInCache(ctx, txInfo.NftIndex); err != nil { - logx.Errorf("[DeleteLatestNftInfoForReadInCache] param: %d, err: %s", txInfo.NftIndex, err.Error()) - return "", err - } - if err := svcCtx.MempoolModel.CreateBatchedMempoolTxs([]*mempool.MempoolTx{mempoolTx}); err != nil { - logx.Errorf("fail to create mempool tx: %v, err: %s", mempoolTx, err.Error()) - _ = CreateFailTx(svcCtx.FailTxModel, commonTx.TxTypeTransferNft, txInfo, err) - return "", errorcode.RpcErrInternal - } - // update cache, not key logic - if err := commglobalmap.SetLatestNftInfoForReadInCache(ctx, txInfo.NftIndex); err != nil { - logx.Errorf("[SetLatestNftInfoForReadInCache] param: %d, err: %s", txInfo.NftIndex, err.Error()) - } - return txId, nil -} diff --git a/service/rpc/globalRPC/internal/logic/sendrawtx/sendtransfertx.go b/service/rpc/globalRPC/internal/logic/sendrawtx/sendtransfertx.go deleted file mode 100644 index 0e2dd172a..000000000 --- a/service/rpc/globalRPC/internal/logic/sendrawtx/sendtransfertx.go +++ /dev/null @@ -1,116 +0,0 @@ -package sendrawtx - -import ( - "context" - "encoding/json" - - "github.com/bnb-chain/zkbas-crypto/wasm/legend/legendTxTypes" - "github.com/zeromicro/go-zero/core/logx" - - "github.com/bnb-chain/zkbas/common/commonAsset" - "github.com/bnb-chain/zkbas/common/commonConstant" - "github.com/bnb-chain/zkbas/common/commonTx" - "github.com/bnb-chain/zkbas/common/model/mempool" - "github.com/bnb-chain/zkbas/common/zcrypto/txVerification" - "github.com/bnb-chain/zkbas/errorcode" - "github.com/bnb-chain/zkbas/service/rpc/globalRPC/internal/repo/commglobalmap" - "github.com/bnb-chain/zkbas/service/rpc/globalRPC/internal/svc" -) - -func SendTransferTx(ctx context.Context, svcCtx *svc.ServiceContext, commglobalmap commglobalmap.Commglobalmap, rawTxInfo string) (txId string, err error) { - txInfo, err := commonTx.ParseTransferTxInfo(rawTxInfo) - if err != nil { - logx.Errorf("cannot parse tx err: %s", err.Error()) - return "", errorcode.RpcErrInvalidTx - } - - if err := legendTxTypes.ValidateTransferTxInfo(txInfo); err != nil { - logx.Errorf("cannot pass static check, err: %s", err.Error()) - return "", errorcode.RpcErrInvalidTxField.RefineError(err) - } - - if err := CheckGasAccountIndex(txInfo.GasAccountIndex, svcCtx.SysConfigModel); err != nil { - return "", err - } - - var accountInfoMap = make(map[int64]*commonAsset.AccountInfo) - accountInfoMap[txInfo.FromAccountIndex], err = commglobalmap.GetLatestAccountInfo(ctx, txInfo.FromAccountIndex) - if err != nil { - if err == errorcode.DbErrNotFound { - return "", errorcode.RpcErrInvalidTxField.RefineError("invalid FromAccountIndex") - } - logx.Errorf("unable to get account info by index: %d, err: %s", txInfo.FromAccountIndex, err.Error()) - return "", errorcode.RpcErrInternal - } - - if accountInfoMap[txInfo.ToAccountIndex] == nil { - accountInfoMap[txInfo.ToAccountIndex], err = commglobalmap.GetBasicAccountInfo(ctx, txInfo.ToAccountIndex) - if err != nil { - if err == errorcode.DbErrNotFound { - return "", errorcode.RpcErrInvalidTxField.RefineError("invalid ToAccountIndex") - } - logx.Errorf("unable to get account info by index: %d, err: %s", txInfo.ToAccountIndex, err.Error()) - return "", errorcode.RpcErrInternal - } - } - if accountInfoMap[txInfo.ToAccountIndex].AccountNameHash != txInfo.ToAccountNameHash { - logx.Errorf("invalid account name hash, expected: %s, actual: %s", accountInfoMap[txInfo.ToAccountIndex].AccountNameHash, txInfo.ToAccountNameHash) - return "", errorcode.RpcErrInvalidTxField.RefineError("invalid ToAccountNameHash") - } - if accountInfoMap[txInfo.GasAccountIndex] == nil { - accountInfoMap[txInfo.GasAccountIndex], err = commglobalmap.GetBasicAccountInfo(ctx, txInfo.GasAccountIndex) - if err != nil { - if err == errorcode.DbErrNotFound { - return "", errorcode.RpcErrInvalidTxField.RefineError("invalid GasAccountIndex") - } - logx.Errorf("unable to get account info by index: %d, err: %s", txInfo.GasAccountIndex, err.Error()) - return "", errorcode.RpcErrInternal - } - } - - var ( - txDetails []*mempool.MempoolTxDetail - ) - // verify tx - txDetails, err = txVerification.VerifyTransferTxInfo( - accountInfoMap, - txInfo, - ) - if err != nil { - return "", errorcode.RpcErrVerification.RefineError(err) - } - - // write into mempool - txInfoBytes, err := json.Marshal(txInfo) - if err != nil { - logx.Errorf("unable to marshal tx, err: %s", err.Error()) - return "", errorcode.RpcErrInternal - } - txId, mempoolTx := ConstructMempoolTx( - commonTx.TxTypeTransfer, - txInfo.GasFeeAssetId, - txInfo.GasFeeAssetAmount.String(), - commonConstant.NilTxNftIndex, - commonConstant.NilPairIndex, - txInfo.AssetId, - txInfo.AssetAmount.String(), - "", - string(txInfoBytes), - txInfo.Memo, - txInfo.FromAccountIndex, - txInfo.Nonce, - txInfo.ExpiredAt, - txDetails, - ) - if err := svcCtx.MempoolModel.CreateBatchedMempoolTxs([]*mempool.MempoolTx{mempoolTx}); err != nil { - _ = CreateFailTx(svcCtx.FailTxModel, commonTx.TxTypeTransfer, txInfo, err) - return "", errorcode.RpcErrInternal - } - if err := commglobalmap.SetLatestAccountInfoInToCache(ctx, txInfo.FromAccountIndex); err != nil { - logx.Errorf("unable to set account info in cache: %s", err.Error()) - } - if err := commglobalmap.SetLatestAccountInfoInToCache(ctx, txInfo.ToAccountIndex); err != nil { - logx.Errorf("unable to set account info in cache: %s", err.Error()) - } - return txId, nil -} diff --git a/service/rpc/globalRPC/internal/logic/sendrawtx/sendwithdrawnfttx.go b/service/rpc/globalRPC/internal/logic/sendrawtx/sendwithdrawnfttx.go deleted file mode 100644 index 54bce3941..000000000 --- a/service/rpc/globalRPC/internal/logic/sendrawtx/sendwithdrawnfttx.go +++ /dev/null @@ -1,159 +0,0 @@ -package sendrawtx - -import ( - "context" - "encoding/json" - "math/big" - - "github.com/bnb-chain/zkbas-crypto/wasm/legend/legendTxTypes" - "github.com/ethereum/go-ethereum/common" - "github.com/zeromicro/go-zero/core/logx" - - "github.com/bnb-chain/zkbas/common/commonAsset" - "github.com/bnb-chain/zkbas/common/commonConstant" - "github.com/bnb-chain/zkbas/common/commonTx" - "github.com/bnb-chain/zkbas/common/model/mempool" - "github.com/bnb-chain/zkbas/common/util" - "github.com/bnb-chain/zkbas/common/util/globalmapHandler" - "github.com/bnb-chain/zkbas/common/zcrypto/txVerification" - "github.com/bnb-chain/zkbas/errorcode" - "github.com/bnb-chain/zkbas/service/rpc/globalRPC/internal/repo/commglobalmap" - "github.com/bnb-chain/zkbas/service/rpc/globalRPC/internal/svc" -) - -func SendWithdrawNftTx(ctx context.Context, svcCtx *svc.ServiceContext, commglobalmap commglobalmap.Commglobalmap, rawTxInfo string) (txId string, err error) { - txInfo, err := commonTx.ParseWithdrawNftTxInfo(rawTxInfo) - if err != nil { - logx.Errorf("cannot parse tx err: %s", err.Error()) - return "", errorcode.RpcErrInvalidTx - } - - if err := legendTxTypes.ValidateWithdrawNftTxInfo(txInfo); err != nil { - logx.Errorf("cannot pass static check, err: %s", err.Error()) - return "", errorcode.RpcErrInvalidTxField.RefineError(err) - } - - if err := CheckGasAccountIndex(txInfo.GasAccountIndex, svcCtx.SysConfigModel); err != nil { - return "", err - } - - var ( - accountInfoMap = make(map[int64]*commonAsset.AccountInfo) - ) - accountInfoMap[txInfo.AccountIndex], err = commglobalmap.GetLatestAccountInfo(ctx, txInfo.AccountIndex) - if err != nil { - if err == errorcode.DbErrNotFound { - return "", errorcode.RpcErrInvalidTxField.RefineError("invalid FromAccountIndex") - } - logx.Errorf("unable to get account info by index: %d, err: %s", txInfo.AccountIndex, err.Error()) - return "", errorcode.RpcErrInternal - } - if accountInfoMap[txInfo.GasAccountIndex] == nil { - accountInfoMap[txInfo.GasAccountIndex], err = commglobalmap.GetBasicAccountInfo(ctx, txInfo.GasAccountIndex) - if err != nil { - if err == errorcode.DbErrNotFound { - return "", errorcode.RpcErrInvalidTxField.RefineError("invalid GasAccountIndex") - } - logx.Errorf("unable to get account info by index: %d, err: %s", txInfo.GasAccountIndex, err.Error()) - return "", errorcode.RpcErrInternal - } - } - if accountInfoMap[txInfo.CreatorAccountIndex] == nil { - accountInfoMap[txInfo.CreatorAccountIndex], err = commglobalmap.GetBasicAccountInfo(ctx, txInfo.CreatorAccountIndex) - if err != nil { - if err == errorcode.DbErrNotFound { - return "", errorcode.RpcErrInvalidTxField.RefineError("invalid CreatorAccountIndex") - } - logx.Errorf("unable to get account info by index: %d, err: %s", txInfo.CreatorAccountIndex, err.Error()) - return "", errorcode.RpcErrInternal - } - } - - nftInfo, err := commglobalmap.GetLatestNftInfoForRead(ctx, txInfo.NftIndex) - if err != nil { - if err == errorcode.DbErrNotFound { - return "", errorcode.RpcErrInvalidTxField.RefineError("invalid NftIndex") - } - logx.Errorf("fail to get nft info: %d, err: %s", txInfo.NftIndex, err.Error()) - return "", err - } - if nftInfo.OwnerAccountIndex != txInfo.AccountIndex { - logx.Errorf("not nft owner") - return "", errorcode.RpcErrInvalidTxField.RefineError("not nft owner") - } - - txInfo.CreatorAccountIndex = nftInfo.CreatorAccountIndex - txInfo.CreatorAccountNameHash = common.FromHex(accountInfoMap[nftInfo.CreatorAccountIndex].AccountNameHash) - txInfo.CreatorTreasuryRate = nftInfo.CreatorTreasuryRate - txInfo.NftContentHash = common.FromHex(nftInfo.NftContentHash) - txInfo.NftL1Address = nftInfo.NftL1Address - txInfo.NftL1TokenId, _ = new(big.Int).SetString(nftInfo.NftL1TokenId, 10) - txInfo.CollectionId = nftInfo.CollectionId - - var ( - txDetails []*mempool.MempoolTxDetail - ) - // verify tx - txDetails, err = txVerification.VerifyWithdrawNftTxInfo( - accountInfoMap, - nftInfo, - txInfo, - ) - if err != nil { - return "", errorcode.RpcErrVerification.RefineError(err) - } - - // delete key - key := util.GetNftKeyForRead(txInfo.NftIndex) - _, err = svcCtx.RedisConnection.Del(key) - if err != nil { - logx.Errorf("unable to delete key from redis, key: %s, err: %s", key, err.Error()) - return "", errorcode.RpcErrInternal - } - // write into mempool - txInfoBytes, err := json.Marshal(txInfo) - if err != nil { - logx.Errorf("unable to marshal tx, err: %s", err.Error()) - return "", errorcode.RpcErrInternal - } - txId, mempoolTx := ConstructMempoolTx( - commonTx.TxTypeWithdrawNft, - txInfo.GasFeeAssetId, - txInfo.GasFeeAssetAmount.String(), - txInfo.NftIndex, - commonConstant.NilPairIndex, - commonConstant.NilAssetId, - commonConstant.NilAssetAmountStr, - "", - string(txInfoBytes), - "", - txInfo.AccountIndex, - txInfo.Nonce, - txInfo.ExpiredAt, - txDetails, - ) - if err := svcCtx.MempoolModel.CreateBatchedMempoolTxs([]*mempool.MempoolTx{mempoolTx}); err != nil { - logx.Errorf("fail to create mempool tx: %v, err: %s", mempoolTx, err.Error()) - _ = CreateFailTx(svcCtx.FailTxModel, commonTx.TxTypeWithdrawNft, txInfo, err) - return "", errorcode.RpcErrInternal - } - // update redis - var formatNftInfo *commonAsset.NftInfo - for _, txDetail := range mempoolTx.MempoolDetails { - if txDetail.AssetType == commonAsset.NftAssetType { - formatNftInfo, err = commonAsset.ParseNftInfo(txDetail.BalanceDelta) - if err != nil { - logx.Errorf("unable to parse nft info: %s", err.Error()) - return txId, nil - } - } - } - nftInfoBytes, err := json.Marshal(formatNftInfo) - if err != nil { - logx.Errorf("unable to marshal tx, err: %s", err.Error()) - return txId, nil - } - _ = svcCtx.RedisConnection.Setex(key, string(nftInfoBytes), globalmapHandler.NftExpiryTime) - - return txId, nil -} diff --git a/service/rpc/globalRPC/internal/logic/sendrawtx/sendwithdrawtx.go b/service/rpc/globalRPC/internal/logic/sendrawtx/sendwithdrawtx.go deleted file mode 100644 index 909352168..000000000 --- a/service/rpc/globalRPC/internal/logic/sendrawtx/sendwithdrawtx.go +++ /dev/null @@ -1,97 +0,0 @@ -package sendrawtx - -import ( - "context" - "encoding/json" - - "github.com/bnb-chain/zkbas-crypto/wasm/legend/legendTxTypes" - "github.com/zeromicro/go-zero/core/logx" - - "github.com/bnb-chain/zkbas/common/commonAsset" - "github.com/bnb-chain/zkbas/common/commonConstant" - "github.com/bnb-chain/zkbas/common/commonTx" - "github.com/bnb-chain/zkbas/common/model/mempool" - "github.com/bnb-chain/zkbas/common/zcrypto/txVerification" - "github.com/bnb-chain/zkbas/errorcode" - "github.com/bnb-chain/zkbas/service/rpc/globalRPC/internal/repo/commglobalmap" - "github.com/bnb-chain/zkbas/service/rpc/globalRPC/internal/svc" -) - -func SendWithdrawTx(ctx context.Context, svcCtx *svc.ServiceContext, commglobalmap commglobalmap.Commglobalmap, rawTxInfo string) (txId string, err error) { - txInfo, err := commonTx.ParseWithdrawTxInfo(rawTxInfo) - if err != nil { - logx.Errorf("cannot parse tx err: %s", err.Error()) - return "", errorcode.RpcErrInvalidTx - } - - if err := legendTxTypes.ValidateWithdrawTxInfo(txInfo); err != nil { - logx.Errorf("cannot pass static check, err: %s", err.Error()) - return "", errorcode.RpcErrInvalidTxField.RefineError(err) - } - - if err := CheckGasAccountIndex(txInfo.GasAccountIndex, svcCtx.SysConfigModel); err != nil { - return "", err - } - - var ( - accountInfoMap = make(map[int64]*commonAsset.AccountInfo) - ) - accountInfoMap[txInfo.FromAccountIndex], err = commglobalmap.GetLatestAccountInfo(ctx, txInfo.FromAccountIndex) - if err != nil { - if err == errorcode.DbErrNotFound { - return "", errorcode.RpcErrInvalidTxField.RefineError("invalid FromAccountIndex") - } - logx.Errorf("unable to get account info by index: %d, err: %s", txInfo.FromAccountIndex, err.Error()) - return "", errorcode.RpcErrInternal - } - if accountInfoMap[txInfo.GasAccountIndex] == nil { - accountInfoMap[txInfo.GasAccountIndex], err = commglobalmap.GetBasicAccountInfo(ctx, txInfo.GasAccountIndex) - if err != nil { - if err == errorcode.DbErrNotFound { - return "", errorcode.RpcErrInvalidTxField.RefineError("invalid GasAccountIndex") - } - logx.Errorf("unable to get account info by index: %d, err: %s", txInfo.GasAccountIndex, err.Error()) - return "", errorcode.RpcErrInternal - } - } - - var ( - txDetails []*mempool.MempoolTxDetail - ) - txDetails, err = txVerification.VerifyWithdrawTxInfo( - accountInfoMap, - txInfo, - ) - if err != nil { - return "", errorcode.RpcErrVerification.RefineError(err) - } - - // write into mempool - txInfoBytes, err := json.Marshal(txInfo) - if err != nil { - logx.Errorf("unable to marshal tx, err: %s", err.Error()) - return "", errorcode.RpcErrInternal - } - txId, mempoolTx := ConstructMempoolTx( - commonTx.TxTypeWithdraw, - txInfo.GasFeeAssetId, - txInfo.GasFeeAssetAmount.String(), - commonConstant.NilTxNftIndex, - commonConstant.NilPairIndex, - txInfo.AssetId, - txInfo.AssetAmount.String(), - txInfo.ToAddress, - string(txInfoBytes), - "", - txInfo.FromAccountIndex, - txInfo.Nonce, - txInfo.ExpiredAt, - txDetails, - ) - if err := svcCtx.MempoolModel.CreateBatchedMempoolTxs([]*mempool.MempoolTx{mempoolTx}); err != nil { - logx.Errorf("fail to create mempool tx: %v, err: %s", mempoolTx, err.Error()) - _ = CreateFailTx(svcCtx.FailTxModel, commonTx.TxTypeWithdraw, txInfo, err) - return "", errorcode.RpcErrInternal - } - return txId, nil -} diff --git a/service/rpc/globalRPC/internal/logic/sendrawtx/utils.go b/service/rpc/globalRPC/internal/logic/sendrawtx/utils.go deleted file mode 100644 index a25b86910..000000000 --- a/service/rpc/globalRPC/internal/logic/sendrawtx/utils.go +++ /dev/null @@ -1,117 +0,0 @@ -package sendrawtx - -import ( - "encoding/base64" - "encoding/json" - "strconv" - - "github.com/consensys/gnark-crypto/ecc/bn254/fr/mimc" - "github.com/zeromicro/go-zero/core/logx" - - "github.com/bnb-chain/zkbas/common/commonConstant" - "github.com/bnb-chain/zkbas/common/model/mempool" - "github.com/bnb-chain/zkbas/common/model/sysconfig" - "github.com/bnb-chain/zkbas/common/model/tx" - "github.com/bnb-chain/zkbas/common/sysconfigName" - "github.com/bnb-chain/zkbas/common/util" - "github.com/bnb-chain/zkbas/errorcode" -) - -func CheckGasAccountIndex(txGasAccountIndex int64, sysConfigModel sysconfig.SysconfigModel) error { - gasAccountIndexConfig, err := sysConfigModel.GetSysconfigByName(sysconfigName.GasAccountIndex) - if err != nil { - logx.Errorf("fail to get config: %s, err: %s", sysconfigName.GasAccountIndex, err.Error()) - return errorcode.RpcErrInternal - } - gasAccountIndex, err := strconv.ParseInt(gasAccountIndexConfig.Value, 10, 64) - if err != nil { - logx.Errorf("cannot parse int :%s, err: %s", gasAccountIndexConfig.Value, err.Error()) - return errorcode.RpcErrInternal - } - if gasAccountIndex != txGasAccountIndex { - logx.Errorf("invalid gas account index, expected: %d, actual: %d", gasAccountIndex, txGasAccountIndex) - return errorcode.RpcErrInvalidTxField.RefineError("invalid GasAccountIndex") - } - return nil -} - -func ComputeL2TxTxHash(txInfo string) string { - hFunc := mimc.NewMiMC() - hFunc.Write([]byte(txInfo)) - return base64.StdEncoding.EncodeToString(hFunc.Sum(nil)) -} - -func ConstructMempoolTx( - txType int64, - gasFeeAssetId int64, - gasFeeAssetAmount string, - nftIndex int64, - pairIndex int64, - assetId int64, - txAmount string, - toAddress string, - txInfo string, - memo string, - accountIndex int64, - nonce int64, - expiredAt int64, - txDetails []*mempool.MempoolTxDetail, -) (txId string, mempoolTx *mempool.MempoolTx) { - txId = ComputeL2TxTxHash(txInfo) - return txId, &mempool.MempoolTx{ - TxHash: txId, - TxType: txType, - GasFeeAssetId: gasFeeAssetId, - GasFee: gasFeeAssetAmount, - NftIndex: nftIndex, - PairIndex: pairIndex, - AssetId: assetId, - TxAmount: txAmount, - NativeAddress: toAddress, - MempoolDetails: txDetails, - TxInfo: txInfo, - ExtraInfo: "", - Memo: memo, - AccountIndex: accountIndex, - Nonce: nonce, - ExpiredAt: expiredAt, - L2BlockHeight: commonConstant.NilBlockHeight, - Status: mempool.PendingTxStatus, - } -} - -func CreateFailTx(failTxModel tx.FailTxModel, txType int, txInfo interface{}, error error) error { - txHash := util.RandomUUID() - nativeAddress := "0x00" - txMarshaled, err := json.Marshal(txInfo) - if err != nil { - logx.Errorf("unable to marshal, error: %s", err.Error()) - return err - } - // write into fail tx - failTx := &tx.FailTx{ - // transaction id, is primary key - TxHash: txHash, - // transaction type - TxType: int64(txType), - // tx status, 1 - success(default), 2 - failure - TxStatus: tx.StatusFail, - // l1asset id - AssetAId: commonConstant.NilAssetId, - // AssetBId - AssetBId: commonConstant.NilAssetId, - // tx amount - TxAmount: commonConstant.NilAssetAmountStr, - // layer1 address - NativeAddress: nativeAddress, - // tx proof - TxInfo: string(txMarshaled), - // extra info, if tx fails, show the error info - ExtraInfo: error.Error(), - // native memo info - Memo: "", - } - - err = failTxModel.CreateFailTx(failTx) - return err -} diff --git a/service/rpc/globalRPC/internal/logic/sendremoveliquiditytxlogic.go b/service/rpc/globalRPC/internal/logic/sendremoveliquiditytxlogic.go deleted file mode 100644 index 9d8d562a3..000000000 --- a/service/rpc/globalRPC/internal/logic/sendremoveliquiditytxlogic.go +++ /dev/null @@ -1,54 +0,0 @@ -/* - * Copyright © 2021 Zkbas Protocol - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package logic - -import ( - "context" - - "github.com/zeromicro/go-zero/core/logx" - - "github.com/bnb-chain/zkbas/service/rpc/globalRPC/globalRPCProto" - "github.com/bnb-chain/zkbas/service/rpc/globalRPC/internal/logic/sendrawtx" - "github.com/bnb-chain/zkbas/service/rpc/globalRPC/internal/repo/commglobalmap" - "github.com/bnb-chain/zkbas/service/rpc/globalRPC/internal/svc" -) - -type SendRemoveLiquidityTxLogic struct { - ctx context.Context - svcCtx *svc.ServiceContext - logx.Logger - commglobalmap commglobalmap.Commglobalmap -} - -func NewSendRemoveLiquidityTxLogic(ctx context.Context, svcCtx *svc.ServiceContext) *SendRemoveLiquidityTxLogic { - return &SendRemoveLiquidityTxLogic{ - ctx: ctx, - svcCtx: svcCtx, - Logger: logx.WithContext(ctx), - commglobalmap: commglobalmap.New(svcCtx), - } -} - -func (l *SendRemoveLiquidityTxLogic) SendRemoveLiquidityTx(in *globalRPCProto.ReqSendTxByRawInfo) (respSendTx *globalRPCProto.RespSendTx, err error) { - respSendTx = &globalRPCProto.RespSendTx{} - txId, err := sendrawtx.SendRemoveLiquidityTx(l.ctx, l.svcCtx, l.commglobalmap, in.TxInfo) - if err != nil { - return nil, err - } - respSendTx.TxId = txId - return respSendTx, nil -} diff --git a/service/rpc/globalRPC/internal/logic/sendswaptxlogic.go b/service/rpc/globalRPC/internal/logic/sendswaptxlogic.go deleted file mode 100644 index 249772d7b..000000000 --- a/service/rpc/globalRPC/internal/logic/sendswaptxlogic.go +++ /dev/null @@ -1,54 +0,0 @@ -/* - * Copyright © 2021 Zkbas Protocol - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package logic - -import ( - "context" - - "github.com/zeromicro/go-zero/core/logx" - - "github.com/bnb-chain/zkbas/service/rpc/globalRPC/globalRPCProto" - "github.com/bnb-chain/zkbas/service/rpc/globalRPC/internal/logic/sendrawtx" - "github.com/bnb-chain/zkbas/service/rpc/globalRPC/internal/repo/commglobalmap" - "github.com/bnb-chain/zkbas/service/rpc/globalRPC/internal/svc" -) - -type SendSwapTxLogic struct { - ctx context.Context - svcCtx *svc.ServiceContext - logx.Logger - commglobalmap commglobalmap.Commglobalmap -} - -func NewSendSwapTxLogic(ctx context.Context, svcCtx *svc.ServiceContext) *SendSwapTxLogic { - return &SendSwapTxLogic{ - ctx: ctx, - svcCtx: svcCtx, - Logger: logx.WithContext(ctx), - commglobalmap: commglobalmap.New(svcCtx), - } -} - -func (l *SendSwapTxLogic) SendSwapTx(in *globalRPCProto.ReqSendTxByRawInfo) (respSendTx *globalRPCProto.RespSendTx, err error) { - respSendTx = &globalRPCProto.RespSendTx{} - txId, err := sendrawtx.SendSwapTx(l.ctx, l.svcCtx, l.commglobalmap, in.TxInfo) - if err != nil { - return nil, err - } - respSendTx.TxId = txId - return respSendTx, nil -} diff --git a/service/rpc/globalRPC/internal/logic/sendtransfernfttxlogic.go b/service/rpc/globalRPC/internal/logic/sendtransfernfttxlogic.go deleted file mode 100644 index bc504f1f8..000000000 --- a/service/rpc/globalRPC/internal/logic/sendtransfernfttxlogic.go +++ /dev/null @@ -1,60 +0,0 @@ -/* - * Copyright © 2021 Zkbas Protocol - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package logic - -import ( - "context" - - "github.com/zeromicro/go-zero/core/logx" - - "github.com/bnb-chain/zkbas/service/rpc/globalRPC/globalRPCProto" - "github.com/bnb-chain/zkbas/service/rpc/globalRPC/internal/logic/sendrawtx" - "github.com/bnb-chain/zkbas/service/rpc/globalRPC/internal/repo/commglobalmap" - "github.com/bnb-chain/zkbas/service/rpc/globalRPC/internal/repo/failtx" - "github.com/bnb-chain/zkbas/service/rpc/globalRPC/internal/repo/sysconf" - "github.com/bnb-chain/zkbas/service/rpc/globalRPC/internal/svc" -) - -type SendTransferNftTxLogic struct { - ctx context.Context - svcCtx *svc.ServiceContext - logx.Logger - commglobalmap commglobalmap.Commglobalmap - failtx failtx.Model - sysconf sysconf.Model -} - -func NewSendTransferNftTxLogic(ctx context.Context, svcCtx *svc.ServiceContext) *SendTransferNftTxLogic { - return &SendTransferNftTxLogic{ - ctx: ctx, - svcCtx: svcCtx, - Logger: logx.WithContext(ctx), - commglobalmap: commglobalmap.New(svcCtx), - failtx: failtx.New(svcCtx), - sysconf: sysconf.New(svcCtx), - } -} - -func (l *SendTransferNftTxLogic) SendTransferNftTx(in *globalRPCProto.ReqSendTxByRawInfo) (respSendTx *globalRPCProto.RespSendTx, err error) { - respSendTx = &globalRPCProto.RespSendTx{} - txId, err := sendrawtx.SendTransferNftTx(l.ctx, l.svcCtx, l.commglobalmap, in.TxInfo) - if err != nil { - return nil, err - } - respSendTx.TxId = txId - return respSendTx, nil -} diff --git a/service/rpc/globalRPC/internal/logic/sendtransfertxlogic.go b/service/rpc/globalRPC/internal/logic/sendtransfertxlogic.go deleted file mode 100644 index 468e00c7c..000000000 --- a/service/rpc/globalRPC/internal/logic/sendtransfertxlogic.go +++ /dev/null @@ -1,54 +0,0 @@ -/* - * Copyright © 2021 Zkbas Protocol - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package logic - -import ( - "context" - - "github.com/zeromicro/go-zero/core/logx" - - "github.com/bnb-chain/zkbas/service/rpc/globalRPC/globalRPCProto" - "github.com/bnb-chain/zkbas/service/rpc/globalRPC/internal/logic/sendrawtx" - "github.com/bnb-chain/zkbas/service/rpc/globalRPC/internal/repo/commglobalmap" - "github.com/bnb-chain/zkbas/service/rpc/globalRPC/internal/svc" -) - -type SendTransferTxLogic struct { - ctx context.Context - svcCtx *svc.ServiceContext - logx.Logger - commglobalmap commglobalmap.Commglobalmap -} - -func NewSendTransferTxLogic(ctx context.Context, svcCtx *svc.ServiceContext) *SendTransferTxLogic { - return &SendTransferTxLogic{ - ctx: ctx, - svcCtx: svcCtx, - Logger: logx.WithContext(ctx), - commglobalmap: commglobalmap.New(svcCtx), - } -} - -func (l *SendTransferTxLogic) SendTransferTx(in *globalRPCProto.ReqSendTxByRawInfo) (respSendTx *globalRPCProto.RespSendTx, err error) { - respSendTx = &globalRPCProto.RespSendTx{} - txId, err := sendrawtx.SendTransferTx(l.ctx, l.svcCtx, l.commglobalmap, in.TxInfo) - if err != nil { - return nil, err - } - respSendTx.TxId = txId - return respSendTx, nil -} diff --git a/service/rpc/globalRPC/internal/logic/sendtxlogic.go b/service/rpc/globalRPC/internal/logic/sendtxlogic.go deleted file mode 100644 index aaa84cc3d..000000000 --- a/service/rpc/globalRPC/internal/logic/sendtxlogic.go +++ /dev/null @@ -1,112 +0,0 @@ -/* - * Copyright © 2021 Zkbas Protocol - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package logic - -import ( - "context" - - "github.com/zeromicro/go-zero/core/logx" - - "github.com/bnb-chain/zkbas/common/commonTx" - "github.com/bnb-chain/zkbas/errorcode" - "github.com/bnb-chain/zkbas/service/rpc/globalRPC/globalRPCProto" - "github.com/bnb-chain/zkbas/service/rpc/globalRPC/internal/logic/sendrawtx" - "github.com/bnb-chain/zkbas/service/rpc/globalRPC/internal/repo/commglobalmap" - "github.com/bnb-chain/zkbas/service/rpc/globalRPC/internal/svc" -) - -type SendTxLogic struct { - ctx context.Context - svcCtx *svc.ServiceContext - logx.Logger - commglobalmap commglobalmap.Commglobalmap -} - -func NewSendTxLogic(ctx context.Context, svcCtx *svc.ServiceContext) *SendTxLogic { - return &SendTxLogic{ - ctx: ctx, - svcCtx: svcCtx, - Logger: logx.WithContext(ctx), - commglobalmap: commglobalmap.New(svcCtx), - } -} - -func (l *SendTxLogic) SendTx(in *globalRPCProto.ReqSendTx) (resp *globalRPCProto.RespSendTx, err error) { - resp = &globalRPCProto.RespSendTx{} - switch in.TxType { - case commonTx.TxTypeTransfer: - resp.TxId, err = sendrawtx.SendTransferTx(l.ctx, l.svcCtx, l.commglobalmap, in.TxInfo) - if err != nil { - logx.Errorf("[sendTransferTx] err: %s", err.Error()) - return nil, err - } - case commonTx.TxTypeSwap: - resp.TxId, err = sendrawtx.SendSwapTx(l.ctx, l.svcCtx, l.commglobalmap, in.TxInfo) - if err != nil { - logx.Errorf("[sendSwapTx] err: %s", err.Error()) - return nil, err - } - case commonTx.TxTypeAddLiquidity: - resp.TxId, err = sendrawtx.SendAddLiquidityTx(l.ctx, l.svcCtx, l.commglobalmap, in.TxInfo) - if err != nil { - logx.Errorf("[sendAddLiquidityTx] err: %s", err.Error()) - return nil, err - } - case commonTx.TxTypeRemoveLiquidity: - resp.TxId, err = sendrawtx.SendRemoveLiquidityTx(l.ctx, l.svcCtx, l.commglobalmap, in.TxInfo) - if err != nil { - logx.Errorf("[sendRemoveLiquidityTx] err: %s", err.Error()) - return nil, err - } - case commonTx.TxTypeWithdraw: - resp.TxId, err = sendrawtx.SendWithdrawTx(l.ctx, l.svcCtx, l.commglobalmap, in.TxInfo) - if err != nil { - logx.Errorf("[sendWithdrawTx] err: %s", err.Error()) - return nil, err - } - case commonTx.TxTypeTransferNft: - resp.TxId, err = sendrawtx.SendTransferNftTx(l.ctx, l.svcCtx, l.commglobalmap, in.TxInfo) - if err != nil { - logx.Errorf("[sendWithdrawTx] err: %s", err.Error()) - return nil, err - } - case commonTx.TxTypeAtomicMatch: - resp.TxId, err = sendrawtx.SendAtomicMatchTx(l.ctx, l.svcCtx, l.commglobalmap, in.TxInfo) - if err != nil { - logx.Errorf("[sendWithdrawTx] err: %s", err.Error()) - return nil, err - } - case commonTx.TxTypeCancelOffer: - resp.TxId, err = sendrawtx.SendCancelOfferTx(l.ctx, l.svcCtx, l.commglobalmap, in.TxInfo) - if err != nil { - logx.Errorf("[sendWithdrawTx] err: %s", err.Error()) - return nil, err - } - case commonTx.TxTypeWithdrawNft: - resp.TxId, err = sendrawtx.SendWithdrawNftTx(l.ctx, l.svcCtx, l.commglobalmap, in.TxInfo) - if err != nil { - logx.Errorf("[sendWithdrawTx] err: %s", err.Error()) - return nil, err - } - case commonTx.TxTypeOffer: - return nil, errorcode.RpcErrInvalidTxType - default: - logx.Errorf("[sendtxlogic] invalid tx type: %s", in.TxType) - return nil, errorcode.RpcErrInvalidTxType - } - return resp, err -} diff --git a/service/rpc/globalRPC/internal/logic/sendwithdrawnfttxlogic.go b/service/rpc/globalRPC/internal/logic/sendwithdrawnfttxlogic.go deleted file mode 100644 index f6269213f..000000000 --- a/service/rpc/globalRPC/internal/logic/sendwithdrawnfttxlogic.go +++ /dev/null @@ -1,53 +0,0 @@ -/* - * Copyright © 2021 Zkbas Protocol - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package logic - -import ( - "context" - - "github.com/zeromicro/go-zero/core/logx" - - "github.com/bnb-chain/zkbas/service/rpc/globalRPC/globalRPCProto" - "github.com/bnb-chain/zkbas/service/rpc/globalRPC/internal/logic/sendrawtx" - "github.com/bnb-chain/zkbas/service/rpc/globalRPC/internal/repo/commglobalmap" - "github.com/bnb-chain/zkbas/service/rpc/globalRPC/internal/svc" -) - -type SendWithdrawNftTxLogic struct { - ctx context.Context - svcCtx *svc.ServiceContext - logx.Logger - commglobalmap commglobalmap.Commglobalmap -} - -func NewSendWithdrawNftTxLogic(ctx context.Context, svcCtx *svc.ServiceContext) *SendWithdrawNftTxLogic { - return &SendWithdrawNftTxLogic{ - ctx: ctx, - svcCtx: svcCtx, - Logger: logx.WithContext(ctx), - commglobalmap: commglobalmap.New(svcCtx), - } -} -func (l *SendWithdrawNftTxLogic) SendWithdrawNftTx(in *globalRPCProto.ReqSendTxByRawInfo) (respSendTx *globalRPCProto.RespSendTx, err error) { - respSendTx = &globalRPCProto.RespSendTx{} - txId, err := sendrawtx.SendWithdrawNftTx(l.ctx, l.svcCtx, l.commglobalmap, in.TxInfo) - if err != nil { - return nil, err - } - respSendTx.TxId = txId - return respSendTx, nil -} diff --git a/service/rpc/globalRPC/internal/logic/sendwithdrawtxlogic.go b/service/rpc/globalRPC/internal/logic/sendwithdrawtxlogic.go deleted file mode 100644 index 1e52a2721..000000000 --- a/service/rpc/globalRPC/internal/logic/sendwithdrawtxlogic.go +++ /dev/null @@ -1,55 +0,0 @@ -/* - * - * * Copyright © 2021 Zkbas Protocol - * * - * * Licensed under the Apache License, Version 2.0 (the "License"); - * * you may not use this file except in compliance with the License. - * * You may obtain a copy of the License at - * * - * * http://www.apache.org/licenses/LICENSE-2.0 - * * - * * Unless required by applicable law or agreed to in writing, software - * * distributed under the License is distributed on an "AS IS" BASIS, - * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * * See the License for the specific language governing permissions and - * * limitations under the License. - * - */ - -package logic - -import ( - "context" - - "github.com/zeromicro/go-zero/core/logx" - - "github.com/bnb-chain/zkbas/service/rpc/globalRPC/globalRPCProto" - "github.com/bnb-chain/zkbas/service/rpc/globalRPC/internal/logic/sendrawtx" - "github.com/bnb-chain/zkbas/service/rpc/globalRPC/internal/repo/commglobalmap" - "github.com/bnb-chain/zkbas/service/rpc/globalRPC/internal/svc" -) - -type SendWithdrawTxLogic struct { - ctx context.Context - svcCtx *svc.ServiceContext - logx.Logger - commglobalmap commglobalmap.Commglobalmap -} - -func NewSendWithdrawTxLogic(ctx context.Context, svcCtx *svc.ServiceContext) *SendWithdrawTxLogic { - return &SendWithdrawTxLogic{ - ctx: ctx, - svcCtx: svcCtx, - Logger: logx.WithContext(ctx), - commglobalmap: commglobalmap.New(svcCtx), - } -} -func (l *SendWithdrawTxLogic) SendWithdrawTx(in *globalRPCProto.ReqSendTxByRawInfo) (respSendTx *globalRPCProto.RespSendTx, err error) { - respSendTx = &globalRPCProto.RespSendTx{} - txId, err := sendrawtx.SendWithdrawTx(l.ctx, l.svcCtx, l.commglobalmap, in.TxInfo) - if err != nil { - return nil, err - } - respSendTx.TxId = txId - return respSendTx, nil -} diff --git a/service/rpc/globalRPC/internal/repo/commglobalmap/accountinfo.go b/service/rpc/globalRPC/internal/repo/commglobalmap/accountinfo.go deleted file mode 100644 index 9237ad86d..000000000 --- a/service/rpc/globalRPC/internal/repo/commglobalmap/accountinfo.go +++ /dev/null @@ -1,152 +0,0 @@ -package commglobalmap - -import ( - "context" - "errors" - "strconv" - - "github.com/bnb-chain/zkbas/common/commonAsset" - "github.com/bnb-chain/zkbas/common/commonConstant" - "github.com/bnb-chain/zkbas/common/model/account" - "github.com/bnb-chain/zkbas/common/util" - "github.com/bnb-chain/zkbas/errorcode" - "github.com/bnb-chain/zkbas/pkg/multcache" -) - -func (m *model) GetLatestAccountInfoWithCache(ctx context.Context, accountIndex int64) (*commonAsset.AccountInfo, error) { - f := func() (interface{}, error) { - accountInfo, err := m.GetLatestAccountInfo(ctx, accountIndex) - if err != nil { - return nil, err - } - account, err := commonAsset.FromFormatAccountInfo(accountInfo) - if err != nil { - return nil, err - } - return account, nil - } - accountInfo := &account.Account{} - value, err := m.cache.GetWithSet(ctx, multcache.SpliceCacheKeyAccountByAccountIndex(accountIndex), accountInfo, multcache.AccountTtl, f) - if err != nil { - return nil, err - } - account, _ := value.(*account.Account) - res, err := commonAsset.ToFormatAccountInfo(account) - if err != nil { - return nil, err - } - return res, nil -} - -func (m *model) SetLatestAccountInfoInToCache(ctx context.Context, accountIndex int64) error { - accountInfo, err := m.GetLatestAccountInfo(ctx, accountIndex) - if err != nil { - return err - } - account, err := commonAsset.FromFormatAccountInfo(accountInfo) - if err != nil { - return err - } - if err := m.cache.Set(ctx, multcache.SpliceCacheKeyAccountByAccountIndex(accountIndex), account, multcache.AccountTtl); err != nil { - return err - } - return nil -} - -func (m *model) DeleteLatestAccountInfoInCache(ctx context.Context, accountIndex int64) error { - return m.cache.Delete(ctx, multcache.SpliceCacheKeyAccountByAccountIndex(accountIndex)) -} - -func (m *model) GetLatestAccountInfo(ctx context.Context, accountIndex int64) (*commonAsset.AccountInfo, error) { - oAccountInfo, err := m.accountModel.GetAccountByAccountIndex(accountIndex) - if err != nil { - return nil, err - } - accountInfo, err := commonAsset.ToFormatAccountInfo(oAccountInfo) - if err != nil { - return nil, err - } - mempoolTxs, err := m.mempoolModel.GetPendingMempoolTxsByAccountIndex(accountIndex) - if err != nil && err != errorcode.DbErrNotFound { - return nil, err - } - for _, mempoolTx := range mempoolTxs { - if mempoolTx.Nonce != commonConstant.NilNonce { - accountInfo.Nonce = mempoolTx.Nonce - } - for _, mempoolTxDetail := range mempoolTx.MempoolDetails { - if mempoolTxDetail.AccountIndex != accountIndex { - continue - } - switch mempoolTxDetail.AssetType { - case commonAsset.GeneralAssetType: - if accountInfo.AssetInfo[mempoolTxDetail.AssetId] == nil { - accountInfo.AssetInfo[mempoolTxDetail.AssetId] = &commonAsset.AccountAsset{ - AssetId: mempoolTxDetail.AssetId, - Balance: util.ZeroBigInt, - LpAmount: util.ZeroBigInt, - OfferCanceledOrFinalized: util.ZeroBigInt, - } - } - nBalance, err := commonAsset.ComputeNewBalance(commonAsset.GeneralAssetType, - accountInfo.AssetInfo[mempoolTxDetail.AssetId].String(), mempoolTxDetail.BalanceDelta) - if err != nil { - return nil, err - } - accountInfo.AssetInfo[mempoolTxDetail.AssetId], err = commonAsset.ParseAccountAsset(nBalance) - if err != nil { - return nil, err - } - case commonAsset.CollectionNonceAssetType: - accountInfo.CollectionNonce, err = strconv.ParseInt(mempoolTxDetail.BalanceDelta, 10, 64) - if err != nil { - return nil, err - } - case commonAsset.LiquidityAssetType: - case commonAsset.NftAssetType: - default: - return nil, errors.New("invalid asset type") - } - } - } - accountInfo.Nonce = accountInfo.Nonce + 1 - accountInfo.CollectionNonce = accountInfo.CollectionNonce + 1 - return accountInfo, nil -} - -func (m *model) GetBasicAccountInfoWithCache(ctx context.Context, accountIndex int64) (*commonAsset.AccountInfo, error) { - f := func() (interface{}, error) { - accountInfo, err := m.GetBasicAccountInfo(ctx, accountIndex) - if err != nil { - return nil, err - } - account, err := commonAsset.FromFormatAccountInfo(accountInfo) - if err != nil { - return nil, err - } - return account, nil - } - accountInfo := &account.Account{} - value, err := m.cache.GetWithSet(ctx, multcache.SpliceCacheKeyBasicAccountByAccountIndex(accountIndex), accountInfo, multcache.AccountTtl, f) - if err != nil { - return nil, err - } - account, _ := value.(*account.Account) - res, err := commonAsset.ToFormatAccountInfo(account) - if err != nil { - return nil, err - } - return res, nil -} - -func (m *model) GetBasicAccountInfo(ctx context.Context, accountIndex int64) (accountInfo *commonAsset.AccountInfo, err error) { - oAccountInfo, err := m.accountModel.GetAccountByAccountIndex(accountIndex) - if err != nil { - return nil, err - } - accountInfo, err = commonAsset.ToFormatAccountInfo(oAccountInfo) - if err != nil { - return nil, err - } - return accountInfo, nil -} diff --git a/service/rpc/globalRPC/internal/repo/commglobalmap/api.go b/service/rpc/globalRPC/internal/repo/commglobalmap/api.go deleted file mode 100644 index 04c4bc49e..000000000 --- a/service/rpc/globalRPC/internal/repo/commglobalmap/api.go +++ /dev/null @@ -1,59 +0,0 @@ -package commglobalmap - -import ( - "context" - - "github.com/bnb-chain/zkbas/common/commonAsset" - "github.com/bnb-chain/zkbas/common/model/account" - "github.com/bnb-chain/zkbas/common/model/liquidity" - "github.com/bnb-chain/zkbas/common/model/mempool" - "github.com/bnb-chain/zkbas/common/model/nft" - commGlobalmapHandler "github.com/bnb-chain/zkbas/common/util/globalmapHandler" - "github.com/bnb-chain/zkbas/service/rpc/globalRPC/internal/svc" -) - -//go:generate mockgen -source api.go -destination api_mock.go -package commglobalmap - -type GlobalAssetInfo struct { - AccountIndex int64 - AssetId int64 - AssetType int64 - ChainId int64 - BaseBalanceEnc string -} - -type Commglobalmap interface { - GetLatestAccountInfo(ctx context.Context, accountIndex int64) (accountInfo *commonAsset.AccountInfo, err error) - GetLatestAccountInfoWithCache(ctx context.Context, accountIndex int64) (*commonAsset.AccountInfo, error) - SetLatestAccountInfoInToCache(ctx context.Context, accountIndex int64) error - DeleteLatestAccountInfoInCache(ctx context.Context, accountIndex int64) error - - GetBasicAccountInfo(ctx context.Context, accountIndex int64) (accountInfo *commonAsset.AccountInfo, err error) - GetBasicAccountInfoWithCache(ctx context.Context, accountIndex int64) (*commonAsset.AccountInfo, error) - - GetLatestLiquidityInfoForReadWithCache(ctx context.Context, pairIndex int64) (liquidityInfo *commGlobalmapHandler.LiquidityInfo, err error) - GetLatestLiquidityInfoForRead(ctx context.Context, pairIndex int64) (liquidityInfo *commGlobalmapHandler.LiquidityInfo, err error) - GetLatestLiquidityInfoForWrite(ctx context.Context, pairIndex int64) (liquidityInfo *commGlobalmapHandler.LiquidityInfo, err error) - SetLatestLiquidityInfoForWrite(ctx context.Context, pairIndex int64) error - DeleteLatestLiquidityInfoForWriteInCache(ctx context.Context, pairIndex int64) error - - GetLatestOfferIdForWrite(ctx context.Context, accountIndex int64) (nftIndex int64, err error) - - GetLatestNftInfoForRead(ctx context.Context, nftIndex int64) (*commonAsset.NftInfo, error) - GetLatestNftInfoForReadWithCache(ctx context.Context, nftIndex int64) (*commonAsset.NftInfo, error) - SetLatestNftInfoForReadInCache(ctx context.Context, nftIndex int64) error - DeleteLatestNftInfoForReadInCache(ctx context.Context, nftIndex int64) error -} - -func New(svcCtx *svc.ServiceContext) Commglobalmap { - return &model{ - mempoolModel: mempool.NewMempoolModel(svcCtx.Conn, svcCtx.Config.CacheRedis, svcCtx.GormPointer), - mempoolTxDetailModel: mempool.NewMempoolDetailModel(svcCtx.Conn, svcCtx.Config.CacheRedis, svcCtx.GormPointer), - accountModel: account.NewAccountModel(svcCtx.Conn, svcCtx.Config.CacheRedis, svcCtx.GormPointer), - liquidityModel: liquidity.NewLiquidityModel(svcCtx.Conn, svcCtx.Config.CacheRedis, svcCtx.GormPointer), - redisConnection: svcCtx.RedisConnection, - nftModel: nft.NewL2NftModel(svcCtx.Conn, svcCtx.Config.CacheRedis, svcCtx.GormPointer), - offerModel: nft.NewOfferModel(svcCtx.Conn, svcCtx.Config.CacheRedis, svcCtx.GormPointer), - cache: svcCtx.Cache, - } -} diff --git a/service/rpc/globalRPC/internal/repo/commglobalmap/api_mock.go b/service/rpc/globalRPC/internal/repo/commglobalmap/api_mock.go deleted file mode 100644 index 646ff3321..000000000 --- a/service/rpc/globalRPC/internal/repo/commglobalmap/api_mock.go +++ /dev/null @@ -1,271 +0,0 @@ -// Code generated by MockGen. DO NOT EDIT. -// Source: api.go - -// Package commglobalmap is a generated GoMock package. -package commglobalmap - -import ( - context "context" - reflect "reflect" - - commonAsset "github.com/bnb-chain/zkbas/common/commonAsset" - globalmapHandler "github.com/bnb-chain/zkbas/common/util/globalmapHandler" - gomock "github.com/golang/mock/gomock" -) - -// MockCommglobalmap is a mock of Commglobalmap interface. -type MockCommglobalmap struct { - ctrl *gomock.Controller - recorder *MockCommglobalmapMockRecorder -} - -// MockCommglobalmapMockRecorder is the mock recorder for MockCommglobalmap. -type MockCommglobalmapMockRecorder struct { - mock *MockCommglobalmap -} - -// NewMockCommglobalmap creates a new mock instance. -func NewMockCommglobalmap(ctrl *gomock.Controller) *MockCommglobalmap { - mock := &MockCommglobalmap{ctrl: ctrl} - mock.recorder = &MockCommglobalmapMockRecorder{mock} - return mock -} - -// EXPECT returns an object that allows the caller to indicate expected use. -func (m *MockCommglobalmap) EXPECT() *MockCommglobalmapMockRecorder { - return m.recorder -} - -// DeleteLatestAccountInfoInCache mocks base method. -func (m *MockCommglobalmap) DeleteLatestAccountInfoInCache(ctx context.Context, accountIndex int64) error { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "DeleteLatestAccountInfoInCache", ctx, accountIndex) - ret0, _ := ret[0].(error) - return ret0 -} - -// DeleteLatestAccountInfoInCache indicates an expected call of DeleteLatestAccountInfoInCache. -func (mr *MockCommglobalmapMockRecorder) DeleteLatestAccountInfoInCache(ctx, accountIndex interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "DeleteLatestAccountInfoInCache", reflect.TypeOf((*MockCommglobalmap)(nil).DeleteLatestAccountInfoInCache), ctx, accountIndex) -} - -// DeleteLatestLiquidityInfoForWriteInCache mocks base method. -func (m *MockCommglobalmap) DeleteLatestLiquidityInfoForWriteInCache(ctx context.Context, pairIndex int64) error { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "DeleteLatestLiquidityInfoForWriteInCache", ctx, pairIndex) - ret0, _ := ret[0].(error) - return ret0 -} - -// DeleteLatestLiquidityInfoForWriteInCache indicates an expected call of DeleteLatestLiquidityInfoForWriteInCache. -func (mr *MockCommglobalmapMockRecorder) DeleteLatestLiquidityInfoForWriteInCache(ctx, pairIndex interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "DeleteLatestLiquidityInfoForWriteInCache", reflect.TypeOf((*MockCommglobalmap)(nil).DeleteLatestLiquidityInfoForWriteInCache), ctx, pairIndex) -} - -// DeleteLatestNftInfoForReadInCache mocks base method. -func (m *MockCommglobalmap) DeleteLatestNftInfoForReadInCache(ctx context.Context, nftIndex int64) error { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "DeleteLatestNftInfoForReadInCache", ctx, nftIndex) - ret0, _ := ret[0].(error) - return ret0 -} - -// DeleteLatestNftInfoForReadInCache indicates an expected call of DeleteLatestNftInfoForReadInCache. -func (mr *MockCommglobalmapMockRecorder) DeleteLatestNftInfoForReadInCache(ctx, nftIndex interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "DeleteLatestNftInfoForReadInCache", reflect.TypeOf((*MockCommglobalmap)(nil).DeleteLatestNftInfoForReadInCache), ctx, nftIndex) -} - -// GetBasicAccountInfo mocks base method. -func (m *MockCommglobalmap) GetBasicAccountInfo(ctx context.Context, accountIndex int64) (*commonAsset.AccountInfo, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "GetBasicAccountInfo", ctx, accountIndex) - ret0, _ := ret[0].(*commonAsset.AccountInfo) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// GetBasicAccountInfo indicates an expected call of GetBasicAccountInfo. -func (mr *MockCommglobalmapMockRecorder) GetBasicAccountInfo(ctx, accountIndex interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetBasicAccountInfo", reflect.TypeOf((*MockCommglobalmap)(nil).GetBasicAccountInfo), ctx, accountIndex) -} - -// GetBasicAccountInfoWithCache mocks base method. -func (m *MockCommglobalmap) GetBasicAccountInfoWithCache(ctx context.Context, accountIndex int64) (*commonAsset.AccountInfo, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "GetBasicAccountInfoWithCache", ctx, accountIndex) - ret0, _ := ret[0].(*commonAsset.AccountInfo) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// GetBasicAccountInfoWithCache indicates an expected call of GetBasicAccountInfoWithCache. -func (mr *MockCommglobalmapMockRecorder) GetBasicAccountInfoWithCache(ctx, accountIndex interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetBasicAccountInfoWithCache", reflect.TypeOf((*MockCommglobalmap)(nil).GetBasicAccountInfoWithCache), ctx, accountIndex) -} - -// GetLatestAccountInfo mocks base method. -func (m *MockCommglobalmap) GetLatestAccountInfo(ctx context.Context, accountIndex int64) (*commonAsset.AccountInfo, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "GetLatestAccountInfo", ctx, accountIndex) - ret0, _ := ret[0].(*commonAsset.AccountInfo) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// GetLatestAccountInfo indicates an expected call of GetLatestAccountInfo. -func (mr *MockCommglobalmapMockRecorder) GetLatestAccountInfo(ctx, accountIndex interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetLatestAccountInfo", reflect.TypeOf((*MockCommglobalmap)(nil).GetLatestAccountInfo), ctx, accountIndex) -} - -// GetLatestAccountInfoWithCache mocks base method. -func (m *MockCommglobalmap) GetLatestAccountInfoWithCache(ctx context.Context, accountIndex int64) (*commonAsset.AccountInfo, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "GetLatestAccountInfoWithCache", ctx, accountIndex) - ret0, _ := ret[0].(*commonAsset.AccountInfo) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// GetLatestAccountInfoWithCache indicates an expected call of GetLatestAccountInfoWithCache. -func (mr *MockCommglobalmapMockRecorder) GetLatestAccountInfoWithCache(ctx, accountIndex interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetLatestAccountInfoWithCache", reflect.TypeOf((*MockCommglobalmap)(nil).GetLatestAccountInfoWithCache), ctx, accountIndex) -} - -// GetLatestLiquidityInfoForRead mocks base method. -func (m *MockCommglobalmap) GetLatestLiquidityInfoForRead(ctx context.Context, pairIndex int64) (*globalmapHandler.LiquidityInfo, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "GetLatestLiquidityInfoForRead", ctx, pairIndex) - ret0, _ := ret[0].(*globalmapHandler.LiquidityInfo) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// GetLatestLiquidityInfoForRead indicates an expected call of GetLatestLiquidityInfoForRead. -func (mr *MockCommglobalmapMockRecorder) GetLatestLiquidityInfoForRead(ctx, pairIndex interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetLatestLiquidityInfoForRead", reflect.TypeOf((*MockCommglobalmap)(nil).GetLatestLiquidityInfoForRead), ctx, pairIndex) -} - -// GetLatestLiquidityInfoForReadWithCache mocks base method. -func (m *MockCommglobalmap) GetLatestLiquidityInfoForReadWithCache(ctx context.Context, pairIndex int64) (*globalmapHandler.LiquidityInfo, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "GetLatestLiquidityInfoForReadWithCache", ctx, pairIndex) - ret0, _ := ret[0].(*globalmapHandler.LiquidityInfo) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// GetLatestLiquidityInfoForReadWithCache indicates an expected call of GetLatestLiquidityInfoForReadWithCache. -func (mr *MockCommglobalmapMockRecorder) GetLatestLiquidityInfoForReadWithCache(ctx, pairIndex interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetLatestLiquidityInfoForReadWithCache", reflect.TypeOf((*MockCommglobalmap)(nil).GetLatestLiquidityInfoForReadWithCache), ctx, pairIndex) -} - -// GetLatestLiquidityInfoForWrite mocks base method. -func (m *MockCommglobalmap) GetLatestLiquidityInfoForWrite(ctx context.Context, pairIndex int64) (*globalmapHandler.LiquidityInfo, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "GetLatestLiquidityInfoForWrite", ctx, pairIndex) - ret0, _ := ret[0].(*globalmapHandler.LiquidityInfo) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// GetLatestLiquidityInfoForWrite indicates an expected call of GetLatestLiquidityInfoForWrite. -func (mr *MockCommglobalmapMockRecorder) GetLatestLiquidityInfoForWrite(ctx, pairIndex interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetLatestLiquidityInfoForWrite", reflect.TypeOf((*MockCommglobalmap)(nil).GetLatestLiquidityInfoForWrite), ctx, pairIndex) -} - -// GetLatestNftInfoForRead mocks base method. -func (m *MockCommglobalmap) GetLatestNftInfoForRead(ctx context.Context, nftIndex int64) (*commonAsset.NftInfo, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "GetLatestNftInfoForRead", ctx, nftIndex) - ret0, _ := ret[0].(*commonAsset.NftInfo) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// GetLatestNftInfoForRead indicates an expected call of GetLatestNftInfoForRead. -func (mr *MockCommglobalmapMockRecorder) GetLatestNftInfoForRead(ctx, nftIndex interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetLatestNftInfoForRead", reflect.TypeOf((*MockCommglobalmap)(nil).GetLatestNftInfoForRead), ctx, nftIndex) -} - -// GetLatestNftInfoForReadWithCache mocks base method. -func (m *MockCommglobalmap) GetLatestNftInfoForReadWithCache(ctx context.Context, nftIndex int64) (*commonAsset.NftInfo, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "GetLatestNftInfoForReadWithCache", ctx, nftIndex) - ret0, _ := ret[0].(*commonAsset.NftInfo) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// GetLatestNftInfoForReadWithCache indicates an expected call of GetLatestNftInfoForReadWithCache. -func (mr *MockCommglobalmapMockRecorder) GetLatestNftInfoForReadWithCache(ctx, nftIndex interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetLatestNftInfoForReadWithCache", reflect.TypeOf((*MockCommglobalmap)(nil).GetLatestNftInfoForReadWithCache), ctx, nftIndex) -} - -// GetLatestOfferIdForWrite mocks base method. -func (m *MockCommglobalmap) GetLatestOfferIdForWrite(ctx context.Context, accountIndex int64) (int64, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "GetLatestOfferIdForWrite", ctx, accountIndex) - ret0, _ := ret[0].(int64) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// GetLatestOfferIdForWrite indicates an expected call of GetLatestOfferIdForWrite. -func (mr *MockCommglobalmapMockRecorder) GetLatestOfferIdForWrite(ctx, accountIndex interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetLatestOfferIdForWrite", reflect.TypeOf((*MockCommglobalmap)(nil).GetLatestOfferIdForWrite), ctx, accountIndex) -} - -// SetLatestAccountInfoInToCache mocks base method. -func (m *MockCommglobalmap) SetLatestAccountInfoInToCache(ctx context.Context, accountIndex int64) error { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "SetLatestAccountInfoInToCache", ctx, accountIndex) - ret0, _ := ret[0].(error) - return ret0 -} - -// SetLatestAccountInfoInToCache indicates an expected call of SetLatestAccountInfoInToCache. -func (mr *MockCommglobalmapMockRecorder) SetLatestAccountInfoInToCache(ctx, accountIndex interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "SetLatestAccountInfoInToCache", reflect.TypeOf((*MockCommglobalmap)(nil).SetLatestAccountInfoInToCache), ctx, accountIndex) -} - -// SetLatestLiquidityInfoForWrite mocks base method. -func (m *MockCommglobalmap) SetLatestLiquidityInfoForWrite(ctx context.Context, pairIndex int64) error { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "SetLatestLiquidityInfoForWrite", ctx, pairIndex) - ret0, _ := ret[0].(error) - return ret0 -} - -// SetLatestLiquidityInfoForWrite indicates an expected call of SetLatestLiquidityInfoForWrite. -func (mr *MockCommglobalmapMockRecorder) SetLatestLiquidityInfoForWrite(ctx, pairIndex interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "SetLatestLiquidityInfoForWrite", reflect.TypeOf((*MockCommglobalmap)(nil).SetLatestLiquidityInfoForWrite), ctx, pairIndex) -} - -// SetLatestNftInfoForReadInCache mocks base method. -func (m *MockCommglobalmap) SetLatestNftInfoForReadInCache(ctx context.Context, nftIndex int64) error { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "SetLatestNftInfoForReadInCache", ctx, nftIndex) - ret0, _ := ret[0].(error) - return ret0 -} - -// SetLatestNftInfoForReadInCache indicates an expected call of SetLatestNftInfoForReadInCache. -func (mr *MockCommglobalmapMockRecorder) SetLatestNftInfoForReadInCache(ctx, nftIndex interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "SetLatestNftInfoForReadInCache", reflect.TypeOf((*MockCommglobalmap)(nil).SetLatestNftInfoForReadInCache), ctx, nftIndex) -} diff --git a/service/rpc/globalRPC/internal/repo/commglobalmap/liquidityinfo.go b/service/rpc/globalRPC/internal/repo/commglobalmap/liquidityinfo.go deleted file mode 100644 index 78fdf0d2b..000000000 --- a/service/rpc/globalRPC/internal/repo/commglobalmap/liquidityinfo.go +++ /dev/null @@ -1,153 +0,0 @@ -package commglobalmap - -import ( - "context" - "encoding/json" - - "github.com/zeromicro/go-zero/core/logx" - - "github.com/bnb-chain/zkbas/common/commonAsset" - commGlobalmapHandler "github.com/bnb-chain/zkbas/common/util/globalmapHandler" - "github.com/bnb-chain/zkbas/errorcode" - "github.com/bnb-chain/zkbas/pkg/multcache" -) - -func (m *model) GetLatestLiquidityInfoForReadWithCache(ctx context.Context, pairIndex int64) (*commGlobalmapHandler.LiquidityInfo, error) { - // f := func() (interface{}, error) { - // tmpLiquidity, err := m.GetLatestLiquidityInfoForRead(ctx, pairIndex) - // if err != nil { - // return nil, err - // } - // infoBytes, err := json.Marshal(tmpLiquidity) - // if err != nil { - // logx.Errorf("[json.Marshal] unable to marshal: %v", err) - // return nil, err - // } - // return &infoBytes, nil - // } - // var byteLiquidity []byte - // value, err := m.cache.GetWithSet(ctx, multcache.SpliceCacheKeyLiquidityByPairIndex(pairIndex), &byteLiquidity, 1, f) - // if err != nil { - // return nil, err - // } - // res, _ := value.(*[]byte) - // liquidity := &commGlobalmapHandler.LiquidityInfo{} - // err = json.Unmarshal([]byte(*res), &liquidity) - // if err != nil { - // logx.Errorf("[json.Unmarshal] unable to unmarshal liquidity info: %v", err) - // return nil, err - // } - // return liquidity, nil - return m.GetLatestLiquidityInfoForRead(ctx, pairIndex) - -} -func (m *model) GetLatestLiquidityInfoForRead(ctx context.Context, pairIndex int64) (liquidityInfo *commGlobalmapHandler.LiquidityInfo, err error) { - dbLiquidityInfo, err := m.liquidityModel.GetLiquidityByPairIndex(pairIndex) - if err != nil { - return nil, err - } - mempoolTxs, err := m.mempoolModel.GetPendingLiquidityTxs() - if err != nil { - if err != errorcode.DbErrNotFound { - return nil, err - } - } - liquidityInfo, err = commonAsset.ConstructLiquidityInfo( - pairIndex, - dbLiquidityInfo.AssetAId, - dbLiquidityInfo.AssetA, - dbLiquidityInfo.AssetBId, - dbLiquidityInfo.AssetB, - dbLiquidityInfo.LpAmount, - dbLiquidityInfo.KLast, - dbLiquidityInfo.FeeRate, - dbLiquidityInfo.TreasuryAccountIndex, - dbLiquidityInfo.TreasuryRate) - if err != nil { - return nil, err - } - for _, mempoolTx := range mempoolTxs { - for _, txDetail := range mempoolTx.MempoolDetails { - if txDetail.AssetType != commonAsset.LiquidityAssetType || liquidityInfo.PairIndex != txDetail.AssetId { - continue - } - nBalance, err := commonAsset.ComputeNewBalance(commonAsset.LiquidityAssetType, liquidityInfo.String(), txDetail.BalanceDelta) - if err != nil { - return nil, err - } - liquidityInfo, err = commonAsset.ParseLiquidityInfo(nBalance) - if err != nil { - return nil, err - } - } - } - // TODO: this set cache operation will be deleted in the future, we should use GetLatestLiquidityInfoForReadWithCache anywhere - // and delete the cache where mempool be changed - infoBytes, err := json.Marshal(liquidityInfo) - if err != nil { - logx.Errorf("[json.Marshal] unable to marshal, err : %s", err.Error()) - return nil, err - } - if err := m.cache.Set(ctx, multcache.SpliceCacheKeyLiquidityForReadByPairIndex(pairIndex), infoBytes, 1); err != nil { - return nil, err - } - return liquidityInfo, nil -} - -func (m *model) GetLatestLiquidityInfoForWrite(ctx context.Context, pairIndex int64) (liquidityInfo *commGlobalmapHandler.LiquidityInfo, err error) { - dbLiquidityInfo, err := m.liquidityModel.GetLiquidityByPairIndex(pairIndex) - if err != nil { - return nil, err - } - mempoolTxs, err := m.mempoolModel.GetPendingLiquidityTxs() - if err != nil && err != errorcode.DbErrNotFound { - return nil, err - } - liquidityInfo, err = commonAsset.ConstructLiquidityInfo( - pairIndex, - dbLiquidityInfo.AssetAId, - dbLiquidityInfo.AssetA, - dbLiquidityInfo.AssetBId, - dbLiquidityInfo.AssetB, - dbLiquidityInfo.LpAmount, - dbLiquidityInfo.KLast, - dbLiquidityInfo.FeeRate, - dbLiquidityInfo.TreasuryAccountIndex, - dbLiquidityInfo.TreasuryRate, - ) - if err != nil { - logx.Errorf("[ConstructLiquidityInfo] err: %s", err.Error()) - return nil, err - } - for _, mempoolTx := range mempoolTxs { - for _, txDetail := range mempoolTx.MempoolDetails { - if txDetail.AssetType != commonAsset.LiquidityAssetType || liquidityInfo.PairIndex != txDetail.AssetId { - continue - } - nBalance, err := commonAsset.ComputeNewBalance(commonAsset.LiquidityAssetType, liquidityInfo.String(), txDetail.BalanceDelta) - if err != nil { - return nil, err - } - liquidityInfo, err = commonAsset.ParseLiquidityInfo(nBalance) - if err != nil { - return nil, err - } - } - } - return liquidityInfo, nil -} - -func (m *model) SetLatestLiquidityInfoForWrite(ctx context.Context, pairIndex int64) error { - liquidityInfo, err := m.GetLatestLiquidityInfoForWrite(ctx, pairIndex) - if err != nil { - return err - } - if err := m.cache.Set(ctx, multcache.SpliceCacheKeyLiquidityInfoForWriteByPairIndex(pairIndex), liquidityInfo, 10); err != nil { - return err - } - return nil -} - -func (m *model) DeleteLatestLiquidityInfoForWriteInCache(ctx context.Context, pairIndex int64) error { - return m.cache.Delete(ctx, multcache.SpliceCacheKeyLiquidityInfoForWriteByPairIndex(pairIndex)) -} diff --git a/service/rpc/globalRPC/internal/repo/commglobalmap/nft.go b/service/rpc/globalRPC/internal/repo/commglobalmap/nft.go deleted file mode 100644 index 40242d6fd..000000000 --- a/service/rpc/globalRPC/internal/repo/commglobalmap/nft.go +++ /dev/null @@ -1,98 +0,0 @@ -package commglobalmap - -import ( - "context" - - "github.com/zeromicro/go-zero/core/stores/redis" - - "github.com/bnb-chain/zkbas/common/commonAsset" - "github.com/bnb-chain/zkbas/common/model/account" - "github.com/bnb-chain/zkbas/common/model/liquidity" - "github.com/bnb-chain/zkbas/common/model/mempool" - "github.com/bnb-chain/zkbas/common/model/nft" - "github.com/bnb-chain/zkbas/errorcode" - "github.com/bnb-chain/zkbas/pkg/multcache" -) - -type model struct { - mempoolModel mempool.MempoolModel - mempoolTxDetailModel mempool.MempoolTxDetailModel - accountModel account.AccountModel - liquidityModel liquidity.LiquidityModel - redisConnection *redis.Redis - offerModel nft.OfferModel - nftModel nft.L2NftModel - cache multcache.MultCache -} - -func (m *model) GetLatestOfferIdForWrite(ctx context.Context, accountIndex int64) (int64, error) { - lastOfferId, err := m.offerModel.GetLatestOfferId(accountIndex) - if err != nil { - if err == errorcode.DbErrNotFound { - return 0, nil - } - return -1, err - } - return lastOfferId, nil -} - -func (m *model) GetLatestNftInfoForRead(ctx context.Context, nftIndex int64) (*commonAsset.NftInfo, error) { - dbNftInfo, err := m.nftModel.GetNftAsset(nftIndex) - if err != nil { - return nil, err - } - mempoolTxs, err := m.mempoolModel.GetPendingNftTxs() - if err != nil && err != errorcode.DbErrNotFound { - return nil, err - } - nftInfo := commonAsset.ConstructNftInfo(nftIndex, dbNftInfo.CreatorAccountIndex, dbNftInfo.OwnerAccountIndex, dbNftInfo.NftContentHash, - dbNftInfo.NftL1TokenId, dbNftInfo.NftL1Address, dbNftInfo.CreatorTreasuryRate, dbNftInfo.CollectionId) - for _, mempoolTx := range mempoolTxs { - for _, txDetail := range mempoolTx.MempoolDetails { - if txDetail.AssetType != commonAsset.NftAssetType || txDetail.AssetId != nftInfo.NftIndex { - continue - } - nBalance, err := commonAsset.ComputeNewBalance(commonAsset.NftAssetType, nftInfo.String(), txDetail.BalanceDelta) - if err != nil { - return nil, err - } - nftInfo, err = commonAsset.ParseNftInfo(nBalance) - if err != nil { - return nil, err - } - } - } - return nftInfo, nil -} - -func (m *model) GetLatestNftInfoForReadWithCache(ctx context.Context, nftIndex int64) (*commonAsset.NftInfo, error) { - f := func() (interface{}, error) { - tmpNftInfo, err := m.GetLatestNftInfoForRead(ctx, nftIndex) - if err != nil { - return nil, err - } - return tmpNftInfo, nil - } - nftInfoType := &commonAsset.NftInfo{} - value, err := m.cache.GetWithSet(ctx, multcache.SpliceCacheKeyNftInfoByNftIndex(nftIndex), nftInfoType, multcache.NftTtl, f) - if err != nil { - return nil, err - } - nftInfo, _ := value.(*commonAsset.NftInfo) - return nftInfo, nil -} - -func (m *model) SetLatestNftInfoForReadInCache(ctx context.Context, nftIndex int64) error { - nftInfo, err := m.GetLatestNftInfoForRead(ctx, nftIndex) - if err != nil { - return err - } - if err := m.cache.Set(ctx, multcache.SpliceCacheKeyNftInfoByNftIndex(nftIndex), nftInfo, multcache.NftTtl); err != nil { - return err - } - return nil -} - -func (m *model) DeleteLatestNftInfoForReadInCache(ctx context.Context, nftIndex int64) error { - return m.cache.Delete(ctx, multcache.SpliceCacheKeyNftInfoByNftIndex(nftIndex)) -} diff --git a/service/rpc/globalRPC/internal/repo/failtx/api.go b/service/rpc/globalRPC/internal/repo/failtx/api.go deleted file mode 100644 index 84c222a13..000000000 --- a/service/rpc/globalRPC/internal/repo/failtx/api.go +++ /dev/null @@ -1,20 +0,0 @@ -package failtx - -import ( - table "github.com/bnb-chain/zkbas/common/model/tx" - "github.com/bnb-chain/zkbas/service/rpc/globalRPC/internal/svc" -) - -//go:generate mockgen -source api.go -destination api_mock.go -package failtx - -type Model interface { - CreateFailTx(failTx *table.FailTx) error -} - -func New(svcCtx *svc.ServiceContext) Model { - return &model{ - table: `fail_tx`, - db: svcCtx.GormPointer, - cache: svcCtx.Cache, - } -} diff --git a/service/rpc/globalRPC/internal/repo/failtx/api_mock.go b/service/rpc/globalRPC/internal/repo/failtx/api_mock.go deleted file mode 100644 index 355af7216..000000000 --- a/service/rpc/globalRPC/internal/repo/failtx/api_mock.go +++ /dev/null @@ -1,49 +0,0 @@ -// Code generated by MockGen. DO NOT EDIT. -// Source: api.go - -// Package failtx is a generated GoMock package. -package failtx - -import ( - reflect "reflect" - - gomock "github.com/golang/mock/gomock" - tx "github.com/bnb-chain/zkbas/common/model/tx" -) - -// MockModel is a mock of Model interface. -type MockModel struct { - ctrl *gomock.Controller - recorder *MockModelMockRecorder -} - -// MockModelMockRecorder is the mock recorder for MockModel. -type MockModelMockRecorder struct { - mock *MockModel -} - -// NewMockModel creates a new mock instance. -func NewMockModel(ctrl *gomock.Controller) *MockModel { - mock := &MockModel{ctrl: ctrl} - mock.recorder = &MockModelMockRecorder{mock} - return mock -} - -// EXPECT returns an object that allows the caller to indicate expected use. -func (m *MockModel) EXPECT() *MockModelMockRecorder { - return m.recorder -} - -// CreateFailTx mocks base method. -func (m *MockModel) CreateFailTx(failTx *tx.FailTx) error { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "CreateFailTx", failTx) - ret0, _ := ret[0].(error) - return ret0 -} - -// CreateFailTx indicates an expected call of CreateFailTx. -func (mr *MockModelMockRecorder) CreateFailTx(failTx interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "CreateFailTx", reflect.TypeOf((*MockModel)(nil).CreateFailTx), failTx) -} diff --git a/service/rpc/globalRPC/internal/repo/failtx/failtx.go b/service/rpc/globalRPC/internal/repo/failtx/failtx.go deleted file mode 100644 index 4d5d1f657..000000000 --- a/service/rpc/globalRPC/internal/repo/failtx/failtx.go +++ /dev/null @@ -1,34 +0,0 @@ -package failtx - -import ( - "github.com/zeromicro/go-zero/core/logx" - "gorm.io/gorm" - - table "github.com/bnb-chain/zkbas/common/model/tx" - "github.com/bnb-chain/zkbas/errorcode" - "github.com/bnb-chain/zkbas/pkg/multcache" -) - -type model struct { - table string - db *gorm.DB - cache multcache.MultCache -} - -/* - Func: CreateFailTx - Params: failTx *FailTx - Return: err error - Description: create fail txVerification -*/ -func (m *model) CreateFailTx(failTx *table.FailTx) error { - dbTx := m.db.Table(m.table).Create(failTx) - if dbTx.Error != nil { - logx.Errorf("fail to create failTx, error: %s", dbTx.Error.Error()) - return errorcode.DbErrSqlOperation - } - if dbTx.RowsAffected == 0 { - return errorcode.DbErrFailToCreateFailTx - } - return nil -} diff --git a/service/rpc/globalRPC/internal/repo/sysconf/api.go b/service/rpc/globalRPC/internal/repo/sysconf/api.go deleted file mode 100644 index 15cd2e5d8..000000000 --- a/service/rpc/globalRPC/internal/repo/sysconf/api.go +++ /dev/null @@ -1,20 +0,0 @@ -package sysconf - -import ( - "context" - - table "github.com/bnb-chain/zkbas/common/model/sysconfig" - "github.com/bnb-chain/zkbas/service/rpc/globalRPC/internal/svc" -) - -type Model interface { - GetSysconfigByName(ctx context.Context, name string) (info *table.Sysconfig, err error) -} - -func New(svcCtx *svc.ServiceContext) Model { - return &model{ - table: `sys_config`, - db: svcCtx.GormPointer, - cache: svcCtx.Cache, - } -} diff --git a/service/rpc/globalRPC/internal/repo/sysconf/sysconf.go b/service/rpc/globalRPC/internal/repo/sysconf/sysconf.go deleted file mode 100644 index 0c671181c..000000000 --- a/service/rpc/globalRPC/internal/repo/sysconf/sysconf.go +++ /dev/null @@ -1,43 +0,0 @@ -package sysconf - -import ( - "context" - "fmt" - - "github.com/zeromicro/go-zero/core/logx" - "gorm.io/gorm" - - table "github.com/bnb-chain/zkbas/common/model/sysconfig" - "github.com/bnb-chain/zkbas/errorcode" - "github.com/bnb-chain/zkbas/pkg/multcache" -) - -type model struct { - table string - db *gorm.DB - cache multcache.MultCache -} - -func (m *model) GetSysconfigByName(ctx context.Context, name string) (*table.Sysconfig, error) { - f := func() (interface{}, error) { - var config table.Sysconfig - dbTx := m.db.Table(m.table).Where("name = ?", name).Find(&config) - if dbTx.Error != nil { - logx.Errorf("fail to get sysconfig: %s, error: %s", name, dbTx.Error.Error()) - return nil, errorcode.DbErrSqlOperation - } else if dbTx.RowsAffected == 0 { - return nil, errorcode.DbErrNotFound - } - return &config, nil - } - var config table.Sysconfig - value, err := m.cache.GetWithSet(ctx, multcache.KeyGetSysconfigByName+name, &config, multcache.SysconfigTtl, f) - if err != nil { - return &config, err - } - config1, ok := value.(*table.Sysconfig) - if !ok { - return nil, fmt.Errorf("[GetSysconfigByName] ErrConvertFail") - } - return config1, nil -} diff --git a/service/rpc/globalRPC/internal/server/globalrpcserver.go b/service/rpc/globalRPC/internal/server/globalrpcserver.go deleted file mode 100644 index 10e54f7eb..000000000 --- a/service/rpc/globalRPC/internal/server/globalrpcserver.go +++ /dev/null @@ -1,118 +0,0 @@ -// Code generated by goctl. DO NOT EDIT! -// Source: globalRPC.proto - -package server - -import ( - "context" - - "github.com/bnb-chain/zkbas/service/rpc/globalRPC/globalRPCProto" - "github.com/bnb-chain/zkbas/service/rpc/globalRPC/internal/logic" - "github.com/bnb-chain/zkbas/service/rpc/globalRPC/internal/svc" -) - -type GlobalRPCServer struct { - svcCtx *svc.ServiceContext - globalRPCProto.UnimplementedGlobalRPCServer -} - -func NewGlobalRPCServer(svcCtx *svc.ServiceContext) *GlobalRPCServer { - return &GlobalRPCServer{ - svcCtx: svcCtx, - } -} - -func (s *GlobalRPCServer) GetLatestAssetsListByAccountIndex(ctx context.Context, in *globalRPCProto.ReqGetLatestAssetsListByAccountIndex) (*globalRPCProto.RespGetLatestAssetsListByAccountIndex, error) { - l := logic.NewGetLatestAssetsListByAccountIndexLogic(ctx, s.svcCtx) - return l.GetLatestAssetsListByAccountIndex(in) -} - -func (s *GlobalRPCServer) GetLatestAccountInfoByAccountIndex(ctx context.Context, in *globalRPCProto.ReqGetLatestAccountInfoByAccountIndex) (*globalRPCProto.RespGetLatestAccountInfoByAccountIndex, error) { - l := logic.NewGetLatestAccountInfoByAccountIndexLogic(ctx, s.svcCtx) - return l.GetLatestAccountInfoByAccountIndex(in) -} - -func (s *GlobalRPCServer) GetLatestPairInfo(ctx context.Context, in *globalRPCProto.ReqGetLatestPairInfo) (*globalRPCProto.RespGetLatestPairInfo, error) { - l := logic.NewGetLatestPairInfoLogic(ctx, s.svcCtx) - return l.GetLatestPairInfo(in) -} - -func (s *GlobalRPCServer) GetSwapAmount(ctx context.Context, in *globalRPCProto.ReqGetSwapAmount) (*globalRPCProto.RespGetSwapAmount, error) { - l := logic.NewGetSwapAmountLogic(ctx, s.svcCtx) - return l.GetSwapAmount(in) -} - -func (s *GlobalRPCServer) GetLpValue(ctx context.Context, in *globalRPCProto.ReqGetLpValue) (*globalRPCProto.RespGetLpValue, error) { - l := logic.NewGetLpValueLogic(ctx, s.svcCtx) - return l.GetLpValue(in) -} - -func (s *GlobalRPCServer) SendTx(ctx context.Context, in *globalRPCProto.ReqSendTx) (*globalRPCProto.RespSendTx, error) { - l := logic.NewSendTxLogic(ctx, s.svcCtx) - return l.SendTx(in) -} - -func (s *GlobalRPCServer) SendCreateCollectionTx(ctx context.Context, in *globalRPCProto.ReqSendCreateCollectionTx) (*globalRPCProto.RespSendCreateCollectionTx, error) { - l := logic.NewSendCreateCollectionTxLogic(ctx, s.svcCtx) - return l.SendCreateCollectionTx(in) -} - -func (s *GlobalRPCServer) SendMintNftTx(ctx context.Context, in *globalRPCProto.ReqSendMintNftTx) (*globalRPCProto.RespSendMintNftTx, error) { - l := logic.NewSendMintNftTxLogic(ctx, s.svcCtx) - return l.SendMintNftTx(in) -} - -func (s *GlobalRPCServer) GetNextNonce(ctx context.Context, in *globalRPCProto.ReqGetNextNonce) (*globalRPCProto.RespGetNextNonce, error) { - l := logic.NewGetNextNonceLogic(ctx, s.svcCtx) - return l.GetNextNonce(in) -} - -func (s *GlobalRPCServer) GetMaxOfferId(ctx context.Context, in *globalRPCProto.ReqGetMaxOfferId) (*globalRPCProto.RespGetMaxOfferId, error) { - l := logic.NewGetMaxOfferIdLogic(ctx, s.svcCtx) - return l.GetMaxOfferId(in) -} - -func (s *GlobalRPCServer) SendAddLiquidityTx(ctx context.Context, in *globalRPCProto.ReqSendTxByRawInfo) (*globalRPCProto.RespSendTx, error) { - l := logic.NewSendAddLiquidityTxLogic(ctx, s.svcCtx) - return l.SendAddLiquidityTx(in) -} - -func (s *GlobalRPCServer) SendAtomicMatchTx(ctx context.Context, in *globalRPCProto.ReqSendTxByRawInfo) (*globalRPCProto.RespSendTx, error) { - l := logic.NewSendAtomicMatchTxLogic(ctx, s.svcCtx) - return l.SendAtomicMatchTx(in) -} - -func (s *GlobalRPCServer) SendCancelOfferTx(ctx context.Context, in *globalRPCProto.ReqSendTxByRawInfo) (*globalRPCProto.RespSendTx, error) { - l := logic.NewSendCancelOfferTxLogic(ctx, s.svcCtx) - return l.SendCancelOfferTx(in) -} - -func (s *GlobalRPCServer) SendRemoveLiquidityTx(ctx context.Context, in *globalRPCProto.ReqSendTxByRawInfo) (*globalRPCProto.RespSendTx, error) { - l := logic.NewSendRemoveLiquidityTxLogic(ctx, s.svcCtx) - return l.SendRemoveLiquidityTx(in) -} - -func (s *GlobalRPCServer) SendSwapTx(ctx context.Context, in *globalRPCProto.ReqSendTxByRawInfo) (*globalRPCProto.RespSendTx, error) { - l := logic.NewSendSwapTxLogic(ctx, s.svcCtx) - return l.SendSwapTx(in) -} - -func (s *GlobalRPCServer) SendTransferNftTx(ctx context.Context, in *globalRPCProto.ReqSendTxByRawInfo) (*globalRPCProto.RespSendTx, error) { - l := logic.NewSendTransferNftTxLogic(ctx, s.svcCtx) - return l.SendTransferNftTx(in) -} - -func (s *GlobalRPCServer) SendTransferTx(ctx context.Context, in *globalRPCProto.ReqSendTxByRawInfo) (*globalRPCProto.RespSendTx, error) { - l := logic.NewSendTransferTxLogic(ctx, s.svcCtx) - return l.SendTransferTx(in) -} - -func (s *GlobalRPCServer) SendWithdrawNftTx(ctx context.Context, in *globalRPCProto.ReqSendTxByRawInfo) (*globalRPCProto.RespSendTx, error) { - l := logic.NewSendWithdrawNftTxLogic(ctx, s.svcCtx) - return l.SendWithdrawNftTx(in) -} - -func (s *GlobalRPCServer) SendWithdrawTx(ctx context.Context, in *globalRPCProto.ReqSendTxByRawInfo) (*globalRPCProto.RespSendTx, error) { - l := logic.NewSendWithdrawTxLogic(ctx, s.svcCtx) - return l.SendWithdrawTx(in) -} diff --git a/service/rpc/globalRPC/internal/svc/servicecontext.go b/service/rpc/globalRPC/internal/svc/servicecontext.go deleted file mode 100644 index 14df3b12b..000000000 --- a/service/rpc/globalRPC/internal/svc/servicecontext.go +++ /dev/null @@ -1,87 +0,0 @@ -package svc - -import ( - "github.com/zeromicro/go-zero/core/logx" - "github.com/zeromicro/go-zero/core/stores/redis" - "github.com/zeromicro/go-zero/core/stores/sqlx" - "gorm.io/driver/postgres" - "gorm.io/gorm" - - "github.com/bnb-chain/zkbas/common/model/account" - asset "github.com/bnb-chain/zkbas/common/model/assetInfo" - "github.com/bnb-chain/zkbas/common/model/block" - "github.com/bnb-chain/zkbas/common/model/liquidity" - "github.com/bnb-chain/zkbas/common/model/mempool" - "github.com/bnb-chain/zkbas/common/model/nft" - "github.com/bnb-chain/zkbas/common/model/sysconfig" - "github.com/bnb-chain/zkbas/common/model/tx" - "github.com/bnb-chain/zkbas/pkg/multcache" - "github.com/bnb-chain/zkbas/service/rpc/globalRPC/internal/config" -) - -type ServiceContext struct { - Config config.Config - MempoolModel mempool.MempoolModel - MempoolDetailModel mempool.MempoolTxDetailModel - AccountModel account.AccountModel - AccountHistoryModel account.AccountHistoryModel - TxModel tx.TxModel - TxDetailModel tx.TxDetailModel - FailTxModel tx.FailTxModel - LiquidityModel liquidity.LiquidityModel - LiquidityHistoryModel liquidity.LiquidityHistoryModel - BlockModel block.BlockModel - - NftModel nft.L2NftModel - CollectionModel nft.L2NftCollectionModel - OfferModel nft.OfferModel - - L2AssetModel asset.AssetInfoModel - - SysConfigModel sysconfig.SysconfigModel - - RedisConnection *redis.Redis - - GormPointer *gorm.DB - - Conn sqlx.SqlConn - Cache multcache.MultCache -} - -func WithRedis(redisType string, redisPass string) redis.Option { - return func(p *redis.Redis) { - p.Type = redisType - p.Pass = redisPass - } -} - -func NewServiceContext(c config.Config) *ServiceContext { - gormPointer, err := gorm.Open(postgres.Open(c.Postgres.DataSource)) - if err != nil { - logx.Must(err) - } - conn := sqlx.NewSqlConn("postgres", c.Postgres.DataSource) - redisConn := redis.New(c.CacheRedis[0].Host, WithRedis(c.CacheRedis[0].Type, c.CacheRedis[0].Pass)) - return &ServiceContext{ - Config: c, - MempoolModel: mempool.NewMempoolModel(conn, c.CacheRedis, gormPointer), - MempoolDetailModel: mempool.NewMempoolDetailModel(conn, c.CacheRedis, gormPointer), - AccountModel: account.NewAccountModel(conn, c.CacheRedis, gormPointer), - AccountHistoryModel: account.NewAccountHistoryModel(conn, c.CacheRedis, gormPointer), - TxModel: tx.NewTxModel(conn, c.CacheRedis, gormPointer, redisConn), - TxDetailModel: tx.NewTxDetailModel(conn, c.CacheRedis, gormPointer), - FailTxModel: tx.NewFailTxModel(conn, c.CacheRedis, gormPointer), - LiquidityModel: liquidity.NewLiquidityModel(conn, c.CacheRedis, gormPointer), - LiquidityHistoryModel: liquidity.NewLiquidityHistoryModel(conn, c.CacheRedis, gormPointer), - BlockModel: block.NewBlockModel(conn, c.CacheRedis, gormPointer, redisConn), - NftModel: nft.NewL2NftModel(conn, c.CacheRedis, gormPointer), - CollectionModel: nft.NewL2NftCollectionModel(conn, c.CacheRedis, gormPointer), - OfferModel: nft.NewOfferModel(conn, c.CacheRedis, gormPointer), - L2AssetModel: asset.NewAssetInfoModel(conn, c.CacheRedis, gormPointer), - SysConfigModel: sysconfig.NewSysconfigModel(conn, c.CacheRedis, gormPointer), - RedisConnection: redisConn, - GormPointer: gormPointer, - Conn: conn, - Cache: multcache.NewRedisCache(c.CacheRedis[0].Host, c.CacheRedis[0].Pass, 10), - } -} diff --git a/service/rpc/globalRPC/test/getLatestAccountInfoByAccountIndex_test.go b/service/rpc/globalRPC/test/getLatestAccountInfoByAccountIndex_test.go deleted file mode 100644 index fef1ae92f..000000000 --- a/service/rpc/globalRPC/test/getLatestAccountInfoByAccountIndex_test.go +++ /dev/null @@ -1,43 +0,0 @@ -package main - -import ( - "context" - "encoding/json" - "flag" - "fmt" - "testing" - - "github.com/zeromicro/go-zero/core/conf" - "github.com/zeromicro/go-zero/core/logx" - - "github.com/bnb-chain/zkbas/service/rpc/globalRPC/globalRPCProto" - "github.com/bnb-chain/zkbas/service/rpc/globalRPC/internal/config" - "github.com/bnb-chain/zkbas/service/rpc/globalRPC/internal/server" - "github.com/bnb-chain/zkbas/service/rpc/globalRPC/internal/svc" -) - -func TestGetLatestAccountInfoByAccountIndex(t *testing.T) { - flag.Parse() - - var c config.Config - conf.MustLoad(*configFile, &c) - logx.MustSetup(c.LogConf) - ctx := svc.NewServiceContext(c) - - srv := server.NewGlobalRPCServer(ctx) - resp, err := srv.GetLatestAccountInfoByAccountIndex( - context.Background(), - &globalRPCProto.ReqGetLatestAccountInfoByAccountIndex{ - AccountIndex: 10000, - }, - ) - if err != nil { - t.Fatal(err) - } - respBytes, err := json.Marshal(resp) - if err != nil { - t.Fatal(err) - } - fmt.Println(string(respBytes)) - fmt.Printf("Starting rpc server at %s...\n", c.ListenOn) -} diff --git a/service/rpc/globalRPC/test/sendAddLiquidityTx_test.go b/service/rpc/globalRPC/test/sendAddLiquidityTx_test.go deleted file mode 100644 index d049d8bb2..000000000 --- a/service/rpc/globalRPC/test/sendAddLiquidityTx_test.go +++ /dev/null @@ -1,106 +0,0 @@ -package main - -import ( - "context" - "encoding/json" - "flag" - "fmt" - "math/big" - "testing" - "time" - - curve "github.com/bnb-chain/zkbas-crypto/ecc/ztwistededwards/tebn254" - "github.com/bnb-chain/zkbas-crypto/wasm/legend/legendTxTypes" - "github.com/consensys/gnark-crypto/ecc/bn254/fr/mimc" - "github.com/zeromicro/go-zero/core/conf" - "github.com/zeromicro/go-zero/core/logx" - - "github.com/bnb-chain/zkbas/common/commonTx" - "github.com/bnb-chain/zkbas/common/util" - "github.com/bnb-chain/zkbas/service/rpc/globalRPC/globalRPCProto" - "github.com/bnb-chain/zkbas/service/rpc/globalRPC/internal/config" - "github.com/bnb-chain/zkbas/service/rpc/globalRPC/internal/server" - "github.com/bnb-chain/zkbas/service/rpc/globalRPC/internal/svc" -) - -func TestSendAddLiquidityTx(t *testing.T) { - flag.Parse() - - var c config.Config - conf.MustLoad(*configFile, &c) - logx.MustSetup(c.LogConf) - ctx := svc.NewServiceContext(c) - - /* - err := globalmapHandler.ReloadGlobalMap(ctx) - if err != nil { - logx.Error("[main] %s", err.Error()) - return - } - */ - - srv := server.NewGlobalRPCServer(ctx) - txInfo := constructSendAddLiquidityTxInfo() - resp, err := srv.SendTx( - context.Background(), - &globalRPCProto.ReqSendTx{ - TxType: commonTx.TxTypeAddLiquidity, - TxInfo: txInfo, - }, - ) - if err != nil { - t.Fatal(err) - } - respBytes, err := json.Marshal(resp) - if err != nil { - t.Fatal(err) - } - fmt.Println(string(respBytes)) - fmt.Printf("Starting rpc server at %s...\n", c.ListenOn) -} - -func constructSendAddLiquidityTxInfo() string { - // from sher.legend to gavin.legend - seed := "28e1a3762ff9944e9a4ad79477b756ef0aff3d2af76f0f40a0c3ec6ca76cf24b" - key, err := curve.GenerateEddsaPrivateKey(seed) - if err != nil { - panic(err) - } - assetAAmount := big.NewInt(100000) - assetBAmount := big.NewInt(100000) - lpAmount, err := util.ComputeEmptyLpAmount(assetAAmount, assetBAmount) - if err != nil { - panic(err) - } - expiredAt := time.Now().Add(time.Hour * 2).UnixMilli() - txInfo := &commonTx.AddLiquidityTxInfo{ - FromAccountIndex: 2, - PairIndex: 0, - AssetAId: 0, - AssetAAmount: assetAAmount, - AssetBId: 2, - AssetBAmount: assetBAmount, - LpAmount: lpAmount, - GasAccountIndex: 1, - GasFeeAssetId: 2, - GasFeeAssetAmount: big.NewInt(5000), - ExpiredAt: expiredAt, - Nonce: 3, - } - hFunc := mimc.NewMiMC() - msgHash, err := legendTxTypes.ComputeAddLiquidityMsgHash(txInfo, hFunc) - if err != nil { - panic(err) - } - hFunc.Reset() - signature, err := key.Sign(msgHash, hFunc) - if err != nil { - panic(err) - } - txInfo.Sig = signature - txInfoBytes, err := json.Marshal(txInfo) - if err != nil { - panic(err) - } - return string(txInfoBytes) -} diff --git a/service/rpc/globalRPC/test/sendAtomicMatchTx_test.go b/service/rpc/globalRPC/test/sendAtomicMatchTx_test.go deleted file mode 100644 index 81497a4ae..000000000 --- a/service/rpc/globalRPC/test/sendAtomicMatchTx_test.go +++ /dev/null @@ -1,149 +0,0 @@ -package main - -import ( - "context" - "encoding/json" - "flag" - "fmt" - "math/big" - "testing" - "time" - - curve "github.com/bnb-chain/zkbas-crypto/ecc/ztwistededwards/tebn254" - "github.com/bnb-chain/zkbas-crypto/wasm/legend/legendTxTypes" - "github.com/consensys/gnark-crypto/ecc/bn254/fr/mimc" - "github.com/zeromicro/go-zero/core/conf" - "github.com/zeromicro/go-zero/core/logx" - - "github.com/bnb-chain/zkbas/common/commonAsset" - "github.com/bnb-chain/zkbas/common/commonTx" - "github.com/bnb-chain/zkbas/service/rpc/globalRPC/globalRPCProto" - "github.com/bnb-chain/zkbas/service/rpc/globalRPC/internal/config" - "github.com/bnb-chain/zkbas/service/rpc/globalRPC/internal/server" - "github.com/bnb-chain/zkbas/service/rpc/globalRPC/internal/svc" -) - -func TestSendAtomicMatchTx(t *testing.T) { - flag.Parse() - - var c config.Config - conf.MustLoad(*configFile, &c) - logx.MustSetup(c.LogConf) - ctx := svc.NewServiceContext(c) - - /* - err := globalmapHandler.ReloadGlobalMap(ctx) - if err != nil { - logx.Error("[main] %s", err.Error()) - return - } - */ - - srv := server.NewGlobalRPCServer(ctx) - txInfo := constructSendAtomicMatchTxInfo() - resp, err := srv.SendTx( - context.Background(), - &globalRPCProto.ReqSendTx{ - TxType: commonTx.TxTypeAtomicMatch, - TxInfo: txInfo, - }, - ) - if err != nil { - t.Fatal(err) - } - respBytes, err := json.Marshal(resp) - if err != nil { - t.Fatal(err) - } - fmt.Println(string(respBytes)) - fmt.Printf("Starting rpc server at %s...\n", c.ListenOn) -} - -func constructSendAtomicMatchTxInfo() string { - // from sher.legend to gavin.legend - sherSeed := "28e1a3762ff9944e9a4ad79477b756ef0aff3d2af76f0f40a0c3ec6ca76cf24b" - sherKey, err := curve.GenerateEddsaPrivateKey(sherSeed) - if err != nil { - panic(err) - } - gavinSeed := "17673b9a9fdec6dc90c7cc1eb1c939134dfb659d2f08edbe071e5c45f343d008" - gavinKey, err := curve.GenerateEddsaPrivateKey(gavinSeed) - if err != nil { - panic(err) - } - listedAt := time.Now().UnixMilli() - expiredAt := time.Now().Add(time.Hour * 2).UnixMilli() - buyOffer := &commonTx.OfferTxInfo{ - Type: commonAsset.BuyOfferType, - OfferId: 0, - AccountIndex: 3, - NftIndex: 1, - AssetId: 0, - AssetAmount: big.NewInt(10000), - ListedAt: listedAt, - ExpiredAt: expiredAt, - TreasuryRate: 200, - Sig: nil, - } - hFunc := mimc.NewMiMC() - buyHash, err := legendTxTypes.ComputeOfferMsgHash(buyOffer, hFunc) - if err != nil { - panic(err) - } - hFunc.Reset() - buySig, err := gavinKey.Sign(buyHash, hFunc) - if err != nil { - panic(err) - } - buyOffer.Sig = buySig - sellOffer := &commonTx.OfferTxInfo{ - Type: commonAsset.SellOfferType, - OfferId: 0, - AccountIndex: 2, - NftIndex: 1, - AssetId: 0, - AssetAmount: big.NewInt(10000), - ListedAt: listedAt, - ExpiredAt: expiredAt, - TreasuryRate: 200, - Sig: nil, - } - hFunc.Reset() - sellHash, err := legendTxTypes.ComputeOfferMsgHash(sellOffer, hFunc) - if err != nil { - panic(err) - } - hFunc.Reset() - sellSig, err := sherKey.Sign(sellHash, hFunc) - if err != nil { - panic(err) - } - sellOffer.Sig = sellSig - txInfo := &commonTx.AtomicMatchTxInfo{ - AccountIndex: 2, - BuyOffer: buyOffer, - SellOffer: sellOffer, - GasAccountIndex: 1, - GasFeeAssetId: 0, - GasFeeAssetAmount: big.NewInt(5000), - Nonce: 8, - ExpiredAt: expiredAt, - Sig: nil, - } - hFunc.Reset() - msgHash, err := legendTxTypes.ComputeAtomicMatchMsgHash(txInfo, hFunc) - if err != nil { - panic(err) - } - hFunc.Reset() - signature, err := sherKey.Sign(msgHash, hFunc) - if err != nil { - panic(err) - } - txInfo.Sig = signature - txInfoBytes, err := json.Marshal(txInfo) - if err != nil { - panic(err) - } - return string(txInfoBytes) -} diff --git a/service/rpc/globalRPC/test/sendCancelOfferTx_test.go b/service/rpc/globalRPC/test/sendCancelOfferTx_test.go deleted file mode 100644 index 8ea2465c3..000000000 --- a/service/rpc/globalRPC/test/sendCancelOfferTx_test.go +++ /dev/null @@ -1,95 +0,0 @@ -package main - -import ( - "context" - "encoding/json" - "flag" - "fmt" - "math/big" - "testing" - "time" - - curve "github.com/bnb-chain/zkbas-crypto/ecc/ztwistededwards/tebn254" - "github.com/bnb-chain/zkbas-crypto/wasm/legend/legendTxTypes" - "github.com/consensys/gnark-crypto/ecc/bn254/fr/mimc" - "github.com/zeromicro/go-zero/core/conf" - "github.com/zeromicro/go-zero/core/logx" - - "github.com/bnb-chain/zkbas/common/commonTx" - "github.com/bnb-chain/zkbas/service/rpc/globalRPC/globalRPCProto" - "github.com/bnb-chain/zkbas/service/rpc/globalRPC/internal/config" - "github.com/bnb-chain/zkbas/service/rpc/globalRPC/internal/server" - "github.com/bnb-chain/zkbas/service/rpc/globalRPC/internal/svc" -) - -func TestSendCancelOfferTx(t *testing.T) { - flag.Parse() - - var c config.Config - conf.MustLoad(*configFile, &c) - logx.MustSetup(c.LogConf) - ctx := svc.NewServiceContext(c) - - /* - err := globalmapHandler.ReloadGlobalMap(ctx) - if err != nil { - logx.Error("[main] %s", err.Error()) - return - } - */ - - srv := server.NewGlobalRPCServer(ctx) - txInfo := constructSendCancelOfferTxInfo() - resp, err := srv.SendTx( - context.Background(), - &globalRPCProto.ReqSendTx{ - TxType: commonTx.TxTypeCancelOffer, - TxInfo: txInfo, - }, - ) - if err != nil { - t.Fatal(err) - } - respBytes, err := json.Marshal(resp) - if err != nil { - t.Fatal(err) - } - fmt.Println(string(respBytes)) - fmt.Printf("Starting rpc server at %s...\n", c.ListenOn) -} - -func constructSendCancelOfferTxInfo() string { - // from sher.legend to gavin.legend - seed := "28e1a3762ff9944e9a4ad79477b756ef0aff3d2af76f0f40a0c3ec6ca76cf24b" - key, err := curve.GenerateEddsaPrivateKey(seed) - if err != nil { - panic(err) - } - expiredAt := time.Now().Add(time.Hour * 2).UnixMilli() - txInfo := &commonTx.CancelOfferTxInfo{ - AccountIndex: 2, - OfferId: 1, - GasAccountIndex: 1, - GasFeeAssetId: 2, - GasFeeAssetAmount: big.NewInt(5000), - ExpiredAt: expiredAt, - Nonce: 9, - Sig: nil, - } - hFunc := mimc.NewMiMC() - msgHash, err := legendTxTypes.ComputeCancelOfferMsgHash(txInfo, hFunc) - if err != nil { - panic(err) - } - hFunc.Reset() - signature, err := key.Sign(msgHash, hFunc) - if err != nil { - panic(err) - } - txInfo.Sig = signature - txInfoBytes, err := json.Marshal(txInfo) - if err != nil { - panic(err) - } - return string(txInfoBytes) -} diff --git a/service/rpc/globalRPC/test/sendCreateCollectionTx_test.go b/service/rpc/globalRPC/test/sendCreateCollectionTx_test.go deleted file mode 100644 index 68af168d3..000000000 --- a/service/rpc/globalRPC/test/sendCreateCollectionTx_test.go +++ /dev/null @@ -1,96 +0,0 @@ -package main - -import ( - "context" - "encoding/json" - "flag" - "fmt" - "math/big" - "testing" - "time" - - curve "github.com/bnb-chain/zkbas-crypto/ecc/ztwistededwards/tebn254" - "github.com/bnb-chain/zkbas-crypto/wasm/legend/legendTxTypes" - "github.com/consensys/gnark-crypto/ecc/bn254/fr/mimc" - "github.com/zeromicro/go-zero/core/conf" - "github.com/zeromicro/go-zero/core/logx" - - "github.com/bnb-chain/zkbas/common/commonTx" - "github.com/bnb-chain/zkbas/service/rpc/globalRPC/globalRPCProto" - "github.com/bnb-chain/zkbas/service/rpc/globalRPC/internal/config" - "github.com/bnb-chain/zkbas/service/rpc/globalRPC/internal/server" - "github.com/bnb-chain/zkbas/service/rpc/globalRPC/internal/svc" -) - -func TestSendCreateCollectionTx(t *testing.T) { - flag.Parse() - - var c config.Config - conf.MustLoad(*configFile, &c) - logx.MustSetup(c.LogConf) - ctx := svc.NewServiceContext(c) - - /* - err := globalmapHandler.ReloadGlobalMap(ctx) - if err != nil { - logx.Error("[main] %s", err.Error()) - return - } - */ - - srv := server.NewGlobalRPCServer(ctx) - txInfo := constructSendCreateCollectionTxInfo() - resp, err := srv.SendCreateCollectionTx( - context.Background(), - &globalRPCProto.ReqSendCreateCollectionTx{ - TxInfo: txInfo, - }, - ) - if err != nil { - t.Fatal(err) - } - respBytes, err := json.Marshal(resp) - if err != nil { - t.Fatal(err) - } - fmt.Println(string(respBytes)) - fmt.Printf("Starting rpc server at %s...\n", c.ListenOn) -} - -func constructSendCreateCollectionTxInfo() string { - // from sher.legend to gavin.legend - seed := "28e1a3762ff9944e9a4ad79477b756ef0aff3d2af76f0f40a0c3ec6ca76cf24b" - key, err := curve.GenerateEddsaPrivateKey(seed) - if err != nil { - panic(err) - } - expiredAt := time.Now().Add(time.Hour * 2).UnixMilli() - txInfo := &commonTx.CreateCollectionTxInfo{ - AccountIndex: 2, - CollectionId: 1, - Name: "Zkbas Collection", - Introduction: "Wonderful zkbas!", - GasAccountIndex: 1, - GasFeeAssetId: 2, - GasFeeAssetAmount: big.NewInt(5000), - ExpiredAt: expiredAt, - Nonce: 3, - Sig: nil, - } - hFunc := mimc.NewMiMC() - msgHash, err := legendTxTypes.ComputeCreateCollectionMsgHash(txInfo, hFunc) - if err != nil { - panic(err) - } - hFunc.Reset() - signature, err := key.Sign(msgHash, hFunc) - if err != nil { - panic(err) - } - txInfo.Sig = signature - txInfoBytes, err := json.Marshal(txInfo) - if err != nil { - panic(err) - } - return string(txInfoBytes) -} diff --git a/service/rpc/globalRPC/test/sendMintNftTx_test.go b/service/rpc/globalRPC/test/sendMintNftTx_test.go deleted file mode 100644 index ab7871c1c..000000000 --- a/service/rpc/globalRPC/test/sendMintNftTx_test.go +++ /dev/null @@ -1,108 +0,0 @@ -package main - -import ( - "context" - "encoding/json" - "flag" - "fmt" - "math/big" - "testing" - "time" - - curve "github.com/bnb-chain/zkbas-crypto/ecc/ztwistededwards/tebn254" - "github.com/bnb-chain/zkbas-crypto/wasm/legend/legendTxTypes" - "github.com/consensys/gnark-crypto/ecc/bn254/fr/mimc" - "github.com/ethereum/go-ethereum/common" - "github.com/zeromicro/go-zero/core/conf" - "github.com/zeromicro/go-zero/core/logx" - - "github.com/bnb-chain/zkbas/common/commonTx" - "github.com/bnb-chain/zkbas/common/util" - "github.com/bnb-chain/zkbas/service/rpc/globalRPC/globalRPCProto" - "github.com/bnb-chain/zkbas/service/rpc/globalRPC/internal/config" - "github.com/bnb-chain/zkbas/service/rpc/globalRPC/internal/server" - "github.com/bnb-chain/zkbas/service/rpc/globalRPC/internal/svc" -) - -func TestSendMintNftTx(t *testing.T) { - flag.Parse() - - var c config.Config - conf.MustLoad(*configFile, &c) - logx.MustSetup(c.LogConf) - ctx := svc.NewServiceContext(c) - - /* - err := globalmapHandler.ReloadGlobalMap(ctx) - if err != nil { - logx.Error("[main] %s", err.Error()) - return - } - */ - - srv := server.NewGlobalRPCServer(ctx) - txInfo := constructSendMintNftTxInfo() - resp, err := srv.SendTx( - context.Background(), - &globalRPCProto.ReqSendTx{ - TxType: commonTx.TxTypeMintNft, - TxInfo: txInfo, - }, - ) - if err != nil { - t.Fatal(err) - } - respBytes, err := json.Marshal(resp) - if err != nil { - t.Fatal(err) - } - fmt.Println(string(respBytes)) - fmt.Printf("Starting rpc server at %s...\n", c.ListenOn) -} - -func constructSendMintNftTxInfo() string { - // from sher.legend to gavin.legend - seed := "28e1a3762ff9944e9a4ad79477b756ef0aff3d2af76f0f40a0c3ec6ca76cf24b" - key, err := curve.GenerateEddsaPrivateKey(seed) - if err != nil { - panic(err) - } - nameHash, err := util.AccountNameHash("gavin.legend") - if err != nil { - panic(err) - } - hFunc := mimc.NewMiMC() - hFunc.Write([]byte(util.RandomUUID())) - contentHash := hFunc.Sum(nil) - expiredAt := time.Now().Add(time.Hour * 2).UnixMilli() - txInfo := &commonTx.MintNftTxInfo{ - CreatorAccountIndex: 2, - ToAccountIndex: 3, - ToAccountNameHash: nameHash, - NftContentHash: common.Bytes2Hex(contentHash), - NftCollectionId: 1, - CreatorTreasuryRate: 0, - GasAccountIndex: 1, - GasFeeAssetId: 2, - GasFeeAssetAmount: big.NewInt(5000), - ExpiredAt: expiredAt, - Nonce: 7, - Sig: nil, - } - hFunc.Reset() - msgHash, err := legendTxTypes.ComputeMintNftMsgHash(txInfo, hFunc) - if err != nil { - panic(err) - } - hFunc.Reset() - signature, err := key.Sign(msgHash, hFunc) - if err != nil { - panic(err) - } - txInfo.Sig = signature - txInfoBytes, err := json.Marshal(txInfo) - if err != nil { - panic(err) - } - return string(txInfoBytes) -} diff --git a/service/rpc/globalRPC/test/sendRemoveLiquidityTx_test.go b/service/rpc/globalRPC/test/sendRemoveLiquidityTx_test.go deleted file mode 100644 index 3a924ea1e..000000000 --- a/service/rpc/globalRPC/test/sendRemoveLiquidityTx_test.go +++ /dev/null @@ -1,105 +0,0 @@ -package main - -import ( - "context" - "encoding/json" - "flag" - "fmt" - "math/big" - "testing" - "time" - - curve "github.com/bnb-chain/zkbas-crypto/ecc/ztwistededwards/tebn254" - "github.com/bnb-chain/zkbas-crypto/wasm/legend/legendTxTypes" - "github.com/consensys/gnark-crypto/ecc/bn254/fr/mimc" - "github.com/zeromicro/go-zero/core/conf" - "github.com/zeromicro/go-zero/core/logx" - - "github.com/bnb-chain/zkbas/common/commonTx" - "github.com/bnb-chain/zkbas/service/rpc/globalRPC/globalRPCProto" - "github.com/bnb-chain/zkbas/service/rpc/globalRPC/internal/config" - "github.com/bnb-chain/zkbas/service/rpc/globalRPC/internal/server" - "github.com/bnb-chain/zkbas/service/rpc/globalRPC/internal/svc" -) - -func TestSendRemoveLiquidityTx(t *testing.T) { - flag.Parse() - - var c config.Config - conf.MustLoad(*configFile, &c) - logx.MustSetup(c.LogConf) - ctx := svc.NewServiceContext(c) - - /* - err := globalmapHandler.ReloadGlobalMap(ctx) - if err != nil { - logx.Error("[main] %s", err.Error()) - return - } - */ - - srv := server.NewGlobalRPCServer(ctx) - txInfo := constructSendRemoveLiquidityTxInfo() - resp, err := srv.SendTx( - context.Background(), - &globalRPCProto.ReqSendTx{ - TxType: commonTx.TxTypeRemoveLiquidity, - TxInfo: txInfo, - }, - ) - if err != nil { - t.Fatal(err) - } - respBytes, err := json.Marshal(resp) - if err != nil { - t.Fatal(err) - } - fmt.Println(string(respBytes)) - fmt.Printf("Starting rpc server at %s...\n", c.ListenOn) -} - -func constructSendRemoveLiquidityTxInfo() string { - // from sher.legend to gavin.legend - seed := "28e1a3762ff9944e9a4ad79477b756ef0aff3d2af76f0f40a0c3ec6ca76cf24b" - key, err := curve.GenerateEddsaPrivateKey(seed) - if err != nil { - panic(err) - } - assetAMinAmount := big.NewInt(98) - assetBMinAmount := big.NewInt(99) - lpAmount := big.NewInt(100) - expiredAt := time.Now().Add(time.Hour * 2).UnixMilli() - txInfo := &commonTx.RemoveLiquidityTxInfo{ - FromAccountIndex: 2, - PairIndex: 0, - AssetAId: 0, - AssetAMinAmount: assetAMinAmount, - AssetBId: 2, - AssetBMinAmount: assetBMinAmount, - LpAmount: lpAmount, - AssetAAmountDelta: nil, - AssetBAmountDelta: nil, - GasAccountIndex: 1, - GasFeeAssetId: 2, - GasFeeAssetAmount: big.NewInt(5000), - ExpiredAt: expiredAt, - Nonce: 5, - Sig: nil, - } - hFunc := mimc.NewMiMC() - msgHash, err := legendTxTypes.ComputeRemoveLiquidityMsgHash(txInfo, hFunc) - if err != nil { - panic(err) - } - hFunc.Reset() - signature, err := key.Sign(msgHash, hFunc) - if err != nil { - panic(err) - } - txInfo.Sig = signature - txInfoBytes, err := json.Marshal(txInfo) - if err != nil { - panic(err) - } - return string(txInfoBytes) -} diff --git a/service/rpc/globalRPC/test/sendSwapTx_test.go b/service/rpc/globalRPC/test/sendSwapTx_test.go deleted file mode 100644 index 3181d7e53..000000000 --- a/service/rpc/globalRPC/test/sendSwapTx_test.go +++ /dev/null @@ -1,102 +0,0 @@ -package main - -import ( - "context" - "encoding/json" - "flag" - "fmt" - "math/big" - "testing" - "time" - - curve "github.com/bnb-chain/zkbas-crypto/ecc/ztwistededwards/tebn254" - "github.com/bnb-chain/zkbas-crypto/wasm/legend/legendTxTypes" - "github.com/consensys/gnark-crypto/ecc/bn254/fr/mimc" - "github.com/zeromicro/go-zero/core/conf" - "github.com/zeromicro/go-zero/core/logx" - - "github.com/bnb-chain/zkbas/common/commonTx" - "github.com/bnb-chain/zkbas/service/rpc/globalRPC/globalRPCProto" - "github.com/bnb-chain/zkbas/service/rpc/globalRPC/internal/config" - "github.com/bnb-chain/zkbas/service/rpc/globalRPC/internal/server" - "github.com/bnb-chain/zkbas/service/rpc/globalRPC/internal/svc" -) - -func TestSendSwapTx(t *testing.T) { - flag.Parse() - - var c config.Config - conf.MustLoad(*configFile, &c) - logx.MustSetup(c.LogConf) - ctx := svc.NewServiceContext(c) - - /* - err := globalmapHandler.ReloadGlobalMap(ctx) - if err != nil { - logx.Error("[main] %s", err.Error()) - return - } - */ - - srv := server.NewGlobalRPCServer(ctx) - txInfo := constructSendSwapTxInfo() - resp, err := srv.SendTx( - context.Background(), - &globalRPCProto.ReqSendTx{ - TxType: commonTx.TxTypeSwap, - TxInfo: txInfo, - }, - ) - if err != nil { - t.Fatal(err) - } - respBytes, err := json.Marshal(resp) - if err != nil { - t.Fatal(err) - } - fmt.Println(string(respBytes)) - fmt.Printf("Starting rpc server at %s...\n", c.ListenOn) -} - -func constructSendSwapTxInfo() string { - // from sher.legend to gavin.legend - seed := "28e1a3762ff9944e9a4ad79477b756ef0aff3d2af76f0f40a0c3ec6ca76cf24b" - key, err := curve.GenerateEddsaPrivateKey(seed) - if err != nil { - panic(err) - } - assetAAmount := big.NewInt(100) - assetBAmount := big.NewInt(98) - expiredAt := time.Now().Add(time.Hour * 2).UnixMilli() - txInfo := &commonTx.SwapTxInfo{ - FromAccountIndex: 2, - PairIndex: 0, - AssetAId: 2, - AssetAAmount: assetAAmount, - AssetBId: 0, - AssetBMinAmount: assetBAmount, - AssetBAmountDelta: nil, - GasAccountIndex: 1, - GasFeeAssetId: 0, - GasFeeAssetAmount: big.NewInt(5000), - ExpiredAt: expiredAt, - Nonce: 4, - Sig: nil, - } - hFunc := mimc.NewMiMC() - msgHash, err := legendTxTypes.ComputeSwapMsgHash(txInfo, hFunc) - if err != nil { - panic(err) - } - hFunc.Reset() - signature, err := key.Sign(msgHash, hFunc) - if err != nil { - panic(err) - } - txInfo.Sig = signature - txInfoBytes, err := json.Marshal(txInfo) - if err != nil { - panic(err) - } - return string(txInfoBytes) -} diff --git a/service/rpc/globalRPC/test/sendTransferNftTx_test.go b/service/rpc/globalRPC/test/sendTransferNftTx_test.go deleted file mode 100644 index ae41dbb2c..000000000 --- a/service/rpc/globalRPC/test/sendTransferNftTx_test.go +++ /dev/null @@ -1,109 +0,0 @@ -package main - -import ( - "context" - "encoding/json" - "flag" - "fmt" - "math/big" - "testing" - "time" - - curve "github.com/bnb-chain/zkbas-crypto/ecc/ztwistededwards/tebn254" - "github.com/bnb-chain/zkbas-crypto/wasm/legend/legendTxTypes" - "github.com/consensys/gnark-crypto/ecc/bn254/fr/mimc" - "github.com/zeromicro/go-zero/core/conf" - "github.com/zeromicro/go-zero/core/logx" - - "github.com/bnb-chain/zkbas/common/commonTx" - "github.com/bnb-chain/zkbas/common/util" - "github.com/bnb-chain/zkbas/service/rpc/globalRPC/globalRPCProto" - "github.com/bnb-chain/zkbas/service/rpc/globalRPC/internal/config" - "github.com/bnb-chain/zkbas/service/rpc/globalRPC/internal/server" - "github.com/bnb-chain/zkbas/service/rpc/globalRPC/internal/svc" -) - -func TestSendTransferNftTx(t *testing.T) { - flag.Parse() - - var c config.Config - conf.MustLoad(*configFile, &c) - logx.MustSetup(c.LogConf) - ctx := svc.NewServiceContext(c) - - /* - err := globalmapHandler.ReloadGlobalMap(ctx) - if err != nil { - logx.Error("[main] %s", err.Error()) - return - } - */ - - srv := server.NewGlobalRPCServer(ctx) - txInfo := constructSendTransferNftTxInfo() - resp, err := srv.SendTx( - context.Background(), - &globalRPCProto.ReqSendTx{ - TxType: commonTx.TxTypeTransferNft, - TxInfo: txInfo, - }, - ) - if err != nil { - t.Fatal(err) - } - respBytes, err := json.Marshal(resp) - if err != nil { - t.Fatal(err) - } - fmt.Println(string(respBytes)) - fmt.Printf("Starting rpc server at %s...\n", c.ListenOn) -} - -func constructSendTransferNftTxInfo() string { - // from sher.legend to gavin.legend - //seed := "28e1a3762ff9944e9a4ad79477b756ef0aff3d2af76f0f40a0c3ec6ca76cf24b" - seed := "17673b9a9fdec6dc90c7cc1eb1c939134dfb659d2f08edbe071e5c45f343d008" - key, err := curve.GenerateEddsaPrivateKey(seed) - if err != nil { - panic(err) - } - nameHash, err := util.AccountNameHash("sher.legend") - if err != nil { - panic(err) - } - expiredAt := time.Now().Add(time.Hour * 2).UnixMilli() - txInfo := &commonTx.TransferNftTxInfo{ - FromAccountIndex: 3, - ToAccountIndex: 2, - ToAccountNameHash: nameHash, - NftIndex: 1, - GasAccountIndex: 1, - GasFeeAssetId: 0, - GasFeeAssetAmount: big.NewInt(5000), - CallData: "", - CallDataHash: nil, - ExpiredAt: expiredAt, - Nonce: 1, - Sig: nil, - } - hFunc := mimc.NewMiMC() - hFunc.Write([]byte(txInfo.CallData)) - callDataHash := hFunc.Sum(nil) - txInfo.CallDataHash = callDataHash - hFunc.Reset() - msgHash, err := legendTxTypes.ComputeTransferNftMsgHash(txInfo, hFunc) - if err != nil { - panic(err) - } - hFunc.Reset() - signature, err := key.Sign(msgHash, hFunc) - if err != nil { - panic(err) - } - txInfo.Sig = signature - txInfoBytes, err := json.Marshal(txInfo) - if err != nil { - panic(err) - } - return string(txInfoBytes) -} diff --git a/service/rpc/globalRPC/test/sendTransferTx_test.go b/service/rpc/globalRPC/test/sendTransferTx_test.go deleted file mode 100644 index 56268dbe3..000000000 --- a/service/rpc/globalRPC/test/sendTransferTx_test.go +++ /dev/null @@ -1,114 +0,0 @@ -package main - -import ( - "context" - "encoding/json" - "flag" - "fmt" - "math/big" - "testing" - "time" - - curve "github.com/bnb-chain/zkbas-crypto/ecc/ztwistededwards/tebn254" - "github.com/bnb-chain/zkbas-crypto/wasm/legend/legendTxTypes" - "github.com/consensys/gnark-crypto/ecc/bn254/fr/mimc" - "github.com/zeromicro/go-zero/core/conf" - "github.com/zeromicro/go-zero/core/logx" - - "github.com/bnb-chain/zkbas/common/commonTx" - "github.com/bnb-chain/zkbas/common/tree" - "github.com/bnb-chain/zkbas/common/util" - "github.com/bnb-chain/zkbas/service/rpc/globalRPC/globalRPCProto" - "github.com/bnb-chain/zkbas/service/rpc/globalRPC/internal/config" - "github.com/bnb-chain/zkbas/service/rpc/globalRPC/internal/server" - "github.com/bnb-chain/zkbas/service/rpc/globalRPC/internal/svc" -) - -var configFile = flag.String("f", - "../etc/globalrpc.yaml", "the config file") - -func TestSendTransferTx(t *testing.T) { - flag.Parse() - - var c config.Config - conf.MustLoad(*configFile, &c) - logx.MustSetup(c.LogConf) - ctx := svc.NewServiceContext(c) - - /* - err := globalmapHandler.ReloadGlobalMap(ctx) - if err != nil { - logx.Error("[main] %s", err.Error()) - return - } - */ - - srv := server.NewGlobalRPCServer(ctx) - txInfo := constructSendTransferTxInfo() - resp, err := srv.SendTx( - context.Background(), - &globalRPCProto.ReqSendTx{ - TxType: commonTx.TxTypeTransfer, - TxInfo: txInfo, - }, - ) - if err != nil { - t.Fatal(err) - } - respBytes, err := json.Marshal(resp) - if err != nil { - t.Fatal(err) - } - fmt.Println(string(respBytes)) - fmt.Printf("Starting rpc server at %s...\n", c.ListenOn) -} - -func constructSendTransferTxInfo() string { - // from sher.legend to gavin.legend - seed := "28e1a3762ff9944e9a4ad79477b756ef0aff3d2af76f0f40a0c3ec6ca76cf24b" - key, err := curve.GenerateEddsaPrivateKey(seed) - if err != nil { - panic(err) - } - nameHash, err := util.AccountNameHash("gavin.legend") - if err != nil { - panic(err) - } - expiredAt := time.Now().Add(time.Hour * 2).UnixMilli() - txInfo := &commonTx.TransferTxInfo{ - FromAccountIndex: 2, - ToAccountIndex: 3, - ToAccountNameHash: nameHash, - AssetId: 0, - AssetAmount: big.NewInt(100000), - GasAccountIndex: 1, - GasFeeAssetId: 2, - GasFeeAssetAmount: big.NewInt(5000), - Memo: "transfer", - CallData: "", - CallDataHash: tree.NilHash, - Nonce: 5, - ExpiredAt: expiredAt, - Sig: nil, - } - hFunc := mimc.NewMiMC() - hFunc.Write([]byte(txInfo.CallData)) - callDataHash := hFunc.Sum(nil) - txInfo.CallDataHash = callDataHash - hFunc.Reset() - msgHash, err := legendTxTypes.ComputeTransferMsgHash(txInfo, hFunc) - if err != nil { - panic(err) - } - hFunc.Reset() - signature, err := key.Sign(msgHash, hFunc) - if err != nil { - panic(err) - } - txInfo.Sig = signature - txInfoBytes, err := json.Marshal(txInfo) - if err != nil { - panic(err) - } - return string(txInfoBytes) -} diff --git a/service/rpc/globalRPC/test/sendWithdrawNftTx_test.go b/service/rpc/globalRPC/test/sendWithdrawNftTx_test.go deleted file mode 100644 index 9078341d5..000000000 --- a/service/rpc/globalRPC/test/sendWithdrawNftTx_test.go +++ /dev/null @@ -1,97 +0,0 @@ -package main - -import ( - "context" - "encoding/json" - "flag" - "fmt" - "math/big" - "testing" - "time" - - curve "github.com/bnb-chain/zkbas-crypto/ecc/ztwistededwards/tebn254" - "github.com/bnb-chain/zkbas-crypto/wasm/legend/legendTxTypes" - "github.com/consensys/gnark-crypto/ecc/bn254/fr/mimc" - "github.com/zeromicro/go-zero/core/conf" - "github.com/zeromicro/go-zero/core/logx" - - "github.com/bnb-chain/zkbas/common/commonTx" - "github.com/bnb-chain/zkbas/service/rpc/globalRPC/globalRPCProto" - "github.com/bnb-chain/zkbas/service/rpc/globalRPC/internal/config" - "github.com/bnb-chain/zkbas/service/rpc/globalRPC/internal/server" - "github.com/bnb-chain/zkbas/service/rpc/globalRPC/internal/svc" -) - -func TestSendWithdrawNftTx(t *testing.T) { - flag.Parse() - - var c config.Config - conf.MustLoad(*configFile, &c) - logx.MustSetup(c.LogConf) - ctx := svc.NewServiceContext(c) - - /* - err := globalmapHandler.ReloadGlobalMap(ctx) - if err != nil { - logx.Error("[main] %s", err.Error()) - return - } - */ - - srv := server.NewGlobalRPCServer(ctx) - txInfo := constructSendWithdrawNftTxInfo() - resp, err := srv.SendTx( - context.Background(), - &globalRPCProto.ReqSendTx{ - TxType: commonTx.TxTypeWithdrawNft, - TxInfo: txInfo, - }, - ) - if err != nil { - t.Fatal(err) - } - respBytes, err := json.Marshal(resp) - if err != nil { - t.Fatal(err) - } - fmt.Println(string(respBytes)) - fmt.Printf("Starting rpc server at %s...\n", c.ListenOn) -} - -func constructSendWithdrawNftTxInfo() string { - // from sher.legend to gavin.legend - //seed := "28e1a3762ff9944e9a4ad79477b756ef0aff3d2af76f0f40a0c3ec6ca76cf24b" - seed := "17673b9a9fdec6dc90c7cc1eb1c939134dfb659d2f08edbe071e5c45f343d008" - key, err := curve.GenerateEddsaPrivateKey(seed) - if err != nil { - panic(err) - } - expiredAt := time.Now().Add(time.Hour * 2).UnixMilli() - txInfo := &commonTx.WithdrawNftTxInfo{ - AccountIndex: 3, - NftIndex: 1, - ToAddress: "0xd5Aa3B56a2E2139DB315CdFE3b34149c8ed09171", - GasAccountIndex: 1, - GasFeeAssetId: 0, - GasFeeAssetAmount: big.NewInt(5000), - ExpiredAt: expiredAt, - Nonce: 2, - Sig: nil, - } - hFunc := mimc.NewMiMC() - msgHash, err := legendTxTypes.ComputeWithdrawNftMsgHash(txInfo, hFunc) - if err != nil { - panic(err) - } - hFunc.Reset() - signature, err := key.Sign(msgHash, hFunc) - if err != nil { - panic(err) - } - txInfo.Sig = signature - txInfoBytes, err := json.Marshal(txInfo) - if err != nil { - panic(err) - } - return string(txInfoBytes) -} diff --git a/service/rpc/globalRPC/test/sendWithdrawTx_test.go b/service/rpc/globalRPC/test/sendWithdrawTx_test.go deleted file mode 100644 index d27af5785..000000000 --- a/service/rpc/globalRPC/test/sendWithdrawTx_test.go +++ /dev/null @@ -1,97 +0,0 @@ -package main - -import ( - "context" - "encoding/json" - "flag" - "fmt" - "math/big" - "testing" - "time" - - curve "github.com/bnb-chain/zkbas-crypto/ecc/ztwistededwards/tebn254" - "github.com/bnb-chain/zkbas-crypto/wasm/legend/legendTxTypes" - "github.com/consensys/gnark-crypto/ecc/bn254/fr/mimc" - "github.com/zeromicro/go-zero/core/conf" - "github.com/zeromicro/go-zero/core/logx" - - "github.com/bnb-chain/zkbas/common/commonTx" - "github.com/bnb-chain/zkbas/service/rpc/globalRPC/globalRPCProto" - "github.com/bnb-chain/zkbas/service/rpc/globalRPC/internal/config" - "github.com/bnb-chain/zkbas/service/rpc/globalRPC/internal/server" - "github.com/bnb-chain/zkbas/service/rpc/globalRPC/internal/svc" -) - -func TestSendWithdrawTx(t *testing.T) { - flag.Parse() - - var c config.Config - conf.MustLoad(*configFile, &c) - logx.MustSetup(c.LogConf) - ctx := svc.NewServiceContext(c) - - /* - err := globalmapHandler.ReloadGlobalMap(ctx) - if err != nil { - logx.Error("[main] %s", err.Error()) - return - } - */ - - srv := server.NewGlobalRPCServer(ctx) - txInfo := constructSendWithdrawTxInfo() - resp, err := srv.SendTx( - context.Background(), - &globalRPCProto.ReqSendTx{ - TxType: commonTx.TxTypeWithdraw, - TxInfo: txInfo, - }, - ) - if err != nil { - t.Fatal(err) - } - respBytes, err := json.Marshal(resp) - if err != nil { - t.Fatal(err) - } - fmt.Println(string(respBytes)) - fmt.Printf("Starting rpc server at %s...\n", c.ListenOn) -} - -func constructSendWithdrawTxInfo() string { - // from sher.legend to gavin.legend - seed := "28e1a3762ff9944e9a4ad79477b756ef0aff3d2af76f0f40a0c3ec6ca76cf24b" - key, err := curve.GenerateEddsaPrivateKey(seed) - if err != nil { - panic(err) - } - expiredAt := time.Now().Add(time.Hour * 2).UnixMilli() - txInfo := &commonTx.WithdrawTxInfo{ - FromAccountIndex: 2, - AssetId: 0, - AssetAmount: big.NewInt(10000000), - GasAccountIndex: 1, - GasFeeAssetId: 2, - GasFeeAssetAmount: big.NewInt(5000), - ToAddress: "0x99AC8881834797ebC32f185ee27c2e96842e1a47", - Nonce: 2, - ExpiredAt: expiredAt, - Sig: nil, - } - hFunc := mimc.NewMiMC() - msgHash, err := legendTxTypes.ComputeWithdrawMsgHash(txInfo, hFunc) - if err != nil { - panic(err) - } - hFunc.Reset() - signature, err := key.Sign(msgHash, hFunc) - if err != nil { - panic(err) - } - txInfo.Sig = signature - txInfoBytes, err := json.Marshal(txInfo) - if err != nil { - panic(err) - } - return string(txInfoBytes) -} diff --git a/service/sender/config/config.go b/service/sender/config/config.go new file mode 100644 index 000000000..885a8e8e2 --- /dev/null +++ b/service/sender/config/config.go @@ -0,0 +1,20 @@ +package config + +import ( + "github.com/zeromicro/go-zero/core/logx" +) + +type Config struct { + Postgres struct { + DataSource string + } + ChainConfig struct { + NetworkRPCSysConfigName string + MaxWaitingTime int64 + MaxBlockCount int + ConfirmBlocksCount uint64 + Sk string + GasLimit uint64 + } + LogConf logx.LogConf +} diff --git a/configyaml/sender.yaml.example b/service/sender/etc/config.yaml.example similarity index 73% rename from configyaml/sender.yaml.example rename to service/sender/etc/config.yaml.example index 17bc02496..c7d21476f 100644 --- a/configyaml/sender.yaml.example +++ b/service/sender/etc/config.yaml.example @@ -1,4 +1,4 @@ -Name: sender.cronjob +Name: sender Prometheus: Host: 0.0.0.0 @@ -8,17 +8,18 @@ Prometheus: Postgres: DataSource: host=127.0.0.1 user=postgres password=pw dbname=zkbas port=5432 sslmode=disable -CacheRedis: - - Host: redis:6379 - Type: node - ChainConfig: NetworkRPCSysConfigName: "BscTestNetworkRpc" #NetworkRPCSysConfigName: "LocalTestNetworkRpc" - ZkbasContractAddrSysConfigName: "ZkbasContract" MaxWaitingTime: 120 + ConfirmBlocksCount: 0 MaxBlockCount: 3 Sk: "107f9d2a50ce2d8337e0c5220574e9fcf2bf60002da5acf07718f4d531ea3faa" GasLimit: 20000000 - L1ChainId: "5777" +LogConf: + ServiceName: sender + Mode: console + Path: ./log/sender + StackCooldownMillis: 500 + Level: error diff --git a/service/sender/sender.go b/service/sender/sender.go new file mode 100644 index 000000000..cfff09568 --- /dev/null +++ b/service/sender/sender.go @@ -0,0 +1,65 @@ +package sender + +import ( + "github.com/robfig/cron/v3" + "github.com/zeromicro/go-zero/core/conf" + "github.com/zeromicro/go-zero/core/logx" + "github.com/zeromicro/go-zero/core/proc" + + "github.com/bnb-chain/zkbas/service/sender/config" + "github.com/bnb-chain/zkbas/service/sender/sender" +) + +func Run(configFile string) error { + var c config.Config + conf.MustLoad(configFile, &c) + s := sender.NewSender(c) + logx.MustSetup(c.LogConf) + logx.DisableStat() + proc.AddShutdownListener(func() { + logx.Close() + }) + + // new cron + cronJob := cron.New(cron.WithChain( + cron.SkipIfStillRunning(cron.DiscardLogger), + )) + + _, err := cronJob.AddFunc("@every 10s", func() { + logx.Info("========================= start commit task =========================") + err := s.CommitBlocks() + if err != nil { + logx.Errorf("failed to rollup block, %v", err) + } + }) + if err != nil { + panic(err) + } + + _, err = cronJob.AddFunc("@every 10s", func() { + logx.Info("========================= start verify task =========================") + err = s.VerifyAndExecuteBlocks() + if err != nil { + logx.Errorf("failed to send verify transaction, %v", err) + } + }) + if err != nil { + panic(err) + } + + _, err = cronJob.AddFunc("@every 10s", func() { + logx.Info("========================= start update txs task =========================") + err = s.UpdateSentTxs() + if err != nil { + logx.Errorf("failed to update update tx status, %v", err) + } + }) + if err != nil { + panic(err) + } + + cronJob.Start() + + logx.Info("cronjob is starting......") + select {} +} diff --git a/service/sender/sender/sender.go b/service/sender/sender/sender.go new file mode 100644 index 000000000..834cf866d --- /dev/null +++ b/service/sender/sender/sender.go @@ -0,0 +1,336 @@ +/* + * Copyright © 2021 ZkBAS Protocol + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package sender + +import ( + "context" + "encoding/json" + "errors" + "fmt" + "math/big" + "time" + + "github.com/zeromicro/go-zero/core/logx" + "gorm.io/driver/postgres" + "gorm.io/gorm" + + "github.com/bnb-chain/zkbas-eth-rpc/_rpc" + zkbas "github.com/bnb-chain/zkbas-eth-rpc/zkbas/core/legend" + "github.com/bnb-chain/zkbas/common/chain" + "github.com/bnb-chain/zkbas/common/prove" + "github.com/bnb-chain/zkbas/dao/block" + "github.com/bnb-chain/zkbas/dao/compressedblock" + "github.com/bnb-chain/zkbas/dao/l1rolluptx" + "github.com/bnb-chain/zkbas/dao/proof" + "github.com/bnb-chain/zkbas/dao/sysconfig" + sconfig "github.com/bnb-chain/zkbas/service/sender/config" + "github.com/bnb-chain/zkbas/types" +) + +type Sender struct { + config sconfig.Config + + // Client + cli *_rpc.ProviderClient + authCli *_rpc.AuthClient + zkbasInstance *zkbas.Zkbas + + // Data access objects + blockModel block.BlockModel + compressedBlockModel compressedblock.CompressedBlockModel + l1RollupTxModel l1rolluptx.L1RollupTxModel + sysConfigModel sysconfig.SysConfigModel + proofModel proof.ProofModel +} + +func NewSender(c sconfig.Config) *Sender { + db, err := gorm.Open(postgres.Open(c.Postgres.DataSource)) + if err != nil { + logx.Errorf("gorm connect db error, err = %v", err) + } + s := &Sender{ + config: c, + blockModel: block.NewBlockModel(db), + compressedBlockModel: compressedblock.NewCompressedBlockModel(db), + l1RollupTxModel: l1rolluptx.NewL1RollupTxModel(db), + sysConfigModel: sysconfig.NewSysConfigModel(db), + proofModel: proof.NewProofModel(db), + } + + l1RPCEndpoint, err := s.sysConfigModel.GetSysConfigByName(c.ChainConfig.NetworkRPCSysConfigName) + if err != nil { + logx.Severef("fatal error, cannot fetch l1RPCEndpoint from sysconfig, err: %v, SysConfigName: %s", + err, c.ChainConfig.NetworkRPCSysConfigName) + panic(err) + } + rollupAddress, err := s.sysConfigModel.GetSysConfigByName(types.ZkbasContract) + if err != nil { + logx.Severef("fatal error, cannot fetch rollupAddress from sysconfig, err: %v, SysConfigName: %s", + err, types.ZkbasContract) + panic(err) + } + + s.cli, err = _rpc.NewClient(l1RPCEndpoint.Value) + if err != nil { + panic(err) + } + chainId, err := s.cli.ChainID(context.Background()) + if err != nil { + panic(err) + } + s.authCli, err = _rpc.NewAuthClient(s.cli, c.ChainConfig.Sk, chainId) + if err != nil { + panic(err) + } + s.zkbasInstance, err = zkbas.LoadZkbasInstance(s.cli, rollupAddress.Value) + if err != nil { + panic(err) + } + return s +} + +func (s *Sender) CommitBlocks() (err error) { + var ( + cli = s.cli + authCli = s.authCli + zkbasInstance = s.zkbasInstance + ) + pendingTx, err := s.l1RollupTxModel.GetLatestPendingTx(l1rolluptx.TxTypeCommit) + if err != nil && err != types.DbErrNotFound { + return err + } + // No need to submit new transaction if there is any pending commit txs. + if pendingTx != nil { + return nil + } + + lastHandledTx, err := s.l1RollupTxModel.GetLatestHandledTx(l1rolluptx.TxTypeCommit) + if err != nil && err != types.DbErrNotFound { + return err + } + start := int64(1) + if lastHandledTx != nil { + start = lastHandledTx.L2BlockHeight + 1 + } + // commit new blocks + blocks, err := s.compressedBlockModel.GetCompressedBlockBetween(start, + start+int64(s.config.ChainConfig.MaxBlockCount)) + if err != nil && err != types.DbErrNotFound { + return fmt.Errorf("failed to get compress block err: %v", err) + } + if len(blocks) == 0 { + return nil + } + pendingCommitBlocks, err := ConvertBlocksForCommitToCommitBlockInfos(blocks) + if err != nil { + return fmt.Errorf("failed to get commit block info, err: %v", err) + } + // get last block info + lastStoredBlockInfo := defaultBlockHeader() + if lastHandledTx != nil { + lastHandledBlockInfo, err := s.blockModel.GetBlockByHeight(lastHandledTx.L2BlockHeight) + if err != nil { + return fmt.Errorf("failed to get block info, err: %v", err) + } + // construct last stored block header + lastStoredBlockInfo = chain.ConstructStoredBlockInfo(lastHandledBlockInfo) + } + + gasPrice, err := s.cli.SuggestGasPrice(context.Background()) + if err != nil { + logx.Errorf("failed to fetch gas price: %v", err) + return err + } + // commit blocks on-chain + txHash, err := zkbas.CommitBlocks( + cli, authCli, + zkbasInstance, + lastStoredBlockInfo, + pendingCommitBlocks, + gasPrice, + s.config.ChainConfig.GasLimit) + if err != nil { + return fmt.Errorf("failed to send commit tx, errL %v", err) + } + newRollupTx := &l1rolluptx.L1RollupTx{ + L1TxHash: txHash, + TxStatus: l1rolluptx.StatusPending, + TxType: l1rolluptx.TxTypeCommit, + L2BlockHeight: int64(pendingCommitBlocks[len(pendingCommitBlocks)-1].BlockNumber), + } + err = s.l1RollupTxModel.CreateL1RollupTx(newRollupTx) + if err != nil { + return fmt.Errorf("failed to create tx in database, err: %v", err) + } + logx.Infof("new blocks have been committed(height): %v", newRollupTx.L2BlockHeight) + return nil +} + +func (s *Sender) UpdateSentTxs() (err error) { + pendingTxs, err := s.l1RollupTxModel.GetL1RollupTxsByStatus(l1rolluptx.StatusPending) + if err != nil { + if err == types.DbErrNotFound { + return nil + } + return fmt.Errorf("failed to get pending txs, err: %v", err) + } + + latestL1Height, err := s.cli.GetHeight() + if err != nil { + return fmt.Errorf("failed to get l1 block height, err: %v", err) + } + + var ( + pendingUpdateRxs []*l1rolluptx.L1RollupTx + pendingUpdateProofStatus = make(map[int64]int) + ) + for _, pendingTx := range pendingTxs { + txHash := pendingTx.L1TxHash + receipt, err := s.cli.GetTransactionReceipt(txHash) + if err != nil { + logx.Errorf("query transaction receipt %s failed, err: %v", txHash, err) + if time.Now().After(pendingTx.UpdatedAt.Add(time.Duration(s.config.ChainConfig.MaxWaitingTime) * time.Second)) { + // No need to check the response, do best effort. + //nolint:errcheck + s.l1RollupTxModel.DeleteL1RollupTx(pendingTx) + } + continue + } + if receipt.Status == 0 { + // It is critical to have any failed transactions + panic(fmt.Sprintf("unexpected failed tx: %v", txHash)) + } + + // not finalized yet + if latestL1Height < receipt.BlockNumber.Uint64()+s.config.ChainConfig.ConfirmBlocksCount { + continue + } + var validTx bool + for _, vlog := range receipt.Logs { + switch vlog.Topics[0].Hex() { + case zkbasLogBlockCommitSigHash.Hex(): + var event zkbas.ZkbasBlockCommit + if err = ZkbasContractAbi.UnpackIntoInterface(&event, EventNameBlockCommit, vlog.Data); err != nil { + return err + } + validTx = int64(event.BlockNumber) == pendingTx.L2BlockHeight + case zkbasLogBlockVerificationSigHash.Hex(): + var event zkbas.ZkbasBlockVerification + if err = ZkbasContractAbi.UnpackIntoInterface(&event, EventNameBlockVerification, vlog.Data); err != nil { + return err + } + validTx = int64(event.BlockNumber) == pendingTx.L2BlockHeight + pendingUpdateProofStatus[pendingTx.L2BlockHeight] = proof.Confirmed + case zkbasLogBlocksRevertSigHash.Hex(): + // TODO revert + default: + } + } + + if validTx { + pendingTx.TxStatus = l1rolluptx.StatusHandled + pendingUpdateRxs = append(pendingUpdateRxs, pendingTx) + } + } + + if err = s.l1RollupTxModel.UpdateL1RollupTxs(pendingUpdateRxs, + pendingUpdateProofStatus); err != nil { + return fmt.Errorf("failed to updte rollup txs, err:%v", err) + } + return nil +} + +func (s *Sender) VerifyAndExecuteBlocks() (err error) { + var ( + cli = s.cli + authCli = s.authCli + zkbasInstance = s.zkbasInstance + ) + pendingTx, err := s.l1RollupTxModel.GetLatestPendingTx(l1rolluptx.TxTypeVerifyAndExecute) + if err != nil && err != types.DbErrNotFound { + return err + } + // No need to submit new transaction if there is any pending verification txs. + if pendingTx != nil { + return nil + } + + lastHandledTx, err := s.l1RollupTxModel.GetLatestHandledTx(l1rolluptx.TxTypeVerifyAndExecute) + if err != nil && err != types.DbErrNotFound { + return err + } + + start := int64(1) + if lastHandledTx != nil { + start = lastHandledTx.L2BlockHeight + 1 + } + blocks, err := s.blockModel.GetCommittedBlocksBetween(start, + start+int64(s.config.ChainConfig.MaxBlockCount)) + if err != nil && err != types.DbErrNotFound { + return fmt.Errorf("unable to get blocks to prove, err: %v", err) + } + if len(blocks) == 0 { + return nil + } + pendingVerifyAndExecuteBlocks, err := ConvertBlocksToVerifyAndExecuteBlockInfos(blocks) + if err != nil { + return fmt.Errorf("unable to convert blocks to commit block infos: %v", err) + } + + blockProofs, err := s.proofModel.GetProofsBetween(start, start+int64(len(blocks))-1) + if err != nil { + return fmt.Errorf("unable to get proofs, err: %v", err) + } + if len(blockProofs) != len(blocks) { + return errors.New("related proofs not ready") + } + var proofs []*big.Int + for _, bProof := range blockProofs { + var proofInfo *prove.FormattedProof + err = json.Unmarshal([]byte(bProof.ProofInfo), &proofInfo) + if err != nil { + return err + } + proofs = append(proofs, proofInfo.A[:]...) + proofs = append(proofs, proofInfo.B[0][0], proofInfo.B[0][1]) + proofs = append(proofs, proofInfo.B[1][0], proofInfo.B[1][1]) + proofs = append(proofs, proofInfo.C[:]...) + } + gasPrice, err := s.cli.SuggestGasPrice(context.Background()) + if err != nil { + return err + } + // Verify blocks on-chain + txHash, err := zkbas.VerifyAndExecuteBlocks(cli, authCli, zkbasInstance, + pendingVerifyAndExecuteBlocks, proofs, gasPrice, s.config.ChainConfig.GasLimit) + if err != nil { + return fmt.Errorf("failed to send verify tx: %v", err) + } + + newRollupTx := &l1rolluptx.L1RollupTx{ + L1TxHash: txHash, + TxStatus: l1rolluptx.StatusPending, + TxType: l1rolluptx.TxTypeVerifyAndExecute, + L2BlockHeight: int64(pendingVerifyAndExecuteBlocks[len(pendingVerifyAndExecuteBlocks)-1].BlockHeader.BlockNumber), + } + err = s.l1RollupTxModel.CreateL1RollupTx(newRollupTx) + if err != nil { + return fmt.Errorf(fmt.Sprintf("failed to create rollup tx in db %v", err)) + } + logx.Infof("new blocks have been verified and executed(height): %d", newRollupTx.L2BlockHeight) + return nil +} diff --git a/service/sender/sender/types.go b/service/sender/sender/types.go new file mode 100644 index 000000000..34396476b --- /dev/null +++ b/service/sender/sender/types.go @@ -0,0 +1,114 @@ +/* + * Copyright © 2021 ZkBAS Protocol + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package sender + +import ( + "encoding/json" + "math/big" + "strings" + + "github.com/ethereum/go-ethereum/accounts/abi" + "github.com/ethereum/go-ethereum/common" + "github.com/ethereum/go-ethereum/crypto" + "github.com/zeromicro/go-zero/core/logx" + + zkbas "github.com/bnb-chain/zkbas-eth-rpc/zkbas/core/legend" + "github.com/bnb-chain/zkbas/common/chain" + "github.com/bnb-chain/zkbas/dao/block" + "github.com/bnb-chain/zkbas/dao/compressedblock" + "github.com/bnb-chain/zkbas/tree" + "github.com/bnb-chain/zkbas/types" +) + +const ( + EventNameBlockCommit = "BlockCommit" + EventNameBlockVerification = "BlockVerification" +) + +var ( + ZkbasContractAbi, _ = abi.JSON(strings.NewReader(zkbas.ZkbasMetaData.ABI)) + + zkbasLogBlockCommitSig = []byte("BlockCommit(uint32)") + zkbasLogBlockVerificationSig = []byte("BlockVerification(uint32)") + zkbasLogBlocksRevertSig = []byte("BlocksRevert(uint32,uint32)") + + zkbasLogBlockCommitSigHash = crypto.Keccak256Hash(zkbasLogBlockCommitSig) + zkbasLogBlockVerificationSigHash = crypto.Keccak256Hash(zkbasLogBlockVerificationSig) + zkbasLogBlocksRevertSigHash = crypto.Keccak256Hash(zkbasLogBlocksRevertSig) +) + +func defaultBlockHeader() zkbas.StorageStoredBlockInfo { + var ( + pendingOnChainOperationsHash [32]byte + stateRoot [32]byte + commitment [32]byte + ) + copy(pendingOnChainOperationsHash[:], common.FromHex(types.EmptyStringKeccak)[:]) + copy(stateRoot[:], tree.NilStateRoot[:]) + copy(commitment[:], common.FromHex("0x0000000000000000000000000000000000000000000000000000000000000000")[:]) + return zkbas.StorageStoredBlockInfo{ + BlockSize: 0, + BlockNumber: 0, + PriorityOperations: 0, + PendingOnchainOperationsHash: pendingOnChainOperationsHash, + Timestamp: big.NewInt(0), + StateRoot: stateRoot, + Commitment: commitment, + } +} + +func ConvertBlocksForCommitToCommitBlockInfos(oBlocks []*compressedblock.CompressedBlock) (commitBlocks []zkbas.OldZkbasCommitBlockInfo, err error) { + for _, oBlock := range oBlocks { + var newStateRoot [32]byte + var pubDataOffsets []uint32 + copy(newStateRoot[:], common.FromHex(oBlock.StateRoot)[:]) + err = json.Unmarshal([]byte(oBlock.PublicDataOffsets), &pubDataOffsets) + if err != nil { + logx.Errorf("[ConvertBlocksForCommitToCommitBlockInfos] unable to unmarshal: %s", err.Error()) + return nil, err + } + commitBlock := zkbas.OldZkbasCommitBlockInfo{ + NewStateRoot: newStateRoot, + PublicData: common.FromHex(oBlock.PublicData), + Timestamp: big.NewInt(oBlock.Timestamp), + PublicDataOffsets: pubDataOffsets, + BlockNumber: uint32(oBlock.BlockHeight), + BlockSize: oBlock.BlockSize, + } + commitBlocks = append(commitBlocks, commitBlock) + } + return commitBlocks, nil +} + +func ConvertBlocksToVerifyAndExecuteBlockInfos(oBlocks []*block.Block) (verifyAndExecuteBlocks []zkbas.OldZkbasVerifyAndExecuteBlockInfo, err error) { + for _, oBlock := range oBlocks { + var pendingOnChainOpsPubData [][]byte + if oBlock.PendingOnChainOperationsPubData != "" { + err = json.Unmarshal([]byte(oBlock.PendingOnChainOperationsPubData), &pendingOnChainOpsPubData) + if err != nil { + logx.Errorf("[ConvertBlocksToVerifyAndExecuteBlockInfos] unable to unmarshal pending pub data: %s", err.Error()) + return nil, err + } + } + verifyAndExecuteBlock := zkbas.OldZkbasVerifyAndExecuteBlockInfo{ + BlockHeader: chain.ConstructStoredBlockInfo(oBlock), + PendingOnchainOpsPubData: pendingOnChainOpsPubData, + } + verifyAndExecuteBlocks = append(verifyAndExecuteBlocks, verifyAndExecuteBlock) + } + return verifyAndExecuteBlocks, nil +} diff --git a/service/witness/config/config.go b/service/witness/config/config.go new file mode 100644 index 000000000..d33d354ad --- /dev/null +++ b/service/witness/config/config.go @@ -0,0 +1,21 @@ +package config + +import ( + "github.com/zeromicro/go-zero/core/logx" + + "github.com/bnb-chain/zkbas/tree" +) + +type Config struct { + Postgres struct { + DataSource string + } + TreeDB struct { + Driver tree.Driver + //nolint:staticcheck + LevelDBOption tree.LevelDBOption `json:",optional"` + //nolint:staticcheck + RedisDBOption tree.RedisDBOption `json:",optional"` + } + LogConf logx.LogConf +} diff --git a/service/witness/etc/config.yaml.example b/service/witness/etc/config.yaml.example new file mode 100644 index 000000000..91bdfd33d --- /dev/null +++ b/service/witness/etc/config.yaml.example @@ -0,0 +1,14 @@ +Name: witness + +Postgres: + DataSource: host=127.0.0.1 user=postgres password=pw dbname=zkbas port=5432 sslmode=disable + +TreeDB: + Driver: memorydb + +LogConf: + ServiceName: witness + Mode: console + Path: ./log/witness + StackCooldownMillis: 500 + Level: error \ No newline at end of file diff --git a/service/witness/witness.go b/service/witness/witness.go new file mode 100644 index 000000000..06d5ba91a --- /dev/null +++ b/service/witness/witness.go @@ -0,0 +1,44 @@ +package witness + +import ( + "github.com/robfig/cron/v3" + "github.com/zeromicro/go-zero/core/conf" + "github.com/zeromicro/go-zero/core/logx" + "github.com/zeromicro/go-zero/core/proc" + + "github.com/bnb-chain/zkbas/service/witness/config" + "github.com/bnb-chain/zkbas/service/witness/witness" +) + +func Run(configFile string) error { + var c config.Config + conf.MustLoad(configFile, &c) + w, err := witness.NewWitness(c) + if err != nil { + panic(err) + } + logx.MustSetup(c.LogConf) + logx.DisableStat() + proc.AddShutdownListener(func() { + logx.Close() + }) + + cronJob := cron.New(cron.WithChain( + cron.SkipIfStillRunning(cron.DiscardLogger), + )) + _, err = cronJob.AddFunc("@every 2s", func() { + logx.Info("==========start generate block witness==========") + err := w.GenerateBlockWitness() + if err != nil { + logx.Errorf("failed to generate block witness, %v", err) + } + w.RescheduleBlockWitness() + }) + if err != nil { + panic(err) + } + cronJob.Start() + + logx.Info("witness cronjob is starting......") + select {} +} diff --git a/common/model/info/constant.go b/service/witness/witness/types.go similarity index 91% rename from common/model/info/constant.go rename to service/witness/witness/types.go index 8b442f57a..fce8dbe39 100644 --- a/common/model/info/constant.go +++ b/service/witness/witness/types.go @@ -1,5 +1,5 @@ /* - * Copyright © 2021 Zkbas Protocol + * Copyright © 2021 ZkBAS Protocol * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -12,7 +12,6 @@ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. - * */ -package info +package witness diff --git a/service/witness/witness/witness.go b/service/witness/witness/witness.go new file mode 100644 index 000000000..00e8f3f30 --- /dev/null +++ b/service/witness/witness/witness.go @@ -0,0 +1,260 @@ +package witness + +import ( + "encoding/json" + "errors" + "fmt" + "time" + + "github.com/ethereum/go-ethereum/common" + "github.com/zeromicro/go-zero/core/logx" + "gorm.io/driver/postgres" + "gorm.io/gorm" + + cryptoBlock "github.com/bnb-chain/zkbas-crypto/legend/circuit/bn254/block" + smt "github.com/bnb-chain/zkbas-smt" + utils "github.com/bnb-chain/zkbas/common/prove" + "github.com/bnb-chain/zkbas/dao/account" + "github.com/bnb-chain/zkbas/dao/block" + "github.com/bnb-chain/zkbas/dao/blockwitness" + "github.com/bnb-chain/zkbas/dao/liquidity" + "github.com/bnb-chain/zkbas/dao/nft" + "github.com/bnb-chain/zkbas/dao/proof" + "github.com/bnb-chain/zkbas/service/witness/config" + "github.com/bnb-chain/zkbas/tree" + "github.com/bnb-chain/zkbas/types" +) + +const ( + UnprovedBlockWitnessTimeout = 10 * time.Minute + + BlockProcessDelta = 10 +) + +type Witness struct { + // config + config config.Config + helper *utils.WitnessHelper + + // Trees + treeCtx *tree.Context + accountTree smt.SparseMerkleTree + assetTrees []smt.SparseMerkleTree + liquidityTree smt.SparseMerkleTree + nftTree smt.SparseMerkleTree + + // The data access object + blockModel block.BlockModel + accountModel account.AccountModel + accountHistoryModel account.AccountHistoryModel + liquidityHistoryModel liquidity.LiquidityHistoryModel + nftHistoryModel nft.L2NftHistoryModel + proofModel proof.ProofModel + blockWitnessModel blockwitness.BlockWitnessModel +} + +func NewWitness(c config.Config) (*Witness, error) { + datasource := c.Postgres.DataSource + db, err := gorm.Open(postgres.Open(datasource)) + if err != nil { + return nil, fmt.Errorf("gorm connect db error, err: %v", err) + } + + w := &Witness{ + config: c, + blockModel: block.NewBlockModel(db), + blockWitnessModel: blockwitness.NewBlockWitnessModel(db), + accountModel: account.NewAccountModel(db), + accountHistoryModel: account.NewAccountHistoryModel(db), + liquidityHistoryModel: liquidity.NewLiquidityHistoryModel(db), + nftHistoryModel: nft.NewL2NftHistoryModel(db), + proofModel: proof.NewProofModel(db), + } + err = w.initState() + return w, err +} + +func (w *Witness) initState() error { + p, err := w.proofModel.GetLatestConfirmedProof() + if err != nil { + if err != types.DbErrNotFound { + return fmt.Errorf("GetLatestConfirmedProof error: %v", err) + } else { + p = &proof.Proof{ + BlockNumber: 0, + } + } + } + // dbinitializer tree database + treeCtx := &tree.Context{ + Name: "witness", + Driver: w.config.TreeDB.Driver, + LevelDBOption: &w.config.TreeDB.LevelDBOption, + RedisDBOption: &w.config.TreeDB.RedisDBOption, + } + err = tree.SetupTreeDB(treeCtx) + if err != nil { + return fmt.Errorf("init tree database failed %v", err) + } + w.treeCtx = treeCtx + + // dbinitializer accountTree and accountStateTrees + // the dbinitializer block number use the latest sent block + w.accountTree, w.assetTrees, err = tree.InitAccountTree( + w.accountModel, + w.accountHistoryModel, + p.BlockNumber, + treeCtx, + ) + // the blockHeight depends on the proof start position + if err != nil { + return fmt.Errorf("initMerkleTree error: %v", err) + } + + w.liquidityTree, err = tree.InitLiquidityTree(w.liquidityHistoryModel, p.BlockNumber, + treeCtx) + if err != nil { + return fmt.Errorf("initLiquidityTree error: %v", err) + } + w.nftTree, err = tree.InitNftTree(w.nftHistoryModel, p.BlockNumber, + treeCtx) + if err != nil { + return fmt.Errorf("initNftTree error: %v", err) + } + w.helper = utils.NewWitnessHelper(w.treeCtx, w.accountTree, w.liquidityTree, w.nftTree, &w.assetTrees, w.accountModel) + return nil +} + +func (w *Witness) GenerateBlockWitness() (err error) { + var latestWitnessHeight int64 + latestWitnessHeight, err = w.blockWitnessModel.GetLatestBlockWitnessHeight() + if err != nil && err != types.DbErrNotFound { + return err + } + // get next batch of blocks + blocks, err := w.blockModel.GetBlocksBetween(latestWitnessHeight+1, latestWitnessHeight+BlockProcessDelta) + if err != nil { + if err != types.DbErrNotFound { + return err + } + return nil + } + // get latestVerifiedBlockNr + latestVerifiedBlockNr, err := w.blockModel.GetLatestVerifiedHeight() + if err != nil { + return err + } + + // scan each block + for _, block := range blocks { + // Step1: construct witness + blockWitness, err := w.constructBlockWitness(block, latestVerifiedBlockNr) + if err != nil { + return fmt.Errorf("failed to construct block witness, err: %v", err) + } + // Step2: commit trees for witness + err = tree.CommitTrees(uint64(latestVerifiedBlockNr), w.accountTree, &w.assetTrees, w.liquidityTree, w.nftTree) + if err != nil { + return fmt.Errorf("unable to commit trees after txs is executed, error: %v", err) + } + // Step3: insert witness into database + err = w.blockWitnessModel.CreateBlockWitness(blockWitness) + if err != nil { + // rollback trees + rollBackErr := tree.RollBackTrees(uint64(block.BlockHeight)-1, w.accountTree, &w.assetTrees, w.liquidityTree, w.nftTree) + if rollBackErr != nil { + logx.Errorf("unable to rollback trees %v", rollBackErr) + } + return fmt.Errorf("create unproved crypto block error, err: %v", err) + } + } + return nil +} + +func (w *Witness) RescheduleBlockWitness() { + latestConfirmedProof, err := w.proofModel.GetLatestConfirmedProof() + if err != nil && err != types.DbErrNotFound { + logx.Errorf("failed to get latest confirmed proof, err: %v", err) + return + } + + var nextBlockNumber int64 = 1 + if err != types.DbErrNotFound { + nextBlockNumber = latestConfirmedProof.BlockNumber + 1 + } + + nextBlockWitness, err := w.blockWitnessModel.GetBlockWitnessByNumber(nextBlockNumber) + if err != nil { + logx.Errorf("failed to get latest block witness, err: %v", err) + return + } + + // skip if next block is not processed + if nextBlockWitness.Status == blockwitness.StatusPublished { + return + } + + // skip if the next block proof exists + // if the proof is not submitted and verified in L1, there should be another alerts + _, err = w.proofModel.GetProofByBlockNumber(nextBlockNumber) + if err == nil { + return + } + + // update block status to Published if it's timeout + if time.Now().After(nextBlockWitness.UpdatedAt.Add(UnprovedBlockWitnessTimeout)) { + err := w.blockWitnessModel.UpdateBlockWitnessStatus(nextBlockWitness, blockwitness.StatusPublished) + if err != nil { + logx.Errorf("update unproved block status error, err: %v", err) + return + } + } +} + +func (w *Witness) constructBlockWitness(block *block.Block, latestVerifiedBlockNr int64) (*blockwitness.BlockWitness, error) { + var oldStateRoot, newStateRoot []byte + txsWitness := make([]*utils.TxWitness, 0, block.BlockSize) + // scan each transaction + for idx, tx := range block.Txs { + txWitness, err := w.helper.ConstructTxWitness(tx, uint64(latestVerifiedBlockNr)) + if err != nil { + return nil, err + } + txsWitness = append(txsWitness, txWitness) + // if it is the first tx of the block + if idx == 0 { + oldStateRoot = txWitness.StateRootBefore + } + // if it is the last tx of the block + if idx == len(block.Txs)-1 { + newStateRoot = txWitness.StateRootAfter + } + } + + emptyTxCount := int(block.BlockSize) - len(block.Txs) + for i := 0; i < emptyTxCount; i++ { + txsWitness = append(txsWitness, cryptoBlock.EmptyTx()) + } + if common.Bytes2Hex(newStateRoot) != block.StateRoot { + return nil, errors.New("state root doesn't match") + } + + b := &cryptoBlock.Block{ + BlockNumber: block.BlockHeight, + CreatedAt: block.CreatedAt.UnixMilli(), + OldStateRoot: oldStateRoot, + NewStateRoot: newStateRoot, + BlockCommitment: common.FromHex(block.BlockCommitment), + Txs: txsWitness, + } + bz, err := json.Marshal(b) + if err != nil { + return nil, err + } + blockWitness := blockwitness.BlockWitness{ + Height: block.BlockHeight, + WitnessData: string(bz), + Status: blockwitness.StatusPublished, + } + return &blockWitness, nil +} diff --git a/common/model/init/contractaddr.yaml b/tools/dbinitializer/contractaddr.yaml similarity index 100% rename from common/model/init/contractaddr.yaml rename to tools/dbinitializer/contractaddr.yaml diff --git a/common/model/init/contractaddr.yaml.example b/tools/dbinitializer/contractaddr.yaml.example similarity index 100% rename from common/model/init/contractaddr.yaml.example rename to tools/dbinitializer/contractaddr.yaml.example diff --git a/tools/dbinitializer/main.go b/tools/dbinitializer/main.go new file mode 100644 index 000000000..7d230c034 --- /dev/null +++ b/tools/dbinitializer/main.go @@ -0,0 +1,264 @@ +/* + * Copyright © 2021 ZkBAS Protocol + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package dbinitializer + +import ( + "encoding/json" + + "github.com/ethereum/go-ethereum/common" + "github.com/stretchr/testify/assert" + "github.com/zeromicro/go-zero/core/conf" + "github.com/zeromicro/go-zero/core/logx" + "gorm.io/driver/postgres" + "gorm.io/gorm" + + "github.com/bnb-chain/zkbas/dao/account" + "github.com/bnb-chain/zkbas/dao/asset" + "github.com/bnb-chain/zkbas/dao/block" + "github.com/bnb-chain/zkbas/dao/blockwitness" + "github.com/bnb-chain/zkbas/dao/compressedblock" + "github.com/bnb-chain/zkbas/dao/l1rolluptx" + "github.com/bnb-chain/zkbas/dao/l1syncedblock" + "github.com/bnb-chain/zkbas/dao/liquidity" + "github.com/bnb-chain/zkbas/dao/mempool" + "github.com/bnb-chain/zkbas/dao/nft" + "github.com/bnb-chain/zkbas/dao/priorityrequest" + "github.com/bnb-chain/zkbas/dao/proof" + "github.com/bnb-chain/zkbas/dao/sysconfig" + "github.com/bnb-chain/zkbas/dao/tx" + "github.com/bnb-chain/zkbas/tree" + "github.com/bnb-chain/zkbas/types" +) + +type contractAddr struct { + Governance string + AssetGovernance string + VerifierProxy string + ZnsControllerProxy string + ZnsResolverProxy string + ZkbasProxy string + UpgradeGateKeeper string + LEGToken string + REYToken string + ERC721 string + ZnsPriceOracle string +} + +type dao struct { + sysConfigModel sysconfig.SysConfigModel + accountModel account.AccountModel + accountHistoryModel account.AccountHistoryModel + assetModel asset.AssetModel + mempoolModel mempool.MempoolModel + failTxModel tx.FailTxModel + txDetailModel tx.TxDetailModel + txModel tx.TxModel + blockModel block.BlockModel + compressedBlockModel compressedblock.CompressedBlockModel + blockWitnessModel blockwitness.BlockWitnessModel + proofModel proof.ProofModel + l1SyncedBlockModel l1syncedblock.L1SyncedBlockModel + priorityRequestModel priorityrequest.PriorityRequestModel + l1RollupTModel l1rolluptx.L1RollupTxModel + liquidityModel liquidity.LiquidityModel + liquidityHistoryModel liquidity.LiquidityHistoryModel + nftModel nft.L2NftModel + nftHistoryModel nft.L2NftHistoryModel +} + +func Initialize( + dsn string, + contractAddrFile string, + bscTestNetworkRPC, localTestNetworkRPC string, +) error { + db, err := gorm.Open(postgres.Open(dsn), &gorm.Config{}) + if err != nil { + return err + } + var svrConf contractAddr + conf.MustLoad(contractAddrFile, &svrConf) + + unmarshal, _ := json.Marshal(svrConf) + logx.Infof("init configs: %s", string(unmarshal)) + + dao := &dao{ + sysConfigModel: sysconfig.NewSysConfigModel(db), + accountModel: account.NewAccountModel(db), + accountHistoryModel: account.NewAccountHistoryModel(db), + assetModel: asset.NewAssetModel(db), + mempoolModel: mempool.NewMempoolModel(db), + failTxModel: tx.NewFailTxModel(db), + txDetailModel: tx.NewTxDetailModel(db), + txModel: tx.NewTxModel(db), + blockModel: block.NewBlockModel(db), + compressedBlockModel: compressedblock.NewCompressedBlockModel(db), + blockWitnessModel: blockwitness.NewBlockWitnessModel(db), + proofModel: proof.NewProofModel(db), + l1SyncedBlockModel: l1syncedblock.NewL1SyncedBlockModel(db), + priorityRequestModel: priorityrequest.NewPriorityRequestModel(db), + l1RollupTModel: l1rolluptx.NewL1RollupTxModel(db), + liquidityModel: liquidity.NewLiquidityModel(db), + liquidityHistoryModel: liquidity.NewLiquidityHistoryModel(db), + nftModel: nft.NewL2NftModel(db), + nftHistoryModel: nft.NewL2NftHistoryModel(db), + } + + dropTables(dao, bscTestNetworkRPC, localTestNetworkRPC) + initTable(dao, &svrConf, bscTestNetworkRPC, localTestNetworkRPC) + + return nil +} + +func initSysConfig(svrConf *contractAddr, bscTestNetworkRPC, localTestNetworkRPC string) []*sysconfig.SysConfig { + return []*sysconfig.SysConfig{ + { + Name: types.SysGasFee, + Value: "100000000000000", + ValueType: "string", + Comment: "based on BNB", + }, + { + Name: types.TreasuryAccountIndex, + Value: "0", + ValueType: "int", + Comment: "treasury index", + }, + { + Name: types.GasAccountIndex, + Value: "1", + ValueType: "int", + Comment: "gas index", + }, + { + Name: types.ZkbasContract, + Value: svrConf.ZkbasProxy, + ValueType: "string", + Comment: "Zkbas contract on BSC", + }, + // Governance Contract + { + Name: types.GovernanceContract, + Value: svrConf.Governance, + ValueType: "string", + Comment: "Governance contract on BSC", + }, + // network rpc + { + Name: types.BscTestNetworkRpc, + Value: bscTestNetworkRPC, + ValueType: "string", + Comment: "BSC network rpc", + }, + { + Name: types.LocalTestNetworkRpc, + Value: localTestNetworkRPC, + ValueType: "string", + Comment: "Local network rpc", + }, + { + Name: types.ZnsPriceOracle, + Value: svrConf.ZnsPriceOracle, + ValueType: "string", + Comment: "Zns Price Oracle", + }, + } +} + +func initAssetsInfo() []*asset.Asset { + return []*asset.Asset{ + { + AssetId: 0, + L1Address: "0x00", + AssetName: "BNB", + AssetSymbol: "BNB", + Decimals: 18, + Status: 0, + IsGasAsset: asset.IsGasAsset, + }, + } +} + +func dropTables( + dao *dao, bscTestNetworkRPC, localTestNetworkRPC string) { + assert.Nil(nil, dao.sysConfigModel.DropSysConfigTable()) + assert.Nil(nil, dao.accountModel.DropAccountTable()) + assert.Nil(nil, dao.accountHistoryModel.DropAccountHistoryTable()) + assert.Nil(nil, dao.assetModel.DropAssetTable()) + assert.Nil(nil, dao.mempoolModel.DropMempoolTxTable()) + assert.Nil(nil, dao.failTxModel.DropFailTxTable()) + assert.Nil(nil, dao.txDetailModel.DropTxDetailTable()) + assert.Nil(nil, dao.txModel.DropTxTable()) + assert.Nil(nil, dao.blockModel.DropBlockTable()) + assert.Nil(nil, dao.compressedBlockModel.DropCompressedBlockTable()) + assert.Nil(nil, dao.blockWitnessModel.DropBlockWitnessTable()) + assert.Nil(nil, dao.proofModel.DropProofTable()) + assert.Nil(nil, dao.l1SyncedBlockModel.DropL1SyncedBlockTable()) + assert.Nil(nil, dao.priorityRequestModel.DropPriorityRequestTable()) + assert.Nil(nil, dao.l1RollupTModel.DropL1RollupTxTable()) + assert.Nil(nil, dao.liquidityModel.DropLiquidityTable()) + assert.Nil(nil, dao.liquidityHistoryModel.DropLiquidityHistoryTable()) + assert.Nil(nil, dao.nftModel.DropL2NftTable()) + assert.Nil(nil, dao.nftHistoryModel.DropL2NftHistoryTable()) +} + +func initTable(dao *dao, svrConf *contractAddr, bscTestNetworkRPC, localTestNetworkRPC string) { + assert.Nil(nil, dao.sysConfigModel.CreateSysConfigTable()) + assert.Nil(nil, dao.accountModel.CreateAccountTable()) + assert.Nil(nil, dao.accountHistoryModel.CreateAccountHistoryTable()) + assert.Nil(nil, dao.assetModel.CreateAssetTable()) + assert.Nil(nil, dao.mempoolModel.CreateMempoolTxTable()) + assert.Nil(nil, dao.failTxModel.CreateFailTxTable()) + assert.Nil(nil, dao.blockModel.CreateBlockTable()) + assert.Nil(nil, dao.txModel.CreateTxTable()) + assert.Nil(nil, dao.txDetailModel.CreateTxDetailTable()) + assert.Nil(nil, dao.compressedBlockModel.CreateCompressedBlockTable()) + assert.Nil(nil, dao.blockWitnessModel.CreateBlockWitnessTable()) + assert.Nil(nil, dao.proofModel.CreateProofTable()) + assert.Nil(nil, dao.l1SyncedBlockModel.CreateL1SyncedBlockTable()) + assert.Nil(nil, dao.priorityRequestModel.CreatePriorityRequestTable()) + assert.Nil(nil, dao.l1RollupTModel.CreateL1RollupTxTable()) + assert.Nil(nil, dao.liquidityModel.CreateLiquidityTable()) + assert.Nil(nil, dao.liquidityHistoryModel.CreateLiquidityHistoryTable()) + assert.Nil(nil, dao.nftModel.CreateL2NftTable()) + assert.Nil(nil, dao.nftHistoryModel.CreateL2NftHistoryTable()) + rowsAffected, err := dao.assetModel.CreateAssetsInBatch(initAssetsInfo()) + if err != nil { + panic(err) + } + logx.Infof("l2 assets info rows affected: %d", rowsAffected) + rowsAffected, err = dao.sysConfigModel.CreateSysConfigInBatches(initSysConfig(svrConf, bscTestNetworkRPC, localTestNetworkRPC)) + if err != nil { + panic(err) + } + logx.Infof("sys config rows affected: %d", rowsAffected) + err = dao.blockModel.CreateGenesisBlock(&block.Block{ + BlockCommitment: "0000000000000000000000000000000000000000000000000000000000000000", + BlockHeight: 0, + StateRoot: common.Bytes2Hex(tree.NilStateRoot), + PriorityOperations: 0, + PendingOnChainOperationsHash: "c5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470", + CommittedTxHash: "", + CommittedAt: 0, + VerifiedTxHash: "", + VerifiedAt: 0, + BlockStatus: block.StatusVerifiedAndExecuted, + }) + if err != nil { + panic(err) + } +} diff --git a/tools/recovery/etc/config.yaml.example b/tools/recovery/etc/config.yaml.example new file mode 100644 index 000000000..c94aac51d --- /dev/null +++ b/tools/recovery/etc/config.yaml.example @@ -0,0 +1,10 @@ +Postgres: + DataSource: host=127.0.0.1 user=postgres password=Zkbas@123 dbname=zkbas port=5432 sslmode=disable + +CacheRedis: + - Host: 127.0.0.1:6379 + # Pass: myredis + Type: node + +TreeDB: + Driver: memorydb \ No newline at end of file diff --git a/tools/recovery/internal/config/config.go b/tools/recovery/internal/config/config.go new file mode 100644 index 000000000..5cfc2f3ff --- /dev/null +++ b/tools/recovery/internal/config/config.go @@ -0,0 +1,23 @@ +package config + +import ( + "github.com/zeromicro/go-zero/core/logx" + "github.com/zeromicro/go-zero/core/stores/cache" + + "github.com/bnb-chain/zkbas/tree" +) + +type Config struct { + Postgres struct { + DataSource string + } + CacheRedis cache.CacheConf + TreeDB struct { + Driver tree.Driver + //nolint:staticcheck + LevelDBOption tree.LevelDBOption `json:",optional"` + //nolint:staticcheck + RedisDBOption tree.RedisDBOption `json:",optional"` + } + LogConf logx.LogConf +} diff --git a/tools/recovery/internal/svc/servicecontext.go b/tools/recovery/internal/svc/servicecontext.go new file mode 100644 index 000000000..f270d5941 --- /dev/null +++ b/tools/recovery/internal/svc/servicecontext.go @@ -0,0 +1,35 @@ +package svc + +import ( + "github.com/zeromicro/go-zero/core/logx" + "gorm.io/driver/postgres" + "gorm.io/gorm" + + "github.com/bnb-chain/zkbas/dao/account" + "github.com/bnb-chain/zkbas/dao/liquidity" + "github.com/bnb-chain/zkbas/dao/nft" + "github.com/bnb-chain/zkbas/tools/recovery/internal/config" +) + +type ServiceContext struct { + Config config.Config + + AccountModel account.AccountModel + AccountHistoryModel account.AccountHistoryModel + LiquidityHistoryModel liquidity.LiquidityHistoryModel + NftHistoryModel nft.L2NftHistoryModel +} + +func NewServiceContext(c config.Config) *ServiceContext { + db, err := gorm.Open(postgres.Open(c.Postgres.DataSource)) + if err != nil { + logx.Errorf("gorm connect db error, err = %s", err.Error()) + } + return &ServiceContext{ + Config: c, + AccountModel: account.NewAccountModel(db), + AccountHistoryModel: account.NewAccountHistoryModel(db), + LiquidityHistoryModel: liquidity.NewLiquidityHistoryModel(db), + NftHistoryModel: nft.NewL2NftHistoryModel(db), + } +} diff --git a/tools/recovery/recovery.go b/tools/recovery/recovery.go new file mode 100644 index 000000000..eb7d2094f --- /dev/null +++ b/tools/recovery/recovery.go @@ -0,0 +1,74 @@ +package recovery + +import ( + "github.com/zeromicro/go-zero/core/conf" + "github.com/zeromicro/go-zero/core/logx" + "github.com/zeromicro/go-zero/core/proc" + + bsmt "github.com/bnb-chain/zkbas-smt" + "github.com/bnb-chain/zkbas/tools/recovery/internal/config" + "github.com/bnb-chain/zkbas/tools/recovery/internal/svc" + "github.com/bnb-chain/zkbas/tree" +) + +func RecoveryTreeDB( + configFile string, + blockHeight int64, + serviceName string, + batchSize int, +) { + var c config.Config + conf.MustLoad(configFile, &c) + ctx := svc.NewServiceContext(c) + logx.MustSetup(c.LogConf) + logx.DisableStat() + proc.AddShutdownListener(func() { + logx.Close() + }) + + // dbinitializer tree database + treeCtx := &tree.Context{ + Name: serviceName, + Driver: c.TreeDB.Driver, + LevelDBOption: &c.TreeDB.LevelDBOption, + RedisDBOption: &c.TreeDB.RedisDBOption, + Reload: true, + } + treeCtx.SetOptions(bsmt.InitializeVersion(bsmt.Version(blockHeight) - 1)) + treeCtx.SetBatchReloadSize(batchSize) + err := tree.SetupTreeDB(treeCtx) + if err != nil { + logx.Errorf("Init tree database failed: %s", err) + return + } + + // dbinitializer accountTree and accountStateTrees + _, _, err = tree.InitAccountTree( + ctx.AccountModel, + ctx.AccountHistoryModel, + blockHeight, + treeCtx, + ) + if err != nil { + logx.Error("InitMerkleTree error:", err) + return + } + // dbinitializer liquidityTree + _, err = tree.InitLiquidityTree( + ctx.LiquidityHistoryModel, + blockHeight, + treeCtx) + if err != nil { + logx.Errorf("InitLiquidityTree error: %s", err.Error()) + return + } + // dbinitializer nftTree + _, err = tree.InitNftTree( + ctx.NftHistoryModel, + blockHeight, + treeCtx) + if err != nil { + logx.Errorf("InitNftTree error: %s", err.Error()) + return + } +} diff --git a/tree/account_tree.go b/tree/account_tree.go new file mode 100644 index 000000000..b46c3eb10 --- /dev/null +++ b/tree/account_tree.go @@ -0,0 +1,273 @@ +/* + * Copyright © 2021 ZkBAS Protocol + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package tree + +import ( + "errors" + "strconv" + + "github.com/zeromicro/go-zero/core/logx" + + "github.com/bnb-chain/zkbas-crypto/hash/bn254/zmimc" + bsmt "github.com/bnb-chain/zkbas-smt" + "github.com/bnb-chain/zkbas-smt/database/memory" + "github.com/bnb-chain/zkbas/common/chain" + "github.com/bnb-chain/zkbas/dao/account" + "github.com/bnb-chain/zkbas/types" +) + +func accountAssetNamespace(index int64) string { + return AccountAssetPrefix + strconv.Itoa(int(index)) + ":" +} + +func InitAccountTree( + accountModel AccountModel, + accountHistoryModel AccountHistoryModel, + blockHeight int64, + ctx *Context, +) ( + accountTree bsmt.SparseMerkleTree, accountAssetTrees []bsmt.SparseMerkleTree, err error, +) { + accountNums, err := accountHistoryModel.GetValidAccountCount(blockHeight) + if err != nil { + logx.Errorf("unable to get all accountNums") + return nil, nil, err + } + + opts := ctx.Options(blockHeight) + + // init account state trees + accountAssetTrees = make([]bsmt.SparseMerkleTree, accountNums) + for index := int64(0); index < accountNums; index++ { + // create account assets tree + accountAssetTrees[index], err = bsmt.NewBASSparseMerkleTree(bsmt.NewHasher(zmimc.Hmimc), + SetNamespace(ctx, accountAssetNamespace(index)), AssetTreeHeight, NilAccountAssetNodeHash, + opts...) + if err != nil { + logx.Errorf("unable to create new tree by assets: %s", err.Error()) + return nil, nil, err + } + } + accountTree, err = bsmt.NewBASSparseMerkleTree(bsmt.NewHasher(zmimc.Hmimc), + SetNamespace(ctx, AccountPrefix), AccountTreeHeight, NilAccountNodeHash, + opts...) + if err != nil { + logx.Errorf("unable to create new account tree: %s", err.Error()) + return nil, nil, err + } + + if accountNums == 0 { + return accountTree, accountAssetTrees, nil + } + + if ctx.IsLoad() { + for i := 0; i < int(accountNums); i += ctx.BatchReloadSize() { + err := reloadAccountTreeFromRDB( + accountModel, accountHistoryModel, blockHeight, + i, i+ctx.BatchReloadSize(), + accountTree, accountAssetTrees) + if err != nil { + return nil, nil, err + } + } + + for i := range accountAssetTrees { + _, err := accountAssetTrees[i].Commit(nil) + if err != nil { + logx.Errorf("unable to set asset to tree: %s", err.Error()) + return nil, nil, err + } + } + + _, err = accountTree.Commit(nil) + if err != nil { + logx.Errorf("unable to commit account tree: %s", err.Error()) + return nil, nil, err + } + return accountTree, accountAssetTrees, nil + } + + // It's not loading from RDB, need to check tree version + if accountTree.LatestVersion() > bsmt.Version(blockHeight) && !accountTree.IsEmpty() { + logx.Infof("account tree version [%d] is higher than block, rollback to %d", accountTree.LatestVersion(), blockHeight) + err := accountTree.Rollback(bsmt.Version(blockHeight)) + if err != nil { + logx.Errorf("unable to rollback account tree: %s, version: %d", err.Error(), blockHeight) + return nil, nil, err + } + } + + for i := range accountAssetTrees { + if accountAssetTrees[i].LatestVersion() > bsmt.Version(blockHeight) && !accountAssetTrees[i].IsEmpty() { + logx.Infof("asset tree %d version [%d] is higher than block, rollback to %d", i, accountAssetTrees[i].LatestVersion(), blockHeight) + err := accountAssetTrees[i].Rollback(bsmt.Version(blockHeight)) + if err != nil { + logx.Errorf("unable to rollback asset [%d] tree: %s, version: %d", i, err.Error(), blockHeight) + return nil, nil, err + } + } + } + + return accountTree, accountAssetTrees, nil +} + +func reloadAccountTreeFromRDB( + accountModel AccountModel, + accountHistoryModel AccountHistoryModel, + blockHeight int64, + offset, limit int, + accountTree bsmt.SparseMerkleTree, + accountAssetTrees []bsmt.SparseMerkleTree, +) error { + _, accountHistories, err := accountHistoryModel.GetValidAccounts(blockHeight, + limit, offset) + if err != nil { + logx.Errorf("unable to get all accountHistories") + return err + } + + var ( + accountInfoMap = make(map[int64]*account.Account) + ) + + for _, accountHistory := range accountHistories { + if accountInfoMap[accountHistory.AccountIndex] == nil { + accountInfo, err := accountModel.GetAccountByIndex(accountHistory.AccountIndex) + if err != nil { + logx.Errorf("unable to get account by account index: %s", err.Error()) + return err + } + accountInfoMap[accountHistory.AccountIndex] = &account.Account{ + AccountIndex: accountInfo.AccountIndex, + AccountName: accountInfo.AccountName, + PublicKey: accountInfo.PublicKey, + AccountNameHash: accountInfo.AccountNameHash, + L1Address: accountInfo.L1Address, + Nonce: 0, + CollectionNonce: 0, + Status: account.AccountStatusConfirmed, + } + } + if accountHistory.Nonce != types.NilNonce { + accountInfoMap[accountHistory.AccountIndex].Nonce = accountHistory.Nonce + } + if accountHistory.CollectionNonce != types.NilNonce { + accountInfoMap[accountHistory.AccountIndex].CollectionNonce = accountHistory.CollectionNonce + } + accountInfoMap[accountHistory.AccountIndex].AssetInfo = accountHistory.AssetInfo + accountInfoMap[accountHistory.AccountIndex].AssetRoot = accountHistory.AssetRoot + } + + // get related account info + for i := int64(0); i < int64(len(accountHistories)); i++ { + accountIndex := accountHistories[i].AccountIndex + if accountInfoMap[accountIndex] == nil { + logx.Errorf("invalid account index") + return errors.New("invalid account index") + } + oAccountInfo := accountInfoMap[accountIndex] + accountInfo, err := chain.ToFormatAccountInfo(oAccountInfo) + if err != nil { + logx.Errorf("unable to convert to format account info: %s", err.Error()) + return err + } + // create account assets node + for assetId, assetInfo := range accountInfo.AssetInfo { + hashVal, err := AssetToNode( + assetInfo.Balance.String(), + assetInfo.LpAmount.String(), + assetInfo.OfferCanceledOrFinalized.String(), + ) + if err != nil { + logx.Errorf("unable to convert asset to node: %s", err.Error()) + return err + } + err = accountAssetTrees[accountIndex].Set(uint64(assetId), hashVal) + if err != nil { + logx.Errorf("unable to set asset to tree: %s", err.Error()) + return err + } + } + accountHashVal, err := AccountToNode( + accountInfoMap[accountIndex].AccountNameHash, + accountInfoMap[accountIndex].PublicKey, + accountInfoMap[accountIndex].Nonce, + accountInfoMap[accountIndex].CollectionNonce, + accountAssetTrees[accountIndex].Root(), + ) + if err != nil { + logx.Errorf("unable to convert account to node: %s", err.Error()) + return err + } + err = accountTree.Set(uint64(accountIndex), accountHashVal) + if err != nil { + logx.Errorf("unable to set account to tree: %s", err.Error()) + return err + } + } + + return nil +} + +func AssetToNode(balance string, lpAmount string, offerCanceledOrFinalized string) (hashVal []byte, err error) { + hashVal, err = ComputeAccountAssetLeafHash(balance, lpAmount, offerCanceledOrFinalized) + if err != nil { + logx.Errorf("unable to compute asset leaf hash: %s", err.Error()) + return nil, err + } + + return hashVal, nil +} + +func AccountToNode( + accountNameHash string, + publicKey string, + nonce int64, + collectionNonce int64, + assetRoot []byte, +) (hashVal []byte, err error) { + hashVal, err = ComputeAccountLeafHash( + accountNameHash, + publicKey, + nonce, + collectionNonce, + assetRoot) + if err != nil { + logx.Errorf("unable to compute account leaf hash: %s", err.Error()) + return nil, err + } + + return hashVal, nil +} + +func NewEmptyAccountAssetTree( + ctx *Context, + index int64, + blockHeight uint64, +) (tree bsmt.SparseMerkleTree, err error) { + return bsmt.NewBASSparseMerkleTree( + bsmt.NewHasher(zmimc.Hmimc), + SetNamespace(ctx, accountAssetNamespace(index)), + AssetTreeHeight, NilAccountAssetNodeHash, + ctx.Options(int64(blockHeight))...) +} + +func NewMemAccountAssetTree() (tree bsmt.SparseMerkleTree, err error) { + return bsmt.NewBASSparseMerkleTree(bsmt.NewHasher(zmimc.Hmimc), + memory.NewMemoryDB(), AssetTreeHeight, NilAccountAssetNodeHash) +} diff --git a/tree/liquidity_tree.go b/tree/liquidity_tree.go new file mode 100644 index 000000000..c689af288 --- /dev/null +++ b/tree/liquidity_tree.go @@ -0,0 +1,139 @@ +/* + * Copyright © 2021 ZkBAS Protocol + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package tree + +import ( + "github.com/zeromicro/go-zero/core/logx" + + "github.com/bnb-chain/zkbas-crypto/hash/bn254/zmimc" + bsmt "github.com/bnb-chain/zkbas-smt" + "github.com/bnb-chain/zkbas/types" +) + +func InitLiquidityTree( + liquidityHistoryModel LiquidityHistoryModel, + blockHeight int64, + ctx *Context, +) ( + liquidityTree bsmt.SparseMerkleTree, err error, +) { + + liquidityTree, err = bsmt.NewBASSparseMerkleTree(bsmt.NewHasher(zmimc.Hmimc), + SetNamespace(ctx, LiquidityPrefix), LiquidityTreeHeight, NilLiquidityNodeHash, + ctx.Options(blockHeight)...) + if err != nil { + logx.Errorf("unable to create tree from db: %s", err.Error()) + return nil, err + } + + if ctx.IsLoad() { + nums, err := liquidityHistoryModel.GetLatestLiquidityCountByBlockHeight(blockHeight) + if err != nil { + logx.Errorf("unable to get latest liquidity assets: %s", err.Error()) + return nil, err + } + for i := 0; i < int(nums); i += ctx.BatchReloadSize() { + err := loadLiquidityTreeFromRDB( + liquidityHistoryModel, blockHeight, + i, i+ctx.BatchReloadSize(), liquidityTree) + if err != nil { + return nil, err + } + _, err = liquidityTree.Commit(nil) + if err != nil { + logx.Errorf("unable to commit liquidity tree: %s", err.Error()) + return nil, err + } + } + + return liquidityTree, nil + } + + // It's not loading from RDB, need to check tree version + if liquidityTree.LatestVersion() > bsmt.Version(blockHeight) && !liquidityTree.IsEmpty() { + logx.Infof("liquidity tree version [%d] is higher than block, rollback to %d", liquidityTree.LatestVersion(), blockHeight) + err := liquidityTree.Rollback(bsmt.Version(blockHeight)) + if err != nil { + logx.Errorf("unable to rollback liquidity tree: %s, version: %d", err.Error(), blockHeight) + return nil, err + } + } + + return liquidityTree, nil +} + +func loadLiquidityTreeFromRDB( + liquidityHistoryModel LiquidityHistoryModel, + blockHeight int64, + offset, limit int, + liquidityTree bsmt.SparseMerkleTree, +) error { + liquidityAssets, err := liquidityHistoryModel.GetLatestLiquidityByBlockHeight(blockHeight, + limit, offset) + if err != nil { + if err != types.DbErrNotFound { + logx.Errorf("unable to get latest liquidity assets: %s", err.Error()) + return err + } + } + for _, liquidityAsset := range liquidityAssets { + pairIndex := liquidityAsset.PairIndex + hashVal, err := LiquidityAssetToNode( + liquidityAsset.AssetAId, liquidityAsset.AssetA, + liquidityAsset.AssetBId, liquidityAsset.AssetB, + liquidityAsset.LpAmount, liquidityAsset.KLast, + liquidityAsset.FeeRate, liquidityAsset.TreasuryAccountIndex, liquidityAsset.TreasuryRate) + if err != nil { + logx.Errorf("unable to convert liquidity asset to node: %s", err.Error()) + return err + } + err = liquidityTree.Set(uint64(pairIndex), hashVal) + if err != nil { + logx.Errorf("unable to write liquidity asset to tree: %s", err.Error()) + return err + } + } + return nil +} + +func LiquidityAssetToNode( + assetAId int64, + assetA string, + assetBId int64, + assetB string, + lpAmount string, + kLast string, + feeRate int64, + treasuryAccountIndex int64, + treasuryFeeRate int64, +) (hashVal []byte, err error) { + hashVal, err = ComputeLiquidityAssetLeafHash( + assetAId, assetA, + assetBId, assetB, + lpAmount, + kLast, + feeRate, + treasuryAccountIndex, + treasuryFeeRate, + ) + if err != nil { + logx.Errorf("unable to compute liquidity asset leaf hash: %s", err.Error()) + return nil, err + } + return hashVal, nil +} diff --git a/tree/nft_tree.go b/tree/nft_tree.go new file mode 100644 index 000000000..ac73f502a --- /dev/null +++ b/tree/nft_tree.go @@ -0,0 +1,120 @@ +/* + * Copyright © 2021 ZkBAS Protocol + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package tree + +import ( + "github.com/zeromicro/go-zero/core/logx" + + "github.com/bnb-chain/zkbas-crypto/hash/bn254/zmimc" + bsmt "github.com/bnb-chain/zkbas-smt" +) + +func InitNftTree( + nftHistoryModel L2NftHistoryModel, + blockHeight int64, + ctx *Context, +) ( + nftTree bsmt.SparseMerkleTree, err error, +) { + nftTree, err = bsmt.NewBASSparseMerkleTree(bsmt.NewHasher(zmimc.Hmimc), + SetNamespace(ctx, NFTPrefix), NftTreeHeight, NilNftNodeHash, + ctx.Options(blockHeight)...) + if err != nil { + logx.Errorf("unable to create tree from db: %s", err.Error()) + return nil, err + } + + if ctx.IsLoad() { + nums, err := nftHistoryModel.GetLatestNftAssetCountByBlockHeight(blockHeight) + if err != nil { + logx.Errorf("unable to get latest nft assets: %s", err.Error()) + return nil, err + } + for i := 0; i < int(nums); i += ctx.BatchReloadSize() { + err := loadNftTreeFromRDB( + nftHistoryModel, blockHeight, + i, i+ctx.BatchReloadSize(), nftTree) + if err != nil { + return nil, err + } + } + _, err = nftTree.Commit(nil) + if err != nil { + logx.Errorf("unable to commit nft tree: %s", err.Error()) + return nil, err + } + return nftTree, nil + } + + // It's not loading from RDB, need to check tree version + if nftTree.LatestVersion() > bsmt.Version(blockHeight) && !nftTree.IsEmpty() { + logx.Infof("nft tree version [%d] is higher than block, rollback to %d", nftTree.LatestVersion(), blockHeight) + err := nftTree.Rollback(bsmt.Version(blockHeight)) + if err != nil { + logx.Errorf("unable to rollback nft tree: %s, version: %d", err.Error(), blockHeight) + return nil, err + } + } + return nftTree, nil +} + +func loadNftTreeFromRDB( + nftHistoryModel L2NftHistoryModel, + blockHeight int64, + offset, limit int, + nftTree bsmt.SparseMerkleTree, +) error { + _, nftAssets, err := nftHistoryModel.GetLatestNftAssetsByBlockHeight(blockHeight, + limit, offset) + if err != nil { + logx.Errorf("unable to get latest nft assets: %s", err.Error()) + return err + } + for _, nftAsset := range nftAssets { + nftIndex := nftAsset.NftIndex + hashVal, err := NftAssetToNode(nftAsset) + if err != nil { + logx.Errorf("unable to convert nft asset to node: %s", err.Error()) + return err + } + + err = nftTree.Set(uint64(nftIndex), hashVal) + if err != nil { + logx.Errorf("unable to write nft asset to tree: %s", err.Error()) + return err + } + } + return nil +} + +func NftAssetToNode(nftAsset *AccountL2NftHistory) (hashVal []byte, err error) { + hashVal, err = ComputeNftAssetLeafHash( + nftAsset.CreatorAccountIndex, + nftAsset.OwnerAccountIndex, + nftAsset.NftContentHash, + nftAsset.NftL1Address, + nftAsset.NftL1TokenId, + nftAsset.CreatorTreasuryRate, + nftAsset.CollectionId, + ) + if err != nil { + logx.Errorf("unable to compute nft asset leaf hash: %s", err.Error()) + return nil, err + } + return hashVal, nil +} diff --git a/pkg/treedb/treedb.go b/tree/treedb.go similarity index 69% rename from pkg/treedb/treedb.go rename to tree/treedb.go index 008ba3daf..84d134345 100644 --- a/pkg/treedb/treedb.go +++ b/tree/treedb.go @@ -1,105 +1,129 @@ -package treedb +package tree import ( "encoding/json" + "errors" "strings" "time" - "github.com/bnb-chain/bas-smt/database" - "github.com/bnb-chain/bas-smt/database/leveldb" - "github.com/bnb-chain/bas-smt/database/memory" - "github.com/bnb-chain/bas-smt/database/redis" + bsmt "github.com/bnb-chain/zkbas-smt" + "github.com/bnb-chain/zkbas-smt/database" + "github.com/bnb-chain/zkbas-smt/database/leveldb" + "github.com/bnb-chain/zkbas-smt/database/memory" + "github.com/bnb-chain/zkbas-smt/database/redis" ) -const ( - NFTPrefix = "nft:" - LiquidityPrefix = "liquidity:" - AccountPrefix = "account:" - AccountAssetPrefix = "account_asset:" +const defaultBatchReloadSize = 1000 + +var ( + ErrUnsupportedDriver = errors.New("unsupported db driver") ) type Driver string type LevelDBOption struct { - File string - Cache int `json:",optional"` + File string + //nolint:staticcheck + Cache int `json:",optional"` + //nolint:staticcheck Handles int `json:",optional"` } type RedisDBOption struct { + //nolint:staticcheck ClusterAddr []string `json:",optional"` - Addr string `json:",optional"` + //nolint:staticcheck + Addr string `json:",optional"` // Use the specified Username to authenticate the current connection // with one of the connections defined in the ACL list when connecting // to a Redis 6.0 instance, or greater, that is using the Redis ACL system. + //nolint:staticcheck Username string `json:",optional"` // Optional password. Must match the password specified in the // requirepass server configuration option (if connecting to a Redis 5.0 instance, or lower), // or the User Password when connecting to a Redis 6.0 instance, or greater, // that is using the Redis ACL system. + //nolint:staticcheck Password string `json:",optional"` // The maximum number of retries before giving up. Command is retried // on network errors and MOVED/ASK redirects. // Default is 3 retries. + //nolint:staticcheck MaxRedirects int `json:",optional"` // Enables read-only commands on slave nodes. + //nolint:staticcheck ReadOnly bool `json:",optional"` // Allows routing read-only commands to the closest master or slave node. // It automatically enables ReadOnly. + //nolint:staticcheck RouteByLatency bool `json:",optional"` // Allows routing read-only commands to the random master or slave node. // It automatically enables ReadOnly. + //nolint:staticcheck RouteRandomly bool `json:",optional"` // Maximum number of retries before giving up. // Default is 3 retries; -1 (not 0) disables retries. + //nolint:staticcheck MaxRetries int `json:",optional"` // Minimum backoff between each retry. // Default is 8 milliseconds; -1 disables backoff. + //nolint:staticcheck MinRetryBackoff time.Duration `json:",optional"` // Maximum backoff between each retry. // Default is 512 milliseconds; -1 disables backoff. + //nolint:staticcheck MaxRetryBackoff time.Duration `json:",optional"` // Dial timeout for establishing new connections. // Default is 5 seconds. + //nolint:staticcheck DialTimeout time.Duration `json:",optional"` // Timeout for socket reads. If reached, commands will fail // with a timeout instead of blocking. Use value -1 for no timeout and 0 for default. // Default is 3 seconds. + //nolint:staticcheck ReadTimeout time.Duration `json:",optional"` // Timeout for socket writes. If reached, commands will fail // with a timeout instead of blocking. // Default is ReadTimeout. + //nolint:staticcheck WriteTimeout time.Duration `json:",optional"` // Type of connection pool. // true for FIFO pool, false for LIFO pool. // Note that fifo has higher overhead compared to lifo. + //nolint:staticcheck PoolFIFO bool `json:",optional"` // Maximum number of socket connections. // Default is 10 connections per every available CPU as reported by runtime.GOMAXPROCS. + //nolint:staticcheck PoolSize int `json:",optional"` // Minimum number of idle connections which is useful when establishing // new connection is slow. + //nolint:staticcheck MinIdleConns int `json:",optional"` // Connection age at which client retires (closes) the connection. // Default is to not close aged connections. + //nolint:staticcheck MaxConnAge time.Duration `json:",optional"` // Amount of time client waits for connection if all connections // are busy before returning an error. // Default is ReadTimeout + 1 second. + //nolint:staticcheck PoolTimeout time.Duration `json:",optional"` // Amount of time after which client closes idle connections. // Should be less than server's timeout. // Default is 5 minutes. -1 disables idle timeout check. + //nolint:staticcheck IdleTimeout time.Duration `json:",optional"` - // Frequency of idle checks made by idle connections reaper. - // Default is 1 minute. -1 disables idle connections reaper, + // Frequency of idle checks made by idle connection reaper. + // Default is 1 minute. -1 disables idle connection reaper, // but idle connections are still discarded by the client // if IdleTimeout is set. + //nolint:staticcheck IdleCheckFrequency time.Duration `json:",optional"` } @@ -157,3 +181,49 @@ func SetNamespace( } return context.TreeDB } + +type Context struct { + Name string + Driver Driver + LevelDBOption *LevelDBOption + RedisDBOption *RedisDBOption + + TreeDB database.TreeDB + defaultOptions []bsmt.Option + Reload bool + batchReloadSize int +} + +func (ctx *Context) IsLoad() bool { + if ctx.Reload { + return true + } + return ctx.Driver == MemoryDB +} + +func (ctx *Context) Options(blockHeight int64) []bsmt.Option { + var opts []bsmt.Option + for i := range ctx.defaultOptions { + opts = append(opts, ctx.defaultOptions[i]) + } + if ctx.Driver == MemoryDB { + opts = append(opts, bsmt.InitializeVersion(bsmt.Version(blockHeight))) + } + return opts +} + +func (ctx *Context) SetOptions(opts ...bsmt.Option) { + ctx.defaultOptions = append(ctx.defaultOptions, opts...) +} + +func (ctx *Context) BatchReloadSize() int { + if ctx.batchReloadSize <= 0 { + return defaultBatchReloadSize // default + } + + return ctx.batchReloadSize +} + +func (ctx *Context) SetBatchReloadSize(size int) { + ctx.batchReloadSize = size +} diff --git a/common/tree/constant.go b/tree/types.go similarity index 80% rename from common/tree/constant.go rename to tree/types.go index 1e6c56ac7..2f25853a6 100644 --- a/common/tree/constant.go +++ b/tree/types.go @@ -1,5 +1,5 @@ /* - * Copyright © 2021 Zkbas Protocol + * Copyright © 2021 ZkBAS Protocol * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -18,17 +18,17 @@ package tree import ( - "github.com/bnb-chain/zkbas-crypto/accumulators/merkleTree" "github.com/consensys/gnark-crypto/ecc/bn254/fr/mimc" - "github.com/bnb-chain/zkbas/common/model/account" - "github.com/bnb-chain/zkbas/common/model/liquidity" - "github.com/bnb-chain/zkbas/common/model/nft" - "github.com/bnb-chain/zkbas/common/model/sysconfig" + "github.com/bnb-chain/zkbas-crypto/accumulators/merkleTree" + "github.com/bnb-chain/zkbas/dao/account" + "github.com/bnb-chain/zkbas/dao/liquidity" + "github.com/bnb-chain/zkbas/dao/nft" + "github.com/bnb-chain/zkbas/dao/sysconfig" ) type ( - SysconfigModel = sysconfig.SysconfigModel + SysconfigModel = sysconfig.SysConfigModel AccountModel = account.AccountModel AccountHistoryModel = account.AccountHistoryModel L2NftHistoryModel = nft.L2NftHistoryModel @@ -53,9 +53,16 @@ const ( ) var ( - NilHash = merkleTree.NilHash - NilAccountAssetRoot, NilStateRoot, NilAccountRoot, NilLiquidityRoot, NilNftRoot []byte - NilAccountAssetNodeHash, NilAccountNodeHash, NilLiquidityNodeHash, NilNftNodeHash []byte + NilHash = merkleTree.NilHash + NilStateRoot []byte + NilAccountRoot []byte + NilLiquidityRoot []byte + NilNftRoot []byte + NilAccountAssetRoot []byte + NilAccountNodeHash []byte + NilLiquidityNodeHash []byte + NilNftNodeHash []byte + NilAccountAssetNodeHash []byte ) func init() { diff --git a/common/tree/util.go b/tree/util.go similarity index 55% rename from common/tree/util.go rename to tree/util.go index d3e2c4818..22aaed672 100644 --- a/common/tree/util.go +++ b/tree/util.go @@ -1,5 +1,5 @@ /* - * Copyright © 2021 Zkbas Protocol + * Copyright © 2021 ZkBAS Protocol * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -18,10 +18,15 @@ package tree import ( + "bytes" "math/big" - bsmt "github.com/bnb-chain/bas-smt" + curve "github.com/bnb-chain/zkbas-crypto/ecc/ztwistededwards/tebn254" + "github.com/bnb-chain/zkbas-crypto/ffmath" + bsmt "github.com/bnb-chain/zkbas-smt" + common2 "github.com/bnb-chain/zkbas/common" "github.com/consensys/gnark-crypto/ecc/bn254/fr/mimc" + "github.com/ethereum/go-ethereum/common" "github.com/pkg/errors" ) @@ -121,12 +126,12 @@ func CommitTrees(version uint64, if err != nil { return errors.Wrapf(err, "unable to commit account tree, tree ver: %d, prune ver: %d", ver, accPrunedVersion) } - for idx, assetTree := range *assetTrees { + for idx := range *assetTrees { assetPrunedVersion := bsmt.Version(version) - if assetTree.LatestVersion() < assetPrunedVersion { - assetPrunedVersion = assetTree.LatestVersion() + if (*assetTrees)[idx].LatestVersion() < assetPrunedVersion { + assetPrunedVersion = (*assetTrees)[idx].LatestVersion() } - ver, err := assetTree.Commit(&assetPrunedVersion) + ver, err := (*assetTrees)[idx].Commit(&assetPrunedVersion) if err != nil { return errors.Wrapf(err, "unable to commit asset tree [%d], tree ver: %d, prune ver: %d", idx, ver, assetPrunedVersion) } @@ -188,3 +193,129 @@ func RollBackTrees(version uint64, } return nil } + +func ComputeAccountLeafHash( + accountNameHash string, + pk string, + nonce int64, + collectionNonce int64, + assetRoot []byte, +) (hashVal []byte, err error) { + hFunc := mimc.NewMiMC() + var buf bytes.Buffer + buf.Write(common.FromHex(accountNameHash)) + err = common2.PaddingPkIntoBuf(&buf, pk) + if err != nil { + return nil, err + } + common2.PaddingInt64IntoBuf(&buf, nonce) + common2.PaddingInt64IntoBuf(&buf, collectionNonce) + buf.Write(assetRoot) + hFunc.Reset() + hFunc.Write(buf.Bytes()) + hashVal = hFunc.Sum(nil) + return hashVal, nil +} + +func ComputeAccountAssetLeafHash( + balance string, + lpAmount string, + offerCanceledOrFinalized string, +) (hashVal []byte, err error) { + hFunc := mimc.NewMiMC() + var buf bytes.Buffer + err = common2.PaddingStringBigIntIntoBuf(&buf, balance) + if err != nil { + return nil, err + } + err = common2.PaddingStringBigIntIntoBuf(&buf, lpAmount) + if err != nil { + return nil, err + } + err = common2.PaddingStringBigIntIntoBuf(&buf, offerCanceledOrFinalized) + if err != nil { + return nil, err + } + hFunc.Write(buf.Bytes()) + return hFunc.Sum(nil), nil +} + +func ComputeLiquidityAssetLeafHash( + assetAId int64, + assetA string, + assetBId int64, + assetB string, + lpAmount string, + kLast string, + feeRate int64, + treasuryAccountIndex int64, + treasuryRate int64, +) (hashVal []byte, err error) { + hFunc := mimc.NewMiMC() + var buf bytes.Buffer + common2.PaddingInt64IntoBuf(&buf, assetAId) + err = common2.PaddingStringBigIntIntoBuf(&buf, assetA) + if err != nil { + return nil, err + } + common2.PaddingInt64IntoBuf(&buf, assetBId) + err = common2.PaddingStringBigIntIntoBuf(&buf, assetB) + if err != nil { + return nil, err + } + err = common2.PaddingStringBigIntIntoBuf(&buf, lpAmount) + if err != nil { + return nil, err + } + err = common2.PaddingStringBigIntIntoBuf(&buf, kLast) + if err != nil { + return nil, err + } + common2.PaddingInt64IntoBuf(&buf, feeRate) + common2.PaddingInt64IntoBuf(&buf, treasuryAccountIndex) + common2.PaddingInt64IntoBuf(&buf, treasuryRate) + hFunc.Write(buf.Bytes()) + hashVal = hFunc.Sum(nil) + return hashVal, nil +} + +func ComputeNftAssetLeafHash( + creatorAccountIndex int64, + ownerAccountIndex int64, + nftContentHash string, + nftL1Address string, + nftL1TokenId string, + creatorTreasuryRate int64, + collectionId int64, +) (hashVal []byte, err error) { + hFunc := mimc.NewMiMC() + var buf bytes.Buffer + common2.PaddingInt64IntoBuf(&buf, creatorAccountIndex) + common2.PaddingInt64IntoBuf(&buf, ownerAccountIndex) + buf.Write(ffmath.Mod(new(big.Int).SetBytes(common.FromHex(nftContentHash)), curve.Modulus).FillBytes(make([]byte, 32))) + err = common2.PaddingAddressIntoBuf(&buf, nftL1Address) + if err != nil { + return nil, err + } + err = common2.PaddingStringBigIntIntoBuf(&buf, nftL1TokenId) + if err != nil { + return nil, err + } + common2.PaddingInt64IntoBuf(&buf, creatorTreasuryRate) + common2.PaddingInt64IntoBuf(&buf, collectionId) + hFunc.Write(buf.Bytes()) + hashVal = hFunc.Sum(nil) + return hashVal, nil +} + +func ComputeStateRootHash( + accountRoot []byte, + liquidityRoot []byte, + nftRoot []byte, +) []byte { + hFunc := mimc.NewMiMC() + hFunc.Write(accountRoot) + hFunc.Write(liquidityRoot) + hFunc.Write(nftRoot) + return hFunc.Sum(nil) +} diff --git a/types/account.go b/types/account.go new file mode 100644 index 000000000..2dd2875ac --- /dev/null +++ b/types/account.go @@ -0,0 +1,108 @@ +/* + * Copyright © 2021 ZkBAS Protocol + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package types + +import ( + "encoding/json" + "math/big" +) + +const ( + FungibleAssetType = 1 + LiquidityAssetType = 2 + NftAssetType = 3 + CollectionNonceAssetType = 4 + + BuyOfferType = 0 + SellOfferType = 1 +) + +type AccountAsset struct { + AssetId int64 + Balance *big.Int + LpAmount *big.Int + OfferCanceledOrFinalized *big.Int +} + +func (asset *AccountAsset) DeepCopy() *AccountAsset { + return &AccountAsset{ + AssetId: asset.AssetId, + Balance: big.NewInt(0).Set(asset.Balance), + LpAmount: big.NewInt(0).Set(asset.LpAmount), + OfferCanceledOrFinalized: big.NewInt(0).Set(asset.OfferCanceledOrFinalized), + } +} + +func ConstructAccountAsset(assetId int64, balance *big.Int, lpAmount *big.Int, offerCanceledOrFinalized *big.Int) *AccountAsset { + return &AccountAsset{ + assetId, + balance, + lpAmount, + offerCanceledOrFinalized, + } +} + +func ParseAccountAsset(balance string) (asset *AccountAsset, err error) { + err = json.Unmarshal([]byte(balance), &asset) + if err != nil { + return nil, JsonErrUnmarshal + } + return asset, nil +} + +func (asset *AccountAsset) String() (info string) { + infoBytes, _ := json.Marshal(asset) + return string(infoBytes) +} + +type AccountInfo struct { + AccountId uint + AccountIndex int64 + AccountName string + PublicKey string + AccountNameHash string + L1Address string + Nonce int64 + CollectionNonce int64 + // map[int64]*AccountAsset + AssetInfo map[int64]*AccountAsset // key: index, value: balance + AssetRoot string + Status int +} + +func (ai *AccountInfo) DeepCopy() (*AccountInfo, error) { + assetInfo := make(map[int64]*AccountAsset) + for assetId, asset := range ai.AssetInfo { + assetInfo[assetId] = asset.DeepCopy() + } + + newAccountInfo := &AccountInfo{ + AccountId: ai.AccountId, + AccountIndex: ai.AccountIndex, + AccountName: ai.AccountName, + PublicKey: ai.PublicKey, + AccountNameHash: ai.AccountNameHash, + L1Address: ai.L1Address, + Nonce: ai.Nonce, + CollectionNonce: ai.CollectionNonce, + AssetInfo: assetInfo, + AssetRoot: ai.AssetRoot, + Status: ai.Status, + } + return newAccountInfo, nil +} diff --git a/common/commonConstant/asset.go b/types/asset.go similarity index 90% rename from common/commonConstant/asset.go rename to types/asset.go index e945275da..299177f44 100644 --- a/common/commonConstant/asset.go +++ b/types/asset.go @@ -1,5 +1,5 @@ /* - * Copyright © 2021 Zkbas Protocol + * Copyright © 2021 ZkBAS Protocol * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -15,9 +15,10 @@ * */ -package commonConstant +package types import ( + "math" "math/big" ) @@ -30,7 +31,7 @@ const ( NilAssetInfo = "{}" NilAccountName = "" NilAccountOrder = -1 - NilExpiredAt = 0 + NilExpiredAt = math.MaxInt64 NilCollectionId = int64(0) NilAccountIndex = int64(0) NilTxNftIndex = int64(-1) @@ -41,6 +42,7 @@ const ( var ( NilAssetAmountStr = "0" NilNftContentHash = "0" + NilAccountNameHash = "0" NilL1TokenId = "0" NilL1Address = "0" NilOfferCanceledOrFinalized = big.NewInt(0) diff --git a/types/code.go b/types/code.go new file mode 100644 index 000000000..5c722ff60 --- /dev/null +++ b/types/code.go @@ -0,0 +1,38 @@ +package types + +import ( + "errors" + + "github.com/zeromicro/go-zero/core/stores/sqlx" +) + +// For internal errors, `Code` is not needed in current implementation. +// For external errors (app & glaobalRPC), we can define codes, however the current framework also +// does not use the codes. We can leave the codes for future enhancement. + +var ( + DbErrNotFound = sqlx.ErrNotFound + DbErrSqlOperation = errors.New("unknown sql operation error") + DbErrDuplicatedCollectionIndex = errors.New("duplicated collection index") + DbErrFailToCreateBlock = errors.New("fail to create block") + DbErrFailToCreateMempoolTx = errors.New("fail to create mempool tx") + DbErrFailToCreateProof = errors.New("fail to create proof") + DbErrFailToCreateFailTx = errors.New("fail to create fail tx") + DbErrFailToCreateSysconfig = errors.New("fail to create system config") + + JsonErrUnmarshal = errors.New("json.Unmarshal err") + JsonErrMarshal = errors.New("json.Marshal err") + + HttpErrFailToRequest = errors.New("http.NewRequest err") + HttpErrClientDo = errors.New("http.Client.Do err") + + IoErrFailToRead = errors.New("ioutil.ReadAll err") + + AppErrInvalidParam = New(20001, "invalid param: ") + AppErrInvalidTx = New(20002, "invalid tx: cannot parse tx") + AppErrInvalidTxType = New(20003, "invalid tx type") + AppErrInvalidTxField = New(20004, "invalid tx field: ") + AppErrInvalidGasAsset = New(25005, "invalid gas asset") + AppErrNotFound = New(29404, "not found") + AppErrInternal = New(29500, "internal server error") +) diff --git a/types/error.go b/types/error.go new file mode 100644 index 000000000..4ccc41bde --- /dev/null +++ b/types/error.go @@ -0,0 +1,39 @@ +package types + +import ( + "fmt" +) + +type Error interface { + Error() string + Code() int32 + RefineError(err ...interface{}) Error +} + +func New(code int32, msg string) Error { + return newError(code, msg) +} + +type commonError struct { + code int32 + message string +} + +func (e *commonError) Error() string { + return fmt.Sprintf("%d: %s", e.code, e.message) +} + +func (e *commonError) Code() int32 { + return e.code +} + +func (e *commonError) RefineError(err ...interface{}) Error { + return newError(e.Code(), e.message+fmt.Sprint(err...)) +} + +func newError(code int32, msg string) Error { + return &commonError{ + code: code, + message: msg, + } +} diff --git a/common/commonAsset/poolHelper.go b/types/liquidity.go similarity index 87% rename from common/commonAsset/poolHelper.go rename to types/liquidity.go index 82cf1ab12..53173f77a 100644 --- a/common/commonAsset/poolHelper.go +++ b/types/liquidity.go @@ -1,5 +1,5 @@ /* - * Copyright © 2021 Zkbas Protocol + * Copyright © 2021 ZkBAS Protocol * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -15,16 +15,12 @@ * */ -package commonAsset +package types import ( "encoding/json" "errors" "math/big" - - "github.com/zeromicro/go-zero/core/logx" - - "github.com/bnb-chain/zkbas/errorcode" ) type LiquidityInfo struct { @@ -64,22 +60,18 @@ func ConstructLiquidityInfo(pairIndex int64, assetAId int64, assetAAmount string lpAmount string, kLast string, feeRate int64, treasuryAccountIndex int64, treasuryRate int64) (info *LiquidityInfo, err error) { assetA, isValid := new(big.Int).SetString(assetAAmount, 10) if !isValid { - logx.Errorf("[ConstructLiquidityInfo] invalid big int") return nil, errors.New("[ConstructLiquidityInfo] invalid bit int") } assetB, isValid := new(big.Int).SetString(assetBAmount, 10) if !isValid { - logx.Errorf("[ConstructLiquidityInfo] invalid big int") return nil, errors.New("[ConstructLiquidityInfo] invalid bit int") } lp, isValid := new(big.Int).SetString(lpAmount, 10) if !isValid { - logx.Errorf("[ConstructLiquidityInfo] invalid big int") return nil, errors.New("[ConstructLiquidityInfo] invalid bit int") } kLastInt, isValid := new(big.Int).SetString(kLast, 10) if !isValid { - logx.Errorf("[ConstructLiquidityInfo] invalid big int") return nil, errors.New("[ConstructLiquidityInfo] invalid bit int") } info = &LiquidityInfo{ @@ -100,7 +92,7 @@ func ConstructLiquidityInfo(pairIndex int64, assetAId int64, assetAAmount string func ParseLiquidityInfo(infoStr string) (info *LiquidityInfo, err error) { err = json.Unmarshal([]byte(infoStr), &info) if err != nil { - return nil, errorcode.JsonErrUnmarshal + return nil, JsonErrUnmarshal } return info, nil } diff --git a/common/commonAsset/nftHelper.go b/types/nft.go similarity index 79% rename from common/commonAsset/nftHelper.go rename to types/nft.go index 3f2f88d15..55c6512d6 100644 --- a/common/commonAsset/nftHelper.go +++ b/types/nft.go @@ -1,5 +1,5 @@ /* - * Copyright © 2021 Zkbas Protocol + * Copyright © 2021 ZkBAS Protocol * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -15,13 +15,10 @@ * */ -package commonAsset +package types import ( "encoding/json" - - "github.com/bnb-chain/zkbas/common/commonConstant" - "github.com/bnb-chain/zkbas/errorcode" ) type NftInfo struct { @@ -43,7 +40,7 @@ func (info *NftInfo) String() string { func ParseNftInfo(infoStr string) (info *NftInfo, err error) { err = json.Unmarshal([]byte(infoStr), &info) if err != nil { - return nil, errorcode.JsonErrUnmarshal + return nil, JsonErrUnmarshal } return info, nil } @@ -51,11 +48,11 @@ func ParseNftInfo(infoStr string) (info *NftInfo, err error) { func EmptyNftInfo(nftIndex int64) (info *NftInfo) { return &NftInfo{ NftIndex: nftIndex, - CreatorAccountIndex: commonConstant.NilAccountIndex, - OwnerAccountIndex: commonConstant.NilAccountIndex, - NftContentHash: commonConstant.NilNftContentHash, - NftL1TokenId: commonConstant.NilL1TokenId, - NftL1Address: commonConstant.NilL1Address, + CreatorAccountIndex: NilAccountIndex, + OwnerAccountIndex: NilAccountIndex, + NftContentHash: NilNftContentHash, + NftL1TokenId: NilL1TokenId, + NftL1Address: NilL1Address, CreatorTreasuryRate: 0, CollectionId: 0, } diff --git a/common/sysconfigName/constant.go b/types/system.go similarity index 85% rename from common/sysconfigName/constant.go rename to types/system.go index 1433ecf72..23eb554eb 100644 --- a/common/sysconfigName/constant.go +++ b/types/system.go @@ -1,5 +1,5 @@ /* - * Copyright © 2021 Zkbas Protocol + * Copyright © 2021 ZkBAS Protocol * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -15,7 +15,11 @@ * */ -package sysconfigName +package types + +import ( + "math/big" +) const ( TreasuryAccountIndex = "TreasuryAccountIndex" @@ -30,4 +34,11 @@ const ( Governor = "Governor" ZnsPriceOracle = "ZnsPriceOracle" + + AccountNameSuffix = ".legend" +) + +var ( + ZeroBigInt = big.NewInt(0) + ZeroBigIntString = "0" ) diff --git a/types/tx.go b/types/tx.go new file mode 100644 index 000000000..ad199748e --- /dev/null +++ b/types/tx.go @@ -0,0 +1,291 @@ +/* + * Copyright © 2021 ZkBAS Protocol + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package types + +import ( + "encoding/json" + + "github.com/bnb-chain/zkbas-crypto/wasm/legend/legendTxTypes" +) + +const ( + TxTypeEmpty = iota + TxTypeRegisterZns + TxTypeCreatePair + TxTypeUpdatePairRate + TxTypeDeposit + TxTypeDepositNft + TxTypeTransfer + TxTypeSwap + TxTypeAddLiquidity + TxTypeRemoveLiquidity + TxTypeWithdraw + TxTypeCreateCollection + TxTypeMintNft + TxTypeTransferNft + TxTypeAtomicMatch + TxTypeCancelOffer + TxTypeWithdrawNft + TxTypeFullExit + TxTypeFullExitNft + TxTypeOffer +) + +func IsL2Tx(txType int64) bool { + if txType == TxTypeTransfer || + txType == TxTypeSwap || + txType == TxTypeAddLiquidity || + txType == TxTypeRemoveLiquidity || + txType == TxTypeWithdraw || + txType == TxTypeCreateCollection || + txType == TxTypeMintNft || + txType == TxTypeTransferNft || + txType == TxTypeAtomicMatch || + txType == TxTypeCancelOffer || + txType == TxTypeWithdrawNft { + return true + } + return false +} + +type ( + RegisterZnsTxInfo = legendTxTypes.RegisterZnsTxInfo + CreatePairTxInfo = legendTxTypes.CreatePairTxInfo + UpdatePairRateTxInfo = legendTxTypes.UpdatePairRateTxInfo + DepositTxInfo = legendTxTypes.DepositTxInfo + DepositNftTxInfo = legendTxTypes.DepositNftTxInfo + FullExitTxInfo = legendTxTypes.FullExitTxInfo + FullExitNftTxInfo = legendTxTypes.FullExitNftTxInfo + + TransferTxInfo = legendTxTypes.TransferTxInfo + SwapTxInfo = legendTxTypes.SwapTxInfo + AddLiquidityTxInfo = legendTxTypes.AddLiquidityTxInfo + RemoveLiquidityTxInfo = legendTxTypes.RemoveLiquidityTxInfo + WithdrawTxInfo = legendTxTypes.WithdrawTxInfo + CreateCollectionTxInfo = legendTxTypes.CreateCollectionTxInfo + MintNftTxInfo = legendTxTypes.MintNftTxInfo + TransferNftTxInfo = legendTxTypes.TransferNftTxInfo + OfferTxInfo = legendTxTypes.OfferTxInfo + AtomicMatchTxInfo = legendTxTypes.AtomicMatchTxInfo + CancelOfferTxInfo = legendTxTypes.CancelOfferTxInfo + WithdrawNftTxInfo = legendTxTypes.WithdrawNftTxInfo +) + +const ( + TxTypeBytesSize = 1 + AddressBytesSize = 20 + AccountIndexBytesSize = 4 + AccountNameBytesSize = 32 + AccountNameHashBytesSize = 32 + PubkeyBytesSize = 32 + AssetIdBytesSize = 2 + PairIndexBytesSize = 2 + StateAmountBytesSize = 16 + NftIndexBytesSize = 5 + NftTokenIdBytesSize = 32 + NftContentHashBytesSize = 32 + FeeRateBytesSize = 2 + CollectionIdBytesSize = 2 + + RegisterZnsPubDataSize = TxTypeBytesSize + AccountIndexBytesSize + AccountNameBytesSize + + AccountNameHashBytesSize + PubkeyBytesSize + PubkeyBytesSize + CreatePairPubDataSize = TxTypeBytesSize + PairIndexBytesSize + + AssetIdBytesSize + AssetIdBytesSize + FeeRateBytesSize + AccountIndexBytesSize + FeeRateBytesSize + UpdatePairRatePubdataSize = TxTypeBytesSize + PairIndexBytesSize + + FeeRateBytesSize + AccountIndexBytesSize + FeeRateBytesSize + DepositPubDataSize = TxTypeBytesSize + AccountIndexBytesSize + + AccountNameHashBytesSize + AssetIdBytesSize + StateAmountBytesSize + DepositNftPubDataSize = TxTypeBytesSize + AccountIndexBytesSize + NftIndexBytesSize + AddressBytesSize + + AccountIndexBytesSize + FeeRateBytesSize + NftContentHashBytesSize + NftTokenIdBytesSize + + AccountNameHashBytesSize + CollectionIdBytesSize + FullExitPubDataSize = TxTypeBytesSize + AccountIndexBytesSize + + AccountNameHashBytesSize + AssetIdBytesSize + StateAmountBytesSize + FullExitNftPubDataSize = TxTypeBytesSize + AccountIndexBytesSize + AccountIndexBytesSize + FeeRateBytesSize + + NftIndexBytesSize + CollectionIdBytesSize + AddressBytesSize + + AccountNameHashBytesSize + AccountNameHashBytesSize + + NftContentHashBytesSize + NftTokenIdBytesSize +) + +const ( + TypeAccountIndex = iota + TypeAssetId + TypeAccountName + TypeAccountNameOmitSpace + TypeAccountPk + TypePairIndex + TypeLimit + TypeOffset + TypeHash + TypeBlockHeight + TypeTxType + TypeChainId + TypeLPAmount + TypeAssetAmount + TypeBoolean + TypeGasFee +) + +const ( + AddressSize = 20 + FeeRateBase = 10000 + EmptyStringKeccak = "0xc5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470" +) + +func ParseRegisterZnsTxInfo(txInfoStr string) (txInfo *legendTxTypes.RegisterZnsTxInfo, err error) { + err = json.Unmarshal([]byte(txInfoStr), &txInfo) + if err != nil { + return nil, err + } + return txInfo, nil +} + +func ParseCreatePairTxInfo(txInfoStr string) (txInfo *legendTxTypes.CreatePairTxInfo, err error) { + err = json.Unmarshal([]byte(txInfoStr), &txInfo) + if err != nil { + return nil, err + } + return txInfo, nil +} + +func ParseUpdatePairRateTxInfo(txInfoStr string) (txInfo *legendTxTypes.UpdatePairRateTxInfo, err error) { + err = json.Unmarshal([]byte(txInfoStr), &txInfo) + if err != nil { + return nil, err + } + return txInfo, nil +} + +func ParseDepositTxInfo(txInfoStr string) (txInfo *legendTxTypes.DepositTxInfo, err error) { + err = json.Unmarshal([]byte(txInfoStr), &txInfo) + if err != nil { + return nil, err + } + return txInfo, nil +} + +func ParseDepositNftTxInfo(txInfoStr string) (txInfo *legendTxTypes.DepositNftTxInfo, err error) { + err = json.Unmarshal([]byte(txInfoStr), &txInfo) + if err != nil { + return nil, err + } + return txInfo, nil +} + +func ParseFullExitTxInfo(txInfoStr string) (txInfo *legendTxTypes.FullExitTxInfo, err error) { + err = json.Unmarshal([]byte(txInfoStr), &txInfo) + if err != nil { + return nil, err + } + return txInfo, nil +} + +func ParseFullExitNftTxInfo(txInfoStr string) (txInfo *legendTxTypes.FullExitNftTxInfo, err error) { + err = json.Unmarshal([]byte(txInfoStr), &txInfo) + if err != nil { + return nil, err + } + return txInfo, nil +} + +func ParseCreateCollectionTxInfo(txInfoStr string) (txInfo *legendTxTypes.CreateCollectionTxInfo, err error) { + err = json.Unmarshal([]byte(txInfoStr), &txInfo) + if err != nil { + return nil, err + } + return txInfo, nil +} + +func ParseTransferTxInfo(txInfoStr string) (txInfo *legendTxTypes.TransferTxInfo, err error) { + err = json.Unmarshal([]byte(txInfoStr), &txInfo) + if err != nil { + return nil, err + } + return txInfo, nil +} + +func ParseSwapTxInfo(txInfoStr string) (txInfo *legendTxTypes.SwapTxInfo, err error) { + err = json.Unmarshal([]byte(txInfoStr), &txInfo) + if err != nil { + return nil, err + } + return txInfo, nil +} + +func ParseAddLiquidityTxInfo(txInfoStr string) (txInfo *legendTxTypes.AddLiquidityTxInfo, err error) { + err = json.Unmarshal([]byte(txInfoStr), &txInfo) + if err != nil { + return nil, err + } + return txInfo, nil +} + +func ParseRemoveLiquidityTxInfo(txInfoStr string) (txInfo *legendTxTypes.RemoveLiquidityTxInfo, err error) { + err = json.Unmarshal([]byte(txInfoStr), &txInfo) + if err != nil { + return nil, err + } + return txInfo, nil +} + +func ParseMintNftTxInfo(txInfoStr string) (txInfo *legendTxTypes.MintNftTxInfo, err error) { + err = json.Unmarshal([]byte(txInfoStr), &txInfo) + if err != nil { + return nil, err + } + return txInfo, nil +} + +func ParseTransferNftTxInfo(txInfoStr string) (txInfo *legendTxTypes.TransferNftTxInfo, err error) { + err = json.Unmarshal([]byte(txInfoStr), &txInfo) + if err != nil { + return nil, err + } + return txInfo, nil +} + +func ParseAtomicMatchTxInfo(txInfoStr string) (txInfo *legendTxTypes.AtomicMatchTxInfo, err error) { + err = json.Unmarshal([]byte(txInfoStr), &txInfo) + if err != nil { + return nil, err + } + return txInfo, nil +} + +func ParseCancelOfferTxInfo(txInfoStr string) (txInfo *legendTxTypes.CancelOfferTxInfo, err error) { + err = json.Unmarshal([]byte(txInfoStr), &txInfo) + if err != nil { + return nil, err + } + return txInfo, nil +} + +func ParseWithdrawTxInfo(txInfoStr string) (txInfo *legendTxTypes.WithdrawTxInfo, err error) { + err = json.Unmarshal([]byte(txInfoStr), &txInfo) + if err != nil { + return nil, err + } + return txInfo, nil +} + +func ParseWithdrawNftTxInfo(txInfoStr string) (txInfo *legendTxTypes.WithdrawNftTxInfo, err error) { + err = json.Unmarshal([]byte(txInfoStr), &txInfo) + if err != nil { + return nil, err + } + return txInfo, nil +}